import getpassimport osif not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter API key for OpenAI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("gpt-4o-mini", model_provider="openai")
Anthropic
Copy
pip install -qU "langchain[anthropic]"
Copy
import getpassimport osif not os.environ.get("ANTHROPIC_API_KEY"): os.environ["ANTHROPIC_API_KEY"] = getpass.getpass("Enter API key for Anthropic: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("claude-3-5-sonnet-latest", model_provider="anthropic")
Azure
Copy
pip install -qU "langchain[azure]"
Copy
import getpassimport osif not os.environ.get("AZURE_OPENAI_API_KEY"): os.environ["AZURE_OPENAI_API_KEY"] = getpass.getpass("Enter API key for Azure: ")from langchain_openai import AzureChatOpenAImodel = AzureChatOpenAI( azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"], openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],)
Google Gemini
Copy
pip install -qU "langchain[google-genai]"
Copy
import getpassimport osif not os.environ.get("GOOGLE_API_KEY"): os.environ["GOOGLE_API_KEY"] = getpass.getpass("Enter API key for Google Gemini: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("gemini-2.5-flash", model_provider="google_genai")
Google Vertex
Copy
pip install -qU "langchain[google-vertexai]"
Copy
# Ensure your VertexAI credentials are configuredfrom langchain.chat_models import init_chat_modelmodel = init_chat_model("gemini-2.5-flash", model_provider="google_vertexai")
AWS
Copy
pip install -qU "langchain[aws]"
Copy
# Ensure your AWS credentials are configuredfrom langchain.chat_models import init_chat_modelmodel = init_chat_model("anthropic.claude-3-5-sonnet-20240620-v1:0", model_provider="bedrock_converse")
Groq
Copy
pip install -qU "langchain[groq]"
Copy
import getpassimport osif not os.environ.get("GROQ_API_KEY"): os.environ["GROQ_API_KEY"] = getpass.getpass("Enter API key for Groq: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("llama3-8b-8192", model_provider="groq")
Cohere
Copy
pip install -qU "langchain[cohere]"
Copy
import getpassimport osif not os.environ.get("COHERE_API_KEY"): os.environ["COHERE_API_KEY"] = getpass.getpass("Enter API key for Cohere: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("command-r-plus", model_provider="cohere")
import getpassimport osif not os.environ.get("NVIDIA_API_KEY"): os.environ["NVIDIA_API_KEY"] = getpass.getpass("Enter API key for NVIDIA: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("meta/llama3-70b-instruct", model_provider="nvidia")
Fireworks AI
Copy
pip install -qU "langchain[fireworks]"
Copy
import getpassimport osif not os.environ.get("FIREWORKS_API_KEY"): os.environ["FIREWORKS_API_KEY"] = getpass.getpass("Enter API key for Fireworks AI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("accounts/fireworks/models/llama-v3p1-70b-instruct", model_provider="fireworks")
Mistral AI
Copy
pip install -qU "langchain[mistralai]"
Copy
import getpassimport osif not os.environ.get("MISTRAL_API_KEY"): os.environ["MISTRAL_API_KEY"] = getpass.getpass("Enter API key for Mistral AI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("mistral-large-latest", model_provider="mistralai")
Together AI
Copy
pip install -qU "langchain[together]"
Copy
import getpassimport osif not os.environ.get("TOGETHER_API_KEY"): os.environ["TOGETHER_API_KEY"] = getpass.getpass("Enter API key for Together AI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("mistralai/Mixtral-8x7B-Instruct-v0.1", model_provider="together")
IBM watsonx
Copy
pip install -qU "langchain[langchain-ibm]"
Copy
import getpassimport osif not os.environ.get("WATSONX_APIKEY"): os.environ["WATSONX_APIKEY"] = getpass.getpass("Enter API key for IBM watsonx: ")from langchain_ibm import ChatWatsonxmodel = ChatWatsonx( model_id="ibm/granite-34b-code-instruct", url="https://us-south.ml.cloud.ibm.com", project_id="<WATSONX PROJECT_ID>")
Databricks
Copy
pip install -qU "langchain[databricks-langchain]"
Copy
import getpassimport osif not os.environ.get("DATABRICKS_TOKEN"): os.environ["DATABRICKS_TOKEN"] = getpass.getpass("Enter API key for Databricks: ")from databricks_langchain import ChatDatabricksos.environ["DATABRICKS_HOST"] = "https://example.staging.cloud.databricks.com/serving-endpoints"model = ChatDatabricks(endpoint="databricks-meta-llama-3-1-70b-instruct")
xAI
Copy
pip install -qU "langchain[langchain-xai]"
Copy
import getpassimport osif not os.environ.get("XAI_API_KEY"): os.environ["XAI_API_KEY"] = getpass.getpass("Enter API key for xAI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("grok-2", model_provider="xai")
Perplexity
Copy
pip install -qU "langchain[langchain-perplexity]"
Copy
import getpassimport osif not os.environ.get("PPLX_API_KEY"): os.environ["PPLX_API_KEY"] = getpass.getpass("Enter API key for Perplexity: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("llama-3.1-sonar-small-128k-online", model_provider="perplexity")
DeepSeek
Copy
pip install -qU "langchain[langchain-deepseek]"
Copy
import getpassimport osif not os.environ.get("DEEPSEEK_API_KEY"): os.environ["DEEPSEEK_API_KEY"] = getpass.getpass("Enter API key for DeepSeek: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("deepseek-chat", model_provider="deepseek")
While all these LangChain classes support the indicated advanced feature, you may have to open the provider-specific documentation to learn which hosted models or backends support the feature.