import getpassimport osif not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter API key for OpenAI: ")from langchain_openai import OpenAIEmbeddingsembeddings = OpenAIEmbeddings(model="text-embedding-3-large")
pip install -qU "langchain[azure]"
import getpassimport osif not os.environ.get("AZURE_OPENAI_API_KEY"): os.environ["AZURE_OPENAI_API_KEY"] = getpass.getpass("Enter API key for Azure: ")from langchain_openai import AzureOpenAIEmbeddingsembeddings = AzureOpenAIEmbeddings( azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"], openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],)
pip install -qU langchain-google-genai
import getpassimport osif not os.environ.get("GOOGLE_API_KEY"): os.environ["GOOGLE_API_KEY"] = getpass.getpass("Enter API key for Google Gemini: ")from langchain_google_genai import GoogleGenerativeAIEmbeddingsembeddings = GoogleGenerativeAIEmbeddings(model="models/gemini-embedding-001")
pip install -qU langchain-google-vertexai
from langchain_google_vertexai import VertexAIEmbeddingsembeddings = VertexAIEmbeddings(model="text-embedding-005")
pip install -qU langchain-aws
from langchain_aws import BedrockEmbeddingsembeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v2:0")
pip install -qU langchain-huggingface
from langchain_huggingface import HuggingFaceEmbeddingsembeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
pip install -qU langchain-ollama
from langchain_ollama import OllamaEmbeddingsembeddings = OllamaEmbeddings(model="llama3")
pip install -qU langchain-cohere
import getpassimport osif not os.environ.get("COHERE_API_KEY"): os.environ["COHERE_API_KEY"] = getpass.getpass("Enter API key for Cohere: ")from langchain_cohere import CohereEmbeddingsembeddings = CohereEmbeddings(model="embed-english-v3.0")
pip install -qU langchain-mistralai
import getpassimport osif not os.environ.get("MISTRALAI_API_KEY"): os.environ["MISTRALAI_API_KEY"] = getpass.getpass("Enter API key for MistralAI: ")from langchain_mistralai import MistralAIEmbeddingsembeddings = MistralAIEmbeddings(model="mistral-embed")
pip install -qU langchain-nomic
import getpassimport osif not os.environ.get("NOMIC_API_KEY"): os.environ["NOMIC_API_KEY"] = getpass.getpass("Enter API key for Nomic: ")from langchain_nomic import NomicEmbeddingsembeddings = NomicEmbeddings(model="nomic-embed-text-v1.5")
pip install -qU langchain-nvidia-ai-endpoints
import getpassimport osif not os.environ.get("NVIDIA_API_KEY"): os.environ["NVIDIA_API_KEY"] = getpass.getpass("Enter API key for NVIDIA: ")from langchain_nvidia_ai_endpoints import NVIDIAEmbeddingsembeddings = NVIDIAEmbeddings(model="NV-Embed-QA")
pip install -qU langchain-voyageai
import getpassimport osif not os.environ.get("VOYAGE_API_KEY"): os.environ["VOYAGE_API_KEY"] = getpass.getpass("Enter API key for Voyage AI: ")from langchain-voyageai import VoyageAIEmbeddingsembeddings = VoyageAIEmbeddings(model="voyage-3")
pip install -qU langchain-ibm
import getpassimport osif not os.environ.get("WATSONX_APIKEY"): os.environ["WATSONX_APIKEY"] = getpass.getpass("Enter API key for IBM watsonx: ")from langchain_ibm import WatsonxEmbeddingsembeddings = WatsonxEmbeddings( model_id="ibm/slate-125m-english-rtrvr", url="https://us-south.ml.cloud.ibm.com", project_id="<WATSONX PROJECT_ID>",)
pip install -qU langchain-core
from langchain_core.embeddings import DeterministicFakeEmbeddingembeddings = DeterministicFakeEmbedding(size=4096)
pip install -qU "langchain[langchain-xai]"
import getpassimport osif not os.environ.get("XAI_API_KEY"): os.environ["XAI_API_KEY"] = getpass.getpass("Enter API key for xAI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("grok-2", model_provider="xai")
pip install -qU "langchain[langchain-perplexity]"
import getpassimport osif not os.environ.get("PPLX_API_KEY"): os.environ["PPLX_API_KEY"] = getpass.getpass("Enter API key for Perplexity: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("llama-3.1-sonar-small-128k-online", model_provider="perplexity")
pip install -qU "langchain[langchain-deepseek]"
import getpassimport osif not os.environ.get("DEEPSEEK_API_KEY"): os.environ["DEEPSEEK_API_KEY"] = getpass.getpass("Enter API key for DeepSeek: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("deepseek-chat", model_provider="deepseek")
Select vector store:
In-memory
AstraDB
Chroma
FAISS
Milvus
MongoDB
PGVector
PGVectorStore
Pinecone
Qdrant
pip install -qU langchain-core
from langchain_core.vectorstores import InMemoryVectorStorevector_store = InMemoryVectorStore(embeddings)
from langchain_chroma import Chromavector_store = Chroma( collection_name="example_collection", embedding_function=embeddings, persist_directory="./chroma_langchain_db", # Where to save data locally, remove if not necessary)