OllamaEmbeddings have been moved to the @langchain/ollama package. Install it with npm install @langchain/ollama.

Hierarchy (view full)

Constructors

Properties

baseUrl: string = "http://localhost:11434"
keepAlive: string = "5m"
model: string = "llama2"
headers?: Record<string, string>
requestOptions?: {
    embedding_only?: boolean;
    f16_kv?: boolean;
    frequency_penalty?: number;
    logits_all?: boolean;
    low_vram?: boolean;
    main_gpu?: number;
    mirostat?: number;
    mirostat_eta?: number;
    mirostat_tau?: number;
    num_batch?: number;
    num_ctx?: number;
    num_gpu?: number;
    num_gqa?: number;
    num_keep?: number;
    num_predict?: number;
    num_thread?: number;
    penalize_newline?: boolean;
    presence_penalty?: number;
    repeat_last_n?: number;
    repeat_penalty?: number;
    rope_frequency_base?: number;
    rope_frequency_scale?: number;
    stop?: string[];
    temperature?: number;
    tfs_z?: number;
    top_k?: number;
    top_p?: number;
    typical_p?: number;
    use_mlock?: boolean;
    use_mmap?: boolean;
    vocab_only?: boolean;
}

Methods

""