Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add langfuse support with env vars - cursor test #1

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { OllamaEmbeddings } from '@langchain/ollama';
import { LangfuseCallbackHandler } from 'langfuse-langchain';
import {
NodeConnectionType,
type INodeType,
Expand Down Expand Up @@ -38,9 +39,7 @@ export class EmbeddingsOllama implements INodeType {
],
},
},
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiEmbedding],
outputNames: ['Embeddings'],
properties: [getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), ollamaModel],
Expand All @@ -51,9 +50,18 @@ export class EmbeddingsOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string;
const credentials = await this.getCredentials('ollamaApi');

const options = this.getNodeParameter('options', itemIndex, {}) as object;
const langfuseOptions = (options as any).langfuse || {};
const langfuseHandler = new LangfuseCallbackHandler({
publicKey: langfuseOptions.publicKey || process.env.LANGFUSE_PUBLIC_KEY,
secretKey: langfuseOptions.secretKey || process.env.LANGFUSE_SECRET_KEY,
baseUrl: langfuseOptions.baseUrl || process.env.LANGFUSE_HOST,
});

const embeddings = new OllamaEmbeddings({
baseUrl: credentials.baseUrl as string,
model: modelName,
callbacks: [langfuseHandler]
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import type { ChatOllamaInput } from '@langchain/ollama';
import { ChatOllama } from '@langchain/ollama';
import { LangfuseCallbackHandler } from 'langfuse-langchain';
import {
NodeConnectionType,
type INodeType,
Expand Down Expand Up @@ -61,12 +62,19 @@ export class LmChatOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;

const langfuseOptions = (options as any).langfuse || {};
const langfuseHandler = new LangfuseCallbackHandler({
publicKey: langfuseOptions.publicKey || process.env.LANGFUSE_PUBLIC_KEY,
secretKey: langfuseOptions.secretKey || process.env.LANGFUSE_SECRET_KEY,
baseUrl: langfuseOptions.baseUrl || process.env.LANGFUSE_HOST,
});

const model = new ChatOllama({
...options,
baseUrl: credentials.baseUrl as string,
model: modelName,
format: options.format === 'default' ? undefined : options.format,
callbacks: [new N8nLlmTracing(this)],
callbacks: [new N8nLlmTracing(this), langfuseHandler],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */

import { Ollama } from '@langchain/community/llms/ollama';
import { LangfuseCallbackHandler } from 'langfuse-langchain';
import {
NodeConnectionType,
type INodeType,
Expand Down Expand Up @@ -60,11 +61,18 @@ export class LmOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object;

const langfuseOptions = (options as any).langfuse || {};
const langfuseHandler = new LangfuseCallbackHandler({
publicKey: langfuseOptions.publicKey || process.env.LANGFUSE_PUBLIC_KEY,
secretKey: langfuseOptions.secretKey || process.env.LANGFUSE_SECRET_KEY,
baseUrl: langfuseOptions.baseUrl || process.env.LANGFUSE_HOST,
});

const model = new Ollama({
baseUrl: credentials.baseUrl as string,
model: modelName,
...options,
callbacks: [new N8nLlmTracing(this)],
callbacks: [new N8nLlmTracing(this), langfuseHandler],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

Expand Down
32 changes: 32 additions & 0 deletions packages/@n8n/nodes-langchain/nodes/llms/LMOllama/description.ts
Original file line number Diff line number Diff line change
Expand Up @@ -229,5 +229,37 @@ export const ollamaOptions: INodeProperties = {
default: 'default',
description: 'Specifies the format of the API response',
},
{
displayName: 'Langfuse',
name: 'langfuse',
placeholder: 'Add Langfuse Options',
description: 'Options for Langfuse tracing',
type: 'collection',
default: {},
options: [
{
displayName: 'Public Key',
name: 'publicKey',
type: 'string',
default: '',
description: 'Your Langfuse public key. Falls back to LANGFUSE_PUBLIC_KEY environment variable if not set.',
},
{
displayName: 'Secret Key',
name: 'secretKey',
type: 'string',
// typeOptions: { password: true },
default: '',
description: 'Your Langfuse secret key. Falls back to LANGFUSE_SECRET_KEY environment variable if not set.',
},
{
displayName: 'Host',
name: 'baseUrl',
type: 'string',
default: '',
description: 'Your Langfuse host URL. Falls back to LANGFUSE_HOST environment variable if not set.',
},
],
},
],
};
1 change: 1 addition & 0 deletions packages/@n8n/nodes-langchain/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@
"@langchain/qdrant": "0.1.1",
"@langchain/redis": "0.1.0",
"@langchain/textsplitters": "0.1.0",
"langfuse-langchain": "^0.1.0",
"@mozilla/readability": "0.5.0",
"@n8n/json-schema-to-zod": "workspace:*",
"@n8n/typeorm": "0.3.20-12",
Expand Down
Loading