From 4348cda7c579e2eeec55c3ca45f9e619ad1503f9 Mon Sep 17 00:00:00 2001 From: Chau Nguyen Date: Sun, 3 Nov 2024 00:41:37 -0400 Subject: [PATCH] added test --- .../langchain-google-genai/src/chat_models.ts | 28 ++----- .../src/context_caching.ts | 51 +++++++++++++ .../src/tests/context_caching.int.test.ts | 76 +++++++++++++++++++ 3 files changed, 133 insertions(+), 22 deletions(-) create mode 100644 libs/langchain-google-genai/src/context_caching.ts create mode 100644 libs/langchain-google-genai/src/tests/context_caching.int.test.ts diff --git a/libs/langchain-google-genai/src/chat_models.ts b/libs/langchain-google-genai/src/chat_models.ts index 0648e4e9fad6..d3b9e2cdeaad 100644 --- a/libs/langchain-google-genai/src/chat_models.ts +++ b/libs/langchain-google-genai/src/chat_models.ts @@ -9,11 +9,8 @@ import { Part as GenerativeAIPart, ModelParams, RequestOptions, + CachedContent, } from "@google/generative-ai"; -import { - CachedContentCreateParams, - GoogleAICacheManager as CacheManager, -} from '@google/generative-ai/server'; import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { AIMessageChunk, @@ -568,8 +565,6 @@ export class ChatGoogleGenerativeAI streamUsage = true; - cacheManager?: CacheManager; - private client: GenerativeModel; get _isMultimodalModel() { @@ -658,23 +653,12 @@ export class ChatGoogleGenerativeAI this.streamUsage = fields?.streamUsage ?? this.streamUsage; } - async addCachedContent(cachedContentCreateParams: CachedContentCreateParams, - modelParams?: Partial, requestOptions?: RequestOptions - ) { - if (!this.apiKey) { - throw new Error( - "Please set an API key for Google GenerativeAI " + - "in the environment variable GOOGLE_API_KEY " + - "or in the `apiKey` field of the " + - "ChatGoogleGenerativeAI constructor" - ); - } - this.cacheManager = this.cacheManager ? this.cacheManager : new CacheManager(this.apiKey); + enableCachedContent(cachedContent: CachedContent, + modelParams?: ModelParams, requestOptions?: RequestOptions + ): void { + if (!this.apiKey) return; this.client = new GenerativeAI(this.apiKey) - .getGenerativeModelFromCachedContent( - await this.cacheManager.create(cachedContentCreateParams), - modelParams, requestOptions - ); + .getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions); } getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { diff --git a/libs/langchain-google-genai/src/context_caching.ts b/libs/langchain-google-genai/src/context_caching.ts new file mode 100644 index 000000000000..33e7f79885b2 --- /dev/null +++ b/libs/langchain-google-genai/src/context_caching.ts @@ -0,0 +1,51 @@ +import { CachedContentCreateParams, CachedContentUpdateParams, FileMetadata, FileMetadataResponse, GoogleAICacheManager, ListCacheResponse, ListFilesResponse, ListParams, UploadFileResponse } from "@google/generative-ai/server"; +import { GoogleAIFileManager } from "@google/generative-ai/server"; +import { CachedContent, RequestOptions, SingleRequestOptions } from "@google/generative-ai"; + +export class GoogleGenerativeAIContextCache { + private fileManager: GoogleAIFileManager; + private cacheManager: GoogleAICacheManager; + + constructor(apiKey: string, fileManagerRequestOptions?: RequestOptions, + cacheManagerRequestOptions?: RequestOptions + ) { + this.fileManager = new GoogleAIFileManager(apiKey, fileManagerRequestOptions); + this.cacheManager = new GoogleAICacheManager(apiKey, cacheManagerRequestOptions); + } + + uploadFile(filePath: string, fileMetadata: FileMetadata): Promise { + return this.fileManager.uploadFile(filePath, fileMetadata); + } + + listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise { + return this.fileManager.listFiles(listParams, requestOptions); + } + + getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise { + return this.fileManager.getFile(fileId, requestOptions); + } + + deleteFile(fileId: string): Promise { + return this.fileManager.deleteFile(fileId); + } + + createCache(createOptions: CachedContentCreateParams): Promise { + return this.cacheManager.create(createOptions); + } + + listCaches(listParams?: ListParams): Promise { + return this.cacheManager.list(listParams); + } + + getCache(name: string): Promise { + return this.cacheManager.get(name); + } + + updateCache(name: string, updateParams: CachedContentUpdateParams): Promise { + return this.cacheManager.update(name, updateParams); + } + + deleteCache(name: string): Promise { + return this.cacheManager.delete(name); + } +} \ No newline at end of file diff --git a/libs/langchain-google-genai/src/tests/context_caching.int.test.ts b/libs/langchain-google-genai/src/tests/context_caching.int.test.ts new file mode 100644 index 000000000000..b2188525f1af --- /dev/null +++ b/libs/langchain-google-genai/src/tests/context_caching.int.test.ts @@ -0,0 +1,76 @@ +/* eslint-disable no-process-env */ + +import { test } from "@jest/globals"; + +import { ChatGoogleGenerativeAI } from "../chat_models.js"; +import { fileURLToPath } from "node:url"; +import * as path from "node:path"; + +import { + FileState, + UploadFileResponse, +} from '@google/generative-ai/server'; +import { GoogleGenerativeAIContextCache } from "../context_caching.js"; + +const model = new ChatGoogleGenerativeAI({}); +let fileResult: UploadFileResponse; + +beforeAll(async () => { + const displayName = 'Sherlock Jr. video'; + + const filename = fileURLToPath(import.meta.url); + const dirname = path.dirname(filename); + const pathToVideoFile = path.join(dirname, "/data/hotdog.jpg"); + + const contextCache = new GoogleGenerativeAIContextCache(process.env.GOOGLE_API_KEY || ""); + fileResult = await contextCache.uploadFile(pathToVideoFile, { + displayName, + mimeType: 'video/mp4', + }); + + const { name, uri } = fileResult.file; + + // Poll getFile() on a set interval (2 seconds here) to check file state. + let file = await contextCache.getFile(name); + while (file.state === FileState.PROCESSING) { + console.log('Waiting for video to be processed.'); + // Sleep for 2 seconds + await new Promise((resolve) => setTimeout(resolve, 2_000)); + file = await contextCache.getFile(name); + } + console.log(`Video processing complete: ${uri}`); + + const systemInstruction = + 'You are an expert video analyzer, and your job is to answer ' + + "the user's query based on the video file you have access to."; + const cachedContent = await contextCache.createCache({ + model: 'models/gemini-1.5-flash-001', + displayName: 'sherlock jr movie', + systemInstruction, + contents: [ + { + role: 'user', + parts: [ + { + fileData: { + mimeType: fileResult.file.mimeType, + fileUri: fileResult.file.uri, + }, + }, + ], + }, + ], + ttlSeconds: 300, + }); + + model.enableCachedContent(cachedContent); +}); + +test("Test Google AI", async () => { + const res = await model.invoke('Introduce different characters in the movie by describing ' + + 'their personality, looks, and names. Also list the ' + + 'timestamps they were introduced for the first time.'); + + console.log(res) + expect(res).toBeTruthy(); +}); \ No newline at end of file