Skip to content

Commit

Permalink
added test
Browse files Browse the repository at this point in the history
  • Loading branch information
Chau Nguyen authored and KevinZJN committed Nov 13, 2024
1 parent 5146e14 commit 4348cda
Show file tree
Hide file tree
Showing 3 changed files with 133 additions and 22 deletions.
28 changes: 6 additions & 22 deletions libs/langchain-google-genai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,8 @@ import {
Part as GenerativeAIPart,
ModelParams,
RequestOptions,
CachedContent,
} from "@google/generative-ai";
import {
CachedContentCreateParams,
GoogleAICacheManager as CacheManager,
} from '@google/generative-ai/server';
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
AIMessageChunk,
Expand Down Expand Up @@ -568,8 +565,6 @@ export class ChatGoogleGenerativeAI

streamUsage = true;

cacheManager?: CacheManager;

private client: GenerativeModel;

get _isMultimodalModel() {
Expand Down Expand Up @@ -658,23 +653,12 @@ export class ChatGoogleGenerativeAI
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
}

async addCachedContent(cachedContentCreateParams: CachedContentCreateParams,
modelParams?: Partial<ModelParams>, requestOptions?: RequestOptions
) {
if (!this.apiKey) {
throw new Error(
"Please set an API key for Google GenerativeAI " +
"in the environment variable GOOGLE_API_KEY " +
"or in the `apiKey` field of the " +
"ChatGoogleGenerativeAI constructor"
);
}
this.cacheManager = this.cacheManager ? this.cacheManager : new CacheManager(this.apiKey);
enableCachedContent(cachedContent: CachedContent,
modelParams?: ModelParams, requestOptions?: RequestOptions
): void {
if (!this.apiKey) return;
this.client = new GenerativeAI(this.apiKey)
.getGenerativeModelFromCachedContent(
await this.cacheManager.create(cachedContentCreateParams),
modelParams, requestOptions
);
.getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions);
}

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
Expand Down
51 changes: 51 additions & 0 deletions libs/langchain-google-genai/src/context_caching.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { CachedContentCreateParams, CachedContentUpdateParams, FileMetadata, FileMetadataResponse, GoogleAICacheManager, ListCacheResponse, ListFilesResponse, ListParams, UploadFileResponse } from "@google/generative-ai/server";
import { GoogleAIFileManager } from "@google/generative-ai/server";
import { CachedContent, RequestOptions, SingleRequestOptions } from "@google/generative-ai";

export class GoogleGenerativeAIContextCache {
private fileManager: GoogleAIFileManager;
private cacheManager: GoogleAICacheManager;

constructor(apiKey: string, fileManagerRequestOptions?: RequestOptions,
cacheManagerRequestOptions?: RequestOptions
) {
this.fileManager = new GoogleAIFileManager(apiKey, fileManagerRequestOptions);
this.cacheManager = new GoogleAICacheManager(apiKey, cacheManagerRequestOptions);
}

uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse> {
return this.fileManager.uploadFile(filePath, fileMetadata);
}

listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse> {
return this.fileManager.listFiles(listParams, requestOptions);
}

getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse> {
return this.fileManager.getFile(fileId, requestOptions);
}

deleteFile(fileId: string): Promise<void> {
return this.fileManager.deleteFile(fileId);
}

createCache(createOptions: CachedContentCreateParams): Promise<CachedContent> {
return this.cacheManager.create(createOptions);
}

listCaches(listParams?: ListParams): Promise<ListCacheResponse> {
return this.cacheManager.list(listParams);
}

getCache(name: string): Promise<CachedContent> {
return this.cacheManager.get(name);
}

updateCache(name: string, updateParams: CachedContentUpdateParams): Promise<CachedContent> {
return this.cacheManager.update(name, updateParams);
}

deleteCache(name: string): Promise<void> {
return this.cacheManager.delete(name);
}
}
76 changes: 76 additions & 0 deletions libs/langchain-google-genai/src/tests/context_caching.int.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/* eslint-disable no-process-env */

import { test } from "@jest/globals";

import { ChatGoogleGenerativeAI } from "../chat_models.js";
import { fileURLToPath } from "node:url";
import * as path from "node:path";

import {
FileState,
UploadFileResponse,
} from '@google/generative-ai/server';
import { GoogleGenerativeAIContextCache } from "../context_caching.js";

const model = new ChatGoogleGenerativeAI({});
let fileResult: UploadFileResponse;

beforeAll(async () => {
const displayName = 'Sherlock Jr. video';

const filename = fileURLToPath(import.meta.url);
const dirname = path.dirname(filename);
const pathToVideoFile = path.join(dirname, "/data/hotdog.jpg");

const contextCache = new GoogleGenerativeAIContextCache(process.env.GOOGLE_API_KEY || "");
fileResult = await contextCache.uploadFile(pathToVideoFile, {
displayName,
mimeType: 'video/mp4',
});

const { name, uri } = fileResult.file;

// Poll getFile() on a set interval (2 seconds here) to check file state.
let file = await contextCache.getFile(name);
while (file.state === FileState.PROCESSING) {
console.log('Waiting for video to be processed.');
// Sleep for 2 seconds
await new Promise((resolve) => setTimeout(resolve, 2_000));
file = await contextCache.getFile(name);
}
console.log(`Video processing complete: ${uri}`);

const systemInstruction =
'You are an expert video analyzer, and your job is to answer ' +
"the user's query based on the video file you have access to.";
const cachedContent = await contextCache.createCache({
model: 'models/gemini-1.5-flash-001',
displayName: 'sherlock jr movie',
systemInstruction,
contents: [
{
role: 'user',
parts: [
{
fileData: {
mimeType: fileResult.file.mimeType,
fileUri: fileResult.file.uri,
},
},
],
},
],
ttlSeconds: 300,
});

model.enableCachedContent(cachedContent);
});

test("Test Google AI", async () => {
const res = await model.invoke('Introduce different characters in the movie by describing ' +
'their personality, looks, and names. Also list the ' +
'timestamps they were introduced for the first time.');

console.log(res)
expect(res).toBeTruthy();
});

0 comments on commit 4348cda

Please sign in to comment.