Skip to content

Commit

Permalink
lint and format
Browse files Browse the repository at this point in the history
  • Loading branch information
Chau Nguyen authored and KevinZJN committed Nov 13, 2024
1 parent a447435 commit 6658acb
Show file tree
Hide file tree
Showing 3 changed files with 89 additions and 41 deletions.
32 changes: 20 additions & 12 deletions libs/langchain-google-genai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ export interface GoogleGenerativeAIChatCallOptions
*/
export interface GoogleGenerativeAIChatInput
extends BaseChatModelParams,
Pick<GoogleGenerativeAIChatCallOptions, "streamUsage"> {
Pick<GoogleGenerativeAIChatCallOptions, "streamUsage"> {
/**
* @deprecated Use "model" instead.
*
Expand Down Expand Up @@ -522,7 +522,8 @@ export interface GoogleGenerativeAIChatInput
*/
export class ChatGoogleGenerativeAI
extends BaseChatModel<GoogleGenerativeAIChatCallOptions, AIMessageChunk>
implements GoogleGenerativeAIChatInput {
implements GoogleGenerativeAIChatInput
{
static lc_name() {
return "ChatGoogleGenerativeAI";
}
Expand Down Expand Up @@ -611,9 +612,9 @@ export class ChatGoogleGenerativeAI
if (!this.apiKey) {
throw new Error(
"Please set an API key for Google GenerativeAI " +
"in the environment variable GOOGLE_API_KEY " +
"or in the `apiKey` field of the " +
"ChatGoogleGenerativeAI constructor"
"in the environment variable GOOGLE_API_KEY " +
"or in the `apiKey` field of the " +
"ChatGoogleGenerativeAI constructor"
);
}

Expand Down Expand Up @@ -653,12 +654,19 @@ export class ChatGoogleGenerativeAI
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
}

enableCachedContent(cachedContent: CachedContent,
modelParams?: ModelParams, requestOptions?: RequestOptions
enableCachedContent(
cachedContent: CachedContent,
modelParams?: ModelParams,
requestOptions?: RequestOptions
): void {
if (!this.apiKey) return;
this.client = new GenerativeAI(this.apiKey)
.getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions);
this.client = new GenerativeAI(
this.apiKey
).getGenerativeModelFromCachedContent(
cachedContent,
modelParams,
requestOptions
);
}

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
Expand Down Expand Up @@ -897,9 +905,9 @@ export class ChatGoogleGenerativeAI
):
| Runnable<BaseLanguageModelInput, RunOutput>
| Runnable<
BaseLanguageModelInput,
{ raw: BaseMessage; parsed: RunOutput }
> {
BaseLanguageModelInput,
{ raw: BaseMessage; parsed: RunOutput }
> {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const schema: z.ZodType<RunOutput> | Record<string, any> = outputSchema;
const name = config?.name;
Expand Down
61 changes: 49 additions & 12 deletions libs/langchain-google-genai/src/context_caching.ts
Original file line number Diff line number Diff line change
@@ -1,35 +1,69 @@
import { CachedContentCreateParams, CachedContentUpdateParams, FileMetadata, FileMetadataResponse, GoogleAICacheManager, ListCacheResponse, ListFilesResponse, ListParams, UploadFileResponse } from "@google/generative-ai/server";
import { GoogleAIFileManager } from "@google/generative-ai/server";
import { CachedContent, RequestOptions, SingleRequestOptions } from "@google/generative-ai";
import {
CachedContentCreateParams,
CachedContentUpdateParams,
FileMetadata,
FileMetadataResponse,
GoogleAICacheManager,
ListCacheResponse,
ListFilesResponse,
ListParams,
UploadFileResponse,
GoogleAIFileManager,
} from "@google/generative-ai/server";
import {
CachedContent,
RequestOptions,
SingleRequestOptions,
} from "@google/generative-ai";

export class GoogleGenerativeAIContextCache {
private fileManager: GoogleAIFileManager;

private cacheManager: GoogleAICacheManager;

constructor(apiKey: string, fileManagerRequestOptions?: RequestOptions,
constructor(
apiKey: string,
fileManagerRequestOptions?: RequestOptions,
cacheManagerRequestOptions?: RequestOptions
) {
this.fileManager = new GoogleAIFileManager(apiKey, fileManagerRequestOptions);
this.cacheManager = new GoogleAICacheManager(apiKey, cacheManagerRequestOptions);
this.fileManager = new GoogleAIFileManager(
apiKey,
fileManagerRequestOptions
);
this.cacheManager = new GoogleAICacheManager(
apiKey,
cacheManagerRequestOptions
);
}

uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse> {
uploadFile(
filePath: string,
fileMetadata: FileMetadata
): Promise<UploadFileResponse> {
return this.fileManager.uploadFile(filePath, fileMetadata);
}

listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse> {
listFiles(
listParams?: ListParams,
requestOptions?: SingleRequestOptions
): Promise<ListFilesResponse> {
return this.fileManager.listFiles(listParams, requestOptions);
}

getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse> {
getFile(
fileId: string,
requestOptions?: SingleRequestOptions
): Promise<FileMetadataResponse> {
return this.fileManager.getFile(fileId, requestOptions);
}

deleteFile(fileId: string): Promise<void> {
return this.fileManager.deleteFile(fileId);
}

createCache(createOptions: CachedContentCreateParams): Promise<CachedContent> {
createCache(
createOptions: CachedContentCreateParams
): Promise<CachedContent> {
return this.cacheManager.create(createOptions);
}

Expand All @@ -41,11 +75,14 @@ export class GoogleGenerativeAIContextCache {
return this.cacheManager.get(name);
}

updateCache(name: string, updateParams: CachedContentUpdateParams): Promise<CachedContent> {
updateCache(
name: string,
updateParams: CachedContentUpdateParams
): Promise<CachedContent> {
return this.cacheManager.update(name, updateParams);
}

deleteCache(name: string): Promise<void> {
return this.cacheManager.delete(name);
}
}
}
37 changes: 20 additions & 17 deletions libs/langchain-google-genai/src/tests/context_caching.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,31 @@

import { test } from "@jest/globals";

import { ChatGoogleGenerativeAI } from "../chat_models.js";
import { fileURLToPath } from "node:url";
import * as path from "node:path";

import {
FileState,
UploadFileResponse,
} from '@google/generative-ai/server';
import { FileState, UploadFileResponse } from "@google/generative-ai/server";
import { GoogleGenerativeAIContextCache } from "../context_caching.js";
import { ChatGoogleGenerativeAI } from "../chat_models.js";

const model = new ChatGoogleGenerativeAI({});
let fileResult: UploadFileResponse;

beforeAll(async () => {
// Download video file and save in src/tests/data
// curl -O https://storage.googleapis.com/generativeai-downloads/data/Sherlock_Jr_FullMovie.mp4
const displayName = 'Sherlock Jr. video';
const displayName = "Sherlock Jr. video";

const filename = fileURLToPath(import.meta.url);
const dirname = path.dirname(filename);
const pathToVideoFile = path.join(dirname, "/data/Sherlock_Jr_FullMovie.mp4");

const contextCache = new GoogleGenerativeAIContextCache(process.env.GOOGLE_API_KEY || "");
const contextCache = new GoogleGenerativeAIContextCache(
process.env.GOOGLE_API_KEY || ""
);
fileResult = await contextCache.uploadFile(pathToVideoFile, {
displayName,
mimeType: 'video/mp4',
mimeType: "video/mp4",
});

const { name } = fileResult.file;
Expand All @@ -36,20 +35,22 @@ beforeAll(async () => {
let file = await contextCache.getFile(name);
while (file.state === FileState.PROCESSING) {
// Sleep for 2 seconds
await new Promise((resolve) => setTimeout(resolve, 2_000));
await new Promise((resolve) => {
setTimeout(resolve, 2_000);
});
file = await contextCache.getFile(name);
}

const systemInstruction =
'You are an expert video analyzer, and your job is to answer ' +
"You are an expert video analyzer, and your job is to answer " +
"the user's query based on the video file you have access to.";
const cachedContent = await contextCache.createCache({
model: 'models/gemini-1.5-flash-001',
displayName: 'sherlock jr movie',
model: "models/gemini-1.5-flash-001",
displayName: "sherlock jr movie",
systemInstruction,
contents: [
{
role: 'user',
role: "user",
parts: [
{
fileData: {
Expand All @@ -67,10 +68,12 @@ beforeAll(async () => {
}, 10 * 60 * 1000); // Set timeout to 10 minutes to upload file

test("Test Google AI", async () => {
const res = await model.invoke('Introduce different characters in the movie by describing ' +
'their personality, looks, and names. Also list the ' +
'timestamps they were introduced for the first time.');
const res = await model.invoke(
"Introduce different characters in the movie by describing " +
"their personality, looks, and names. Also list the " +
"timestamps they were introduced for the first time."
);

console.log(res);
expect(res).toBeTruthy();
});
});

0 comments on commit 6658acb

Please sign in to comment.