From bcea967f5578932ede73dbd3fe85f2b3f274635f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 28 Aug 2024 13:09:12 +0200 Subject: [PATCH 01/95] add boilerplate --- packages/langchain/internal.d.ts | 3 +++ packages/langchain/internal.js | 15 +++++++++++ packages/langchain/jest.config.mjs | 5 ++++ packages/langchain/package.json | 38 ++++++++++++++++++++++++++++ packages/langchain/src/index.ts | 1 + packages/langchain/src/internal.ts | 0 packages/langchain/src/openai.ts | 0 packages/langchain/tsconfig.cjs.json | 8 ++++++ packages/langchain/tsconfig.json | 13 ++++++++++ 9 files changed, 83 insertions(+) create mode 100644 packages/langchain/internal.d.ts create mode 100644 packages/langchain/internal.js create mode 100644 packages/langchain/jest.config.mjs create mode 100644 packages/langchain/package.json create mode 100644 packages/langchain/src/index.ts create mode 100644 packages/langchain/src/internal.ts create mode 100644 packages/langchain/src/openai.ts create mode 100644 packages/langchain/tsconfig.cjs.json create mode 100644 packages/langchain/tsconfig.json diff --git a/packages/langchain/internal.d.ts b/packages/langchain/internal.d.ts new file mode 100644 index 000000000..bf1fe07d1 --- /dev/null +++ b/packages/langchain/internal.d.ts @@ -0,0 +1,3 @@ +// eslint-disable-next-line import/no-internal-modules +export * from './dist/internal.js'; +// # sourceMappingURL=internal.d.ts.map diff --git a/packages/langchain/internal.js b/packages/langchain/internal.js new file mode 100644 index 000000000..065a92458 --- /dev/null +++ b/packages/langchain/internal.js @@ -0,0 +1,15 @@ +'use strict'; +function __export(m) { + for (const p in m) { + if (!exports.hasOwnProperty(p)) { + exports[p] = m[p]; + } + } +} +Object.defineProperty(exports, '__esModule', { value: true }); +/** + * @packageDocumentation + * @experimental The internal module is related to sdk-metadata types which are used only internally. + */ +__export(require('./dist/internal')); +// # sourceMappingURL=internal.js.map diff --git a/packages/langchain/jest.config.mjs b/packages/langchain/jest.config.mjs new file mode 100644 index 000000000..b09fd0973 --- /dev/null +++ b/packages/langchain/jest.config.mjs @@ -0,0 +1,5 @@ +import config from '../../jest.config.mjs'; +export default { + ...config, + displayName: 'langchain', +}; diff --git a/packages/langchain/package.json b/packages/langchain/package.json new file mode 100644 index 000000000..e23dd82b8 --- /dev/null +++ b/packages/langchain/package.json @@ -0,0 +1,38 @@ +{ + "name": "@sap-ai-sdk/langchain", + "version": "0.0.0", + "description": "", + "license": "Apache-2.0", + "keywords": [ + "sap-ai-sdk", + "gen-ai-hub", + "orchestration", + "llm-access" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "internal.js", + "internal.d.ts" + ], + "scripts": { + "compile": "tsc", + "compile:cjs": "tsc -p tsconfig.cjs.json", + "test": "NODE_OPTIONS=--experimental-vm-modules jest", + "lint": "eslint \"**/*.ts\" && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -c", + "lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error" + }, + "dependencies": { + "@sap-ai-sdk/ai-core": "workspace:^", + "@sap-ai-sdk/gen-ai-hub": "workspace:^" + }, + "devDependencies": { + "typescript": "^5.5.4", + } + } + \ No newline at end of file diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts new file mode 100644 index 000000000..f01045b8f --- /dev/null +++ b/packages/langchain/src/index.ts @@ -0,0 +1 @@ +export * from './openai.js' \ No newline at end of file diff --git a/packages/langchain/src/internal.ts b/packages/langchain/src/internal.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/langchain/src/openai.ts b/packages/langchain/src/openai.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/langchain/tsconfig.cjs.json b/packages/langchain/tsconfig.cjs.json new file mode 100644 index 000000000..3a6e73061 --- /dev/null +++ b/packages/langchain/tsconfig.cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "CommonJS", + "outDir": "./dist-cjs" + } +} + \ No newline at end of file diff --git a/packages/langchain/tsconfig.json b/packages/langchain/tsconfig.json new file mode 100644 index 000000000..43fd2cdd6 --- /dev/null +++ b/packages/langchain/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "tsBuildInfoFile": "./dist/.tsbuildinfo", + "composite": true + }, + "include": ["src/**/*.ts"], + "exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"], + "references": [{ "path": "../gen-ai-hub" }] + } + \ No newline at end of file From 47c99e8595b49724d86b354ec0734ca7597482e8 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 28 Aug 2024 13:18:42 +0200 Subject: [PATCH 02/95] add dependencies --- packages/langchain/package.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index e23dd82b8..7a507c407 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -29,7 +29,9 @@ }, "dependencies": { "@sap-ai-sdk/ai-core": "workspace:^", - "@sap-ai-sdk/gen-ai-hub": "workspace:^" + "@sap-ai-sdk/gen-ai-hub": "workspace:^", + "@langchain/core": "^0.2.30", + "@langchain/openai": "^0.2.8" }, "devDependencies": { "typescript": "^5.5.4", From 4e04a330a6044bce660fcd8a300fbc747e03268c Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 29 Aug 2024 15:17:03 +0200 Subject: [PATCH 03/95] commit temp --- eslint.config.js | 6 + packages/langchain/package.json | 4 +- packages/langchain/src/openai.ts | 122 ++++++++++++++ pnpm-lock.yaml | 270 +++++++++++++++++++++++++++++++ pnpm-workspace.yaml | 3 +- 5 files changed, 402 insertions(+), 3 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index c63dabe48..49095d295 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -21,5 +21,11 @@ export default [ rules: { '@typescript-eslint/explicit-module-boundary-types': 'off' } + }, + { + files: ['packages/langchain/**/*.ts'], + rules: { + 'import/no-internal-modules': 'off' + } } ]; diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 7a507c407..7d61b9558 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -34,7 +34,7 @@ "@langchain/openai": "^0.2.8" }, "devDependencies": { - "typescript": "^5.5.4", + "typescript": "^5.5.4" } - } +} \ No newline at end of file diff --git a/packages/langchain/src/openai.ts b/packages/langchain/src/openai.ts index e69de29bb..92f4fe3f7 100644 --- a/packages/langchain/src/openai.ts +++ b/packages/langchain/src/openai.ts @@ -0,0 +1,122 @@ +/** + * TODO: + * 1. Decide on constructor (what defaults to set) + * 2. Overwrite _generate method + * 3. Call our OpenAI client + * 4. (Optional) Handle streaming + * 5. Parse response + */ + +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; +import { BaseLLMParams } from '@langchain/core/language_models/llms'; +import { LLMResult } from '@langchain/core/outputs'; +import { OpenAI, OpenAIInput } from '@langchain/openai'; +import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiChatCompletionParameters, OpenAiChatMessage, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; + +/** + * async chatCompletion( + model: OpenAiChatModel | { name: OpenAiChatModel; version: string }, + data: OpenAiChatCompletionParameters, + deploymentResolver?: DeploymentResolver, + requestConfig?: CustomRequestConfig + */ + +interface ModelDeployment { + modelName: OpenAiChatModel | { name: OpenAiChatModel, version: string } +} + +/** + * Input for Text generation for OpenAI GPT. + */ +// ASSUMPTION: We remove all these types from OpenAIInput, because we assume that the types in OpenAiChatCompletionParameters are the correct ones +export interface OpenAIInputParameters + extends Omit, + OpenAiChatCompletionParameters, + BaseLLMParams, + ModelDeployment {} + +/** + * OpenAI GPT Language Model Wrapper to generate texts. + */ +export class BTPOpenAIGPT extends OpenAI implements OpenAIInputParameters { + private openAiClient: OpenAiClient; + + deployment_id: BTPOpenAIGPTTextModel; + modelName: OpenAiChatModel + messages: OpenAiChatMessage[]; + + constructor(fields: Partial) { + super({ ...fields, stop: [], n: 1, modelName: '', openAIApiKey: 'dummy' }); + this.modelName = fields?.modelName as OpenAiChatModel; + this.stop = fields?.stop as any; + this.n = fields?.n as any; + this.messages = fields.messages; + + + this.deployment_id = fields?.deployment_id ?? 'text-davinci-003'; + + // LLM client + this.openAiClient = new OpenAiClient(); + } + + override async _generate( + prompts: string[], + options: this['ParsedCallOptions'], + runManager?: CallbackManagerForLLMRun, + ): Promise { + const res = await this.caller.callWithOptions( + { + signal: options.signal, + }, + () => + this.openAiClient.chatCompletion(this.modelName, + { + // replace with messages thingy + messages: this.messages, + max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, + temperature: this.temperature, + top_p: this.topP, + logit_bias: this.logitBias, + n: this.n, + stop: options?.stop ?? this.stop, + presence_penalty: this.presencePenalty, + frequency_penalty: this.frequencyPenalty, + } + ), + ); + + // currently BTP LLM Proxy for OpenAI doesn't support streaming + // ASSUMPTION: assuming we only offer chat models -> we can cast the response to string as this is always the expected output + await runManager?.handleLLMNewToken(res.choices[0].message.content as string); + + return { + generations: res.choices.map((c) => [ + { + text: c.text, + generationInfo: { + finish_reason: c.finish_reason, + index: c.index, + logprobs: c.logprobs, + }, + }, + ]), + llmOutput: { + created: res.created, + id: res.id, + model: res.model, + object: res.object, + tokenUsage: { + completionTokens: res.usage.completion_tokens, + promptTokens: res.usage.prompt_tokens, + totalTokens: res.usage.total_tokens, + }, + }, + }; + } +} + +/** + * @deprecated Use {@link BTPOpenAIGPT} instead. + */ +export const BTPOpenAI = BTPOpenAIGPT; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2415ef5d8..e5fb0584d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -131,6 +131,25 @@ importers: specifier: ^5.5.4 version: 5.5.4 + packages/langchain: + dependencies: + '@langchain/core': + specifier: ^0.2.30 + version: 0.2.30(openai@4.56.1(zod@3.23.8)) + '@langchain/openai': + specifier: ^0.2.8 + version: 0.2.8 + '@sap-ai-sdk/ai-core': + specifier: workspace:^ + version: link:../ai-core + '@sap-ai-sdk/gen-ai-hub': + specifier: workspace:^ + version: link:../gen-ai-hub + devDependencies: + typescript: + specifier: ^5.5.4 + version: 5.5.4 + sample-code: dependencies: '@sap-ai-sdk/ai-core': @@ -653,6 +672,14 @@ packages: '@jsdevtools/ono@7.1.3': resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} + '@langchain/core@0.2.30': + resolution: {integrity: sha512-jeLmLTxnEq9zSq0J/fMlBCMT5Ix8tbZriqNYTm3oS7CPM2uHBcRQhV3fpsh4G8FnE7Pxa4sWfrFzc2jykhlk7A==} + engines: {node: '>=18'} + + '@langchain/openai@0.2.8': + resolution: {integrity: sha512-p5fxEAKuR8UV9jWIxkZ6AY/vAPSYxJI0Pf/UM4T3FKk/dn99G/mAEDLhfI4pBf7B8o8TudSVyBW2hRjZqlQu7g==} + engines: {node: '>=18'} + '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} @@ -820,9 +847,15 @@ packages: '@types/minimist@1.2.5': resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==} + '@types/node-fetch@2.6.11': + resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} + '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + '@types/node@18.19.47': + resolution: {integrity: sha512-1f7dB3BL/bpd9tnDJrrHb66Y+cVrhxSOTGorRNdHwYTUlTay3HuTDPKo9a/4vX9pMQkhYBcAbL4jQdNlhCFP9A==} + '@types/node@20.16.0': resolution: {integrity: sha512-vDxceJcoZhIVh67S568bm1UGZO0DX0hpplJZxzeXMKwIPLn190ec5RRxQ69BKhX44SUGIxxgMdDY557lGLKprQ==} @@ -838,6 +871,9 @@ packages: '@types/range-parser@1.2.7': resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} + '@types/retry@0.12.0': + resolution: {integrity: sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==} + '@types/semver@7.5.8': resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} @@ -853,6 +889,9 @@ packages: '@types/triple-beam@1.3.5': resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==} + '@types/uuid@10.0.0': + resolution: {integrity: sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==} + '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -920,6 +959,10 @@ packages: '@ungap/structured-clone@1.2.0': resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + accepts@1.3.8: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} @@ -938,6 +981,10 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + agentkeepalive@4.5.0: + resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} + engines: {node: '>= 8.0.0'} + ajv-draft-04@1.0.0: resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==} peerDependencies: @@ -1088,6 +1135,9 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + better-path-resolve@1.0.0: resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} engines: {node: '>=4'} @@ -1221,6 +1271,10 @@ packages: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + comment-parser@1.4.1: resolution: {integrity: sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==} engines: {node: '>= 12.0.0'} @@ -1614,6 +1668,13 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + execa@5.1.1: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} @@ -1732,10 +1793,17 @@ packages: resolution: {integrity: sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA==} engines: {node: '>=14'} + form-data-encoder@1.7.2: + resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + form-data@4.0.0: resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} engines: {node: '>= 6'} + formdata-node@4.4.1: + resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==} + engines: {node: '>= 12.20'} + forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -1897,6 +1965,9 @@ packages: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} + humanize-ms@1.2.1: + resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + iconv-lite@0.4.24: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} @@ -2260,6 +2331,9 @@ packages: node-notifier: optional: true + js-tiktoken@1.0.14: + resolution: {integrity: sha512-Pk3l3WOgM9joguZY2k52+jH82RtABRgB5RdGFZNUGbOKGMVlNmafcPA3b0ITcCZPu1L9UclP1tne6aw7ZI4Myg==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -2337,6 +2411,20 @@ packages: kuler@2.0.0: resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} + langsmith@0.1.48: + resolution: {integrity: sha512-lh98dZeShVPG1VzENpbjFWiburyYpChsO7nehGwxuqQ5/E1BBFKpsDCxLTcgXYpgWFPJxRvMqq7bLeq/txjakw==} + peerDependencies: + '@langchain/core': '*' + langchain: '*' + openai: '*' + peerDependenciesMeta: + '@langchain/core': + optional: true + langchain: + optional: true + openai: + optional: true + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} @@ -2512,6 +2600,10 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mustache@4.2.0: + resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} + hasBin: true + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -2527,6 +2619,10 @@ packages: resolution: {integrity: sha512-t1QzWwnk4sjLWaQAS8CHgOJ+RAfmHpxFWmc36IWTiWHQfs0w5JDMBS1b1ZxQteo0vVVuWJvIUKHDkkeK7vIGCg==} engines: {node: '>= 8.0.0'} + node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + node-fetch-h2@2.3.0: resolution: {integrity: sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==} engines: {node: 4.x || >=6.0.0} @@ -2618,6 +2714,15 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + openai@4.56.1: + resolution: {integrity: sha512-XMsxdjrWBYgbP6EsDIwbhkQEgeyL2C41te/QrJm8kdfho22exhTUJ/cFJSmCTToam/RSOC1BlOylHvD6i/bmsA==} + hasBin: true + peerDependencies: + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true + openapi-types@12.1.3: resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==} @@ -2640,6 +2745,10 @@ packages: resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} engines: {node: '>=8'} + p-finally@1.0.0: + resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} + engines: {node: '>=4'} + p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} @@ -2660,6 +2769,18 @@ packages: resolution: {integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==} engines: {node: '>=6'} + p-queue@6.6.2: + resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} + engines: {node: '>=8'} + + p-retry@4.6.2: + resolution: {integrity: sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==} + engines: {node: '>=8'} + + p-timeout@3.2.0: + resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} + engines: {node: '>=8'} + p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} @@ -3269,6 +3390,9 @@ packages: unbox-primitive@1.0.2: resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@6.19.6: resolution: {integrity: sha512-e/vggGopEfTKSvj4ihnOLTsqhrKRN3LeO6qSN/GxohhuRv8qH9bNQ4B8W7e/vFL+0XTnmHPB4/kegunZGA4Org==} @@ -3300,6 +3424,10 @@ packages: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} + uuid@10.0.0: + resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} + hasBin: true + v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} @@ -3324,6 +3452,10 @@ packages: walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + web-streams-polyfill@4.0.0-beta.3: + resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} + engines: {node: '>= 14'} + webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} @@ -3418,6 +3550,14 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + zod-to-json-schema@3.23.2: + resolution: {integrity: sha512-uSt90Gzc/tUfyNqxnjlfBs8W6WSGpNBv0rVsNxP/BVSMHMKGdthPYff4xtCHYloJGM0CFxFsb3NbC0eqPhfImw==} + peerDependencies: + zod: ^3.23.3 + + zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + snapshots: '@ampproject/remapping@2.3.0': @@ -4171,6 +4311,34 @@ snapshots: '@jsdevtools/ono@7.1.3': {} + '@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8))': + dependencies: + ansi-styles: 5.2.0 + camelcase: 6.3.0 + decamelize: 1.2.0 + js-tiktoken: 1.0.14 + langsmith: 0.1.48(@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8)))(openai@4.56.1(zod@3.23.8)) + mustache: 4.2.0 + p-queue: 6.6.2 + p-retry: 4.6.2 + uuid: 10.0.0 + zod: 3.23.8 + zod-to-json-schema: 3.23.2(zod@3.23.8) + transitivePeerDependencies: + - langchain + - openai + + '@langchain/openai@0.2.8': + dependencies: + '@langchain/core': 0.2.30(openai@4.56.1(zod@3.23.8)) + js-tiktoken: 1.0.14 + openai: 4.56.1(zod@3.23.8) + zod: 3.23.8 + zod-to-json-schema: 3.23.2(zod@3.23.8) + transitivePeerDependencies: + - encoding + - langchain + '@manypkg/find-root@1.1.0': dependencies: '@babel/runtime': 7.25.0 @@ -4474,8 +4642,17 @@ snapshots: '@types/minimist@1.2.5': {} + '@types/node-fetch@2.6.11': + dependencies: + '@types/node': 20.16.1 + form-data: 4.0.0 + '@types/node@12.20.55': {} + '@types/node@18.19.47': + dependencies: + undici-types: 5.26.5 + '@types/node@20.16.0': dependencies: undici-types: 6.19.6 @@ -4490,6 +4667,8 @@ snapshots: '@types/range-parser@1.2.7': {} + '@types/retry@0.12.0': {} + '@types/semver@7.5.8': {} '@types/send@0.17.4': @@ -4507,6 +4686,8 @@ snapshots: '@types/triple-beam@1.3.5': {} + '@types/uuid@10.0.0': {} + '@types/yargs-parser@21.0.3': {} '@types/yargs@17.0.32': @@ -4596,6 +4777,10 @@ snapshots: '@ungap/structured-clone@1.2.0': {} + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + accepts@1.3.8: dependencies: mime-types: 2.1.35 @@ -4611,6 +4796,10 @@ snapshots: acorn@8.12.1: {} + agentkeepalive@4.5.0: + dependencies: + humanize-ms: 1.2.1 + ajv-draft-04@1.0.0(ajv@8.16.0): optionalDependencies: ajv: 8.16.0 @@ -4805,6 +4994,8 @@ snapshots: balanced-match@1.0.2: {} + base64-js@1.5.1: {} + better-path-resolve@1.0.0: dependencies: is-windows: 1.0.2 @@ -4948,6 +5139,8 @@ snapshots: dependencies: delayed-stream: 1.0.0 + commander@10.0.1: {} + comment-parser@1.4.1: {} concat-map@0.0.1: {} @@ -5365,6 +5558,10 @@ snapshots: etag@1.8.1: {} + event-target-shim@5.0.1: {} + + eventemitter3@4.0.7: {} + execa@5.1.1: dependencies: cross-spawn: 7.0.3 @@ -5535,12 +5732,19 @@ snapshots: cross-spawn: 7.0.3 signal-exit: 4.1.0 + form-data-encoder@1.7.2: {} + form-data@4.0.0: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + formdata-node@4.4.1: + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 4.0.0-beta.3 + forwarded@0.2.0: {} fresh@0.5.2: {} @@ -5699,6 +5903,10 @@ snapshots: human-signals@2.1.0: {} + humanize-ms@1.2.1: + dependencies: + ms: 2.1.3 + iconv-lite@0.4.24: dependencies: safer-buffer: 2.1.2 @@ -6289,6 +6497,10 @@ snapshots: - supports-color - ts-node + js-tiktoken@1.0.14: + dependencies: + base64-js: 1.5.1 + js-tokens@4.0.0: {} js-yaml@3.14.1: @@ -6366,6 +6578,18 @@ snapshots: kuler@2.0.0: {} + langsmith@0.1.48(@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8)))(openai@4.56.1(zod@3.23.8)): + dependencies: + '@types/uuid': 10.0.0 + commander: 10.0.1 + p-queue: 6.6.2 + p-retry: 4.6.2 + semver: 7.6.3 + uuid: 10.0.0 + optionalDependencies: + '@langchain/core': 0.2.30(openai@4.56.1(zod@3.23.8)) + openai: 4.56.1(zod@3.23.8) + leven@3.1.0: {} levn@0.4.1: @@ -6525,6 +6749,8 @@ snapshots: ms@2.1.3: {} + mustache@4.2.0: {} + natural-compare@1.4.0: {} negotiator@0.6.3: {} @@ -6541,6 +6767,8 @@ snapshots: dependencies: clone: 2.1.2 + node-domexception@1.0.0: {} + node-fetch-h2@2.3.0: dependencies: http2-client: 1.3.5 @@ -6654,6 +6882,20 @@ snapshots: dependencies: mimic-fn: 2.1.0 + openai@4.56.1(zod@3.23.8): + dependencies: + '@types/node': 18.19.47 + '@types/node-fetch': 2.6.11 + abort-controller: 3.0.0 + agentkeepalive: 4.5.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + optionalDependencies: + zod: 3.23.8 + transitivePeerDependencies: + - encoding + openapi-types@12.1.3: {} opossum@8.1.4: {} @@ -6675,6 +6917,8 @@ snapshots: dependencies: p-map: 2.1.0 + p-finally@1.0.0: {} + p-limit@2.3.0: dependencies: p-try: 2.2.0 @@ -6693,6 +6937,20 @@ snapshots: p-map@2.1.0: {} + p-queue@6.6.2: + dependencies: + eventemitter3: 4.0.7 + p-timeout: 3.2.0 + + p-retry@4.6.2: + dependencies: + '@types/retry': 0.12.0 + retry: 0.13.1 + + p-timeout@3.2.0: + dependencies: + p-finally: 1.0.0 + p-try@2.2.0: {} package-json-from-dist@1.0.0: {} @@ -7315,6 +7573,8 @@ snapshots: has-symbols: 1.0.3 which-boxed-primitive: 1.0.2 + undici-types@5.26.5: {} + undici-types@6.19.6: {} universalify@0.1.2: {} @@ -7337,6 +7597,8 @@ snapshots: utils-merge@1.0.1: {} + uuid@10.0.0: {} + v8-compile-cache-lib@3.0.1: {} v8-to-istanbul@9.3.0: @@ -7364,6 +7626,8 @@ snapshots: dependencies: makeerror: 1.0.12 + web-streams-polyfill@4.0.0-beta.3: {} + webidl-conversions@3.0.1: {} whatwg-url@5.0.0: @@ -7473,3 +7737,9 @@ snapshots: yn@3.1.1: {} yocto-queue@0.1.0: {} + + zod-to-json-schema@3.23.2(zod@3.23.8): + dependencies: + zod: 3.23.8 + + zod@3.23.8: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 3139fac47..96eeb0a84 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -3,7 +3,8 @@ packages: - 'packages/ai-core' - 'packages/gen-ai-hub' - 'packages/core' + - 'packages/langchain' - 'sample-code' - 'tests/e2e-tests' - 'tests/type-tests' - - 'tests/smoke-tests' + - 'tests/smoke-tests' \ No newline at end of file From 4ab7b4e9fcf1c3f978334cd09001c9cadd90e967 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 29 Aug 2024 17:55:00 +0200 Subject: [PATCH 04/95] fix lockfile --- pnpm-lock.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5c52c5f6a..bad3e3629 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -853,6 +853,9 @@ packages: '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + '@types/node@18.19.47': + resolution: {integrity: sha512-1f7dB3BL/bpd9tnDJrrHb66Y+cVrhxSOTGorRNdHwYTUlTay3HuTDPKo9a/4vX9pMQkhYBcAbL4jQdNlhCFP9A==} + '@types/node@20.16.1': resolution: {integrity: sha512-zJDo7wEadFtSyNz5QITDfRcrhqDvQI1xQNQ0VoizPjM/dVAODqqIUWbJPkvsxmTI0MYRGRikcdjMPhOssnPejQ==} @@ -4641,11 +4644,15 @@ snapshots: '@types/node-fetch@2.6.11': dependencies: - '@types/node': 20.16.1 + '@types/node': 20.16.2 form-data: 4.0.0 '@types/node@12.20.55': {} + '@types/node@18.19.47': + dependencies: + undici-types: 5.26.5 + '@types/node@20.16.1': dependencies: undici-types: 6.19.6 From b9bde164c788c1537285584946e1c087ec7316c3 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 30 Aug 2024 16:01:11 +0200 Subject: [PATCH 05/95] change structure --- packages/langchain/internal.d.ts | 3 - packages/langchain/internal.js | 15 --- packages/langchain/src/chat/index.ts | 1 + .../src/{internal.ts => chat/openai.ts} | 0 packages/langchain/src/embedding/index.ts | 1 + packages/langchain/src/embedding/openai.ts | 0 packages/langchain/src/index.ts | 3 +- packages/langchain/src/openai.ts | 122 ------------------ packages/langchain/tsconfig.json | 3 +- 9 files changed, 5 insertions(+), 143 deletions(-) delete mode 100644 packages/langchain/internal.d.ts delete mode 100644 packages/langchain/internal.js create mode 100644 packages/langchain/src/chat/index.ts rename packages/langchain/src/{internal.ts => chat/openai.ts} (100%) create mode 100644 packages/langchain/src/embedding/index.ts create mode 100644 packages/langchain/src/embedding/openai.ts delete mode 100644 packages/langchain/src/openai.ts diff --git a/packages/langchain/internal.d.ts b/packages/langchain/internal.d.ts deleted file mode 100644 index bf1fe07d1..000000000 --- a/packages/langchain/internal.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -// eslint-disable-next-line import/no-internal-modules -export * from './dist/internal.js'; -// # sourceMappingURL=internal.d.ts.map diff --git a/packages/langchain/internal.js b/packages/langchain/internal.js deleted file mode 100644 index 065a92458..000000000 --- a/packages/langchain/internal.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict'; -function __export(m) { - for (const p in m) { - if (!exports.hasOwnProperty(p)) { - exports[p] = m[p]; - } - } -} -Object.defineProperty(exports, '__esModule', { value: true }); -/** - * @packageDocumentation - * @experimental The internal module is related to sdk-metadata types which are used only internally. - */ -__export(require('./dist/internal')); -// # sourceMappingURL=internal.js.map diff --git a/packages/langchain/src/chat/index.ts b/packages/langchain/src/chat/index.ts new file mode 100644 index 000000000..754274f47 --- /dev/null +++ b/packages/langchain/src/chat/index.ts @@ -0,0 +1 @@ +export * from './openai.js'; diff --git a/packages/langchain/src/internal.ts b/packages/langchain/src/chat/openai.ts similarity index 100% rename from packages/langchain/src/internal.ts rename to packages/langchain/src/chat/openai.ts diff --git a/packages/langchain/src/embedding/index.ts b/packages/langchain/src/embedding/index.ts new file mode 100644 index 000000000..754274f47 --- /dev/null +++ b/packages/langchain/src/embedding/index.ts @@ -0,0 +1 @@ +export * from './openai.js'; diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index f01045b8f..e762bc704 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1 +1,2 @@ -export * from './openai.js' \ No newline at end of file +export * from './chat/index.js'; +export * from './embedding/index.js'; diff --git a/packages/langchain/src/openai.ts b/packages/langchain/src/openai.ts deleted file mode 100644 index 92f4fe3f7..000000000 --- a/packages/langchain/src/openai.ts +++ /dev/null @@ -1,122 +0,0 @@ -/** - * TODO: - * 1. Decide on constructor (what defaults to set) - * 2. Overwrite _generate method - * 3. Call our OpenAI client - * 4. (Optional) Handle streaming - * 5. Parse response - */ - -import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; -import { BaseLLMParams } from '@langchain/core/language_models/llms'; -import { LLMResult } from '@langchain/core/outputs'; -import { OpenAI, OpenAIInput } from '@langchain/openai'; -import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; -import { OpenAiChatCompletionParameters, OpenAiChatMessage, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; - -/** - * async chatCompletion( - model: OpenAiChatModel | { name: OpenAiChatModel; version: string }, - data: OpenAiChatCompletionParameters, - deploymentResolver?: DeploymentResolver, - requestConfig?: CustomRequestConfig - */ - -interface ModelDeployment { - modelName: OpenAiChatModel | { name: OpenAiChatModel, version: string } -} - -/** - * Input for Text generation for OpenAI GPT. - */ -// ASSUMPTION: We remove all these types from OpenAIInput, because we assume that the types in OpenAiChatCompletionParameters are the correct ones -export interface OpenAIInputParameters - extends Omit, - OpenAiChatCompletionParameters, - BaseLLMParams, - ModelDeployment {} - -/** - * OpenAI GPT Language Model Wrapper to generate texts. - */ -export class BTPOpenAIGPT extends OpenAI implements OpenAIInputParameters { - private openAiClient: OpenAiClient; - - deployment_id: BTPOpenAIGPTTextModel; - modelName: OpenAiChatModel - messages: OpenAiChatMessage[]; - - constructor(fields: Partial) { - super({ ...fields, stop: [], n: 1, modelName: '', openAIApiKey: 'dummy' }); - this.modelName = fields?.modelName as OpenAiChatModel; - this.stop = fields?.stop as any; - this.n = fields?.n as any; - this.messages = fields.messages; - - - this.deployment_id = fields?.deployment_id ?? 'text-davinci-003'; - - // LLM client - this.openAiClient = new OpenAiClient(); - } - - override async _generate( - prompts: string[], - options: this['ParsedCallOptions'], - runManager?: CallbackManagerForLLMRun, - ): Promise { - const res = await this.caller.callWithOptions( - { - signal: options.signal, - }, - () => - this.openAiClient.chatCompletion(this.modelName, - { - // replace with messages thingy - messages: this.messages, - max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, - temperature: this.temperature, - top_p: this.topP, - logit_bias: this.logitBias, - n: this.n, - stop: options?.stop ?? this.stop, - presence_penalty: this.presencePenalty, - frequency_penalty: this.frequencyPenalty, - } - ), - ); - - // currently BTP LLM Proxy for OpenAI doesn't support streaming - // ASSUMPTION: assuming we only offer chat models -> we can cast the response to string as this is always the expected output - await runManager?.handleLLMNewToken(res.choices[0].message.content as string); - - return { - generations: res.choices.map((c) => [ - { - text: c.text, - generationInfo: { - finish_reason: c.finish_reason, - index: c.index, - logprobs: c.logprobs, - }, - }, - ]), - llmOutput: { - created: res.created, - id: res.id, - model: res.model, - object: res.object, - tokenUsage: { - completionTokens: res.usage.completion_tokens, - promptTokens: res.usage.prompt_tokens, - totalTokens: res.usage.total_tokens, - }, - }, - }; - } -} - -/** - * @deprecated Use {@link BTPOpenAIGPT} instead. - */ -export const BTPOpenAI = BTPOpenAIGPT; diff --git a/packages/langchain/tsconfig.json b/packages/langchain/tsconfig.json index 43fd2cdd6..f8793365f 100644 --- a/packages/langchain/tsconfig.json +++ b/packages/langchain/tsconfig.json @@ -9,5 +9,4 @@ "include": ["src/**/*.ts"], "exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"], "references": [{ "path": "../gen-ai-hub" }] - } - \ No newline at end of file +} From ae95a75ee9402d36d5ba1ce18de46c7ed26c436a Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 30 Aug 2024 16:14:33 +0200 Subject: [PATCH 06/95] WIP --- packages/langchain/src/chat/openai.ts | 235 +++++++++++++++++++++ packages/langchain/src/embedding/openai.ts | 62 ++++++ 2 files changed, 297 insertions(+) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index e69de29bb..3a4844216 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -0,0 +1,235 @@ +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; +import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import { AIMessage, BaseMessage, ChatMessage, ToolMessage } from '@langchain/core/messages'; +import { ChatResult } from '@langchain/core/outputs'; +import { StructuredTool } from '@langchain/core/tools'; +import { ChatOpenAI, ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; +import { zodToJsonSchema } from 'zod-to-json-schema'; +import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; +import { BTPBaseLLMParameters } from '../../client/base.js'; +import { + BTPOpenAIGPTAssistantMessage, + BTPOpenAIGPTChatCompletionResult, + BTPOpenAIGPTChatModel, + BTPOpenAIGPTFunction, + BTPOpenAIGPTFunctionCall, + BTPOpenAIGPTMessage, + BTPOpenAIGPTTool, + BTPOpenAIGPTToolMessage, +} from '../../client/openai.js'; +import { BTPLLMError } from '../../core/error.js'; + +/** + * Input for Text generation for OpenAI GPT. + */ +export interface BTPOpenAIGPTChatInput + extends Omit, + BTPBaseLLMParameters, + BaseChatModelParams {} + +/** + * Chat Call options. + */ +interface BTPOpenAIChatCallOptions + extends Omit { + functions?: BTPOpenAIGPTFunction[]; + function_call?: BTPOpenAIGPTFunctionCall; + tools?: BTPOpenAIGPTTool[]; +} + +/** + * OpenAI Language Model Wrapper to generate texts. + */ +export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInput { + declare CallOptions: BTPOpenAIChatCallOptions; + + private btpOpenAIClient: OpenAiClient; + + deployment_id: BTPOpenAIGPTChatModel; + + constructor(fields?: Partial) { + super({ ...fields, openAIApiKey: 'dummy' }); + + this.deployment_id = fields?.deployment_id ?? 'gpt-35-turbo'; + + // LLM client + this.btpOpenAIClient = new OpenAiClient(); + } + + override get callKeys(): (keyof BTPOpenAIChatCallOptions)[] { + return [ + ...(super.callKeys as (keyof BTPOpenAIChatCallOptions)[]), + 'options', + 'function_call', + 'functions', + 'tools', + 'tool_choice', + 'response_format', + 'seed', + ]; + } + + override get lc_secrets(): { [key: string]: string } | undefined { + // overrides default keys as not applicable in BTP + return {}; + } + + override get lc_aliases(): Record { + // overrides default keys as not applicable in BTP + return {}; + } + + override async _generate( + messages: BaseMessage[], + options: this['CallOptions'], + runManager?: CallbackManagerForLLMRun, + ): Promise { + function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { + return tools !== undefined && tools.every((tool) => Array.isArray((tool as StructuredTool).lc_namespace)); + } + const res = await this.caller.callWithOptions( + { + signal: options.signal, + }, + () => + this.btpOpenAIClient.chatCompletion( + { + messages: messages.map(this.mapBaseMessageToBTPOpenAIMessage.bind(this)), + deployment_id: this.deployment_id, + max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, + temperature: this.temperature, + top_p: this.topP, + logit_bias: this.logitBias, + n: this.n, + stop: options?.stop ?? this.stop, + presence_penalty: this.presencePenalty, + frequency_penalty: this.frequencyPenalty, + functions: isStructuredToolArray(options?.functions) + ? options?.functions.map(this.mapToolToBTPOpenAIFunction.bind(this)) + : options?.functions, + tools: isStructuredToolArray(options?.tools) + ? options?.tools.map(this.mapToolToBTPOpenAITool.bind(this)) + : options?.tools, + tool_choice: options?.tool_choice, + response_format: options?.response_format, + seed: options?.seed, + ...this.modelKwargs, + }, + this.deployment_id + ), + ); + + // currently BTP LLM Proxy for OpenAI doesn't support streaming + await runManager?.handleLLMNewToken( + typeof res.choices[0].message.content === 'string' ? res.choices[0].message.content : '', + ); + + return this.mapBTPOpenAIMessagesToChatResult(res); + } + + /** + * Maps a LangChain {@link StructuredTool} to {@link BTPOpenAIGPTFunction}. + */ + protected mapToolToBTPOpenAIFunction(tool: StructuredTool): BTPOpenAIGPTFunction { + return { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema), + }; + } + + /** + * Maps a LangChain {@link StructuredTool} to {@link BTPOpenAIGPTTool}. + */ + protected mapToolToBTPOpenAITool(tool: StructuredTool): BTPOpenAIGPTTool { + return { + type: 'function', + function: { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema), + }, + }; + } + + /** + * Maps a {@link BaseMessage} to OpenAI's Message Role. + */ + protected mapBaseMessageToRole(message: BaseMessage): BTPOpenAIGPTMessage['role'] { + switch (message._getType()) { + case 'ai': + return 'assistant'; + case 'human': + return 'user'; + case 'system': + return 'system'; + case 'function': + return 'function'; + case 'tool': + return 'tool'; + case 'generic': + return (message as ChatMessage).role as BTPOpenAIGPTMessage['role']; + default: + throw new BTPLLMError(`Unknown message type: ${message._getType()}`); + } + } + + /** + * Maps {@link BaseMessage} to OpenAI Messages. + */ + protected mapBaseMessageToBTPOpenAIMessage(message: BaseMessage): BTPOpenAIGPTMessage { + return { + content: message.content, + name: message.name, + role: this.mapBaseMessageToRole(message), + function_call: message.additional_kwargs.function_call, + tool_calls: message.additional_kwargs.tool_calls, + tool_call_id: (message as ToolMessage).tool_call_id, + } as BTPOpenAIGPTMessage; + } + + /** + * Maps OpenAI messages to LangChain's {@link ChatResult}. + */ + protected mapBTPOpenAIMessagesToChatResult(res: BTPOpenAIGPTChatCompletionResult): ChatResult { + return { + generations: res.choices.map((c) => ({ + text: (c.message as BTPOpenAIGPTAssistantMessage).content || '', + message: new AIMessage({ + content: (c.message as BTPOpenAIGPTAssistantMessage).content || '', + additional_kwargs: { + finish_reason: c.finish_reason, + index: c.index, + logprobs: c.logprobs, + function_call: (c.message as BTPOpenAIGPTAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as BTPOpenAIGPTAssistantMessage).tool_calls, + tool_call_id: (c.message as BTPOpenAIGPTToolMessage).tool_call_id, + }, + }), + generationInfo: { + finish_reason: c.finish_reason, + index: c.index, + logprobs: c.logprobs, + function_call: (c.message as BTPOpenAIGPTAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as BTPOpenAIGPTAssistantMessage).tool_calls, + }, + })), + llmOutput: { + created: res.created, + id: res.id, + model: res.model, + object: res.object, + tokenUsage: { + completionTokens: res.usage.completion_tokens, + promptTokens: res.usage.prompt_tokens, + totalTokens: res.usage.total_tokens, + }, + }, + }; + } +} + +/** + * @deprecated Use {@link BTPOpenAIGPTChat} instead. + */ +export const BTPOpenAIChat = BTPOpenAIGPTChat; diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index e69de29bb..30300c618 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -0,0 +1,62 @@ +import { BaseLLMParams } from '@langchain/core/language_models/llms'; +import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; +import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; +import { BTPBaseLLMParameters } from '../../client/base.js'; +import { BTPOpenAIGPTEmbeddingParameters, BTPOpenAIGPTEmbeddingModel } from '../../client/openai.js'; +import { chunkArray } from '../../core/utils.js'; + +/** + * Input for Text generation for OpenAI GPT. + */ +export interface BTPOpenAIGPTEmbeddingInput + extends Omit, + BTPBaseLLMParameters, + BaseLLMParams {} + +/** + * OpenAI GPT Language Model Wrapper to embed texts. + */ +export class BTPOpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPTEmbeddingInput { + private btpOpenAIClient: OpenAiClient; + + deployment_id: BTPOpenAIGPTEmbeddingModel; + + constructor(fields?: Partial) { + super({ ...fields, openAIApiKey: 'dummy' }); + + this.deployment_id = fields?.deployment_id ?? 'text-embedding-ada-002-v2'; + + // LLM client + this.btpOpenAIClient = new OpenAiClient(); + } + + override async embedDocuments(documents: string[]): Promise { + const chunkedPrompts = chunkArray( + this.stripNewLines ? documents.map((t) => t.replace(/\n/g, ' ')) : documents, + this.batchSize, + ); + const embeddings: number[][] = []; + for await (const promptChunk of chunkedPrompts) { + const resArr = await this.createEmbedding(promptChunk); + resArr.forEach((res) => embeddings.push(res.embedding)); + } + return embeddings; + } + + override async embedQuery(query: string): Promise { + const resArr = await this.createEmbedding(this.stripNewLines ? query.replace(/\n/g, ' ') : query); + return resArr[0].embedding; + } + + private async createEmbedding(query: BTPOpenAIGPTEmbeddingParameters['input']) { + const res = await this.caller.callWithOptions({}, () => + this.btpOpenAIClient.embeddings(query, this.deployment_id), + ); + return res.data; + } +} + +/** + * @deprecated Use {@link BTPOpenAIGPTEmbedding} instead. + */ +export const BTPOpenAIEmbedding = BTPOpenAIGPTEmbedding; From 7f2fbc2e3422ff87cd02297c28d8b2d9bb2c9b56 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 2 Sep 2024 13:06:43 +0200 Subject: [PATCH 07/95] temp --- packages/langchain/src/chat/openai.ts | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index 3a4844216..d870185b9 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -5,17 +5,14 @@ import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import { ChatOpenAI, ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; import { zodToJsonSchema } from 'zod-to-json-schema'; -import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiChatAssistantMessage, OpenAiChatModel, OpenAiChatToolMessage, OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; import { BTPBaseLLMParameters } from '../../client/base.js'; import { - BTPOpenAIGPTAssistantMessage, BTPOpenAIGPTChatCompletionResult, - BTPOpenAIGPTChatModel, BTPOpenAIGPTFunction, BTPOpenAIGPTFunctionCall, BTPOpenAIGPTMessage, BTPOpenAIGPTTool, - BTPOpenAIGPTToolMessage, } from '../../client/openai.js'; import { BTPLLMError } from '../../core/error.js'; @@ -24,7 +21,7 @@ import { BTPLLMError } from '../../core/error.js'; */ export interface BTPOpenAIGPTChatInput extends Omit, - BTPBaseLLMParameters, + BTPBaseLLMParameters, BaseChatModelParams {} /** @@ -45,7 +42,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu private btpOpenAIClient: OpenAiClient; - deployment_id: BTPOpenAIGPTChatModel; + deployment_id: OpenAiChatModel; constructor(fields?: Partial) { super({ ...fields, openAIApiKey: 'dummy' }); @@ -194,24 +191,24 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu protected mapBTPOpenAIMessagesToChatResult(res: BTPOpenAIGPTChatCompletionResult): ChatResult { return { generations: res.choices.map((c) => ({ - text: (c.message as BTPOpenAIGPTAssistantMessage).content || '', + text: (c.message as OpenAiChatAssistantMessage).content || '', message: new AIMessage({ - content: (c.message as BTPOpenAIGPTAssistantMessage).content || '', + content: (c.message as OpenAiChatAssistantMessage).content || '', additional_kwargs: { finish_reason: c.finish_reason, index: c.index, logprobs: c.logprobs, - function_call: (c.message as BTPOpenAIGPTAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as BTPOpenAIGPTAssistantMessage).tool_calls, - tool_call_id: (c.message as BTPOpenAIGPTToolMessage).tool_call_id, + function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, + tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id, }, }), generationInfo: { finish_reason: c.finish_reason, index: c.index, logprobs: c.logprobs, - function_call: (c.message as BTPOpenAIGPTAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as BTPOpenAIGPTAssistantMessage).tool_calls, + function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, }, })), llmOutput: { From 739f287b7f2c91d2bc3fde88e0871c7d65f250ef Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 2 Sep 2024 16:24:56 +0200 Subject: [PATCH 08/95] replace most types --- packages/langchain/src/chat/openai.ts | 48 ++++++++++++++------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index d870185b9..a13e37877 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -5,15 +5,18 @@ import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import { ChatOpenAI, ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; import { zodToJsonSchema } from 'zod-to-json-schema'; -import { OpenAiChatAssistantMessage, OpenAiChatModel, OpenAiChatToolMessage, OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; -import { BTPBaseLLMParameters } from '../../client/base.js'; import { - BTPOpenAIGPTChatCompletionResult, - BTPOpenAIGPTFunction, - BTPOpenAIGPTFunctionCall, - BTPOpenAIGPTMessage, - BTPOpenAIGPTTool, -} from '../../client/openai.js'; + OpenAiChatAssistantMessage, + OpenAiChatModel, + OpenAiChatToolMessage, + OpenAiClient, + OpenAiChatCompletionFunction, + OpenAiChatFunctionCall, + OpenAiChatMessage, + OpenAiChatCompletionTool, + OpenAiChatCompletionOutput +} from '@sap-ai-sdk/gen-ai-hub'; +import { BTPBaseLLMParameters } from '../../client/base.js'; import { BTPLLMError } from '../../core/error.js'; /** @@ -29,9 +32,9 @@ export interface BTPOpenAIGPTChatInput */ interface BTPOpenAIChatCallOptions extends Omit { - functions?: BTPOpenAIGPTFunction[]; - function_call?: BTPOpenAIGPTFunctionCall; - tools?: BTPOpenAIGPTTool[]; + functions?: OpenAiChatCompletionFunction[]; + function_call?: OpenAiChatFunctionCall; + tools?: OpenAiChatCompletionTool[]; } /** @@ -40,11 +43,10 @@ interface BTPOpenAIChatCallOptions export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInput { declare CallOptions: BTPOpenAIChatCallOptions; - private btpOpenAIClient: OpenAiClient; - deployment_id: OpenAiChatModel; + private btpOpenAIClient: OpenAiClient; - constructor(fields?: Partial) { + constructor(fields: BTPOpenAIGPTChatInput) { super({ ...fields, openAIApiKey: 'dummy' }); this.deployment_id = fields?.deployment_id ?? 'gpt-35-turbo'; @@ -125,9 +127,9 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu } /** - * Maps a LangChain {@link StructuredTool} to {@link BTPOpenAIGPTFunction}. + * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. */ - protected mapToolToBTPOpenAIFunction(tool: StructuredTool): BTPOpenAIGPTFunction { + protected mapToolToBTPOpenAIFunction(tool: StructuredTool): OpenAiChatCompletionFunction { return { name: tool.name, description: tool.description, @@ -136,9 +138,9 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu } /** - * Maps a LangChain {@link StructuredTool} to {@link BTPOpenAIGPTTool}. + * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. */ - protected mapToolToBTPOpenAITool(tool: StructuredTool): BTPOpenAIGPTTool { + protected mapToolToBTPOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { return { type: 'function', function: { @@ -152,7 +154,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu /** * Maps a {@link BaseMessage} to OpenAI's Message Role. */ - protected mapBaseMessageToRole(message: BaseMessage): BTPOpenAIGPTMessage['role'] { + protected mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { switch (message._getType()) { case 'ai': return 'assistant'; @@ -165,7 +167,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu case 'tool': return 'tool'; case 'generic': - return (message as ChatMessage).role as BTPOpenAIGPTMessage['role']; + return (message as ChatMessage).role as OpenAiChatMessage['role']; default: throw new BTPLLMError(`Unknown message type: ${message._getType()}`); } @@ -174,7 +176,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu /** * Maps {@link BaseMessage} to OpenAI Messages. */ - protected mapBaseMessageToBTPOpenAIMessage(message: BaseMessage): BTPOpenAIGPTMessage { + protected mapBaseMessageToBTPOpenAIMessage(message: BaseMessage): OpenAiChatMessage { return { content: message.content, name: message.name, @@ -182,13 +184,13 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, tool_call_id: (message as ToolMessage).tool_call_id, - } as BTPOpenAIGPTMessage; + } as OpenAiChatMessage; } /** * Maps OpenAI messages to LangChain's {@link ChatResult}. */ - protected mapBTPOpenAIMessagesToChatResult(res: BTPOpenAIGPTChatCompletionResult): ChatResult { + protected mapBTPOpenAIMessagesToChatResult(res: OpenAiChatCompletionOutput): ChatResult { return { generations: res.choices.map((c) => ({ text: (c.message as OpenAiChatAssistantMessage).content || '', From f8025f17a4c655f9579266410299ab035143b6cd Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 2 Sep 2024 16:29:44 +0200 Subject: [PATCH 09/95] adjust most types --- packages/langchain/src/embedding/openai.ts | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index 30300c618..391ac8758 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -1,8 +1,7 @@ import { BaseLLMParams } from '@langchain/core/language_models/llms'; import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; -import { OpenAiClient } from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiClient, OpenAiEmbeddingModel, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; import { BTPBaseLLMParameters } from '../../client/base.js'; -import { BTPOpenAIGPTEmbeddingParameters, BTPOpenAIGPTEmbeddingModel } from '../../client/openai.js'; import { chunkArray } from '../../core/utils.js'; /** @@ -10,17 +9,16 @@ import { chunkArray } from '../../core/utils.js'; */ export interface BTPOpenAIGPTEmbeddingInput extends Omit, - BTPBaseLLMParameters, + BTPBaseLLMParameters, BaseLLMParams {} /** * OpenAI GPT Language Model Wrapper to embed texts. */ export class BTPOpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPTEmbeddingInput { + deployment_id: OpenAiEmbeddingModel; private btpOpenAIClient: OpenAiClient; - deployment_id: BTPOpenAIGPTEmbeddingModel; - constructor(fields?: Partial) { super({ ...fields, openAIApiKey: 'dummy' }); @@ -48,7 +46,7 @@ export class BTPOpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAI return resArr[0].embedding; } - private async createEmbedding(query: BTPOpenAIGPTEmbeddingParameters['input']) { + private async createEmbedding(query: OpenAiEmbeddingParameters['input']) { const res = await this.caller.callWithOptions({}, () => this.btpOpenAIClient.embeddings(query, this.deployment_id), ); From c4a44670451dc0dfd607d01fa37f0168dfb40d30 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 10:57:42 +0200 Subject: [PATCH 10/95] add dependency and update methods --- packages/langchain/src/chat/openai.ts | 15 ++++++------ pnpm-lock.yaml | 35 +++++++++++++-------------- 2 files changed, 24 insertions(+), 26 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index a13e37877..9fa195a79 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -17,7 +17,6 @@ import { OpenAiChatCompletionOutput } from '@sap-ai-sdk/gen-ai-hub'; import { BTPBaseLLMParameters } from '../../client/base.js'; -import { BTPLLMError } from '../../core/error.js'; /** * Input for Text generation for OpenAI GPT. @@ -93,7 +92,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu () => this.btpOpenAIClient.chatCompletion( { - messages: messages.map(this.mapBaseMessageToBTPOpenAIMessage.bind(this)), + messages: messages.map(this.mapBaseMessageToOpenAIChatMessage.bind(this)), deployment_id: this.deployment_id, max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, temperature: this.temperature, @@ -107,7 +106,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu ? options?.functions.map(this.mapToolToBTPOpenAIFunction.bind(this)) : options?.functions, tools: isStructuredToolArray(options?.tools) - ? options?.tools.map(this.mapToolToBTPOpenAITool.bind(this)) + ? options?.tools.map(this.mapToolToOpenAITool.bind(this)) : options?.tools, tool_choice: options?.tool_choice, response_format: options?.response_format, @@ -123,7 +122,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu typeof res.choices[0].message.content === 'string' ? res.choices[0].message.content : '', ); - return this.mapBTPOpenAIMessagesToChatResult(res); + return this.mapResponseToChatResult(res); } /** @@ -140,7 +139,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. */ - protected mapToolToBTPOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { + protected mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { return { type: 'function', function: { @@ -169,14 +168,14 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu case 'generic': return (message as ChatMessage).role as OpenAiChatMessage['role']; default: - throw new BTPLLMError(`Unknown message type: ${message._getType()}`); + throw new Error(`Unknown message type: ${message._getType()}`); } } /** * Maps {@link BaseMessage} to OpenAI Messages. */ - protected mapBaseMessageToBTPOpenAIMessage(message: BaseMessage): OpenAiChatMessage { + protected mapBaseMessageToOpenAIChatMessage(message: BaseMessage): OpenAiChatMessage { return { content: message.content, name: message.name, @@ -190,7 +189,7 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu /** * Maps OpenAI messages to LangChain's {@link ChatResult}. */ - protected mapBTPOpenAIMessagesToChatResult(res: OpenAiChatCompletionOutput): ChatResult { + protected mapResponseToChatResult(res: OpenAiChatCompletionOutput): ChatResult { return { generations: res.choices.map((c) => ({ text: (c.message as OpenAiChatAssistantMessage).content || '', diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1fe110bf4..5cc3ec915 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -160,6 +160,9 @@ importers: '@sap-ai-sdk/gen-ai-hub': specifier: workspace:^ version: link:../gen-ai-hub + zod-to-json-schema: + specifier: ^3.23.2 + version: 3.23.2(zod@3.23.8) devDependencies: typescript: specifier: ^5.5.4 @@ -203,10 +206,10 @@ importers: dependencies: '@sap-ai-sdk/ai-core': specifier: canary - version: 0.0.1-20240901013059.0 + version: 0.0.1-20240903013041.0 '@sap-ai-sdk/gen-ai-hub': specifier: canary - version: 0.0.1-20240901013059.0 + version: 0.0.1-20240903013041.0 express: specifier: ^4.19.2 version: 4.19.2 @@ -721,14 +724,14 @@ packages: resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@sap-ai-sdk/ai-core@0.0.1-20240901013059.0': - resolution: {integrity: sha512-03lJxw/X15KuMIItU/Id8iiniQ7tg7c1jbcatLsaPaHihhaucYJvN1EhqktmLoQzkln/ZbRzffitmBwF9qI8+A==} + '@sap-ai-sdk/ai-core@0.0.1-20240903013041.0': + resolution: {integrity: sha512-8H43rXFPlAsQse2Ir9gEOZJ2XcugG2O6MZbvEcRJi+k3InOvvf4oUVtvlU7ihRNbD8QBmUSfk7pvXXJe19lq1Q==} - '@sap-ai-sdk/core@0.0.1-20240901013059.0': - resolution: {integrity: sha512-svtHctZI9XJVoH1cZhiUXbICbt7INP0phTbLMpmLaDsJNGfBe9edu8bB+tXchlh/ywxfO0wERFNDOQ2whShzUA==} + '@sap-ai-sdk/core@0.0.1-20240903013041.0': + resolution: {integrity: sha512-Dt/QAIjZueI/1l5I1ofJyZC9xF+AmJSfH6TSMKWzOvFmyPt6UE0z2gICJMi/2pK5+1HaD7T+e5SeXGLubK/jCA==} - '@sap-ai-sdk/gen-ai-hub@0.0.1-20240901013059.0': - resolution: {integrity: sha512-i5S56ZXMWXEC5rL4e3yKiqerNSuEr+mYXV1ZVoLyx1wf8JfN9bzeR064T8BYMW9Wr9IFLLxFz2bKieKzbj+YnQ==} + '@sap-ai-sdk/gen-ai-hub@0.0.1-20240903013041.0': + resolution: {integrity: sha512-LOJ3ji2j+7ezWKAHv/81l0LE0S8RZPFV/s7VfFQ/9lK0PZc18ZEIyENLlG1umWCqnMOT1vCw8XByOBA0kJfQAw==} '@sap-cloud-sdk/connectivity@3.20.0': resolution: {integrity: sha512-H9jWH6+owUu0vDiz1WWgB+o/1LzFnmmvELUHakdQSU1n930giPOBT9wwCmdbQgsQ+MJ4G6GURyqo9eKberBdXg==} @@ -4393,15 +4396,15 @@ snapshots: '@pkgr/core@0.1.1': {} - '@sap-ai-sdk/ai-core@0.0.1-20240901013059.0': + '@sap-ai-sdk/ai-core@0.0.1-20240903013041.0': dependencies: - '@sap-ai-sdk/core': 0.0.1-20240901013059.0 + '@sap-ai-sdk/core': 0.0.1-20240903013041.0 transitivePeerDependencies: - debug - encoding - supports-color - '@sap-ai-sdk/core@0.0.1-20240901013059.0': + '@sap-ai-sdk/core@0.0.1-20240903013041.0': dependencies: '@sap-cloud-sdk/connectivity': 3.20.0 '@sap-cloud-sdk/http-client': 3.20.0 @@ -4412,10 +4415,10 @@ snapshots: - encoding - supports-color - '@sap-ai-sdk/gen-ai-hub@0.0.1-20240901013059.0': + '@sap-ai-sdk/gen-ai-hub@0.0.1-20240903013041.0': dependencies: - '@sap-ai-sdk/ai-core': 0.0.1-20240901013059.0 - '@sap-ai-sdk/core': 0.0.1-20240901013059.0 + '@sap-ai-sdk/ai-core': 0.0.1-20240903013041.0 + '@sap-ai-sdk/core': 0.0.1-20240903013041.0 '@sap-cloud-sdk/connectivity': 3.20.0 '@sap-cloud-sdk/http-client': 3.20.0 '@sap-cloud-sdk/openapi': 3.20.0 @@ -4678,10 +4681,6 @@ snapshots: dependencies: undici-types: 5.26.5 - '@types/node@20.16.1': - dependencies: - undici-types: 5.26.5 - '@types/node@20.16.2': dependencies: undici-types: 6.19.6 From f8184abb9182d3a997cee83c702a8565cdedadea Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 11:00:16 +0200 Subject: [PATCH 11/95] remove redundant exports and change names --- packages/langchain/src/chat/openai.ts | 19 +++++++------------ packages/langchain/src/embedding/openai.ts | 7 +------ 2 files changed, 8 insertions(+), 18 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index 9fa195a79..f07aceb41 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -21,7 +21,7 @@ import { BTPBaseLLMParameters } from '../../client/base.js'; /** * Input for Text generation for OpenAI GPT. */ -export interface BTPOpenAIGPTChatInput +export interface OpenAIGPTChatInput extends Omit, BTPBaseLLMParameters, BaseChatModelParams {} @@ -29,7 +29,7 @@ export interface BTPOpenAIGPTChatInput /** * Chat Call options. */ -interface BTPOpenAIChatCallOptions +interface OpenAIChatCallOptions extends Omit { functions?: OpenAiChatCompletionFunction[]; function_call?: OpenAiChatFunctionCall; @@ -39,13 +39,13 @@ interface BTPOpenAIChatCallOptions /** * OpenAI Language Model Wrapper to generate texts. */ -export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInput { - declare CallOptions: BTPOpenAIChatCallOptions; +export class OpenAIGPTChat extends ChatOpenAI implements OpenAIGPTChatInput { + declare CallOptions: OpenAIChatCallOptions; deployment_id: OpenAiChatModel; private btpOpenAIClient: OpenAiClient; - constructor(fields: BTPOpenAIGPTChatInput) { + constructor(fields: OpenAIGPTChatInput) { super({ ...fields, openAIApiKey: 'dummy' }); this.deployment_id = fields?.deployment_id ?? 'gpt-35-turbo'; @@ -54,9 +54,9 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu this.btpOpenAIClient = new OpenAiClient(); } - override get callKeys(): (keyof BTPOpenAIChatCallOptions)[] { + override get callKeys(): (keyof OpenAIChatCallOptions)[] { return [ - ...(super.callKeys as (keyof BTPOpenAIChatCallOptions)[]), + ...(super.callKeys as (keyof OpenAIChatCallOptions)[]), 'options', 'function_call', 'functions', @@ -226,8 +226,3 @@ export class BTPOpenAIGPTChat extends ChatOpenAI implements BTPOpenAIGPTChatInpu }; } } - -/** - * @deprecated Use {@link BTPOpenAIGPTChat} instead. - */ -export const BTPOpenAIChat = BTPOpenAIGPTChat; diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index 391ac8758..cce7683fd 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -15,7 +15,7 @@ export interface BTPOpenAIGPTEmbeddingInput /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class BTPOpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPTEmbeddingInput { +export class OpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPTEmbeddingInput { deployment_id: OpenAiEmbeddingModel; private btpOpenAIClient: OpenAiClient; @@ -53,8 +53,3 @@ export class BTPOpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAI return res.data; } } - -/** - * @deprecated Use {@link BTPOpenAIGPTEmbedding} instead. - */ -export const BTPOpenAIEmbedding = BTPOpenAIGPTEmbedding; From 2402f3b59692e48038c719782225297e1fd798c0 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 11:20:12 +0200 Subject: [PATCH 12/95] merge main --- pnpm-lock.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 40408a72b..0ea04d099 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -160,9 +160,6 @@ importers: '@sap-ai-sdk/gen-ai-hub': specifier: workspace:^ version: link:../gen-ai-hub - zod-to-json-schema: - specifier: ^3.23.2 - version: 3.23.2(zod@3.23.8) devDependencies: typescript: specifier: ^5.5.4 From 810284fbfb2bb569bd7ba73ebcce2a1be4511d6f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 15:52:50 +0200 Subject: [PATCH 13/95] save --- packages/langchain/src/chat/openai.ts | 78 ++++++++++++++++++++++----- 1 file changed, 65 insertions(+), 13 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index f07aceb41..b867721bc 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -14,17 +14,46 @@ import { OpenAiChatFunctionCall, OpenAiChatMessage, OpenAiChatCompletionTool, - OpenAiChatCompletionOutput + OpenAiChatCompletionOutput, + OpenAiChatCompletionParameters, + DeploymentIdConfiguration, + ModelDeployment } from '@sap-ai-sdk/gen-ai-hub'; -import { BTPBaseLLMParameters } from '../../client/base.js'; /** * Input for Text generation for OpenAI GPT. */ -export interface OpenAIGPTChatInput - extends Omit, - BTPBaseLLMParameters, - BaseChatModelParams {} +export interface OpenAIChatModelInterface + extends Omit, + Omit, + BaseChatModelParams { + /** + * The deployment ID of the model. + */ + deploymentId?: DeploymentIdConfiguration; + /** + * The version of the model. + */ + modelVersion?: string; + } + +/** + * Input for Text generation for OpenAI GPT. + */ +export type OpenAIChatModelInput = Omit & + Omit & + BaseChatModelParams & + ModelDeployment; /** * Chat Call options. @@ -39,16 +68,35 @@ interface OpenAIChatCallOptions /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAIGPTChat extends ChatOpenAI implements OpenAIGPTChatInput { +export class OpenAIChatModel extends ChatOpenAI implements OpenAIChatModelInterface { declare CallOptions: OpenAIChatCallOptions; - deployment_id: OpenAiChatModel; + deploymentId?: DeploymentIdConfiguration; + modelVersion?: string; private btpOpenAIClient: OpenAiClient; - constructor(fields: OpenAIGPTChatInput) { - super({ ...fields, openAIApiKey: 'dummy' }); + constructor(fields: OpenAIChatModelInput) { + const defaultValues = new ChatOpenAI(); + const n = fields.n ?? defaultValues.n; + const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; + const temperature = fields.temperature ?? defaultValues.temperature; + const frequencyPenalty = fields.frequency_penalty ?? defaultValues.frequencyPenalty; + const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; + const topP = fields.top_p ?? defaultValues.topP; + + super({ + ...fields, + n, + stop, + temperature, + openAIApiKey: 'dummy', + frequencyPenalty, + presencePenalty, + topP + }); - this.deployment_id = fields?.deployment_id ?? 'gpt-35-turbo'; + this.deploymentId = fields.deploymenId; + this.modelVersion = fields.modelVersion; // LLM client this.btpOpenAIClient = new OpenAiClient(); @@ -93,7 +141,7 @@ export class OpenAIGPTChat extends ChatOpenAI implements OpenAIGPTChatInput { this.btpOpenAIClient.chatCompletion( { messages: messages.map(this.mapBaseMessageToOpenAIChatMessage.bind(this)), - deployment_id: this.deployment_id, + deployment_id: this.deploymentId, max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, temperature: this.temperature, top_p: this.topP, @@ -113,7 +161,11 @@ export class OpenAIGPTChat extends ChatOpenAI implements OpenAIGPTChatInput { seed: options?.seed, ...this.modelKwargs, }, - this.deployment_id + { + modelName: this.modelName ?? this.model, + deploymentId: this.deploymentId, + modelVersion: this.modelVersion + }, ), ); From 3b67a7c3c2f6662f2d81cfe2f192d588a4edcbe2 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 16:06:12 +0200 Subject: [PATCH 14/95] fix 99% problems --- packages/langchain/src/chat/openai.ts | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index b867721bc..f2c17f854 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -7,7 +7,6 @@ import { ChatOpenAI, ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/o import { zodToJsonSchema } from 'zod-to-json-schema'; import { OpenAiChatAssistantMessage, - OpenAiChatModel, OpenAiChatToolMessage, OpenAiClient, OpenAiChatCompletionFunction, @@ -17,7 +16,7 @@ import { OpenAiChatCompletionOutput, OpenAiChatCompletionParameters, DeploymentIdConfiguration, - ModelDeployment + OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; /** @@ -30,7 +29,7 @@ export interface OpenAIChatModelInterface /** * The deployment ID of the model. */ - deploymentId?: DeploymentIdConfiguration; + deploymentId?: string; /** * The version of the model. */ @@ -53,7 +52,18 @@ export type OpenAIChatModelInput = Omit & Omit & BaseChatModelParams & - ModelDeployment; + DeploymentIdConfiguration & + { + /** + * The name of the model. + */ + modelName: OpenAiChatModel; + /** + * The version of the model. + */ + modelVersion?: string; + deploymentId?: string; + }; /** * Chat Call options. @@ -68,10 +78,10 @@ interface OpenAIChatCallOptions /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAIChatModel extends ChatOpenAI implements OpenAIChatModelInterface { +export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { declare CallOptions: OpenAIChatCallOptions; - deploymentId?: DeploymentIdConfiguration; + deploymentId?: string; modelVersion?: string; private btpOpenAIClient: OpenAiClient; @@ -95,7 +105,7 @@ export class OpenAIChatModel extends ChatOpenAI implements OpenAIChatModelInterf topP }); - this.deploymentId = fields.deploymenId; + this.deploymentId = fields.deploymentId; this.modelVersion = fields.modelVersion; // LLM client From 92c63ef5863aff617aad610ecb701a7a9f87706a Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 16:12:27 +0200 Subject: [PATCH 15/95] add typedoc --- packages/langchain/src/chat/openai.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts index f2c17f854..5bc3eb8ac 100644 --- a/packages/langchain/src/chat/openai.ts +++ b/packages/langchain/src/chat/openai.ts @@ -189,6 +189,8 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. + * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI Chat Completion Function. */ protected mapToolToBTPOpenAIFunction(tool: StructuredTool): OpenAiChatCompletionFunction { return { @@ -200,6 +202,8 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. + * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI Chat Completion Tool. */ protected mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { return { @@ -214,6 +218,8 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { /** * Maps a {@link BaseMessage} to OpenAI's Message Role. + * @param message - The message to map. + * @returns The OpenAI Message Role. */ protected mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { switch (message._getType()) { @@ -236,6 +242,8 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { /** * Maps {@link BaseMessage} to OpenAI Messages. + * @param message - The message to map. + * @returns The OpenAI Chat Message. */ protected mapBaseMessageToOpenAIChatMessage(message: BaseMessage): OpenAiChatMessage { return { @@ -250,6 +258,8 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { /** * Maps OpenAI messages to LangChain's {@link ChatResult}. + * @param res - The OpenAI Chat Completion Output. + * @returns The LangChain Chat Result. */ protected mapResponseToChatResult(res: OpenAiChatCompletionOutput): ChatResult { return { From ba8dec051a9098266ca2c03f32401d6b4767640f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 17:22:04 +0200 Subject: [PATCH 16/95] restructure --- .../src/client/openai/openai-types.ts | 5 +- packages/gen-ai-hub/src/index.ts | 1 + packages/langchain/src/chat/index.ts | 2 +- packages/langchain/src/chat/openai.ts | 300 ------------------ packages/langchain/src/chat/openai/index.ts | 1 + packages/langchain/src/chat/openai/openai.ts | 137 ++++++++ packages/langchain/src/chat/openai/types.ts | 58 ++++ packages/langchain/src/chat/openai/util.ts | 114 +++++++ 8 files changed, 316 insertions(+), 302 deletions(-) delete mode 100644 packages/langchain/src/chat/openai.ts create mode 100644 packages/langchain/src/chat/openai/index.ts create mode 100644 packages/langchain/src/chat/openai/openai.ts create mode 100644 packages/langchain/src/chat/openai/types.ts create mode 100644 packages/langchain/src/chat/openai/util.ts diff --git a/packages/gen-ai-hub/src/client/openai/openai-types.ts b/packages/gen-ai-hub/src/client/openai/openai-types.ts index 13d432f85..cb0686491 100644 --- a/packages/gen-ai-hub/src/client/openai/openai-types.ts +++ b/packages/gen-ai-hub/src/client/openai/openai-types.ts @@ -434,7 +434,10 @@ interface OpenAiCompletionChoice { content_filter_results?: OpenAiContentFilterPromptResults; } -interface OpenAiChatCompletionChoice extends OpenAiCompletionChoice { +/** + * OpenAI chat completion choice. + */ +export interface OpenAiChatCompletionChoice extends OpenAiCompletionChoice { /** * Completion chat message. */ diff --git a/packages/gen-ai-hub/src/index.ts b/packages/gen-ai-hub/src/index.ts index 1d8450cc9..b7e4b4b17 100644 --- a/packages/gen-ai-hub/src/index.ts +++ b/packages/gen-ai-hub/src/index.ts @@ -11,6 +11,7 @@ export type { OpenAiChatCompletionTool, OpenAiChatFunctionCall, OpenAiChatToolCall, + OpenAiChatCompletionChoice, OpenAiCompletionParameters, OpenAiChatCompletionParameters, OpenAiEmbeddingParameters, diff --git a/packages/langchain/src/chat/index.ts b/packages/langchain/src/chat/index.ts index 754274f47..06718ab5d 100644 --- a/packages/langchain/src/chat/index.ts +++ b/packages/langchain/src/chat/index.ts @@ -1 +1 @@ -export * from './openai.js'; +export * from './openai/index.js'; diff --git a/packages/langchain/src/chat/openai.ts b/packages/langchain/src/chat/openai.ts deleted file mode 100644 index 5bc3eb8ac..000000000 --- a/packages/langchain/src/chat/openai.ts +++ /dev/null @@ -1,300 +0,0 @@ -import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; -import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; -import { AIMessage, BaseMessage, ChatMessage, ToolMessage } from '@langchain/core/messages'; -import { ChatResult } from '@langchain/core/outputs'; -import { StructuredTool } from '@langchain/core/tools'; -import { ChatOpenAI, ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; -import { zodToJsonSchema } from 'zod-to-json-schema'; -import { - OpenAiChatAssistantMessage, - OpenAiChatToolMessage, - OpenAiClient, - OpenAiChatCompletionFunction, - OpenAiChatFunctionCall, - OpenAiChatMessage, - OpenAiChatCompletionTool, - OpenAiChatCompletionOutput, - OpenAiChatCompletionParameters, - DeploymentIdConfiguration, - OpenAiChatModel -} from '@sap-ai-sdk/gen-ai-hub'; - -/** - * Input for Text generation for OpenAI GPT. - */ -export interface OpenAIChatModelInterface - extends Omit, - Omit, - BaseChatModelParams { - /** - * The deployment ID of the model. - */ - deploymentId?: string; - /** - * The version of the model. - */ - modelVersion?: string; - } - -/** - * Input for Text generation for OpenAI GPT. - */ -export type OpenAIChatModelInput = Omit & - Omit & - BaseChatModelParams & - DeploymentIdConfiguration & - { - /** - * The name of the model. - */ - modelName: OpenAiChatModel; - /** - * The version of the model. - */ - modelVersion?: string; - deploymentId?: string; - }; - -/** - * Chat Call options. - */ -interface OpenAIChatCallOptions - extends Omit { - functions?: OpenAiChatCompletionFunction[]; - function_call?: OpenAiChatFunctionCall; - tools?: OpenAiChatCompletionTool[]; -} - -/** - * OpenAI Language Model Wrapper to generate texts. - */ -export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { - declare CallOptions: OpenAIChatCallOptions; - - deploymentId?: string; - modelVersion?: string; - private btpOpenAIClient: OpenAiClient; - - constructor(fields: OpenAIChatModelInput) { - const defaultValues = new ChatOpenAI(); - const n = fields.n ?? defaultValues.n; - const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; - const temperature = fields.temperature ?? defaultValues.temperature; - const frequencyPenalty = fields.frequency_penalty ?? defaultValues.frequencyPenalty; - const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; - const topP = fields.top_p ?? defaultValues.topP; - - super({ - ...fields, - n, - stop, - temperature, - openAIApiKey: 'dummy', - frequencyPenalty, - presencePenalty, - topP - }); - - this.deploymentId = fields.deploymentId; - this.modelVersion = fields.modelVersion; - - // LLM client - this.btpOpenAIClient = new OpenAiClient(); - } - - override get callKeys(): (keyof OpenAIChatCallOptions)[] { - return [ - ...(super.callKeys as (keyof OpenAIChatCallOptions)[]), - 'options', - 'function_call', - 'functions', - 'tools', - 'tool_choice', - 'response_format', - 'seed', - ]; - } - - override get lc_secrets(): { [key: string]: string } | undefined { - // overrides default keys as not applicable in BTP - return {}; - } - - override get lc_aliases(): Record { - // overrides default keys as not applicable in BTP - return {}; - } - - override async _generate( - messages: BaseMessage[], - options: this['CallOptions'], - runManager?: CallbackManagerForLLMRun, - ): Promise { - function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { - return tools !== undefined && tools.every((tool) => Array.isArray((tool as StructuredTool).lc_namespace)); - } - const res = await this.caller.callWithOptions( - { - signal: options.signal, - }, - () => - this.btpOpenAIClient.chatCompletion( - { - messages: messages.map(this.mapBaseMessageToOpenAIChatMessage.bind(this)), - deployment_id: this.deploymentId, - max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, - temperature: this.temperature, - top_p: this.topP, - logit_bias: this.logitBias, - n: this.n, - stop: options?.stop ?? this.stop, - presence_penalty: this.presencePenalty, - frequency_penalty: this.frequencyPenalty, - functions: isStructuredToolArray(options?.functions) - ? options?.functions.map(this.mapToolToBTPOpenAIFunction.bind(this)) - : options?.functions, - tools: isStructuredToolArray(options?.tools) - ? options?.tools.map(this.mapToolToOpenAITool.bind(this)) - : options?.tools, - tool_choice: options?.tool_choice, - response_format: options?.response_format, - seed: options?.seed, - ...this.modelKwargs, - }, - { - modelName: this.modelName ?? this.model, - deploymentId: this.deploymentId, - modelVersion: this.modelVersion - }, - ), - ); - - // currently BTP LLM Proxy for OpenAI doesn't support streaming - await runManager?.handleLLMNewToken( - typeof res.choices[0].message.content === 'string' ? res.choices[0].message.content : '', - ); - - return this.mapResponseToChatResult(res); - } - - /** - * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. - * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. - * @returns The OpenAI Chat Completion Function. - */ - protected mapToolToBTPOpenAIFunction(tool: StructuredTool): OpenAiChatCompletionFunction { - return { - name: tool.name, - description: tool.description, - parameters: zodToJsonSchema(tool.schema), - }; - } - - /** - * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. - * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. - * @returns The OpenAI Chat Completion Tool. - */ - protected mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { - return { - type: 'function', - function: { - name: tool.name, - description: tool.description, - parameters: zodToJsonSchema(tool.schema), - }, - }; - } - - /** - * Maps a {@link BaseMessage} to OpenAI's Message Role. - * @param message - The message to map. - * @returns The OpenAI Message Role. - */ - protected mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { - switch (message._getType()) { - case 'ai': - return 'assistant'; - case 'human': - return 'user'; - case 'system': - return 'system'; - case 'function': - return 'function'; - case 'tool': - return 'tool'; - case 'generic': - return (message as ChatMessage).role as OpenAiChatMessage['role']; - default: - throw new Error(`Unknown message type: ${message._getType()}`); - } - } - - /** - * Maps {@link BaseMessage} to OpenAI Messages. - * @param message - The message to map. - * @returns The OpenAI Chat Message. - */ - protected mapBaseMessageToOpenAIChatMessage(message: BaseMessage): OpenAiChatMessage { - return { - content: message.content, - name: message.name, - role: this.mapBaseMessageToRole(message), - function_call: message.additional_kwargs.function_call, - tool_calls: message.additional_kwargs.tool_calls, - tool_call_id: (message as ToolMessage).tool_call_id, - } as OpenAiChatMessage; - } - - /** - * Maps OpenAI messages to LangChain's {@link ChatResult}. - * @param res - The OpenAI Chat Completion Output. - * @returns The LangChain Chat Result. - */ - protected mapResponseToChatResult(res: OpenAiChatCompletionOutput): ChatResult { - return { - generations: res.choices.map((c) => ({ - text: (c.message as OpenAiChatAssistantMessage).content || '', - message: new AIMessage({ - content: (c.message as OpenAiChatAssistantMessage).content || '', - additional_kwargs: { - finish_reason: c.finish_reason, - index: c.index, - logprobs: c.logprobs, - function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, - tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id, - }, - }), - generationInfo: { - finish_reason: c.finish_reason, - index: c.index, - logprobs: c.logprobs, - function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, - }, - })), - llmOutput: { - created: res.created, - id: res.id, - model: res.model, - object: res.object, - tokenUsage: { - completionTokens: res.usage.completion_tokens, - promptTokens: res.usage.prompt_tokens, - totalTokens: res.usage.total_tokens, - }, - }, - }; - } -} diff --git a/packages/langchain/src/chat/openai/index.ts b/packages/langchain/src/chat/openai/index.ts new file mode 100644 index 000000000..754274f47 --- /dev/null +++ b/packages/langchain/src/chat/openai/index.ts @@ -0,0 +1 @@ +export * from './openai.js'; diff --git a/packages/langchain/src/chat/openai/openai.ts b/packages/langchain/src/chat/openai/openai.ts new file mode 100644 index 000000000..2dd3e0531 --- /dev/null +++ b/packages/langchain/src/chat/openai/openai.ts @@ -0,0 +1,137 @@ +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; +import { BaseMessage } from '@langchain/core/messages'; +import { ChatResult } from '@langchain/core/outputs'; +import { StructuredTool } from '@langchain/core/tools'; +import { ChatOpenAI } from '@langchain/openai'; +import { + OpenAiClient, + OpenAiChatModel +} from '@sap-ai-sdk/gen-ai-hub'; +import { + mapBaseMessageToOpenAIChatMessage, + mapResponseToChatResult, + mapToolToOpenAIFunction, + mapToolToOpenAITool +} from './util.js'; +import { + OpenAIChatModelInput, + OpenAIChatModelInterface, + OpenAIChatCallOptions +} from './types.js'; + +/** + * OpenAI Language Model Wrapper to generate texts. + */ +export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { + declare CallOptions: OpenAIChatCallOptions; + + deploymentId?: string; + modelVersion?: string; + modelName: OpenAiChatModel; + model: OpenAiChatModel; + private btpOpenAIClient: OpenAiClient; + + constructor(fields: OpenAIChatModelInput) { + const defaultValues = new ChatOpenAI(); + const n = fields.n ?? defaultValues.n; + const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; + const temperature = fields.temperature ?? defaultValues.temperature; + const frequencyPenalty = fields.frequency_penalty ?? defaultValues.frequencyPenalty; + const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; + const topP = fields.top_p ?? defaultValues.topP; + const model = defaultValues.model; + const modelName = model; + + super({ + ...fields, + model, + modelName, + n, + stop, + temperature, + openAIApiKey: 'dummy', + frequencyPenalty, + presencePenalty, + topP + }); + + this.model = fields.modelName; + this.modelName = fields.modelName; + this.modelVersion = fields.modelVersion; + + this.btpOpenAIClient = new OpenAiClient(); + } + + override get callKeys(): (keyof OpenAIChatCallOptions)[] { + return [ + ...(super.callKeys as (keyof OpenAIChatCallOptions)[]), + 'options', + 'functions', + 'tools', + 'tool_choice', + 'response_format', + 'seed', + ]; + } + + override get lc_secrets(): { [key: string]: string } | undefined { + // overrides default keys as not applicable in BTP + return {}; + } + + override get lc_aliases(): Record { + // overrides default keys as not applicable in BTP + return {}; + } + + override async _generate( + messages: BaseMessage[], + options: this['CallOptions'], + runManager?: CallbackManagerForLLMRun, + ): Promise { + function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { + return tools !== undefined && tools.every((tool) => Array.isArray((tool as StructuredTool).lc_namespace)); + } + const res = await this.caller.callWithOptions( + { + signal: options.signal, + }, + () => + this.btpOpenAIClient.chatCompletion( + { + messages: messages.map(mapBaseMessageToOpenAIChatMessage), + max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, + temperature: this.temperature, + top_p: this.topP, + logit_bias: this.logitBias, + n: this.n, + stop: options?.stop ?? this.stop, + presence_penalty: this.presencePenalty, + frequency_penalty: this.frequencyPenalty, + functions: isStructuredToolArray(options?.functions) + ? options?.functions.map(mapToolToOpenAIFunction) + : options?.functions, + tools: isStructuredToolArray(options?.tools) + ? options?.tools.map(mapToolToOpenAITool) + : options?.tools, + tool_choice: options?.tool_choice, + response_format: options?.response_format, + seed: options?.seed, + ...this.modelKwargs, + }, + { + modelName: this.modelName ?? this.model, + deploymentId: this.deploymentId, + modelVersion: this.modelVersion + }, + ), + ); + + // currently BTP LLM Proxy for OpenAI doesn't support streaming + await runManager?.handleLLMNewToken( + typeof res.choices[0].message.content === 'string' ? res.choices[0].message.content : '', + ); + + return mapResponseToChatResult(res); + } +} diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/chat/openai/types.ts new file mode 100644 index 000000000..3b63a6056 --- /dev/null +++ b/packages/langchain/src/chat/openai/types.ts @@ -0,0 +1,58 @@ +import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; +import { OpenAiChatCompletionParameters, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; + +/** + * Input for Text generation for OpenAI GPT. + */ +export interface OpenAIChatModelInterface + extends Omit, + Omit, + BaseChatModelParams { + /** + * The name of the model. + */ + modelName: OpenAiChatModel; + /** + * The name of the model. Alias for `modelName`. + */ + model: OpenAiChatModel; + /** + * The version of the model. + */ + modelVersion?: string; + } + +/** + * Input for Text generation for OpenAI GPT. + */ +export type OpenAIChatModelInput = Omit & + Omit & + BaseChatModelParams & + { + /** + * The name of the model. + */ + modelName: OpenAiChatModel; + /** + * The version of the model. + */ + modelVersion?: string; + }; + +/** + * Chat Call options. + */ +export interface OpenAIChatCallOptions + extends Omit, + Pick {} diff --git a/packages/langchain/src/chat/openai/util.ts b/packages/langchain/src/chat/openai/util.ts new file mode 100644 index 000000000..8ebcb2a85 --- /dev/null +++ b/packages/langchain/src/chat/openai/util.ts @@ -0,0 +1,114 @@ +import { AIMessage, BaseMessage, ChatMessage, ToolMessage } from '@langchain/core/messages'; +import { ChatResult } from '@langchain/core/outputs'; +import { StructuredTool } from '@langchain/core/tools'; +import { OpenAiChatAssistantMessage, OpenAiChatCompletionChoice, OpenAiChatCompletionFunction, OpenAiChatCompletionOutput, OpenAiChatCompletionTool, OpenAiChatMessage, OpenAiChatToolMessage } from '@sap-ai-sdk/gen-ai-hub'; +import { zodToJsonSchema } from 'zod-to-json-schema'; + +/** + * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. + * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI Chat Completion Function. + */ +export function mapToolToOpenAIFunction(tool: StructuredTool): OpenAiChatCompletionFunction { + return { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema), + }; +} + +/** + * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. + * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI Chat Completion Tool. + */ +export function mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { + return { + type: 'function', + function: { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema), + }, + }; +} + +/** + * Maps a {@link BaseMessage} to OpenAI's Message Role. + * @param message - The message to map. + * @returns The OpenAI Message Role. + */ +export function mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { + switch (message._getType()) { + case 'ai': + return 'assistant'; + case 'human': + return 'user'; + case 'system': + return 'system'; + case 'function': + return 'function'; + case 'tool': + return 'tool'; + case 'generic': + return (message as ChatMessage).role as OpenAiChatMessage['role']; + default: + throw new Error(`Unknown message type: ${message._getType()}`); + } +} + +/** + * Maps OpenAI messages to LangChain's {@link ChatResult}. + * @param res - The OpenAI Chat Completion Output. + * @returns The LangChain Chat Result. + */ +export function mapResponseToChatResult(res: OpenAiChatCompletionOutput): ChatResult { + return { + generations: res.choices.map((c: OpenAiChatCompletionChoice) => ({ + text: (c.message as OpenAiChatAssistantMessage).content || '', + message: new AIMessage({ + content: (c.message as OpenAiChatAssistantMessage).content || '', + additional_kwargs: { + finish_reason: c.finish_reason, + index: c.index, + function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, + tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id, + }, + }), + generationInfo: { + finish_reason: c.finish_reason, + index: c.index, + function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, + }, + })), + llmOutput: { + created: res.created, + id: res.id, + model: res.model, + object: res.object, + tokenUsage: { + completionTokens: res.usage.completion_tokens, + promptTokens: res.usage.prompt_tokens, + totalTokens: res.usage.total_tokens, + }, + }, + }; + } + +/** + * Maps {@link BaseMessage} to OpenAI Messages. + * @param message - The message to map. + * @returns The OpenAI Chat Message. + */ +export function mapBaseMessageToOpenAIChatMessage(message: BaseMessage): OpenAiChatMessage { + return { + content: message.content, + name: message.name, + role: mapBaseMessageToRole(message), + function_call: message.additional_kwargs.function_call, + tool_calls: message.additional_kwargs.tool_calls, + tool_call_id: (message as ToolMessage).tool_call_id, + } as OpenAiChatMessage; +} From 71027120a7384a203843f138ce666377e9625ef0 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 17:55:01 +0200 Subject: [PATCH 17/95] 1.0 --- packages/langchain/package.json | 3 +- packages/langchain/src/embedding/openai.ts | 53 ++++++++++++++-------- packages/langchain/src/index.ts | 1 + packages/langchain/src/util/chunk-array.ts | 14 ++++++ packages/langchain/src/util/index.ts | 1 + pnpm-lock.yaml | 3 ++ 6 files changed, 55 insertions(+), 20 deletions(-) create mode 100644 packages/langchain/src/util/chunk-array.ts create mode 100644 packages/langchain/src/util/index.ts diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 7d61b9558..84ec8d089 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -31,7 +31,8 @@ "@sap-ai-sdk/ai-core": "workspace:^", "@sap-ai-sdk/gen-ai-hub": "workspace:^", "@langchain/core": "^0.2.30", - "@langchain/openai": "^0.2.8" + "@langchain/openai": "^0.2.8", + "zod-to-json-schema": "^3.23.2" }, "devDependencies": { "typescript": "^5.5.4" diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index cce7683fd..a9b836c17 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -1,31 +1,43 @@ import { BaseLLMParams } from '@langchain/core/language_models/llms'; import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; -import { OpenAiClient, OpenAiEmbeddingModel, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; -import { BTPBaseLLMParameters } from '../../client/base.js'; -import { chunkArray } from '../../core/utils.js'; +import { OpenAiClient, OpenAiEmbeddingModel, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; +import { chunkArray } from '../util/index.js'; /** * Input for Text generation for OpenAI GPT. */ -export interface BTPOpenAIGPTEmbeddingInput +export interface OpenAIEmbeddingInput extends Omit, - BTPBaseLLMParameters, - BaseLLMParams {} + BaseLLMParams { + /** + * The name of the model. + */ + modelName: OpenAiEmbeddingModel; + /** + * The name of the model. Alias for `modelName`. + */ + model: OpenAiEmbeddingModel; + /** + * The version of the model. + */ + modelVersion?: string; + } /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPTEmbeddingInput { - deployment_id: OpenAiEmbeddingModel; +export class OpenAIEmbedding extends OpenAIEmbeddings implements OpenAIEmbeddingInput { + modelName: OpenAiEmbeddingModel; + model: OpenAiEmbeddingModel; + private btpOpenAIClient: OpenAiClient; - constructor(fields?: Partial) { + constructor(fields: OpenAIEmbeddingInput) { super({ ...fields, openAIApiKey: 'dummy' }); - this.deployment_id = fields?.deployment_id ?? 'text-embedding-ada-002-v2'; - - // LLM client this.btpOpenAIClient = new OpenAiClient(); + this.model = fields.model; + this.modelName = fields.modelName; } override async embedDocuments(documents: string[]): Promise { @@ -35,21 +47,24 @@ export class OpenAIGPTEmbedding extends OpenAIEmbeddings implements BTPOpenAIGPT ); const embeddings: number[][] = []; for await (const promptChunk of chunkedPrompts) { - const resArr = await this.createEmbedding(promptChunk); - resArr.forEach((res) => embeddings.push(res.embedding)); + const resArr = await this.createEmbedding({ input: promptChunk }); + resArr.data.forEach((res) => embeddings.push(res.embedding)); } return embeddings; } override async embedQuery(query: string): Promise { - const resArr = await this.createEmbedding(this.stripNewLines ? query.replace(/\n/g, ' ') : query); - return resArr[0].embedding; + const resArr = await this.createEmbedding( + { + input: this.stripNewLines ? query.replace(/\n/g, ' ') : query, + }); + return resArr.data[0].embedding; } - private async createEmbedding(query: OpenAiEmbeddingParameters['input']) { + private async createEmbedding(query: OpenAiEmbeddingParameters): Promise { const res = await this.caller.callWithOptions({}, () => - this.btpOpenAIClient.embeddings(query, this.deployment_id), + this.btpOpenAIClient.embeddings(query, this.model), ); - return res.data; + return res; } } diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index e762bc704..584778d29 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,2 +1,3 @@ export * from './chat/index.js'; export * from './embedding/index.js'; +export * from './util/index.js'; diff --git a/packages/langchain/src/util/chunk-array.ts b/packages/langchain/src/util/chunk-array.ts new file mode 100644 index 000000000..b32e97748 --- /dev/null +++ b/packages/langchain/src/util/chunk-array.ts @@ -0,0 +1,14 @@ +/** + * Chunk an array into smaller arrays of specified chunk size. + * @param arr - Input array to be chunked. + * @param chunkSize - Size of each chunk. + * @returns Array of chunks. + */ +export const chunkArray = (arr: T[], chunkSize: number): T[][] => + arr.reduce((chunks, elem, index) => { + const chunkIndex = Math.floor(index / chunkSize); + const chunk = chunks[chunkIndex] || []; + + chunks[chunkIndex] = chunk.concat([elem]); + return chunks; + }, [] as T[][]); diff --git a/packages/langchain/src/util/index.ts b/packages/langchain/src/util/index.ts new file mode 100644 index 000000000..6d2048a2a --- /dev/null +++ b/packages/langchain/src/util/index.ts @@ -0,0 +1 @@ +export * from './chunk-array.js'; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0ea04d099..40408a72b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -160,6 +160,9 @@ importers: '@sap-ai-sdk/gen-ai-hub': specifier: workspace:^ version: link:../gen-ai-hub + zod-to-json-schema: + specifier: ^3.23.2 + version: 3.23.2(zod@3.23.8) devDependencies: typescript: specifier: ^5.5.4 From d98e1c7c507e45df4484bbdc772a90e52b71daf2 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 3 Sep 2024 18:21:43 +0200 Subject: [PATCH 18/95] rename etc --- packages/langchain/src/chat/openai/{openai.ts => client.ts} | 0 packages/langchain/src/chat/openai/index.ts | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename packages/langchain/src/chat/openai/{openai.ts => client.ts} (100%) diff --git a/packages/langchain/src/chat/openai/openai.ts b/packages/langchain/src/chat/openai/client.ts similarity index 100% rename from packages/langchain/src/chat/openai/openai.ts rename to packages/langchain/src/chat/openai/client.ts diff --git a/packages/langchain/src/chat/openai/index.ts b/packages/langchain/src/chat/openai/index.ts index 754274f47..37fd896c4 100644 --- a/packages/langchain/src/chat/openai/index.ts +++ b/packages/langchain/src/chat/openai/index.ts @@ -1 +1 @@ -export * from './openai.js'; +export * from './client.js'; From 0b309a91c0a04af3d36179691a01832ff51ec452 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 4 Sep 2024 12:47:20 +0200 Subject: [PATCH 19/95] lint --- packages/langchain/package.json | 77 ++++---- packages/langchain/src/chat/openai/client.ts | 44 +++-- packages/langchain/src/chat/openai/types.ts | 103 ++++++----- packages/langchain/src/chat/openai/util.ts | 178 +++++++++++-------- packages/langchain/src/embedding/openai.ts | 59 +++--- packages/langchain/src/util/chunk-array.ts | 12 +- packages/langchain/tsconfig.cjs.json | 11 +- packages/langchain/tsconfig.json | 20 +-- 8 files changed, 285 insertions(+), 219 deletions(-) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 84ec8d089..695f6a072 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -1,41 +1,40 @@ { - "name": "@sap-ai-sdk/langchain", - "version": "0.0.0", - "description": "", - "license": "Apache-2.0", - "keywords": [ - "sap-ai-sdk", - "gen-ai-hub", - "orchestration", - "llm-access" - ], - "type": "module", - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "files": [ - "dist/**/*.js", - "dist/**/*.js.map", - "dist/**/*.d.ts", - "dist/**/*.d.ts.map", - "internal.js", - "internal.d.ts" - ], - "scripts": { - "compile": "tsc", - "compile:cjs": "tsc -p tsconfig.cjs.json", - "test": "NODE_OPTIONS=--experimental-vm-modules jest", - "lint": "eslint \"**/*.ts\" && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -c", - "lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error" - }, - "dependencies": { - "@sap-ai-sdk/ai-core": "workspace:^", - "@sap-ai-sdk/gen-ai-hub": "workspace:^", - "@langchain/core": "^0.2.30", - "@langchain/openai": "^0.2.8", - "zod-to-json-schema": "^3.23.2" - }, - "devDependencies": { - "typescript": "^5.5.4" - } + "name": "@sap-ai-sdk/langchain", + "version": "0.0.0", + "description": "", + "license": "Apache-2.0", + "keywords": [ + "sap-ai-sdk", + "gen-ai-hub", + "orchestration", + "llm-access" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "internal.js", + "internal.d.ts" + ], + "scripts": { + "compile": "tsc", + "compile:cjs": "tsc -p tsconfig.cjs.json", + "test": "NODE_OPTIONS=--experimental-vm-modules jest", + "lint": "eslint \"**/*.ts\" && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -c", + "lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error" + }, + "dependencies": { + "@sap-ai-sdk/ai-core": "workspace:^", + "@sap-ai-sdk/gen-ai-hub": "workspace:^", + "@langchain/core": "^0.2.30", + "@langchain/openai": "^0.2.8", + "zod-to-json-schema": "^3.23.2" + }, + "devDependencies": { + "typescript": "^5.5.4" + } } - \ No newline at end of file diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/chat/openai/client.ts index 2dd3e0531..2d1077d02 100644 --- a/packages/langchain/src/chat/openai/client.ts +++ b/packages/langchain/src/chat/openai/client.ts @@ -3,10 +3,7 @@ import { BaseMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import { ChatOpenAI } from '@langchain/openai'; -import { - OpenAiClient, - OpenAiChatModel -} from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiClient, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; import { mapBaseMessageToOpenAIChatMessage, mapResponseToChatResult, @@ -34,10 +31,16 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { constructor(fields: OpenAIChatModelInput) { const defaultValues = new ChatOpenAI(); const n = fields.n ?? defaultValues.n; - const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; + const stop = fields.stop + ? Array.isArray(fields.stop) + ? fields.stop + : [fields.stop] + : defaultValues.stop; const temperature = fields.temperature ?? defaultValues.temperature; - const frequencyPenalty = fields.frequency_penalty ?? defaultValues.frequencyPenalty; - const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; + const frequencyPenalty = + fields.frequency_penalty ?? defaultValues.frequencyPenalty; + const presencePenalty = + fields.presence_penalty ?? defaultValues.presencePenalty; const topP = fields.top_p ?? defaultValues.topP; const model = defaultValues.model; const modelName = model; @@ -70,7 +73,7 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { 'tools', 'tool_choice', 'response_format', - 'seed', + 'seed' ]; } @@ -87,14 +90,21 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { override async _generate( messages: BaseMessage[], options: this['CallOptions'], - runManager?: CallbackManagerForLLMRun, + runManager?: CallbackManagerForLLMRun ): Promise { - function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { - return tools !== undefined && tools.every((tool) => Array.isArray((tool as StructuredTool).lc_namespace)); + function isStructuredToolArray( + tools?: unknown[] + ): tools is StructuredTool[] { + return ( + tools !== undefined && + tools.every(tool => + Array.isArray((tool as StructuredTool).lc_namespace) + ) + ); } const res = await this.caller.callWithOptions( { - signal: options.signal, + signal: options.signal }, () => this.btpOpenAIClient.chatCompletion( @@ -117,19 +127,21 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { tool_choice: options?.tool_choice, response_format: options?.response_format, seed: options?.seed, - ...this.modelKwargs, + ...this.modelKwargs }, { modelName: this.modelName ?? this.model, deploymentId: this.deploymentId, modelVersion: this.modelVersion - }, - ), + } + ) ); // currently BTP LLM Proxy for OpenAI doesn't support streaming await runManager?.handleLLMNewToken( - typeof res.choices[0].message.content === 'string' ? res.choices[0].message.content : '', + typeof res.choices[0].message.content === 'string' + ? res.choices[0].message.content + : '' ); return mapResponseToChatResult(res); diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/chat/openai/types.ts index 3b63a6056..44d438ca1 100644 --- a/packages/langchain/src/chat/openai/types.ts +++ b/packages/langchain/src/chat/openai/types.ts @@ -1,58 +1,79 @@ import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; -import { OpenAiChatCompletionParameters, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; +import { + OpenAiChatCompletionParameters, + OpenAiChatModel +} from '@sap-ai-sdk/gen-ai-hub'; /** * Input for Text generation for OpenAI GPT. */ export interface OpenAIChatModelInterface - extends Omit, - Omit, + extends Omit< + OpenAIChatInput, + 'openAIApiKey' | 'streaming' | 'model' | 'modelName' + >, + Omit< + OpenAiChatCompletionParameters, + 'n' | 'stop' | 'messages' | 'temperature' + >, BaseChatModelParams { - /** - * The name of the model. - */ - modelName: OpenAiChatModel; - /** - * The name of the model. Alias for `modelName`. - */ - model: OpenAiChatModel; - /** - * The version of the model. - */ - modelVersion?: string; - } + /** + * The name of the model. + */ + modelName: OpenAiChatModel; + /** + * The name of the model. Alias for `modelName`. + */ + model: OpenAiChatModel; + /** + * The version of the model. + */ + modelVersion?: string; +} /** * Input for Text generation for OpenAI GPT. */ -export type OpenAIChatModelInput = Omit & - Omit & - BaseChatModelParams & - { - /** - * The name of the model. - */ - modelName: OpenAiChatModel; - /** - * The version of the model. - */ - modelVersion?: string; - }; +export type OpenAIChatModelInput = Omit< + OpenAIChatInput, + | 'frequencyPenalty' + | 'presencePenalty' + | 'topP' + | 'temperature' + | 'stop' + | 'n' + | 'modelName' + | 'model' + | 'openAIApiKey' + | 'streaming' +> & + Omit & + BaseChatModelParams & { + /** + * The name of the model. + */ + modelName: OpenAiChatModel; + /** + * The version of the model. + */ + modelVersion?: string; + }; /** * Chat Call options. */ export interface OpenAIChatCallOptions - extends Omit, - Pick {} + extends Omit< + ChatOpenAICallOptions, + | 'tool_choice' + | 'promptIndex' + | 'functions' + | 'function_call' + | 'tools' + | 'response_format' + >, + Pick< + OpenAiChatCompletionParameters, + 'tool_choice' | 'functions' | 'tools' | 'response_format' + > {} diff --git a/packages/langchain/src/chat/openai/util.ts b/packages/langchain/src/chat/openai/util.ts index 8ebcb2a85..266c13812 100644 --- a/packages/langchain/src/chat/openai/util.ts +++ b/packages/langchain/src/chat/openai/util.ts @@ -1,7 +1,20 @@ -import { AIMessage, BaseMessage, ChatMessage, ToolMessage } from '@langchain/core/messages'; +import { + AIMessage, + BaseMessage, + ChatMessage, + ToolMessage +} from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; -import { OpenAiChatAssistantMessage, OpenAiChatCompletionChoice, OpenAiChatCompletionFunction, OpenAiChatCompletionOutput, OpenAiChatCompletionTool, OpenAiChatMessage, OpenAiChatToolMessage } from '@sap-ai-sdk/gen-ai-hub'; +import { + OpenAiChatAssistantMessage, + OpenAiChatCompletionChoice, + OpenAiChatCompletionFunction, + OpenAiChatCompletionOutput, + OpenAiChatCompletionTool, + OpenAiChatMessage, + OpenAiChatToolMessage +} from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; /** @@ -9,12 +22,14 @@ import { zodToJsonSchema } from 'zod-to-json-schema'; * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI Chat Completion Function. */ -export function mapToolToOpenAIFunction(tool: StructuredTool): OpenAiChatCompletionFunction { - return { - name: tool.name, - description: tool.description, - parameters: zodToJsonSchema(tool.schema), - }; +export function mapToolToOpenAIFunction( + tool: StructuredTool +): OpenAiChatCompletionFunction { + return { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema) + }; } /** @@ -22,15 +37,17 @@ export function mapToolToOpenAIFunction(tool: StructuredTool): OpenAiChatComplet * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI Chat Completion Tool. */ -export function mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionTool { - return { - type: 'function', - function: { - name: tool.name, - description: tool.description, - parameters: zodToJsonSchema(tool.schema), - }, - }; +export function mapToolToOpenAITool( + tool: StructuredTool +): OpenAiChatCompletionTool { + return { + type: 'function', + function: { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema) + } + }; } /** @@ -38,23 +55,25 @@ export function mapToolToOpenAITool(tool: StructuredTool): OpenAiChatCompletionT * @param message - The message to map. * @returns The OpenAI Message Role. */ -export function mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { - switch (message._getType()) { - case 'ai': - return 'assistant'; - case 'human': - return 'user'; - case 'system': - return 'system'; - case 'function': - return 'function'; - case 'tool': - return 'tool'; - case 'generic': - return (message as ChatMessage).role as OpenAiChatMessage['role']; - default: - throw new Error(`Unknown message type: ${message._getType()}`); - } +export function mapBaseMessageToRole( + message: BaseMessage +): OpenAiChatMessage['role'] { + switch (message._getType()) { + case 'ai': + return 'assistant'; + case 'human': + return 'user'; + case 'system': + return 'system'; + case 'function': + return 'function'; + case 'tool': + return 'tool'; + case 'generic': + return (message as ChatMessage).role as OpenAiChatMessage['role']; + default: + throw new Error(`Unknown message type: ${message._getType()}`); + } } /** @@ -62,53 +81,58 @@ export function mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['r * @param res - The OpenAI Chat Completion Output. * @returns The LangChain Chat Result. */ -export function mapResponseToChatResult(res: OpenAiChatCompletionOutput): ChatResult { - return { - generations: res.choices.map((c: OpenAiChatCompletionChoice) => ({ - text: (c.message as OpenAiChatAssistantMessage).content || '', - message: new AIMessage({ - content: (c.message as OpenAiChatAssistantMessage).content || '', - additional_kwargs: { - finish_reason: c.finish_reason, - index: c.index, - function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, - tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id, - }, - }), - generationInfo: { - finish_reason: c.finish_reason, - index: c.index, - function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, - }, - })), - llmOutput: { - created: res.created, - id: res.id, - model: res.model, - object: res.object, - tokenUsage: { - completionTokens: res.usage.completion_tokens, - promptTokens: res.usage.prompt_tokens, - totalTokens: res.usage.total_tokens, - }, - }, - }; - } +export function mapResponseToChatResult( + res: OpenAiChatCompletionOutput +): ChatResult { + return { + generations: res.choices.map((c: OpenAiChatCompletionChoice) => ({ + text: (c.message as OpenAiChatAssistantMessage).content || '', + message: new AIMessage({ + content: (c.message as OpenAiChatAssistantMessage).content || '', + additional_kwargs: { + finish_reason: c.finish_reason, + index: c.index, + function_call: (c.message as OpenAiChatAssistantMessage) + .function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, + tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id + } + }), + generationInfo: { + finish_reason: c.finish_reason, + index: c.index, + function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter + tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls + } + })), + llmOutput: { + created: res.created, + id: res.id, + model: res.model, + object: res.object, + tokenUsage: { + completionTokens: res.usage.completion_tokens, + promptTokens: res.usage.prompt_tokens, + totalTokens: res.usage.total_tokens + } + } + }; +} /** * Maps {@link BaseMessage} to OpenAI Messages. * @param message - The message to map. * @returns The OpenAI Chat Message. */ -export function mapBaseMessageToOpenAIChatMessage(message: BaseMessage): OpenAiChatMessage { - return { - content: message.content, - name: message.name, - role: mapBaseMessageToRole(message), - function_call: message.additional_kwargs.function_call, - tool_calls: message.additional_kwargs.tool_calls, - tool_call_id: (message as ToolMessage).tool_call_id, - } as OpenAiChatMessage; +export function mapBaseMessageToOpenAIChatMessage( + message: BaseMessage +): OpenAiChatMessage { + return { + content: message.content, + name: message.name, + role: mapBaseMessageToRole(message), + function_call: message.additional_kwargs.function_call, + tool_calls: message.additional_kwargs.tool_calls, + tool_call_id: (message as ToolMessage).tool_call_id + } as OpenAiChatMessage; } diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index a9b836c17..fb90486de 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -1,6 +1,11 @@ import { BaseLLMParams } from '@langchain/core/language_models/llms'; import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; -import { OpenAiClient, OpenAiEmbeddingModel, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; +import { + OpenAiClient, + OpenAiEmbeddingModel, + OpenAiEmbeddingOutput, + OpenAiEmbeddingParameters +} from '@sap-ai-sdk/gen-ai-hub'; import { chunkArray } from '../util/index.js'; /** @@ -9,24 +14,27 @@ import { chunkArray } from '../util/index.js'; export interface OpenAIEmbeddingInput extends Omit, BaseLLMParams { - /** - * The name of the model. - */ - modelName: OpenAiEmbeddingModel; - /** - * The name of the model. Alias for `modelName`. - */ - model: OpenAiEmbeddingModel; - /** - * The version of the model. - */ - modelVersion?: string; - } + /** + * The name of the model. + */ + modelName: OpenAiEmbeddingModel; + /** + * The name of the model. Alias for `modelName`. + */ + model: OpenAiEmbeddingModel; + /** + * The version of the model. + */ + modelVersion?: string; +} /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAIEmbedding extends OpenAIEmbeddings implements OpenAIEmbeddingInput { +export class OpenAIEmbedding + extends OpenAIEmbeddings + implements OpenAIEmbeddingInput +{ modelName: OpenAiEmbeddingModel; model: OpenAiEmbeddingModel; @@ -42,28 +50,31 @@ export class OpenAIEmbedding extends OpenAIEmbeddings implements OpenAIEmbedding override async embedDocuments(documents: string[]): Promise { const chunkedPrompts = chunkArray( - this.stripNewLines ? documents.map((t) => t.replace(/\n/g, ' ')) : documents, - this.batchSize, + this.stripNewLines + ? documents.map(t => t.replace(/\n/g, ' ')) + : documents, + this.batchSize ); const embeddings: number[][] = []; for await (const promptChunk of chunkedPrompts) { const resArr = await this.createEmbedding({ input: promptChunk }); - resArr.data.forEach((res) => embeddings.push(res.embedding)); + resArr.data.forEach(res => embeddings.push(res.embedding)); } return embeddings; } override async embedQuery(query: string): Promise { - const resArr = await this.createEmbedding( - { - input: this.stripNewLines ? query.replace(/\n/g, ' ') : query, - }); + const resArr = await this.createEmbedding({ + input: this.stripNewLines ? query.replace(/\n/g, ' ') : query + }); return resArr.data[0].embedding; } - private async createEmbedding(query: OpenAiEmbeddingParameters): Promise { + private async createEmbedding( + query: OpenAiEmbeddingParameters + ): Promise { const res = await this.caller.callWithOptions({}, () => - this.btpOpenAIClient.embeddings(query, this.model), + this.btpOpenAIClient.embeddings(query, this.model) ); return res; } diff --git a/packages/langchain/src/util/chunk-array.ts b/packages/langchain/src/util/chunk-array.ts index b32e97748..23c89cfec 100644 --- a/packages/langchain/src/util/chunk-array.ts +++ b/packages/langchain/src/util/chunk-array.ts @@ -5,10 +5,10 @@ * @returns Array of chunks. */ export const chunkArray = (arr: T[], chunkSize: number): T[][] => - arr.reduce((chunks, elem, index) => { - const chunkIndex = Math.floor(index / chunkSize); - const chunk = chunks[chunkIndex] || []; + arr.reduce((chunks, elem, index) => { + const chunkIndex = Math.floor(index / chunkSize); + const chunk = chunks[chunkIndex] || []; - chunks[chunkIndex] = chunk.concat([elem]); - return chunks; - }, [] as T[][]); + chunks[chunkIndex] = chunk.concat([elem]); + return chunks; + }, [] as T[][]); diff --git a/packages/langchain/tsconfig.cjs.json b/packages/langchain/tsconfig.cjs.json index 3a6e73061..a302e4463 100644 --- a/packages/langchain/tsconfig.cjs.json +++ b/packages/langchain/tsconfig.cjs.json @@ -1,8 +1,7 @@ { - "extends": "./tsconfig.json", - "compilerOptions": { - "module": "CommonJS", - "outDir": "./dist-cjs" - } + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "CommonJS", + "outDir": "./dist-cjs" + } } - \ No newline at end of file diff --git a/packages/langchain/tsconfig.json b/packages/langchain/tsconfig.json index f8793365f..e6501eced 100644 --- a/packages/langchain/tsconfig.json +++ b/packages/langchain/tsconfig.json @@ -1,12 +1,12 @@ { - "extends": "../../tsconfig.json", - "compilerOptions": { - "rootDir": "./src", - "outDir": "./dist", - "tsBuildInfoFile": "./dist/.tsbuildinfo", - "composite": true - }, - "include": ["src/**/*.ts"], - "exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"], - "references": [{ "path": "../gen-ai-hub" }] + "extends": "../../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "tsBuildInfoFile": "./dist/.tsbuildinfo", + "composite": true + }, + "include": ["src/**/*.ts"], + "exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"], + "references": [{ "path": "../gen-ai-hub" }] } From 027715740abc20b4c2a6211e0e9c0dc89959458f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 5 Sep 2024 14:02:45 +0200 Subject: [PATCH 20/95] update tests --- packages/gen-ai-hub/src/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gen-ai-hub/src/index.ts b/packages/gen-ai-hub/src/index.ts index d4408f29a..013a30b0f 100644 --- a/packages/gen-ai-hub/src/index.ts +++ b/packages/gen-ai-hub/src/index.ts @@ -9,7 +9,6 @@ export type { OpenAiChatFunctionMessage, OpenAiCompletionChoice, OpenAiErrorBase, - OpenAiChatCompletionChoice, OpenAiCompletionOutput, OpenAiUsage, OpenAiChatCompletionFunction, From f4f771a39b62d04640bf1f190345cc9e5bc236f0 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 5 Sep 2024 14:25:26 +0200 Subject: [PATCH 21/95] save --- packages/langchain/src/chat/openai/client.ts | 25 +++++++++++--------- packages/langchain/src/chat/openai/types.ts | 16 +++++++++++++ 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/chat/openai/client.ts index 2d1077d02..9d5a3f631 100644 --- a/packages/langchain/src/chat/openai/client.ts +++ b/packages/langchain/src/chat/openai/client.ts @@ -3,7 +3,7 @@ import { BaseMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import { ChatOpenAI } from '@langchain/openai'; -import { OpenAiClient, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiChatClient, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; import { mapBaseMessageToOpenAIChatMessage, mapResponseToChatResult, @@ -24,9 +24,10 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { deploymentId?: string; modelVersion?: string; + resourceGroup?: string; modelName: OpenAiChatModel; model: OpenAiChatModel; - private btpOpenAIClient: OpenAiClient; + private btpOpenAIClient: OpenAiChatClient; constructor(fields: OpenAIChatModelInput) { const defaultValues = new ChatOpenAI(); @@ -43,12 +44,10 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { fields.presence_penalty ?? defaultValues.presencePenalty; const topP = fields.top_p ?? defaultValues.topP; const model = defaultValues.model; - const modelName = model; super({ ...fields, model, - modelName, n, stop, temperature, @@ -61,8 +60,17 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { this.model = fields.modelName; this.modelName = fields.modelName; this.modelVersion = fields.modelVersion; + this.deploymentId = fields.deploymentId; + this.resourceGroup = fields.resourceGroup; - this.btpOpenAIClient = new OpenAiClient(); + this.btpOpenAIClient = new OpenAiChatClient( + { + modelName: this.modelName, + modelVersion: this.modelVersion, + deploymentId: this.deploymentId, + resourceGroup: this.resourceGroup, + } + ); } override get callKeys(): (keyof OpenAIChatCallOptions)[] { @@ -107,7 +115,7 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { signal: options.signal }, () => - this.btpOpenAIClient.chatCompletion( + this.btpOpenAIClient.run( { messages: messages.map(mapBaseMessageToOpenAIChatMessage), max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, @@ -128,11 +136,6 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { response_format: options?.response_format, seed: options?.seed, ...this.modelKwargs - }, - { - modelName: this.modelName ?? this.model, - deploymentId: this.deploymentId, - modelVersion: this.modelVersion } ) ); diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/chat/openai/types.ts index 44d438ca1..6e3127bd9 100644 --- a/packages/langchain/src/chat/openai/types.ts +++ b/packages/langchain/src/chat/openai/types.ts @@ -30,6 +30,14 @@ export interface OpenAIChatModelInterface * The version of the model. */ modelVersion?: string; + /** + * The deployment ID of the model. + */ + deploymentId?: string; + /** + * The resource group of the model. + */ + resourceGroup?: string; } /** @@ -58,6 +66,14 @@ export type OpenAIChatModelInput = Omit< * The version of the model. */ modelVersion?: string; + /** + * The deployment ID of the model. + */ + deploymentId?: string; + /** + * The resource group of the model. + */ + resourceGroup?: string; }; /** From c6ac61531914a93dc8f53ba0d10612d1e7238693 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 5 Sep 2024 15:36:23 +0200 Subject: [PATCH 22/95] refactor --- packages/gen-ai-hub/src/index.ts | 3 +- .../src/utils/deployment-resolver.ts | 10 +++- packages/langchain/src/chat/openai/client.ts | 34 +++++--------- packages/langchain/src/chat/openai/types.ts | 46 ++----------------- 4 files changed, 27 insertions(+), 66 deletions(-) diff --git a/packages/gen-ai-hub/src/index.ts b/packages/gen-ai-hub/src/index.ts index 013a30b0f..4453532c8 100644 --- a/packages/gen-ai-hub/src/index.ts +++ b/packages/gen-ai-hub/src/index.ts @@ -34,7 +34,8 @@ export type { ModelDeployment, DeploymentIdConfiguration, ModelConfiguration, - ResourceGroupConfiguration + ResourceGroupConfiguration, + ConfigurationOptions } from './utils/index.js'; export type { diff --git a/packages/gen-ai-hub/src/utils/deployment-resolver.ts b/packages/gen-ai-hub/src/utils/deployment-resolver.ts index e0cc5e2e6..2490bd003 100644 --- a/packages/gen-ai-hub/src/utils/deployment-resolver.ts +++ b/packages/gen-ai-hub/src/utils/deployment-resolver.ts @@ -43,8 +43,14 @@ export interface ResourceGroupConfiguration { */ export type ModelDeployment = | ModelNameT - | ((ModelConfiguration | DeploymentIdConfiguration) & - ResourceGroupConfiguration); + | ConfigurationOptions; + +/** + * The configuration options for a model deployment. + * @typeParam ModelNameT - String literal type representing the name of the model. + */ +export type ConfigurationOptions = (ModelConfiguration | DeploymentIdConfiguration) & +ResourceGroupConfiguration; /** * Type guard to check if the given deployment configuration is a deployment ID configuration. diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/chat/openai/client.ts index 9d5a3f631..2a851f119 100644 --- a/packages/langchain/src/chat/openai/client.ts +++ b/packages/langchain/src/chat/openai/client.ts @@ -3,7 +3,7 @@ import { BaseMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import { ChatOpenAI } from '@langchain/openai'; -import { OpenAiChatClient, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; +import { OpenAiChatClient } from '@sap-ai-sdk/gen-ai-hub'; import { mapBaseMessageToOpenAIChatMessage, mapResponseToChatResult, @@ -21,12 +21,6 @@ import { */ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { declare CallOptions: OpenAIChatCallOptions; - - deploymentId?: string; - modelVersion?: string; - resourceGroup?: string; - modelName: OpenAiChatModel; - model: OpenAiChatModel; private btpOpenAIClient: OpenAiChatClient; constructor(fields: OpenAIChatModelInput) { @@ -43,11 +37,12 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; const topP = fields.top_p ?? defaultValues.topP; - const model = defaultValues.model; + const modelName = fields.modelName ?? defaultValues.modelName; super({ ...fields, - model, + modelName, + model: modelName, n, stop, temperature, @@ -57,20 +52,15 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { topP }); - this.model = fields.modelName; - this.modelName = fields.modelName; - this.modelVersion = fields.modelVersion; - this.deploymentId = fields.deploymentId; - this.resourceGroup = fields.resourceGroup; - - this.btpOpenAIClient = new OpenAiChatClient( + this.btpOpenAIClient = fields.modelName ? new OpenAiChatClient( { - modelName: this.modelName, - modelVersion: this.modelVersion, - deploymentId: this.deploymentId, - resourceGroup: this.resourceGroup, - } - ); + modelName: fields.modelName, + modelVersion: fields.modelVersion, + resourceGroup: fields.resourceGroup, + }) : new OpenAiChatClient({ + deploymentId: fields.deploymentId, + resourceGroup: fields.resourceGroup + }); } override get callKeys(): (keyof OpenAIChatCallOptions)[] { diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/chat/openai/types.ts index 6e3127bd9..a5a4ba085 100644 --- a/packages/langchain/src/chat/openai/types.ts +++ b/packages/langchain/src/chat/openai/types.ts @@ -1,6 +1,7 @@ import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; import { + ConfigurationOptions, OpenAiChatCompletionParameters, OpenAiChatModel } from '@sap-ai-sdk/gen-ai-hub'; @@ -11,34 +12,13 @@ import { export interface OpenAIChatModelInterface extends Omit< OpenAIChatInput, - 'openAIApiKey' | 'streaming' | 'model' | 'modelName' + 'openAIApiKey' | 'streaming' >, Omit< OpenAiChatCompletionParameters, 'n' | 'stop' | 'messages' | 'temperature' >, - BaseChatModelParams { - /** - * The name of the model. - */ - modelName: OpenAiChatModel; - /** - * The name of the model. Alias for `modelName`. - */ - model: OpenAiChatModel; - /** - * The version of the model. - */ - modelVersion?: string; - /** - * The deployment ID of the model. - */ - deploymentId?: string; - /** - * The resource group of the model. - */ - resourceGroup?: string; -} + BaseChatModelParams {} /** * Input for Text generation for OpenAI GPT. @@ -57,24 +37,8 @@ export type OpenAIChatModelInput = Omit< | 'streaming' > & Omit & - BaseChatModelParams & { - /** - * The name of the model. - */ - modelName: OpenAiChatModel; - /** - * The version of the model. - */ - modelVersion?: string; - /** - * The deployment ID of the model. - */ - deploymentId?: string; - /** - * The resource group of the model. - */ - resourceGroup?: string; - }; + BaseChatModelParams & + ConfigurationOptions; /** * Chat Call options. From 13439eefc27cf5bad629bcc01df6f2738d61d1a0 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 6 Sep 2024 10:55:07 +0200 Subject: [PATCH 23/95] stash --- packages/langchain/src/chat/openai/client.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/chat/openai/client.ts index 2a851f119..cf45aa3ae 100644 --- a/packages/langchain/src/chat/openai/client.ts +++ b/packages/langchain/src/chat/openai/client.ts @@ -37,6 +37,7 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { const presencePenalty = fields.presence_penalty ?? defaultValues.presencePenalty; const topP = fields.top_p ?? defaultValues.topP; + // add typeguard for assignment const modelName = fields.modelName ?? defaultValues.modelName; super({ From fbf6e9aac336a38e3091804e5c65bda1309e6e50 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 6 Sep 2024 16:22:00 +0200 Subject: [PATCH 24/95] update chat client --- packages/langchain/src/chat/openai/client.ts | 53 +++++--------------- packages/langchain/src/chat/openai/types.ts | 6 +-- packages/langchain/src/chat/openai/util.ts | 29 +++++++---- 3 files changed, 36 insertions(+), 52 deletions(-) diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/chat/openai/client.ts index cf45aa3ae..562cb273a 100644 --- a/packages/langchain/src/chat/openai/client.ts +++ b/packages/langchain/src/chat/openai/client.ts @@ -1,16 +1,16 @@ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; import { BaseMessage } from '@langchain/core/messages'; -import { ChatResult } from '@langchain/core/outputs'; -import { StructuredTool } from '@langchain/core/tools'; +import type { ChatResult } from '@langchain/core/outputs'; import { ChatOpenAI } from '@langchain/openai'; import { OpenAiChatClient } from '@sap-ai-sdk/gen-ai-hub'; import { + isStructuredToolArray, mapBaseMessageToOpenAIChatMessage, mapResponseToChatResult, mapToolToOpenAIFunction, mapToolToOpenAITool } from './util.js'; -import { +import type { OpenAIChatModelInput, OpenAIChatModelInterface, OpenAIChatCallOptions @@ -21,47 +21,30 @@ import { */ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { declare CallOptions: OpenAIChatCallOptions; - private btpOpenAIClient: OpenAiChatClient; + private openAiChatClient: OpenAiChatClient; constructor(fields: OpenAIChatModelInput) { const defaultValues = new ChatOpenAI(); - const n = fields.n ?? defaultValues.n; const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; - const temperature = fields.temperature ?? defaultValues.temperature; - const frequencyPenalty = - fields.frequency_penalty ?? defaultValues.frequencyPenalty; - const presencePenalty = - fields.presence_penalty ?? defaultValues.presencePenalty; - const topP = fields.top_p ?? defaultValues.topP; - // add typeguard for assignment - const modelName = fields.modelName ?? defaultValues.modelName; super({ + temperature: defaultValues.temperature, + modelName: defaultValues.modelName, + model: defaultValues.model, + n: defaultValues.n, + frequencyPenalty: defaultValues.frequencyPenalty, + presencePenalty: defaultValues.presencePenalty, + topP: defaultValues.topP, ...fields, - modelName, - model: modelName, - n, stop, - temperature, openAIApiKey: 'dummy', - frequencyPenalty, - presencePenalty, - topP }); - this.btpOpenAIClient = fields.modelName ? new OpenAiChatClient( - { - modelName: fields.modelName, - modelVersion: fields.modelVersion, - resourceGroup: fields.resourceGroup, - }) : new OpenAiChatClient({ - deploymentId: fields.deploymentId, - resourceGroup: fields.resourceGroup - }); + this.openAiChatClient = new OpenAiChatClient({ ...fields }); } override get callKeys(): (keyof OpenAIChatCallOptions)[] { @@ -91,22 +74,12 @@ export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { options: this['CallOptions'], runManager?: CallbackManagerForLLMRun ): Promise { - function isStructuredToolArray( - tools?: unknown[] - ): tools is StructuredTool[] { - return ( - tools !== undefined && - tools.every(tool => - Array.isArray((tool as StructuredTool).lc_namespace) - ) - ); - } const res = await this.caller.callWithOptions( { signal: options.signal }, () => - this.btpOpenAIClient.run( + this.openAiChatClient.run( { messages: messages.map(mapBaseMessageToOpenAIChatMessage), max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/chat/openai/types.ts index a5a4ba085..7e5a962a7 100644 --- a/packages/langchain/src/chat/openai/types.ts +++ b/packages/langchain/src/chat/openai/types.ts @@ -1,6 +1,6 @@ -import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; -import { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; -import { +import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import type { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; +import type { ConfigurationOptions, OpenAiChatCompletionParameters, OpenAiChatModel diff --git a/packages/langchain/src/chat/openai/util.ts b/packages/langchain/src/chat/openai/util.ts index 266c13812..0cd149988 100644 --- a/packages/langchain/src/chat/openai/util.ts +++ b/packages/langchain/src/chat/openai/util.ts @@ -6,15 +6,10 @@ import { } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; -import { - OpenAiChatAssistantMessage, - OpenAiChatCompletionChoice, - OpenAiChatCompletionFunction, - OpenAiChatCompletionOutput, - OpenAiChatCompletionTool, - OpenAiChatMessage, - OpenAiChatToolMessage -} from '@sap-ai-sdk/gen-ai-hub'; +import type { OpenAiChatAssistantMessage, OpenAiChatCompletionChoice, + OpenAiChatCompletionFunction, OpenAiChatCompletionOutput, OpenAiChatCompletionTool, + OpenAiChatToolMessage, OpenAiChatMessage + } from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; /** @@ -136,3 +131,19 @@ export function mapBaseMessageToOpenAIChatMessage( tool_call_id: (message as ToolMessage).tool_call_id } as OpenAiChatMessage; } + +/** + * Checks if a given array is a structured tool array. + * @param tools - The array to check. + * @returns Whether the array is a structured tool array. + */ +export function isStructuredToolArray( + tools?: unknown[] +): tools is StructuredTool[] { + return ( + tools !== undefined && + tools.every(tool => + Array.isArray((tool as StructuredTool).lc_namespace) + ) + ); +} From fd18586c548df6a506048433d89ebc487de7bea8 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 12:10:42 +0200 Subject: [PATCH 25/95] adjust api --- packages/langchain/src/embedding/openai.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/embedding/openai.ts index fb90486de..e9e41ecb1 100644 --- a/packages/langchain/src/embedding/openai.ts +++ b/packages/langchain/src/embedding/openai.ts @@ -1,7 +1,7 @@ import { BaseLLMParams } from '@langchain/core/language_models/llms'; import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; import { - OpenAiClient, + OpenAiEmbeddingClient, OpenAiEmbeddingModel, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters @@ -38,12 +38,12 @@ export class OpenAIEmbedding modelName: OpenAiEmbeddingModel; model: OpenAiEmbeddingModel; - private btpOpenAIClient: OpenAiClient; + private btpOpenAIClient: OpenAiEmbeddingClient; constructor(fields: OpenAIEmbeddingInput) { super({ ...fields, openAIApiKey: 'dummy' }); - this.btpOpenAIClient = new OpenAiClient(); + this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); this.model = fields.model; this.modelName = fields.modelName; } @@ -74,7 +74,7 @@ export class OpenAIEmbedding query: OpenAiEmbeddingParameters ): Promise { const res = await this.caller.callWithOptions({}, () => - this.btpOpenAIClient.embeddings(query, this.model) + this.btpOpenAIClient.run(query) ); return res; } From 737c6df81577d8c53a88b2e3825b284cb47611e3 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 12:14:23 +0200 Subject: [PATCH 26/95] adjust structure --- packages/langchain/src/chat/index.ts | 1 - packages/langchain/src/chat/openai/index.ts | 1 - packages/langchain/src/embedding/index.ts | 1 - packages/langchain/src/index.ts | 3 +-- .../langchain/src/{chat/openai/client.ts => openai/chat.ts} | 0 .../src/{embedding/openai.ts => openai/embedding.ts} | 0 packages/langchain/src/openai/index.ts | 4 ++++ packages/langchain/src/{chat => }/openai/types.ts | 0 packages/langchain/src/{chat => }/openai/util.ts | 0 9 files changed, 5 insertions(+), 5 deletions(-) delete mode 100644 packages/langchain/src/chat/index.ts delete mode 100644 packages/langchain/src/chat/openai/index.ts delete mode 100644 packages/langchain/src/embedding/index.ts rename packages/langchain/src/{chat/openai/client.ts => openai/chat.ts} (100%) rename packages/langchain/src/{embedding/openai.ts => openai/embedding.ts} (100%) create mode 100644 packages/langchain/src/openai/index.ts rename packages/langchain/src/{chat => }/openai/types.ts (100%) rename packages/langchain/src/{chat => }/openai/util.ts (100%) diff --git a/packages/langchain/src/chat/index.ts b/packages/langchain/src/chat/index.ts deleted file mode 100644 index 06718ab5d..000000000 --- a/packages/langchain/src/chat/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './openai/index.js'; diff --git a/packages/langchain/src/chat/openai/index.ts b/packages/langchain/src/chat/openai/index.ts deleted file mode 100644 index 37fd896c4..000000000 --- a/packages/langchain/src/chat/openai/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './client.js'; diff --git a/packages/langchain/src/embedding/index.ts b/packages/langchain/src/embedding/index.ts deleted file mode 100644 index 754274f47..000000000 --- a/packages/langchain/src/embedding/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './openai.js'; diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 584778d29..6392f5ea0 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,3 +1,2 @@ -export * from './chat/index.js'; -export * from './embedding/index.js'; +export * from './openai/index.js'; export * from './util/index.js'; diff --git a/packages/langchain/src/chat/openai/client.ts b/packages/langchain/src/openai/chat.ts similarity index 100% rename from packages/langchain/src/chat/openai/client.ts rename to packages/langchain/src/openai/chat.ts diff --git a/packages/langchain/src/embedding/openai.ts b/packages/langchain/src/openai/embedding.ts similarity index 100% rename from packages/langchain/src/embedding/openai.ts rename to packages/langchain/src/openai/embedding.ts diff --git a/packages/langchain/src/openai/index.ts b/packages/langchain/src/openai/index.ts new file mode 100644 index 000000000..56f5dcd97 --- /dev/null +++ b/packages/langchain/src/openai/index.ts @@ -0,0 +1,4 @@ +export * from './chat.js'; +export * from './embedding.js'; +export * from './types.js'; +export * from './util.js'; diff --git a/packages/langchain/src/chat/openai/types.ts b/packages/langchain/src/openai/types.ts similarity index 100% rename from packages/langchain/src/chat/openai/types.ts rename to packages/langchain/src/openai/types.ts diff --git a/packages/langchain/src/chat/openai/util.ts b/packages/langchain/src/openai/util.ts similarity index 100% rename from packages/langchain/src/chat/openai/util.ts rename to packages/langchain/src/openai/util.ts From 8e2c2ac69de704a452f9a5e52bb695909dc5f91b Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 14:10:13 +0200 Subject: [PATCH 27/95] final refactor before pr --- packages/langchain/src/index.ts | 1 - packages/langchain/src/openai/chat.ts | 3 +- packages/langchain/src/openai/embedding.ts | 37 +++------------------- packages/langchain/src/openai/types.ts | 29 ++++++++--------- packages/langchain/src/openai/util.ts | 15 +++++++++ packages/langchain/src/util/chunk-array.ts | 14 -------- packages/langchain/src/util/index.ts | 1 - 7 files changed, 33 insertions(+), 67 deletions(-) delete mode 100644 packages/langchain/src/util/chunk-array.ts delete mode 100644 packages/langchain/src/util/index.ts diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 6392f5ea0..06718ab5d 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,2 +1 @@ export * from './openai/index.js'; -export * from './util/index.js'; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 562cb273a..97912aa86 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -12,14 +12,13 @@ import { } from './util.js'; import type { OpenAIChatModelInput, - OpenAIChatModelInterface, OpenAIChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAIChat extends ChatOpenAI implements OpenAIChatModelInterface { +export class OpenAIChat extends ChatOpenAI { declare CallOptions: OpenAIChatCallOptions; private openAiChatClient: OpenAiChatClient; diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index e9e41ecb1..213c20bd7 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -1,51 +1,22 @@ -import { BaseLLMParams } from '@langchain/core/language_models/llms'; -import { OpenAIEmbeddingsParams, OpenAIEmbeddings } from '@langchain/openai'; +import { OpenAIEmbeddings } from '@langchain/openai'; import { OpenAiEmbeddingClient, - OpenAiEmbeddingModel, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; -import { chunkArray } from '../util/index.js'; - -/** - * Input for Text generation for OpenAI GPT. - */ -export interface OpenAIEmbeddingInput - extends Omit, - BaseLLMParams { - /** - * The name of the model. - */ - modelName: OpenAiEmbeddingModel; - /** - * The name of the model. Alias for `modelName`. - */ - model: OpenAiEmbeddingModel; - /** - * The version of the model. - */ - modelVersion?: string; -} +import { chunkArray } from './util.js'; +import { OpenAIEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAIEmbedding - extends OpenAIEmbeddings - implements OpenAIEmbeddingInput -{ - modelName: OpenAiEmbeddingModel; - model: OpenAiEmbeddingModel; - +export class OpenAIEmbedding extends OpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClient; constructor(fields: OpenAIEmbeddingInput) { super({ ...fields, openAIApiKey: 'dummy' }); this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); - this.model = fields.model; - this.modelName = fields.modelName; } override async embedDocuments(documents: string[]): Promise { diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 7e5a962a7..cc1bf48dc 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -1,25 +1,14 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; -import type { ChatOpenAICallOptions, OpenAIChatInput } from '@langchain/openai'; +import { BaseLLMParams } from '@langchain/core/language_models/llms'; +import type { ChatOpenAICallOptions, OpenAIChatInput, OpenAIEmbeddingsParams } from '@langchain/openai'; import type { ConfigurationOptions, OpenAiChatCompletionParameters, - OpenAiChatModel + OpenAiChatModel, + OpenAiEmbeddingModel, + OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; -/** - * Input for Text generation for OpenAI GPT. - */ -export interface OpenAIChatModelInterface - extends Omit< - OpenAIChatInput, - 'openAIApiKey' | 'streaming' - >, - Omit< - OpenAiChatCompletionParameters, - 'n' | 'stop' | 'messages' | 'temperature' - >, - BaseChatModelParams {} - /** * Input for Text generation for OpenAI GPT. */ @@ -57,3 +46,11 @@ export interface OpenAIChatCallOptions OpenAiChatCompletionParameters, 'tool_choice' | 'functions' | 'tools' | 'response_format' > {} + +/** + * Input for Text generation for OpenAI GPT. + */ +export type OpenAIEmbeddingInput = Omit & +OpenAiEmbeddingParameters & +ConfigurationOptions & +BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 0cd149988..b155659c9 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -147,3 +147,18 @@ export function isStructuredToolArray( ) ); } + +/** + * Chunk an array into smaller arrays of specified chunk size. + * @param arr - Input array to be chunked. + * @param chunkSize - Size of each chunk. + * @returns Array of chunks. + */ +export const chunkArray = (arr: T[], chunkSize: number): T[][] => + arr.reduce((chunks, elem, index) => { + const chunkIndex = Math.floor(index / chunkSize); + const chunk = chunks[chunkIndex] || []; + + chunks[chunkIndex] = chunk.concat([elem]); + return chunks; + }, [] as T[][]); diff --git a/packages/langchain/src/util/chunk-array.ts b/packages/langchain/src/util/chunk-array.ts deleted file mode 100644 index 23c89cfec..000000000 --- a/packages/langchain/src/util/chunk-array.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Chunk an array into smaller arrays of specified chunk size. - * @param arr - Input array to be chunked. - * @param chunkSize - Size of each chunk. - * @returns Array of chunks. - */ -export const chunkArray = (arr: T[], chunkSize: number): T[][] => - arr.reduce((chunks, elem, index) => { - const chunkIndex = Math.floor(index / chunkSize); - const chunk = chunks[chunkIndex] || []; - - chunks[chunkIndex] = chunk.concat([elem]); - return chunks; - }, [] as T[][]); diff --git a/packages/langchain/src/util/index.ts b/packages/langchain/src/util/index.ts deleted file mode 100644 index 6d2048a2a..000000000 --- a/packages/langchain/src/util/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './chunk-array.js'; From 002b904aae4d8378287d5ebce97e7ec3bb0194b5 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 14:23:53 +0200 Subject: [PATCH 28/95] fix most type issues --- packages/ai-core/src/index.ts | 3 ++- packages/langchain/src/openai/chat.ts | 4 ++-- packages/langchain/src/openai/types.ts | 4 +++- packages/langchain/src/openai/util.ts | 23 ++++++++++++----------- 4 files changed, 19 insertions(+), 15 deletions(-) diff --git a/packages/ai-core/src/index.ts b/packages/ai-core/src/index.ts index f186b6973..7d54d8007 100644 --- a/packages/ai-core/src/index.ts +++ b/packages/ai-core/src/index.ts @@ -4,5 +4,6 @@ export type { ModelDeployment, DeploymentIdConfiguration, ModelConfiguration, - ResourceGroupConfiguration + ResourceGroupConfiguration, + ConfigurationOptions } from './utils/index.js'; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 97912aa86..bfe88fb1a 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -105,8 +105,8 @@ export class OpenAIChat extends ChatOpenAI { // currently BTP LLM Proxy for OpenAI doesn't support streaming await runManager?.handleLLMNewToken( - typeof res.choices[0].message.content === 'string' - ? res.choices[0].message.content + typeof res.data.choices[0].message.content === 'string' + ? res.data.choices[0].message.content : '' ); diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index cc1bf48dc..e242354a6 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -2,12 +2,14 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_m import { BaseLLMParams } from '@langchain/core/language_models/llms'; import type { ChatOpenAICallOptions, OpenAIChatInput, OpenAIEmbeddingsParams } from '@langchain/openai'; import type { - ConfigurationOptions, OpenAiChatCompletionParameters, OpenAiChatModel, OpenAiEmbeddingModel, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; +import type { + ConfigurationOptions +}from '@sap-ai-sdk/ai-core'; /** * Input for Text generation for OpenAI GPT. diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index b155659c9..021db7c9d 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -7,8 +7,9 @@ import { import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import type { OpenAiChatAssistantMessage, OpenAiChatCompletionChoice, - OpenAiChatCompletionFunction, OpenAiChatCompletionOutput, OpenAiChatCompletionTool, - OpenAiChatToolMessage, OpenAiChatMessage + OpenAiChatCompletionFunction, OpenAiChatCompletionTool, + OpenAiChatToolMessage, OpenAiChatMessage, + OpenAiChatCompletionResponse } from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -77,10 +78,10 @@ export function mapBaseMessageToRole( * @returns The LangChain Chat Result. */ export function mapResponseToChatResult( - res: OpenAiChatCompletionOutput + res: OpenAiChatCompletionResponse ): ChatResult { return { - generations: res.choices.map((c: OpenAiChatCompletionChoice) => ({ + generations: res.data.choices.map((c: OpenAiChatCompletionChoice) => ({ text: (c.message as OpenAiChatAssistantMessage).content || '', message: new AIMessage({ content: (c.message as OpenAiChatAssistantMessage).content || '', @@ -101,14 +102,14 @@ export function mapResponseToChatResult( } })), llmOutput: { - created: res.created, - id: res.id, - model: res.model, - object: res.object, + created: res.data.created, + id: res.data.id, + model: res.data.model, + object: res.data.object, tokenUsage: { - completionTokens: res.usage.completion_tokens, - promptTokens: res.usage.prompt_tokens, - totalTokens: res.usage.total_tokens + completionTokens: res.data.usage.completion_tokens, + promptTokens: res.data.usage.prompt_tokens, + totalTokens: res.data.usage.total_tokens } } }; From 54216439519dd3ad2f4e3a4d52d4d91c55ae4ea6 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 14:38:55 +0200 Subject: [PATCH 29/95] anotha one --- packages/langchain/src/openai/chat.ts | 10 ++---- packages/langchain/src/openai/util.ts | 47 ++++++++++++++------------- 2 files changed, 27 insertions(+), 30 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index bfe88fb1a..6f9e95921 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -31,13 +31,7 @@ export class OpenAIChat extends ChatOpenAI { : defaultValues.stop; super({ - temperature: defaultValues.temperature, - modelName: defaultValues.modelName, - model: defaultValues.model, - n: defaultValues.n, - frequencyPenalty: defaultValues.frequencyPenalty, - presencePenalty: defaultValues.presencePenalty, - topP: defaultValues.topP, + ...defaultValues, ...fields, stop, openAIApiKey: 'dummy', @@ -110,6 +104,6 @@ export class OpenAIChat extends ChatOpenAI { : '' ); - return mapResponseToChatResult(res); + return mapResponseToChatResult(res.data); } } diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 021db7c9d..1d945d103 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -6,10 +6,10 @@ import { } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; -import type { OpenAiChatAssistantMessage, OpenAiChatCompletionChoice, +import type { OpenAiChatCompletionChoice, OpenAiChatCompletionFunction, OpenAiChatCompletionTool, OpenAiChatToolMessage, OpenAiChatMessage, - OpenAiChatCompletionResponse + OpenAiChatCompletionOutput } from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -66,6 +66,7 @@ export function mapBaseMessageToRole( case 'tool': return 'tool'; case 'generic': + // TODO: refactor? return (message as ChatMessage).role as OpenAiChatMessage['role']; default: throw new Error(`Unknown message type: ${message._getType()}`); @@ -78,38 +79,39 @@ export function mapBaseMessageToRole( * @returns The LangChain Chat Result. */ export function mapResponseToChatResult( - res: OpenAiChatCompletionResponse + res: OpenAiChatCompletionOutput ): ChatResult { return { - generations: res.data.choices.map((c: OpenAiChatCompletionChoice) => ({ - text: (c.message as OpenAiChatAssistantMessage).content || '', + generations: res.choices.map((choice: OpenAiChatCompletionChoice) => ({ + text: choice.message.content || '', message: new AIMessage({ - content: (c.message as OpenAiChatAssistantMessage).content || '', + content: choice.message.content || '', additional_kwargs: { - finish_reason: c.finish_reason, - index: c.index, - function_call: (c.message as OpenAiChatAssistantMessage) + finish_reason: choice.finish_reason, + index: choice.index, + function_call: choice.message .function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls, - tool_call_id: (c.message as OpenAiChatToolMessage).tool_call_id + tool_calls: choice.message.tool_calls, + // TODO: refactor? + tool_call_id: (choice.message as OpenAiChatToolMessage).tool_call_id } }), generationInfo: { - finish_reason: c.finish_reason, - index: c.index, - function_call: (c.message as OpenAiChatAssistantMessage).function_call, // add `function_call` parameter - tool_calls: (c.message as OpenAiChatAssistantMessage).tool_calls + finish_reason: choice.finish_reason, + index: choice.index, + function_call: choice.message.function_call, // add `function_call` parameter + tool_calls: choice.message.tool_calls } })), llmOutput: { - created: res.data.created, - id: res.data.id, - model: res.data.model, - object: res.data.object, + created: res.created, + id: res.id, + model: res.model, + object: res.object, tokenUsage: { - completionTokens: res.data.usage.completion_tokens, - promptTokens: res.data.usage.prompt_tokens, - totalTokens: res.data.usage.total_tokens + completionTokens: res.usage.completion_tokens, + promptTokens: res.usage.prompt_tokens, + totalTokens: res.usage.total_tokens } } }; @@ -129,6 +131,7 @@ export function mapBaseMessageToOpenAIChatMessage( role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, + // TODO: refactor? tool_call_id: (message as ToolMessage).tool_call_id } as OpenAiChatMessage; } From 41e644b379a037c08e2e6ca582510925ea121085 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 15:07:23 +0200 Subject: [PATCH 30/95] 'final' cleanup --- packages/langchain/src/openai/util.ts | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 1d945d103..fb352c717 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -7,8 +7,7 @@ import { import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import type { OpenAiChatCompletionChoice, - OpenAiChatCompletionFunction, OpenAiChatCompletionTool, - OpenAiChatToolMessage, OpenAiChatMessage, + OpenAiChatCompletionFunction, OpenAiChatCompletionTool, OpenAiChatMessage, OpenAiChatCompletionOutput } from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -90,16 +89,15 @@ export function mapResponseToChatResult( finish_reason: choice.finish_reason, index: choice.index, function_call: choice.message - .function_call, // add `function_call` parameter + .function_call, tool_calls: choice.message.tool_calls, - // TODO: refactor? - tool_call_id: (choice.message as OpenAiChatToolMessage).tool_call_id + tool_call_id: '', } }), generationInfo: { finish_reason: choice.finish_reason, index: choice.index, - function_call: choice.message.function_call, // add `function_call` parameter + function_call: choice.message.function_call, tool_calls: choice.message.tool_calls } })), @@ -131,8 +129,7 @@ export function mapBaseMessageToOpenAIChatMessage( role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, - // TODO: refactor? - tool_call_id: (message as ToolMessage).tool_call_id + tool_call_id: message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : '' } as OpenAiChatMessage; } From 57d688437d2f7201c2567d0301699a7d06e7fd31 Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Mon, 9 Sep 2024 13:08:05 +0000 Subject: [PATCH 31/95] fix: Changes from lint --- .../ai-core/src/utils/deployment-resolver.ts | 7 ++- packages/gen-ai-hub/src/index.ts | 6 ++- packages/langchain/src/openai/chat.ts | 51 +++++++++---------- packages/langchain/src/openai/types.ts | 16 +++--- packages/langchain/src/openai/util.ts | 21 ++++---- 5 files changed, 53 insertions(+), 48 deletions(-) diff --git a/packages/ai-core/src/utils/deployment-resolver.ts b/packages/ai-core/src/utils/deployment-resolver.ts index adf169fb8..ff66f1800 100644 --- a/packages/ai-core/src/utils/deployment-resolver.ts +++ b/packages/ai-core/src/utils/deployment-resolver.ts @@ -52,8 +52,11 @@ export type ModelDeployment = * The configuration options for a model deployment. * @typeParam ModelNameT - String literal type representing the name of the model. */ -export type ConfigurationOptions = (ModelConfiguration | DeploymentIdConfiguration) & -ResourceGroupConfiguration; +export type ConfigurationOptions = ( + | ModelConfiguration + | DeploymentIdConfiguration +) & + ResourceGroupConfiguration; /** * Type guard to check if the given deployment configuration is a deployment ID configuration. diff --git a/packages/gen-ai-hub/src/index.ts b/packages/gen-ai-hub/src/index.ts index 3a08e9899..24dd9fc62 100644 --- a/packages/gen-ai-hub/src/index.ts +++ b/packages/gen-ai-hub/src/index.ts @@ -29,7 +29,11 @@ export type { OpenAiEmbeddingOutput } from './client/index.js'; -export { OpenAiChatClient, OpenAiEmbeddingClient, OpenAiChatCompletionResponse } from './client/index.js'; +export { + OpenAiChatClient, + OpenAiEmbeddingClient, + OpenAiChatCompletionResponse +} from './client/index.js'; export type { OrchestrationModuleConfig, diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 6f9e95921..a2f37660f 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -10,10 +10,7 @@ import { mapToolToOpenAIFunction, mapToolToOpenAITool } from './util.js'; -import type { - OpenAIChatModelInput, - OpenAIChatCallOptions -} from './types.js'; +import type { OpenAIChatModelInput, OpenAIChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. @@ -34,7 +31,7 @@ export class OpenAIChat extends ChatOpenAI { ...defaultValues, ...fields, stop, - openAIApiKey: 'dummy', + openAIApiKey: 'dummy' }); this.openAiChatClient = new OpenAiChatClient({ ...fields }); @@ -72,29 +69,27 @@ export class OpenAIChat extends ChatOpenAI { signal: options.signal }, () => - this.openAiChatClient.run( - { - messages: messages.map(mapBaseMessageToOpenAIChatMessage), - max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, - temperature: this.temperature, - top_p: this.topP, - logit_bias: this.logitBias, - n: this.n, - stop: options?.stop ?? this.stop, - presence_penalty: this.presencePenalty, - frequency_penalty: this.frequencyPenalty, - functions: isStructuredToolArray(options?.functions) - ? options?.functions.map(mapToolToOpenAIFunction) - : options?.functions, - tools: isStructuredToolArray(options?.tools) - ? options?.tools.map(mapToolToOpenAITool) - : options?.tools, - tool_choice: options?.tool_choice, - response_format: options?.response_format, - seed: options?.seed, - ...this.modelKwargs - } - ) + this.openAiChatClient.run({ + messages: messages.map(mapBaseMessageToOpenAIChatMessage), + max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, + temperature: this.temperature, + top_p: this.topP, + logit_bias: this.logitBias, + n: this.n, + stop: options?.stop ?? this.stop, + presence_penalty: this.presencePenalty, + frequency_penalty: this.frequencyPenalty, + functions: isStructuredToolArray(options?.functions) + ? options?.functions.map(mapToolToOpenAIFunction) + : options?.functions, + tools: isStructuredToolArray(options?.tools) + ? options?.tools.map(mapToolToOpenAITool) + : options?.tools, + tool_choice: options?.tool_choice, + response_format: options?.response_format, + seed: options?.seed, + ...this.modelKwargs + }) ); // currently BTP LLM Proxy for OpenAI doesn't support streaming diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index e242354a6..a8fff5dc8 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -1,15 +1,17 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { BaseLLMParams } from '@langchain/core/language_models/llms'; -import type { ChatOpenAICallOptions, OpenAIChatInput, OpenAIEmbeddingsParams } from '@langchain/openai'; +import type { + ChatOpenAICallOptions, + OpenAIChatInput, + OpenAIEmbeddingsParams +} from '@langchain/openai'; import type { OpenAiChatCompletionParameters, OpenAiChatModel, OpenAiEmbeddingModel, OpenAiEmbeddingParameters } from '@sap-ai-sdk/gen-ai-hub'; -import type { - ConfigurationOptions -}from '@sap-ai-sdk/ai-core'; +import type { ConfigurationOptions } from '@sap-ai-sdk/ai-core'; /** * Input for Text generation for OpenAI GPT. @@ -53,6 +55,6 @@ export interface OpenAIChatCallOptions * Input for Text generation for OpenAI GPT. */ export type OpenAIEmbeddingInput = Omit & -OpenAiEmbeddingParameters & -ConfigurationOptions & -BaseLLMParams; + OpenAiEmbeddingParameters & + ConfigurationOptions & + BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index fb352c717..d3a620daf 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -6,10 +6,13 @@ import { } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; -import type { OpenAiChatCompletionChoice, - OpenAiChatCompletionFunction, OpenAiChatCompletionTool, OpenAiChatMessage, +import type { + OpenAiChatCompletionChoice, + OpenAiChatCompletionFunction, + OpenAiChatCompletionTool, + OpenAiChatMessage, OpenAiChatCompletionOutput - } from '@sap-ai-sdk/gen-ai-hub'; +} from '@sap-ai-sdk/gen-ai-hub'; import { zodToJsonSchema } from 'zod-to-json-schema'; /** @@ -88,10 +91,9 @@ export function mapResponseToChatResult( additional_kwargs: { finish_reason: choice.finish_reason, index: choice.index, - function_call: choice.message - .function_call, + function_call: choice.message.function_call, tool_calls: choice.message.tool_calls, - tool_call_id: '', + tool_call_id: '' } }), generationInfo: { @@ -129,7 +131,8 @@ export function mapBaseMessageToOpenAIChatMessage( role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, - tool_call_id: message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : '' + tool_call_id: + message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : '' } as OpenAiChatMessage; } @@ -143,9 +146,7 @@ export function isStructuredToolArray( ): tools is StructuredTool[] { return ( tools !== undefined && - tools.every(tool => - Array.isArray((tool as StructuredTool).lc_namespace) - ) + tools.every(tool => Array.isArray((tool as StructuredTool).lc_namespace)) ); } From 74f1460d9a86b2b77ce22563a2d3ae4a74ada364 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 16:11:09 +0200 Subject: [PATCH 32/95] switch to azure types --- packages/langchain/src/openai/chat.ts | 4 ++-- packages/langchain/src/openai/embedding.ts | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index a2f37660f..9173bd95d 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -1,7 +1,7 @@ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; -import { ChatOpenAI } from '@langchain/openai'; +import { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai'; import { OpenAiChatClient } from '@sap-ai-sdk/gen-ai-hub'; import { isStructuredToolArray, @@ -15,7 +15,7 @@ import type { OpenAIChatModelInput, OpenAIChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAIChat extends ChatOpenAI { +export class OpenAIChat extends AzureChatOpenAI { declare CallOptions: OpenAIChatCallOptions; private openAiChatClient: OpenAiChatClient; diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 213c20bd7..143198099 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -1,4 +1,4 @@ -import { OpenAIEmbeddings } from '@langchain/openai'; +import { AzureOpenAIEmbeddings } from '@langchain/openai'; import { OpenAiEmbeddingClient, OpenAiEmbeddingOutput, @@ -10,11 +10,11 @@ import { OpenAIEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAIEmbedding extends OpenAIEmbeddings { +export class OpenAIEmbedding extends AzureOpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClient; constructor(fields: OpenAIEmbeddingInput) { - super({ ...fields, openAIApiKey: 'dummy' }); + super({ ...fields }); this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); } From ea2037f762c4a76cc6f777c710cee55c902b9a92 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 16:24:57 +0200 Subject: [PATCH 33/95] lint an new lines --- packages/langchain/src/openai/chat.ts | 6 +++--- packages/langchain/src/openai/embedding.ts | 2 +- packages/langchain/src/openai/types.ts | 5 ++++- pnpm-workspace.yaml | 3 ++- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 9173bd95d..7470981e6 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -1,7 +1,7 @@ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; -import { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai'; +import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient } from '@sap-ai-sdk/gen-ai-hub'; import { isStructuredToolArray, @@ -20,17 +20,17 @@ export class OpenAIChat extends AzureChatOpenAI { private openAiChatClient: OpenAiChatClient; constructor(fields: OpenAIChatModelInput) { - const defaultValues = new ChatOpenAI(); + const defaultValues = new AzureOpenAI(); const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop : [fields.stop] : defaultValues.stop; - super({ ...defaultValues, ...fields, stop, + azureOpenAIApiKey: 'dummy', openAIApiKey: 'dummy' }); diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 143198099..2adf24b94 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -14,7 +14,7 @@ export class OpenAIEmbedding extends AzureOpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClient; constructor(fields: OpenAIEmbeddingInput) { - super({ ...fields }); + super({ ...fields, azureOpenAIApiKey: 'dummy' }); this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); } diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index a8fff5dc8..5cf0def13 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -54,7 +54,10 @@ export interface OpenAIChatCallOptions /** * Input for Text generation for OpenAI GPT. */ -export type OpenAIEmbeddingInput = Omit & +export type OpenAIEmbeddingInput = Omit< + OpenAIEmbeddingsParams, + 'modelName' | 'model' +> & OpenAiEmbeddingParameters & ConfigurationOptions & BaseLLMParams; diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 96eeb0a84..94edfedf6 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -7,4 +7,5 @@ packages: - 'sample-code' - 'tests/e2e-tests' - 'tests/type-tests' - - 'tests/smoke-tests' \ No newline at end of file + - 'tests/smoke-tests' + \ No newline at end of file From 968584229a77db717c77ce4efd81c58153e65d80 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 16:25:27 +0200 Subject: [PATCH 34/95] dumbo --- pnpm-workspace.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 94edfedf6..edb92b1aa 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -8,4 +8,3 @@ packages: - 'tests/e2e-tests' - 'tests/type-tests' - 'tests/smoke-tests' - \ No newline at end of file From 4c077ae880b2aa998a1d36f220c225331add32d4 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 9 Sep 2024 17:15:09 +0200 Subject: [PATCH 35/95] adjust --- packages/langchain/tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/langchain/tsconfig.json b/packages/langchain/tsconfig.json index 513e39869..c4babcce9 100644 --- a/packages/langchain/tsconfig.json +++ b/packages/langchain/tsconfig.json @@ -8,5 +8,5 @@ }, "include": ["src/**/*.ts"], "exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"], - "references": [{ "path": "../foundation-models" }] + "references": [{ "path": "../foundation-models" }, { "path": "../ai-api" }] } From b2f40a05d0f03fd146490eaab4f601c632c7c62c Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 10 Sep 2024 11:20:11 +0200 Subject: [PATCH 36/95] change visibility, merge main --- packages/langchain/src/index.ts | 7 ++++++- packages/langchain/src/internal.ts | 1 + packages/langchain/src/openai/chat.ts | 3 ++- packages/langchain/src/openai/util.ts | 7 +++++++ 4 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 packages/langchain/src/internal.ts diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 06718ab5d..e0e2e0b1c 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1 +1,6 @@ -export * from './openai/index.js'; +export { OpenAIChat, OpenAIEmbedding } from './openai/index.js'; +export type { + OpenAIChatModelInput, + OpenAIEmbeddingInput, + OpenAIChatCallOptions +} from './openai/index.js'; diff --git a/packages/langchain/src/internal.ts b/packages/langchain/src/internal.ts new file mode 100644 index 000000000..06718ab5d --- /dev/null +++ b/packages/langchain/src/internal.ts @@ -0,0 +1 @@ +export * from './openai/index.js'; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 06ee59b79..349775955 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -30,6 +30,7 @@ export class OpenAIChat extends AzureChatOpenAI { ...defaultValues, ...fields, stop, + // overrides the apikey values as not applicable in BTP azureOpenAIApiKey: 'dummy', openAIApiKey: 'dummy' }); @@ -92,7 +93,7 @@ export class OpenAIChat extends AzureChatOpenAI { }) ); - // currently BTP LLM Proxy for OpenAI doesn't support streaming + // we currently do not support streaming await runManager?.handleLLMNewToken( typeof res.data.choices[0].message.content === 'string' ? res.data.choices[0].message.content diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 027e6b02f..5ed1bb8b2 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -19,6 +19,7 @@ import { zodToJsonSchema } from 'zod-to-json-schema'; * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI Chat Completion Function. + * @internal */ export function mapToolToOpenAIFunction( tool: StructuredTool @@ -34,6 +35,7 @@ export function mapToolToOpenAIFunction( * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI Chat Completion Tool. + * @internal */ export function mapToolToOpenAITool( tool: StructuredTool @@ -52,6 +54,7 @@ export function mapToolToOpenAITool( * Maps a {@link BaseMessage} to OpenAI's Message Role. * @param message - The message to map. * @returns The OpenAI Message Role. + * @internal */ export function mapBaseMessageToRole( message: BaseMessage @@ -79,6 +82,7 @@ export function mapBaseMessageToRole( * Maps OpenAI messages to LangChain's {@link ChatResult}. * @param res - The OpenAI Chat Completion Output. * @returns The LangChain Chat Result. + * @internal */ export function mapResponseToChatResult( res: OpenAiChatCompletionOutput @@ -121,6 +125,7 @@ export function mapResponseToChatResult( * Maps {@link BaseMessage} to OpenAI Messages. * @param message - The message to map. * @returns The OpenAI Chat Message. + * @internal */ export function mapBaseMessageToOpenAIChatMessage( message: BaseMessage @@ -140,6 +145,7 @@ export function mapBaseMessageToOpenAIChatMessage( * Checks if a given array is a structured tool array. * @param tools - The array to check. * @returns Whether the array is a structured tool array. + * @internal */ export function isStructuredToolArray( tools?: unknown[] @@ -155,6 +161,7 @@ export function isStructuredToolArray( * @param arr - Input array to be chunked. * @param chunkSize - Size of each chunk. * @returns Array of chunks. + * @internal */ export const chunkArray = (arr: T[], chunkSize: number): T[][] => arr.reduce((chunks, elem, index) => { From 40c933a618a0e9edce22163bf40edcbd40ce42f2 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 10 Sep 2024 13:30:16 +0200 Subject: [PATCH 37/95] simplify --- packages/langchain/src/openai/chat.ts | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 349775955..3edd18abb 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -39,15 +39,7 @@ export class OpenAIChat extends AzureChatOpenAI { } override get callKeys(): (keyof OpenAIChatCallOptions)[] { - return [ - ...(super.callKeys as (keyof OpenAIChatCallOptions)[]), - 'options', - 'functions', - 'tools', - 'tool_choice', - 'response_format', - 'seed' - ]; + return [...(super.callKeys as (keyof OpenAIChatCallOptions)[])]; } override get lc_secrets(): { [key: string]: string } | undefined { From 7e6d9287d51105985c77cc0b8cb0cc4f801d37b0 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 10 Sep 2024 13:31:51 +0200 Subject: [PATCH 38/95] update docs --- packages/langchain/src/openai/chat.ts | 6 +++--- packages/langchain/src/openai/embedding.ts | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 3edd18abb..bc192ca3b 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -30,7 +30,7 @@ export class OpenAIChat extends AzureChatOpenAI { ...defaultValues, ...fields, stop, - // overrides the apikey values as not applicable in BTP + // overrides the apikey values as they are not applicable for BTP azureOpenAIApiKey: 'dummy', openAIApiKey: 'dummy' }); @@ -43,12 +43,12 @@ export class OpenAIChat extends AzureChatOpenAI { } override get lc_secrets(): { [key: string]: string } | undefined { - // overrides default keys as not applicable in BTP + // overrides default keys as they are not applicable for BTP return {}; } override get lc_aliases(): Record { - // overrides default keys as not applicable in BTP + // overrides default keys as they are not applicable for BTP return {}; } diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 2b2075861..89f8f54ac 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -14,6 +14,7 @@ export class OpenAIEmbedding extends AzureOpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClient; constructor(fields: OpenAIEmbeddingInput) { + // overrides the apikey value as it is not applicable in BTP super({ ...fields, azureOpenAIApiKey: 'dummy' }); this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); From 187daee168e73dd6e5be8809c55f1e866a40aadf Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 10 Sep 2024 14:45:34 +0200 Subject: [PATCH 39/95] add public api check --- packages/langchain/internal.d.ts | 3 +++ packages/langchain/internal.js | 2 ++ packages/langchain/package.json | 3 ++- 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 packages/langchain/internal.d.ts create mode 100644 packages/langchain/internal.js diff --git a/packages/langchain/internal.d.ts b/packages/langchain/internal.d.ts new file mode 100644 index 000000000..bf1fe07d1 --- /dev/null +++ b/packages/langchain/internal.d.ts @@ -0,0 +1,3 @@ +// eslint-disable-next-line import/no-internal-modules +export * from './dist/internal.js'; +// # sourceMappingURL=internal.d.ts.map diff --git a/packages/langchain/internal.js b/packages/langchain/internal.js new file mode 100644 index 000000000..0c80210d2 --- /dev/null +++ b/packages/langchain/internal.js @@ -0,0 +1,2 @@ +export * from './dist/internal.js'; +//# sourceMappingURL=internal.js.map diff --git a/packages/langchain/package.json b/packages/langchain/package.json index b4ce4feed..ee1bcb078 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -25,7 +25,8 @@ "compile:cjs": "tsc -p tsconfig.cjs.json", "test": "NODE_OPTIONS=--experimental-vm-modules jest", "lint": "eslint \"**/*.ts\" && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -c", - "lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error" + "lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error", + "check:public-api": "node --loader ts-node/esm ../../scripts/check-public-api-cli.ts" }, "dependencies": { "@sap-ai-sdk/ai-api": "workspace:^", From 5c826ff23cf373507edce9ff4067c88ce490f2b6 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 10 Sep 2024 17:12:09 +0200 Subject: [PATCH 40/95] add baseline docs --- packages/ai-api/package.json | 3 +- .../ai-api/src/utils/deployment-resolver.ts | 10 +- packages/langchain/README.md | 92 +++++++++++++++++++ packages/langchain/package.json | 9 +- packages/langchain/src/openai/types.ts | 4 +- pnpm-lock.yaml | 3 + 6 files changed, 111 insertions(+), 10 deletions(-) create mode 100644 packages/langchain/README.md diff --git a/packages/ai-api/package.json b/packages/ai-api/package.json index 649fa8f3b..2f721ac62 100644 --- a/packages/ai-api/package.json +++ b/packages/ai-api/package.json @@ -29,7 +29,8 @@ "check:public-api": "node --loader ts-node/esm ../../scripts/check-public-api-cli.ts" }, "dependencies": { - "@sap-ai-sdk/core": "workspace:^" + "@sap-ai-sdk/core": "workspace:^", + "@sap-cloud-sdk/util": "^3.20.0" }, "devDependencies": { "typescript": "^5.5.4", diff --git a/packages/ai-api/src/utils/deployment-resolver.ts b/packages/ai-api/src/utils/deployment-resolver.ts index ff66f1800..587088b53 100644 --- a/packages/ai-api/src/utils/deployment-resolver.ts +++ b/packages/ai-api/src/utils/deployment-resolver.ts @@ -1,10 +1,10 @@ +import { Xor } from '@sap-cloud-sdk/util'; import { type AiDeployment, DeploymentApi } from '../client/AI_CORE_API/index.js'; import { deploymentCache } from './deployment-cache.js'; import { extractModel, type FoundationModel } from './model.js'; - /** * The model deployment configuration when using a model. * @typeParam ModelNameT - String literal type representing the name of the model. @@ -52,10 +52,10 @@ export type ModelDeployment = * The configuration options for a model deployment. * @typeParam ModelNameT - String literal type representing the name of the model. */ -export type ConfigurationOptions = ( - | ModelConfiguration - | DeploymentIdConfiguration -) & +export type ConfigurationOptions = Xor< + ModelConfiguration, + DeploymentIdConfiguration +> & ResourceGroupConfiguration; /** diff --git a/packages/langchain/README.md b/packages/langchain/README.md new file mode 100644 index 000000000..e0a31e3d7 --- /dev/null +++ b/packages/langchain/README.md @@ -0,0 +1,92 @@ +# @sap-ai-sdk/langchain + +This package contains langchain compliant models, based on the @sap-ai-sdk clients. + +### Installation + +``` +$ npm install @sap-ai-sdk/langchain + +$ npm install @langchain/openai // if you want to use OpenAI models +``` + +## Pre-requisites + +- [Enable the AI Core service in BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). +- Project configured with Node.js v20 or higher and native ESM support enabled. +- For testing your application locally: + - Download a service key for your AI Core service instance. + - Create a `.env` file in the sample-code directory. + - Add an entry `AICORE_SERVICE_KEY=''`. + +## Usage + +All client's comply with langchains interface, therefore you should be able to use them as per usual. + +The only difference is in the initialization of the client, where you have to option to pass either: + +```ts + modelName: string, + modelVersion?: string, + resourceGroup?: string, + ...others +``` + +or + +```ts + deploymentId: string, + resourceGroup?: string + ...others +``` + +Below are are the usage of OpenAI's chat and embedding client. + +### OpenAI + +#### Chat + +There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. +You can also combine them with the usual langchain functionality, e.g. prompt templates. + +A simple text completion might look like: + +```ts +const client = new OpenAIChat({ modelName: 'gpt-4o' }); + +const response = await client.invoke("What's the capital of france?'"); +``` + +A chat completion example might be: + +```ts +const response = await client.generate([ + [new SystemMessage('You are acting super cool.')], + [new HumanMessage('Whats up')] +]); +``` + +#### Embedding + +You have the option to either embed a text, or a document (an array of strings). + +Below are two examples. + +```ts +const client = new OpenAIEmbedding({ modelName: 'text-embedding-ada-002' }); +const embedding = await client.embedQuery('Paris is the capitol of France'); +const embeddedDocument = await client.embedDocuments([ + 'Page 1: Paris is the capitol of France', + 'Page 2: It is a beautiful city' +]); +``` + +## Support, Feedback, Contribution + +This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/SAP/ai-sdk-js/issues). + +Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](https://github.com/SAP/ai-sdk-js/blob/main/CONTRIBUTING.md). + +## License + +The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index ee1bcb078..f7d64d63d 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -32,9 +32,16 @@ "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", "@langchain/core": "^0.2.30", - "@langchain/openai": "^0.2.8", "zod-to-json-schema": "^3.23.2" }, + "peerDependencies": { + "@langchain/openai": "^0.2.8" + }, + "peerDependenciesMeta": { + "@langchain/openai": { + "optional": true + } + }, "devDependencies": { "typescript": "^5.5.4" } diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 8a416cbe5..1c207e40e 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -8,8 +8,7 @@ import type { import type { OpenAiChatCompletionParameters, OpenAiChatModel, - OpenAiEmbeddingModel, - OpenAiEmbeddingParameters + OpenAiEmbeddingModel } from '@sap-ai-sdk/foundation-models'; import type { ConfigurationOptions } from '@sap-ai-sdk/ai-api'; @@ -58,6 +57,5 @@ export type OpenAIEmbeddingInput = Omit< OpenAIEmbeddingsParams, 'modelName' | 'model' > & - OpenAiEmbeddingParameters & ConfigurationOptions & BaseLLMParams; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7a8ed8cad..4b39ec4f2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -91,6 +91,9 @@ importers: '@sap-ai-sdk/core': specifier: workspace:^ version: link:../core + '@sap-cloud-sdk/util': + specifier: ^3.20.0 + version: 3.20.0 devDependencies: '@sap-cloud-sdk/openapi-generator': specifier: ^3.20.0 From de5fdd78d7997c26a7c0efa56e53723828726536 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 11 Sep 2024 15:08:01 +0200 Subject: [PATCH 41/95] adjust type --- packages/langchain/src/openai/chat.ts | 3 ++- packages/langchain/src/openai/types.ts | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index bc192ca3b..dcbb65ecc 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -32,7 +32,8 @@ export class OpenAIChat extends AzureChatOpenAI { stop, // overrides the apikey values as they are not applicable for BTP azureOpenAIApiKey: 'dummy', - openAIApiKey: 'dummy' + openAIApiKey: 'dummy', + apiKey: 'dummy' }); this.openAiChatClient = new OpenAiChatClient({ ...fields }); diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 1c207e40e..7f71fadcd 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -27,6 +27,9 @@ export type OpenAIChatModelInput = Omit< | 'model' | 'openAIApiKey' | 'streaming' + | 'azureOpenAIApiKey' + | 'openAIApiKey' + | 'apiKey' > & Omit & BaseChatModelParams & @@ -55,7 +58,7 @@ export interface OpenAIChatCallOptions */ export type OpenAIEmbeddingInput = Omit< OpenAIEmbeddingsParams, - 'modelName' | 'model' + 'modelName' | 'model' | 'azureOpenAIApiKey' | 'apiKey' > & ConfigurationOptions & BaseLLMParams; From 4405f25816d6d5e8e1a876e1634b977c1804e6a4 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 11 Sep 2024 15:23:14 +0200 Subject: [PATCH 42/95] rename clients --- packages/langchain/src/index.ts | 2 +- packages/langchain/src/openai/chat.ts | 8 ++++---- packages/langchain/src/openai/embedding.ts | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index e0e2e0b1c..191b8f8ef 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,4 +1,4 @@ -export { OpenAIChat, OpenAIEmbedding } from './openai/index.js'; +export { OpenAiChatClient, OpenAiEmbeddingClient } from './openai/index.js'; export type { OpenAIChatModelInput, OpenAIEmbeddingInput, diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index dcbb65ecc..48803a83a 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -2,7 +2,7 @@ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; -import { OpenAiChatClient } from '@sap-ai-sdk/foundation-models'; +import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; import { isStructuredToolArray, mapBaseMessageToOpenAIChatMessage, @@ -15,9 +15,9 @@ import type { OpenAIChatModelInput, OpenAIChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAIChat extends AzureChatOpenAI { +export class OpenAiChatClient extends AzureChatOpenAI { declare CallOptions: OpenAIChatCallOptions; - private openAiChatClient: OpenAiChatClient; + private openAiChatClient: OpenAiChatClientBase; constructor(fields: OpenAIChatModelInput) { const defaultValues = new AzureOpenAI(); @@ -36,7 +36,7 @@ export class OpenAIChat extends AzureChatOpenAI { apiKey: 'dummy' }); - this.openAiChatClient = new OpenAiChatClient({ ...fields }); + this.openAiChatClient = new OpenAiChatClientBase({ ...fields }); } override get callKeys(): (keyof OpenAIChatCallOptions)[] { diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 89f8f54ac..ca2e1b3d8 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -1,6 +1,6 @@ import { AzureOpenAIEmbeddings } from '@langchain/openai'; import { - OpenAiEmbeddingClient, + OpenAiEmbeddingClient as OpenAiEmbeddingClientBase, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; @@ -10,14 +10,14 @@ import { OpenAIEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAIEmbedding extends AzureOpenAIEmbeddings { - private btpOpenAIClient: OpenAiEmbeddingClient; +export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { + private btpOpenAIClient: OpenAiEmbeddingClientBase; constructor(fields: OpenAIEmbeddingInput) { // overrides the apikey value as it is not applicable in BTP super({ ...fields, azureOpenAIApiKey: 'dummy' }); - this.btpOpenAIClient = new OpenAiEmbeddingClient({ ...fields }); + this.btpOpenAIClient = new OpenAiEmbeddingClientBase({ ...fields }); } override async embedDocuments(documents: string[]): Promise { From 9680f53ab6046bcc2f197dec4fb581100b96a0e1 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 11:31:14 +0200 Subject: [PATCH 43/95] update docs --- packages/langchain/README.md | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index e0a31e3d7..a9c406554 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -21,9 +21,9 @@ $ npm install @langchain/openai // if you want to use OpenAI models ## Usage -All client's comply with langchains interface, therefore you should be able to use them as per usual. +All client's comply with [langchain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI), therefore you should be able to use them as per usual. -The only difference is in the initialization of the client, where you have to option to pass either: +The only difference is in the initialization of the client, where you have th option to pass either: ```ts modelName: string, @@ -40,10 +40,14 @@ or ...others ``` -Below are are the usage of OpenAI's chat and embedding client. +If you pass API Keys they are ignored, since you're not inteded to call the vendor's endpoints directly. +Instead, the credentials in the binding are used to call SAP's LLM Proxy. ### OpenAI +We offer two types of clients for OpenAI models. +Currenty these are chat and embedding models. + #### Chat There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. @@ -52,32 +56,32 @@ You can also combine them with the usual langchain functionality, e.g. prompt te A simple text completion might look like: ```ts -const client = new OpenAIChat({ modelName: 'gpt-4o' }); +const chatClient = new OpenAIChatClient({ modelName: 'gpt-4o' }); -const response = await client.invoke("What's the capital of france?'"); +const response = await chatClient.invoke("What's the capital of France?'"); ``` A chat completion example might be: ```ts -const response = await client.generate([ - [new SystemMessage('You are acting super cool.')], - [new HumanMessage('Whats up')] +const response = await chatClient.generate([ + [new SystemMessage('You are an IT support agent answering questions.')], + [new HumanMessage('Why is my internet not working?')] ]); ``` #### Embedding -You have the option to either embed a text, or a document (an array of strings). +You have the option to either embed a text, or a document, which has to be represented as an array of strings. Below are two examples. ```ts -const client = new OpenAIEmbedding({ modelName: 'text-embedding-ada-002' }); -const embedding = await client.embedQuery('Paris is the capitol of France'); -const embeddedDocument = await client.embedDocuments([ - 'Page 1: Paris is the capitol of France', - 'Page 2: It is a beautiful city' +const embeddingClient = new OpenAIEmbeddingClient({ modelName: 'text-embedding-ada-002' }); +const embeddedText = await embeddingClient.embedQuery('Paris is the capitol of France.'); +const embeddedDocument = await embeddingClient.embedDocuments([ + 'Page 1: Paris is the capitol of France.', + 'Page 2: It is a beautiful city.' ]); ``` From 62c3e926167b688b18068f5fff02d9e7f560703a Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Thu, 12 Sep 2024 09:32:04 +0000 Subject: [PATCH 44/95] fix: Changes from lint --- packages/langchain/README.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index a9c406554..182c97f82 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -77,8 +77,12 @@ You have the option to either embed a text, or a document, which has to be repre Below are two examples. ```ts -const embeddingClient = new OpenAIEmbeddingClient({ modelName: 'text-embedding-ada-002' }); -const embeddedText = await embeddingClient.embedQuery('Paris is the capitol of France.'); +const embeddingClient = new OpenAIEmbeddingClient({ + modelName: 'text-embedding-ada-002' +}); +const embeddedText = await embeddingClient.embedQuery( + 'Paris is the capitol of France.' +); const embeddedDocument = await embeddingClient.embedDocuments([ 'Page 1: Paris is the capitol of France.', 'Page 2: It is a beautiful city.' From e04298df0135c6ad15617b5caaeac05c8d80195d Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 13:06:50 +0200 Subject: [PATCH 45/95] add docs --- packages/langchain/README.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index a9c406554..b11bd3620 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -40,7 +40,7 @@ or ...others ``` -If you pass API Keys they are ignored, since you're not inteded to call the vendor's endpoints directly. +If you pass API keys they are ignored, since you're not inteded to call the vendor's endpoints directly. Instead, the credentials in the binding are used to call SAP's LLM Proxy. ### OpenAI @@ -55,9 +55,13 @@ You can also combine them with the usual langchain functionality, e.g. prompt te A simple text completion might look like: +##### Initialization ```ts const chatClient = new OpenAIChatClient({ modelName: 'gpt-4o' }); +``` +##### Usage +```ts const response = await chatClient.invoke("What's the capital of France?'"); ``` @@ -72,12 +76,18 @@ const response = await chatClient.generate([ #### Embedding -You have the option to either embed a text, or a document, which has to be represented as an array of strings. +You have the option to either embed a text or a document. +Documents have to be represented as an array of strings. Below are two examples. +##### Initialization ```ts const embeddingClient = new OpenAIEmbeddingClient({ modelName: 'text-embedding-ada-002' }); +``` + +##### Usage +```ts const embeddedText = await embeddingClient.embedQuery('Paris is the capitol of France.'); const embeddedDocument = await embeddingClient.embedDocuments([ 'Page 1: Paris is the capitol of France.', From 8536a17df5786cde1ced541fdfca2b85317b1306 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 13:36:37 +0200 Subject: [PATCH 46/95] add type tests --- pnpm-lock.yaml | 6 +++++ tests/type-tests/package.json | 2 ++ tests/type-tests/test/langchain.test-d.ts | 32 +++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 tests/type-tests/test/langchain.test-d.ts diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4b39ec4f2..f8245fa50 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -272,12 +272,18 @@ importers: tests/type-tests: devDependencies: + '@langchain/core': + specifier: ^0.2.30 + version: 0.2.30(openai@4.56.1(zod@3.23.8)) '@sap-ai-sdk/core': specifier: workspace:^ version: link:../../packages/core '@sap-ai-sdk/foundation-models': specifier: workspace:^ version: link:../../packages/foundation-models + '@sap-ai-sdk/langchain': + specifier: workspace:^ + version: link:../../packages/langchain '@sap-ai-sdk/orchestration': specifier: workspace:^ version: link:../../packages/orchestration diff --git a/tests/type-tests/package.json b/tests/type-tests/package.json index f48c5b7b9..0556cfd82 100644 --- a/tests/type-tests/package.json +++ b/tests/type-tests/package.json @@ -16,6 +16,8 @@ "@sap-ai-sdk/foundation-models": "workspace:^", "@sap-ai-sdk/orchestration": "workspace:^", "@sap-ai-sdk/core": "workspace:^", + "@sap-ai-sdk/langchain": "workspace:^", + "@langchain/core": "^0.2.30", "tsd": "^0.31.2" } } diff --git a/tests/type-tests/test/langchain.test-d.ts b/tests/type-tests/test/langchain.test-d.ts new file mode 100644 index 000000000..3574a9c8b --- /dev/null +++ b/tests/type-tests/test/langchain.test-d.ts @@ -0,0 +1,32 @@ +import { expectError, expectType } from 'tsd'; +import { OpenAiChatClient, OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; +import { AIMessageChunk } from '@langchain/core/messages'; +import { LLMResult } from '@langchain/core/outputs'; + +expectError( + new OpenAiChatClient({ deploymentId: 'test', modelName: 'test' }).invoke('Test') +); + +expectError( + new OpenAiChatClient({ modelName: 'my-cool-chat-model' }) +); + +expectError( + new OpenAiChatClient({ deploymentId: 'test', apiKey: 'test' }) +); + +expectType>( + new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).invoke('Test') +); + +expectType>( + new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).generate([['Test']]) +); + +expectType>( + new OpenAiEmbeddingClient({ modelName: 'text-embedding-3-large' }).embedQuery('test') +); + +expectError( + new OpenAiEmbeddingClient({ modelName: 'my-cool-embedding-model' }) +); From f28693ac17fb01fadd567ac256abbab9e7b93c0f Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Thu, 12 Sep 2024 11:37:20 +0000 Subject: [PATCH 47/95] fix: Changes from lint --- tests/type-tests/test/langchain.test-d.ts | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/type-tests/test/langchain.test-d.ts b/tests/type-tests/test/langchain.test-d.ts index 3574a9c8b..4b9836622 100644 --- a/tests/type-tests/test/langchain.test-d.ts +++ b/tests/type-tests/test/langchain.test-d.ts @@ -4,29 +4,29 @@ import { AIMessageChunk } from '@langchain/core/messages'; import { LLMResult } from '@langchain/core/outputs'; expectError( - new OpenAiChatClient({ deploymentId: 'test', modelName: 'test' }).invoke('Test') + new OpenAiChatClient({ deploymentId: 'test', modelName: 'test' }).invoke( + 'Test' + ) ); -expectError( - new OpenAiChatClient({ modelName: 'my-cool-chat-model' }) -); +expectError(new OpenAiChatClient({ modelName: 'my-cool-chat-model' })); -expectError( - new OpenAiChatClient({ deploymentId: 'test', apiKey: 'test' }) -); +expectError(new OpenAiChatClient({ deploymentId: 'test', apiKey: 'test' })); expectType>( - new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).invoke('Test') + new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).invoke('Test') ); expectType>( - new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).generate([['Test']]) + new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).generate([['Test']]) ); expectType>( - new OpenAiEmbeddingClient({ modelName: 'text-embedding-3-large' }).embedQuery('test') + new OpenAiEmbeddingClient({ modelName: 'text-embedding-3-large' }).embedQuery( + 'test' + ) ); expectError( - new OpenAiEmbeddingClient({ modelName: 'my-cool-embedding-model' }) + new OpenAiEmbeddingClient({ modelName: 'my-cool-embedding-model' }) ); From 68112136650ea0d909e381cb7e23a128c6ed7662 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 14:09:24 +0200 Subject: [PATCH 48/95] add docs --- packages/langchain/README.md | 10 +++++++--- packages/langchain/src/openai/util.test.ts | 0 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 packages/langchain/src/openai/util.test.ts diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 9d86fbf16..201497fd1 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -94,14 +94,18 @@ const embeddingClient = new OpenAIEmbeddingClient({ ##### Usage ```ts -const embeddingClient = new OpenAIEmbeddingClient({ - modelName: 'text-embedding-ada-002' -}); const embeddedText = await embeddingClient.embedQuery( 'Paris is the capitol of France.' ); ``` +```ts +const embeddedDocument = await embeddingClient.embedDocuments([ + 'Page 1: Paris is the capitol of France.', + 'Page 2: It is a beautiful city.' +]); +``` + ## Support, Feedback, Contribution This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/SAP/ai-sdk-js/issues). diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts new file mode 100644 index 000000000..e69de29bb From bab7d81a0b874ba3223ad8c4b2f53c91dde49986 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 14:35:14 +0200 Subject: [PATCH 49/95] improve chunking --- packages/langchain/src/openai/embedding.ts | 2 +- packages/langchain/src/openai/util.ts | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index ca2e1b3d8..139ce0723 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -21,7 +21,7 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { } override async embedDocuments(documents: string[]): Promise { - const chunkedPrompts = chunkArray( + const chunkedPrompts = chunkArray( this.stripNewLines ? documents.map(t => t.replace(/\n/g, ' ')) : documents, diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 5ed1bb8b2..3bfa0f933 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -163,11 +163,12 @@ export function isStructuredToolArray( * @returns Array of chunks. * @internal */ -export const chunkArray = (arr: T[], chunkSize: number): T[][] => - arr.reduce((chunks, elem, index) => { +export function chunkArray(arr: string[], chunkSize: number): string[][] { + return arr.reduce((chunks, elem, index) => { const chunkIndex = Math.floor(index / chunkSize); const chunk = chunks[chunkIndex] || []; chunks[chunkIndex] = chunk.concat([elem]); return chunks; - }, [] as T[][]); + }, [] as string[][]); +} From 4c46d34c84ec5648baab6cb7b0337e6d84aa6081 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 14:56:55 +0200 Subject: [PATCH 50/95] move another mapping --- packages/langchain/src/openai/chat.ts | 32 +++------------------ packages/langchain/src/openai/util.ts | 41 ++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 29 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 48803a83a..2d05c8e5c 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -3,13 +3,7 @@ import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; -import { - isStructuredToolArray, - mapBaseMessageToOpenAIChatMessage, - mapResponseToChatResult, - mapToolToOpenAIFunction, - mapToolToOpenAITool -} from './util.js'; +import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; import type { OpenAIChatModelInput, OpenAIChatCallOptions } from './types.js'; /** @@ -63,27 +57,9 @@ export class OpenAiChatClient extends AzureChatOpenAI { signal: options.signal }, () => - this.openAiChatClient.run({ - messages: messages.map(mapBaseMessageToOpenAIChatMessage), - max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens, - temperature: this.temperature, - top_p: this.topP, - logit_bias: this.logitBias, - n: this.n, - stop: options?.stop ?? this.stop, - presence_penalty: this.presencePenalty, - frequency_penalty: this.frequencyPenalty, - functions: isStructuredToolArray(options?.functions) - ? options?.functions.map(mapToolToOpenAIFunction) - : options?.functions, - tools: isStructuredToolArray(options?.tools) - ? options?.tools.map(mapToolToOpenAITool) - : options?.tools, - tool_choice: options?.tool_choice, - response_format: options?.response_format, - seed: options?.seed, - ...this.modelKwargs - }) + this.openAiChatClient.run( + mapLangchainToAiClient(this, options, messages) + ) ); // we currently do not support streaming diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 3bfa0f933..675c96e85 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -11,9 +11,12 @@ import type { OpenAiChatCompletionFunction, OpenAiChatCompletionTool, OpenAiChatMessage, - OpenAiChatCompletionOutput + OpenAiChatCompletionOutput, + OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; +import { OpenAiChatClient } from './chat.js'; +import { OpenAIChatCallOptions } from './types.js'; /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. @@ -172,3 +175,39 @@ export function chunkArray(arr: string[], chunkSize: number): string[][] { return chunks; }, [] as string[][]); } + +/** + * Maps the langchain's input interface to our own client's input interface + * @param client The Langchain OpenAI client + * @param options The Langchain call options + * @param messages The messages to be send + * @returns A AI SDK compatibile request + * @internal + */ +export function mapLangchainToAiClient( + client: OpenAiChatClient, + options: OpenAIChatCallOptions, + messages: BaseMessage[] +): OpenAiChatCompletionParameters { + return { + messages: messages.map(mapBaseMessageToOpenAIChatMessage), + max_tokens: client.maxTokens === -1 ? undefined : client.maxTokens, + temperature: client.temperature, + top_p: client.topP, + logit_bias: client.logitBias, + n: client.n, + stop: options?.stop ?? client.stop, + presence_penalty: client.presencePenalty, + frequency_penalty: client.frequencyPenalty, + functions: isStructuredToolArray(options?.functions) + ? options?.functions.map(mapToolToOpenAIFunction) + : options?.functions, + tools: isStructuredToolArray(options?.tools) + ? options?.tools.map(mapToolToOpenAITool) + : options?.tools, + tool_choice: options?.tool_choice, + response_format: options?.response_format, + seed: options?.seed, + ...client.modelKwargs + }; +} From a4ef25cf764dcf1f1426f5b3f0cbc3b0d21719ce Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 15:51:55 +0200 Subject: [PATCH 51/95] add sample code and e2e tests --- packages/langchain/README.md | 15 +++++++---- packages/langchain/src/openai/util.test.ts | 2 ++ packages/langchain/src/openai/util.ts | 1 - pnpm-lock.yaml | 6 +++++ sample-code/package.json | 2 ++ sample-code/src/index.ts | 4 +++ sample-code/src/langchain-openai.ts | 26 ++++++++++++++++++ sample-code/src/server.ts | 27 +++++++++++++++++++ tests/e2e-tests/src/open-ai-langchain.test.ts | 23 ++++++++++++++++ 9 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 sample-code/src/langchain-openai.ts create mode 100644 tests/e2e-tests/src/open-ai-langchain.test.ts diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 201497fd1..6b9df9e60 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,6 +1,6 @@ # @sap-ai-sdk/langchain -This package contains langchain compliant models, based on the @sap-ai-sdk clients. +This package contains Langchain compliant models, based on the @sap-ai-sdk clients. ### Installation @@ -21,7 +21,7 @@ $ npm install @langchain/openai // if you want to use OpenAI models ## Usage -All client's comply with [langchain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI), therefore you should be able to use them as per usual. +All client's comply with [Langchain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI), therefore you should be able to use them as per usual. The only difference is in the initialization of the client, where you have th option to pass either: @@ -48,17 +48,20 @@ Instead, the credentials in the binding are used to call SAP's LLM Proxy. We offer two types of clients for OpenAI models. Currenty these are chat and embedding models. +All clients assume the same set of default values as [Langchain's default OpenAI client](https://www.npmjs.com/package/@langchain/openai) does. + #### Chat There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. -You can also combine them with the usual langchain functionality, e.g. prompt templates. +You can also combine them with the usual Langchain functionality, e.g. prompt templates. A simple text completion might look like: ##### Initialization ```ts -const chatClient = new OpenAIChatClient({ modelName: 'gpt-4o' }); +import { OpenAiChatClient } from '@sap-ai-sdk/langchain' +const chatClient = new OpenAiChatClient({ modelName: 'gpt-4o' }); ``` ##### Usage @@ -86,7 +89,9 @@ Below are two examples. ##### Initialization ```ts -const embeddingClient = new OpenAIEmbeddingClient({ +import { OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain' + +const embeddingClient = new OpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' }); ``` diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index e69de29bb..ef4c0ae14 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -0,0 +1,2 @@ +// mapResponseToChatResult +// mapLangchainToAiClient diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 675c96e85..c98364825 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -74,7 +74,6 @@ export function mapBaseMessageToRole( case 'tool': return 'tool'; case 'generic': - // TODO: refactor? return (message as ChatMessage).role as OpenAiChatMessage['role']; default: throw new Error(`Unknown message type: ${message._getType()}`); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f8245fa50..0b9d96d63 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -207,12 +207,18 @@ importers: sample-code: dependencies: + '@langchain/core': + specifier: ^0.2.30 + version: 0.2.30(openai@4.56.1(zod@3.23.8)) '@sap-ai-sdk/ai-api': specifier: workspace:^ version: link:../packages/ai-api '@sap-ai-sdk/foundation-models': specifier: workspace:^ version: link:../packages/foundation-models + '@sap-ai-sdk/langchain': + specifier: workspace:^ + version: link:../packages/langchain '@sap-ai-sdk/orchestration': specifier: workspace:^ version: link:../packages/orchestration diff --git a/sample-code/package.json b/sample-code/package.json index ff7991a78..250fef8a0 100644 --- a/sample-code/package.json +++ b/sample-code/package.json @@ -26,6 +26,8 @@ "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", "@sap-ai-sdk/orchestration": "workspace:^", + "@sap-ai-sdk/langchain": "workspace:^", + "@langchain/core": "^0.2.30", "@types/express": "^4.17.21", "express": "^4.19.2" } diff --git a/sample-code/src/index.ts b/sample-code/src/index.ts index aa2767c75..63370a033 100644 --- a/sample-code/src/index.ts +++ b/sample-code/src/index.ts @@ -3,3 +3,7 @@ export { chatCompletion, computeEmbedding } from './foundation-models-openai.js'; +export { + embedQuery, + generate +} from './langchain-openai.js'; diff --git a/sample-code/src/langchain-openai.ts b/sample-code/src/langchain-openai.ts new file mode 100644 index 000000000..e6d9c31de --- /dev/null +++ b/sample-code/src/langchain-openai.ts @@ -0,0 +1,26 @@ +import { HumanMessage } from '@langchain/core/messages'; +import { + OpenAiChatClient, + OpenAiEmbeddingClient +} from '@sap-ai-sdk/langchain'; + +/** + * Ask GPT about the capital of France. + * @returns The answer from GPT. + */ +export async function generate(): Promise { + const client = new OpenAiChatClient({ modelName: 'gpt-35-turbo' }); + const response = await client.generate([[new HumanMessage('What is the capital of France?')]]); + return response.generations[0][0].text; +} + +/** + * Embed 'Hello, world!' using the OpenAI ADA model. + * @returns An embedding vector. + */ +export async function embedQuery(): Promise { + const client = new OpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' }); + const response = await client.embedQuery('Hello, world!'); + + return response; +} diff --git a/sample-code/src/server.ts b/sample-code/src/server.ts index 290b4eab5..714d247c3 100644 --- a/sample-code/src/server.ts +++ b/sample-code/src/server.ts @@ -6,6 +6,7 @@ import { } from './foundation-models-openai.js'; import { orchestrationCompletion } from './orchestration.js'; import { getDeployments } from './ai-api.js'; +import { embedQuery, generate } from './langchain-openai.js'; const app = express(); const port = 8080; @@ -62,6 +63,32 @@ app.get('/ai-api/get-deployments', async (req, res) => { } }); +app.get('/langchain/chat', async (req, res) => { + try { + res.send(await generate()); + } catch (error: any) { + console.error(error); + res + .status(500) + .send('Yikes, vibes are off apparently 😬 -> ' + error.message); + } +}); + +app.get('/langchain/embedding', async (req, res) => { + try { + const result = await embedQuery(); + if (result.length === 0) { + throw new Error('No embedding vector returned'); + } + res.send('Number crunching success, got a nice vector.'); + } catch (error: any) { + console.error(error); + res + .status(500) + .send('Yikes, vibes are off apparently 😬 -> ' + error.message); + } +}); + app.listen(port, () => { console.log(`Server running at http://localhost:${port}`); }); diff --git a/tests/e2e-tests/src/open-ai-langchain.test.ts b/tests/e2e-tests/src/open-ai-langchain.test.ts new file mode 100644 index 000000000..e5381f9e7 --- /dev/null +++ b/tests/e2e-tests/src/open-ai-langchain.test.ts @@ -0,0 +1,23 @@ +import path from 'path'; +import { fileURLToPath } from 'url'; +import dotenv from 'dotenv'; +import { embedQuery, generate } from '@sap-ai-sdk/sample-code'; + +// Pick .env file from root directory +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +dotenv.config({ path: path.resolve(__dirname, '../.env') }); + +describe('Langchain OpenAI Access', () => { + it('should complete a chat', async () => { + const result = await generate(); + expect(result).toBeDefined(); + expect(result).toContain('Paris'); + }); + + it('should compute an embedding vector', async () => { + const result = await embedQuery(); + expect(result).toBeDefined(); + expect(result).not.toHaveLength(0); + }); +}); From 8d96f55dfa633bbff954476743c6af8668e2db8b Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 12 Sep 2024 15:52:21 +0200 Subject: [PATCH 52/95] lint --- packages/langchain/README.md | 4 ++-- sample-code/src/index.ts | 5 +---- sample-code/src/langchain-openai.ts | 13 +++++++------ 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 6b9df9e60..82e7e4aed 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -60,7 +60,7 @@ A simple text completion might look like: ##### Initialization ```ts -import { OpenAiChatClient } from '@sap-ai-sdk/langchain' +import { OpenAiChatClient } from '@sap-ai-sdk/langchain'; const chatClient = new OpenAiChatClient({ modelName: 'gpt-4o' }); ``` @@ -89,7 +89,7 @@ Below are two examples. ##### Initialization ```ts -import { OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain' +import { OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; const embeddingClient = new OpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' diff --git a/sample-code/src/index.ts b/sample-code/src/index.ts index 63370a033..a7b2f3c2a 100644 --- a/sample-code/src/index.ts +++ b/sample-code/src/index.ts @@ -3,7 +3,4 @@ export { chatCompletion, computeEmbedding } from './foundation-models-openai.js'; -export { - embedQuery, - generate -} from './langchain-openai.js'; +export { embedQuery, generate } from './langchain-openai.js'; diff --git a/sample-code/src/langchain-openai.ts b/sample-code/src/langchain-openai.ts index e6d9c31de..bd36990d1 100644 --- a/sample-code/src/langchain-openai.ts +++ b/sample-code/src/langchain-openai.ts @@ -1,8 +1,5 @@ import { HumanMessage } from '@langchain/core/messages'; -import { - OpenAiChatClient, - OpenAiEmbeddingClient -} from '@sap-ai-sdk/langchain'; +import { OpenAiChatClient, OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; /** * Ask GPT about the capital of France. @@ -10,7 +7,9 @@ import { */ export async function generate(): Promise { const client = new OpenAiChatClient({ modelName: 'gpt-35-turbo' }); - const response = await client.generate([[new HumanMessage('What is the capital of France?')]]); + const response = await client.generate([ + [new HumanMessage('What is the capital of France?')] + ]); return response.generations[0][0].text; } @@ -19,7 +18,9 @@ export async function generate(): Promise { * @returns An embedding vector. */ export async function embedQuery(): Promise { - const client = new OpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' }); + const client = new OpenAiEmbeddingClient({ + modelName: 'text-embedding-ada-002' + }); const response = await client.embedQuery('Hello, world!'); return response; From f80afccfd4ae93f7c2deb93b0394aac2db8aa45a Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 14:56:41 +0200 Subject: [PATCH 53/95] merge and rename --- packages/langchain/src/openai/chat.ts | 10 +++++----- packages/langchain/src/openai/embedding.ts | 4 ++-- packages/langchain/src/openai/types.ts | 10 +++++----- packages/langchain/src/openai/util.test.ts | 15 +++++++++++++++ packages/langchain/src/openai/util.ts | 16 ++++++++-------- 5 files changed, 35 insertions(+), 20 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 2d05c8e5c..5b7643a7b 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -4,16 +4,16 @@ import type { ChatResult } from '@langchain/core/outputs'; import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; -import type { OpenAIChatModelInput, OpenAIChatCallOptions } from './types.js'; +import type { OpenAiChatModelInput, OpenAiChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ export class OpenAiChatClient extends AzureChatOpenAI { - declare CallOptions: OpenAIChatCallOptions; + declare CallOptions: OpenAiChatCallOptions; private openAiChatClient: OpenAiChatClientBase; - constructor(fields: OpenAIChatModelInput) { + constructor(fields: OpenAiChatModelInput) { const defaultValues = new AzureOpenAI(); const stop = fields.stop ? Array.isArray(fields.stop) @@ -33,8 +33,8 @@ export class OpenAiChatClient extends AzureChatOpenAI { this.openAiChatClient = new OpenAiChatClientBase({ ...fields }); } - override get callKeys(): (keyof OpenAIChatCallOptions)[] { - return [...(super.callKeys as (keyof OpenAIChatCallOptions)[])]; + override get callKeys(): (keyof OpenAiChatCallOptions)[] { + return [...(super.callKeys as (keyof OpenAiChatCallOptions)[])]; } override get lc_secrets(): { [key: string]: string } | undefined { diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 139ce0723..0ac1f15ca 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -5,7 +5,7 @@ import { OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; import { chunkArray } from './util.js'; -import { OpenAIEmbeddingInput } from './types.js'; +import { OpenAiEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. @@ -13,7 +13,7 @@ import { OpenAIEmbeddingInput } from './types.js'; export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClientBase; - constructor(fields: OpenAIEmbeddingInput) { + constructor(fields: OpenAiEmbeddingInput) { // overrides the apikey value as it is not applicable in BTP super({ ...fields, azureOpenAIApiKey: 'dummy' }); diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 7f71fadcd..573c9ba41 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -13,9 +13,9 @@ import type { import type { ConfigurationOptions } from '@sap-ai-sdk/ai-api'; /** - * Input for Text generation for OpenAI GPT. + * Input for Text generation for OpenAi GPT. */ -export type OpenAIChatModelInput = Omit< +export type OpenAiChatModelInput = Omit< OpenAIChatInput, | 'frequencyPenalty' | 'presencePenalty' @@ -38,7 +38,7 @@ export type OpenAIChatModelInput = Omit< /** * Chat Call options. */ -export interface OpenAIChatCallOptions +export interface OpenAiChatCallOptions extends Omit< ChatOpenAICallOptions, | 'tool_choice' @@ -54,9 +54,9 @@ export interface OpenAIChatCallOptions > {} /** - * Input for Text generation for OpenAI GPT. + * Input for Text generation for OpenAi GPT. */ -export type OpenAIEmbeddingInput = Omit< +export type OpenAiEmbeddingInput = Omit< OpenAIEmbeddingsParams, 'modelName' | 'model' | 'azureOpenAIApiKey' | 'apiKey' > & diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index ef4c0ae14..5394e1807 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,2 +1,17 @@ // mapResponseToChatResult // mapLangchainToAiClient + +describe('Mapping Functions', () => { + const testObject = {}; + it('should complete a chat', async () => { + const result = await generate(); + expect(result).toBeDefined(); + expect(result).toContain('Paris'); + }); + + it('should compute an embedding vector', async () => { + const result = await embedQuery(); + expect(result).toBeDefined(); + expect(result).not.toHaveLength(0); + }); + }); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index c98364825..6bba31161 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -16,7 +16,7 @@ import type { } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { OpenAiChatClient } from './chat.js'; -import { OpenAIChatCallOptions } from './types.js'; +import { OpenAiChatCallOptions } from './types.js'; /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. @@ -24,7 +24,7 @@ import { OpenAIChatCallOptions } from './types.js'; * @returns The OpenAI Chat Completion Function. * @internal */ -export function mapToolToOpenAIFunction( +export function mapToolToOpenAiFunction( tool: StructuredTool ): OpenAiChatCompletionFunction { return { @@ -40,7 +40,7 @@ export function mapToolToOpenAIFunction( * @returns The OpenAI Chat Completion Tool. * @internal */ -export function mapToolToOpenAITool( +export function mapToolToOpenAiTool( tool: StructuredTool ): OpenAiChatCompletionTool { return { @@ -129,7 +129,7 @@ export function mapResponseToChatResult( * @returns The OpenAI Chat Message. * @internal */ -export function mapBaseMessageToOpenAIChatMessage( +export function mapBaseMessageToOpenAiChatMessage( message: BaseMessage ): OpenAiChatMessage { return { @@ -185,11 +185,11 @@ export function chunkArray(arr: string[], chunkSize: number): string[][] { */ export function mapLangchainToAiClient( client: OpenAiChatClient, - options: OpenAIChatCallOptions, + options: OpenAiChatCallOptions, messages: BaseMessage[] ): OpenAiChatCompletionParameters { return { - messages: messages.map(mapBaseMessageToOpenAIChatMessage), + messages: messages.map(mapBaseMessageToOpenAiChatMessage), max_tokens: client.maxTokens === -1 ? undefined : client.maxTokens, temperature: client.temperature, top_p: client.topP, @@ -199,10 +199,10 @@ export function mapLangchainToAiClient( presence_penalty: client.presencePenalty, frequency_penalty: client.frequencyPenalty, functions: isStructuredToolArray(options?.functions) - ? options?.functions.map(mapToolToOpenAIFunction) + ? options?.functions.map(mapToolToOpenAiFunction) : options?.functions, tools: isStructuredToolArray(options?.tools) - ? options?.tools.map(mapToolToOpenAITool) + ? options?.tools.map(mapToolToOpenAiTool) : options?.tools, tool_choice: options?.tool_choice, response_format: options?.response_format, From cd81c734911963c9f0fbd081888b914335f49856 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 15:01:09 +0200 Subject: [PATCH 54/95] update types --- packages/langchain/src/openai/types.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 573c9ba41..1f115cdae 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -7,9 +7,11 @@ import type { } from '@langchain/openai'; import type { OpenAiChatCompletionParameters, - OpenAiChatModel, - OpenAiEmbeddingModel } from '@sap-ai-sdk/foundation-models'; +import type { + AzureOpenAiChatModel, + AzureOpenAiEmbeddingModel +} from '@sap-ai-sdk/core'; import type { ConfigurationOptions } from '@sap-ai-sdk/ai-api'; /** @@ -33,7 +35,7 @@ export type OpenAiChatModelInput = Omit< > & Omit & BaseChatModelParams & - ConfigurationOptions; + ConfigurationOptions; /** * Chat Call options. @@ -60,5 +62,5 @@ export type OpenAiEmbeddingInput = Omit< OpenAIEmbeddingsParams, 'modelName' | 'model' | 'azureOpenAIApiKey' | 'apiKey' > & - ConfigurationOptions & + ConfigurationOptions & BaseLLMParams; From 115d6e9fc62508c3919ad95f66e03c8f5b8763b4 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 15:23:08 +0200 Subject: [PATCH 55/95] start with snapshot tests --- packages/langchain/src/openai/util.test.ts | 26 +++++++++++++--------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 5394e1807..7c8bbc49e 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,17 +1,23 @@ // mapResponseToChatResult // mapLangchainToAiClient +import { OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; +import { parseMockResponse } from '../../../../test-util/mock-http.js'; +import { mapResponseToChatResult } from './util.js'; + describe('Mapping Functions', () => { - const testObject = {}; - it('should complete a chat', async () => { - const result = await generate(); - expect(result).toBeDefined(); - expect(result).toContain('Paris'); + const openAiMockResponse = parseMockResponse( + 'foundation-models', + 'openai-chat-completion-success-response.json' + ); + it('should parse an OpenAi response to a (Langchain) chat response', async () => { + const result = mapResponseToChatResult(openAiMockResponse); + expect(result).toMatchInlineSnapshot(); }); - it('should compute an embedding vector', async () => { - const result = await embedQuery(); - expect(result).toBeDefined(); - expect(result).not.toHaveLength(0); - }); + // it('should compute an embedding vector', async () => { + // const result = await embedQuery(); + // expect(result).toBeDefined(); + // expect(result).not.toHaveLength(0); + // }); }); From 87a7cde1dc2279af660e570fac19e53d56aff30a Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 17:23:57 +0200 Subject: [PATCH 56/95] add another mapping test --- .../src/openai/openai-response.test.ts | 6 +- .../openai/__snapshots__/util.test.ts.snap | 50 +++++++++++++++ packages/langchain/src/openai/chat.ts | 6 +- packages/langchain/src/openai/embedding.ts | 2 +- packages/langchain/src/openai/util.test.ts | 64 ++++++++++++++----- 5 files changed, 106 insertions(+), 22 deletions(-) create mode 100644 packages/langchain/src/openai/__snapshots__/util.test.ts.snap diff --git a/packages/foundation-models/src/openai/openai-response.test.ts b/packages/foundation-models/src/openai/openai-response.test.ts index b108ff157..9ff6f7979 100644 --- a/packages/foundation-models/src/openai/openai-response.test.ts +++ b/packages/foundation-models/src/openai/openai-response.test.ts @@ -1,6 +1,6 @@ -import { parseMockResponse } from '../../../../test-util/mock-http'; -import { OpenAiChatCompletionResponse } from './openai-response'; -import { OpenAiChatCompletionOutput } from './openai-types'; +import { parseMockResponse } from '../../../../test-util/mock-http.js'; +import { OpenAiChatCompletionResponse } from './openai-response.js'; +import { OpenAiChatCompletionOutput } from './openai-types.js'; describe('OpenAI response', () => { const mockResponse = parseMockResponse( diff --git a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap new file mode 100644 index 000000000..6556ecb33 --- /dev/null +++ b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap @@ -0,0 +1,50 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Mapping Functions should parse an OpenAi response to a (Langchain) chat response 1`] = ` +{ + "generations": [ + { + "generationInfo": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_calls": undefined, + }, + "message": { + "id": [ + "langchain_core", + "messages", + "AIMessage", + ], + "kwargs": { + "additional_kwargs": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_call_id": "", + "tool_calls": undefined, + }, + "content": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + "invalid_tool_calls": [], + "response_metadata": {}, + "tool_calls": [], + }, + "lc": 1, + "type": "constructor", + }, + "text": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + }, + ], + "llmOutput": { + "created": 1725457796, + "id": "chatcmpl-A3kgOwg9B6j87n0IkoCFCUCxRSwQZ", + "model": "gpt-4-32k", + "object": "chat.completion", + "tokenUsage": { + "completionTokens": 22, + "promptTokens": 15, + "totalTokens": 37, + }, + }, +} +`; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 5b7643a7b..cee8f8b61 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -14,7 +14,7 @@ export class OpenAiChatClient extends AzureChatOpenAI { private openAiChatClient: OpenAiChatClientBase; constructor(fields: OpenAiChatModelInput) { - const defaultValues = new AzureOpenAI(); + const defaultValues = new AzureOpenAI({ apiKey: 'dummy' }); const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop @@ -25,8 +25,8 @@ export class OpenAiChatClient extends AzureChatOpenAI { ...fields, stop, // overrides the apikey values as they are not applicable for BTP - azureOpenAIApiKey: 'dummy', - openAIApiKey: 'dummy', + azureOpenAIApiKey: undefined, + openAIApiKey: undefined, apiKey: 'dummy' }); diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 0ac1f15ca..6c3589318 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -15,7 +15,7 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { constructor(fields: OpenAiEmbeddingInput) { // overrides the apikey value as it is not applicable in BTP - super({ ...fields, azureOpenAIApiKey: 'dummy' }); + super({ ...fields, apiKey: 'dummy', azureOpenAIApiKey: undefined }); this.btpOpenAIClient = new OpenAiEmbeddingClientBase({ ...fields }); } diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 7c8bbc49e..0e85f80c4 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,23 +1,57 @@ // mapResponseToChatResult // mapLangchainToAiClient -import { OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; -import { parseMockResponse } from '../../../../test-util/mock-http.js'; +import { OpenAiChatCompletionOutput , OpenAiChatClient as OpenAiChatClientBase, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; +import { jest } from '@jest/globals'; +import nock from 'nock'; +import { mockClientCredentialsGrantCall, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; import { mapResponseToChatResult } from './util.js'; +import { OpenAiChatClient } from './chat.js'; describe('Mapping Functions', () => { - const openAiMockResponse = parseMockResponse( - 'foundation-models', - 'openai-chat-completion-success-response.json' + const openAiMockResponse = parseMockResponse( + 'foundation-models', + 'openai-chat-completion-success-response.json' + ); + + const chatCompletionEndpoint = { + url: 'inference/deployments/1234/chat/completions', + apiVersion: '2024-02-01' + }; + + beforeEach(() => { + mockClientCredentialsGrantCall(); + }); + + afterEach(() => { + nock.cleanAll(); + }); + + it('should parse an OpenAi response to a (Langchain) chat response', async () => { + const result = mapResponseToChatResult(openAiMockResponse); + expect(result).toMatchSnapshot(); + }); + + it('should parse a Langchain input to an ai sdk input', async () => { + const prompt = { + input: ['AI is fascinating'] + } as OpenAiEmbeddingParameters; + + mockInference( + { + data: prompt + }, + { + data: openAiMockResponse, + status: 200 + }, + chatCompletionEndpoint ); - it('should parse an OpenAi response to a (Langchain) chat response', async () => { - const result = mapResponseToChatResult(openAiMockResponse); - expect(result).toMatchInlineSnapshot(); - }); - - // it('should compute an embedding vector', async () => { - // const result = await embedQuery(); - // expect(result).toBeDefined(); - // expect(result).not.toHaveLength(0); - // }); + + const client = new OpenAiChatClient({ deploymentId: '1234' }); + const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const response = await client.invoke('Test'); + expect(runSpy).toHaveBeenCalled(); }); +}); From 43a4db817d2b786a0f176868a877083c71f2680d Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 17:41:57 +0200 Subject: [PATCH 57/95] finish unit tests --- packages/langchain/src/openai/util.test.ts | 74 +++++++++++++++++----- 1 file changed, 57 insertions(+), 17 deletions(-) diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 0e85f80c4..a4a3f0d74 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,24 +1,69 @@ // mapResponseToChatResult // mapLangchainToAiClient -import { OpenAiChatCompletionOutput , OpenAiChatClient as OpenAiChatClientBase, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; +import { OpenAiChatClient as OpenAiChatClientBase, OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; import { jest } from '@jest/globals'; import nock from 'nock'; +import { HumanMessage } from '@langchain/core/messages'; import { mockClientCredentialsGrantCall, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; import { mapResponseToChatResult } from './util.js'; import { OpenAiChatClient } from './chat.js'; -describe('Mapping Functions', () => { - const openAiMockResponse = parseMockResponse( - 'foundation-models', - 'openai-chat-completion-success-response.json' - ); +const openAiMockResponse = parseMockResponse( + 'foundation-models', + 'openai-chat-completion-success-response.json' +); + +const chatCompletionEndpoint = { + url: 'inference/deployments/1234/chat/completions', + apiVersion: '2024-02-01' +}; + +const prompt = { + messages: [ + { + role: 'user', + content: 'Where is the deepest place on earth located', + tool_call_id: '' + } + ], + max_tokens: 256, + temperature: 0.7, + top_p: 1, + n: 1, + presence_penalty: 0, + frequency_penalty: 0 +}; - const chatCompletionEndpoint = { - url: 'inference/deployments/1234/chat/completions', - apiVersion: '2024-02-01' - }; +const langchainPrompt = new HumanMessage('Where is the deepest place on earth located'); +const request = { + frequency_penalty: 0, + functions: undefined, + logit_bias: undefined, + max_tokens: 256, + messages: [ + { + content: 'Where is the deepest place on earth located', + function_call: undefined, + name: undefined, + role: 'user', + tool_call_id: '', + tool_calls: undefined + } + ], + n: 1, + presence_penalty: 0, + response_format: undefined, + seed: undefined, + stop: undefined, + temperature: 0.7, + tool_choice: undefined, + tools: undefined, + top_p: 1 +}; + +describe('Mapping Functions', () => { beforeEach(() => { mockClientCredentialsGrantCall(); }); @@ -33,10 +78,6 @@ describe('Mapping Functions', () => { }); it('should parse a Langchain input to an ai sdk input', async () => { - const prompt = { - input: ['AI is fascinating'] - } as OpenAiEmbeddingParameters; - mockInference( { data: prompt @@ -50,8 +91,7 @@ describe('Mapping Functions', () => { const client = new OpenAiChatClient({ deploymentId: '1234' }); const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const response = await client.invoke('Test'); - expect(runSpy).toHaveBeenCalled(); + await client.generate([[langchainPrompt]]); + expect(runSpy).toHaveBeenCalledWith(request); }); }); From 1902dec47ae395fdaed551b09bf8539a610e586f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 19:07:15 +0200 Subject: [PATCH 58/95] lint --- packages/langchain/src/openai/types.ts | 4 +--- packages/langchain/src/openai/util.test.ts | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 1f115cdae..561649762 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -5,9 +5,7 @@ import type { OpenAIChatInput, OpenAIEmbeddingsParams } from '@langchain/openai'; -import type { - OpenAiChatCompletionParameters, -} from '@sap-ai-sdk/foundation-models'; +import type { OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import type { AzureOpenAiChatModel, AzureOpenAiEmbeddingModel diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index a4a3f0d74..8ce5cc62a 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,11 +1,18 @@ // mapResponseToChatResult // mapLangchainToAiClient -import { OpenAiChatClient as OpenAiChatClientBase, OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; +import { + OpenAiChatClient as OpenAiChatClientBase, + OpenAiChatCompletionOutput +} from '@sap-ai-sdk/foundation-models'; import { jest } from '@jest/globals'; import nock from 'nock'; import { HumanMessage } from '@langchain/core/messages'; -import { mockClientCredentialsGrantCall, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; +import { + mockClientCredentialsGrantCall, + mockInference, + parseMockResponse +} from '../../../../test-util/mock-http.js'; import { mapResponseToChatResult } from './util.js'; import { OpenAiChatClient } from './chat.js'; @@ -35,7 +42,9 @@ const prompt = { frequency_penalty: 0 }; -const langchainPrompt = new HumanMessage('Where is the deepest place on earth located'); +const langchainPrompt = new HumanMessage( + 'Where is the deepest place on earth located' +); const request = { frequency_penalty: 0, From 2625cd226c761575ccefdc4e2e81def663bf4568 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 19:34:51 +0200 Subject: [PATCH 59/95] fix exports --- packages/langchain/src/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 191b8f8ef..f279b95ae 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,6 +1,6 @@ export { OpenAiChatClient, OpenAiEmbeddingClient } from './openai/index.js'; export type { - OpenAIChatModelInput, - OpenAIEmbeddingInput, - OpenAIChatCallOptions + OpenAiChatModelInput, + OpenAiEmbeddingInput, + OpenAiChatCallOptions } from './openai/index.js'; From bc66a45955c715fa7c12d96cf4fc8203481dcf53 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 16 Sep 2024 15:30:26 +0200 Subject: [PATCH 60/95] update readme --- packages/langchain/README.md | 17 ++++++++--------- packages/langchain/package.json | 7 +++---- packages/langchain/src/openai/embedding.ts | 4 ++-- packages/langchain/src/openai/types.ts | 6 +++--- packages/langchain/src/openai/util.ts | 17 ----------------- pnpm-lock.yaml | 3 +++ sample-code/src/langchain-openai.ts | 4 +--- 7 files changed, 20 insertions(+), 38 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 82e7e4aed..0eb2f2a4c 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,4 +1,4 @@ -# @sap-ai-sdk/langchain +# @sap-ai-sdk/LangChain This package contains Langchain compliant models, based on the @sap-ai-sdk clients. @@ -21,9 +21,9 @@ $ npm install @langchain/openai // if you want to use OpenAI models ## Usage -All client's comply with [Langchain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI), therefore you should be able to use them as per usual. +All clients comply with [LangChain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI). -The only difference is in the initialization of the client, where you have th option to pass either: +To initialize the client, you can pass either: ```ts modelName: string, @@ -32,7 +32,7 @@ The only difference is in the initialization of the client, where you have th op ...others ``` -or +or alternatively: ```ts deploymentId: string, @@ -40,20 +40,19 @@ or ...others ``` -If you pass API keys they are ignored, since you're not inteded to call the vendor's endpoints directly. +If you pass API keys, they are ignored, as it is not intended to call the vendor's endpoints directly. Instead, the credentials in the binding are used to call SAP's LLM Proxy. ### OpenAI -We offer two types of clients for OpenAI models. -Currenty these are chat and embedding models. +We offer two types of clients for OpenAI models: chat and embedding. -All clients assume the same set of default values as [Langchain's default OpenAI client](https://www.npmjs.com/package/@langchain/openai) does. +All clients assume the same set of default values as [LangChain's default OpenAI client](https://www.npmjs.com/package/@langchain/openai) does. #### Chat There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. -You can also combine them with the usual Langchain functionality, e.g. prompt templates. +You can also combine them with the usual LangChain functionality, e.g. prompt templates. A simple text completion might look like: diff --git a/packages/langchain/package.json b/packages/langchain/package.json index f7d64d63d..4ba85204c 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -5,9 +5,7 @@ "license": "Apache-2.0", "keywords": [ "sap-ai-sdk", - "gen-ai-hub", - "orchestration", - "llm-access" + "langchain" ], "type": "module", "main": "./dist/index.js", @@ -32,7 +30,8 @@ "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", "@langchain/core": "^0.2.30", - "zod-to-json-schema": "^3.23.2" + "zod-to-json-schema": "^3.23.2", + "@sap-cloud-sdk/util": "^3.20.0" }, "peerDependencies": { "@langchain/openai": "^0.2.8" diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 6c3589318..82e9c1595 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -4,7 +4,7 @@ import { OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; -import { chunkArray } from './util.js'; +import { splitInChunks } from '@sap-cloud-sdk/util'; import { OpenAiEmbeddingInput } from './types.js'; /** @@ -21,7 +21,7 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { } override async embedDocuments(documents: string[]): Promise { - const chunkedPrompts = chunkArray( + const chunkedPrompts = splitInChunks( this.stripNewLines ? documents.map(t => t.replace(/\n/g, ' ')) : documents, diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 561649762..bab272671 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -13,7 +13,7 @@ import type { import type { ConfigurationOptions } from '@sap-ai-sdk/ai-api'; /** - * Input for Text generation for OpenAi GPT. + * Input type for OpenAI Chat models. */ export type OpenAiChatModelInput = Omit< OpenAIChatInput, @@ -36,7 +36,7 @@ export type OpenAiChatModelInput = Omit< ConfigurationOptions; /** - * Chat Call options. + * Chat model call options for OpenAI. */ export interface OpenAiChatCallOptions extends Omit< @@ -54,7 +54,7 @@ export interface OpenAiChatCallOptions > {} /** - * Input for Text generation for OpenAi GPT. + * Input type for OpenAI Embedding models. */ export type OpenAiEmbeddingInput = Omit< OpenAIEmbeddingsParams, diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 6bba31161..2d1b989ac 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -158,23 +158,6 @@ export function isStructuredToolArray( ); } -/** - * Chunk an array into smaller arrays of specified chunk size. - * @param arr - Input array to be chunked. - * @param chunkSize - Size of each chunk. - * @returns Array of chunks. - * @internal - */ -export function chunkArray(arr: string[], chunkSize: number): string[][] { - return arr.reduce((chunks, elem, index) => { - const chunkIndex = Math.floor(index / chunkSize); - const chunk = chunks[chunkIndex] || []; - - chunks[chunkIndex] = chunk.concat([elem]); - return chunks; - }, [] as string[][]); -} - /** * Maps the langchain's input interface to our own client's input interface * @param client The Langchain OpenAI client diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a2792e6d7..1fe401a9c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -166,6 +166,9 @@ importers: '@sap-ai-sdk/foundation-models': specifier: workspace:^ version: link:../foundation-models + '@sap-cloud-sdk/util': + specifier: ^3.20.0 + version: 3.20.0 zod-to-json-schema: specifier: ^3.23.2 version: 3.23.2(zod@3.23.8) diff --git a/sample-code/src/langchain-openai.ts b/sample-code/src/langchain-openai.ts index bd36990d1..7500bf9c7 100644 --- a/sample-code/src/langchain-openai.ts +++ b/sample-code/src/langchain-openai.ts @@ -21,7 +21,5 @@ export async function embedQuery(): Promise { const client = new OpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' }); - const response = await client.embedQuery('Hello, world!'); - - return response; + return client.embedQuery('Hello, world!'); } From a62dba0917cd42bda7ecc25b257237eda9600036 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 16 Sep 2024 16:45:45 +0200 Subject: [PATCH 61/95] add ultility and merge main --- packages/langchain/README.md | 6 +----- packages/langchain/src/index.ts | 2 +- packages/langchain/src/openai/chat.ts | 13 ++++--------- packages/langchain/src/openai/embedding.ts | 5 ++--- packages/langchain/src/openai/util.test.ts | 4 ++-- packages/langchain/src/openai/util.ts | 20 ++++++++++++++++---- 6 files changed, 26 insertions(+), 24 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 0eb2f2a4c..cc787984b 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -13,11 +13,7 @@ $ npm install @langchain/openai // if you want to use OpenAI models ## Pre-requisites - [Enable the AI Core service in BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). -- Project configured with Node.js v20 or higher and native ESM support enabled. -- For testing your application locally: - - Download a service key for your AI Core service instance. - - Create a `.env` file in the sample-code directory. - - Add an entry `AICORE_SERVICE_KEY=''`. +- Ensure the project is configured with Node.js v20 or higher, along with native ESM support. ## Usage diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index f279b95ae..75d19020b 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,4 +1,4 @@ -export { OpenAiChatClient, OpenAiEmbeddingClient } from './openai/index.js'; +export { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from './openai/index.js'; export type { OpenAiChatModelInput, OpenAiEmbeddingInput, diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index cee8f8b61..4546f3375 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -3,31 +3,26 @@ import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; -import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; +import { mapLangchainToAiClient, mapResponseToChatResult, toArrayOrUndefined } from './util.js'; import type { OpenAiChatModelInput, OpenAiChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class OpenAiChatClient extends AzureChatOpenAI { +export class AzureOpenAiChatClient extends AzureChatOpenAI { declare CallOptions: OpenAiChatCallOptions; private openAiChatClient: OpenAiChatClientBase; constructor(fields: OpenAiChatModelInput) { const defaultValues = new AzureOpenAI({ apiKey: 'dummy' }); - const stop = fields.stop - ? Array.isArray(fields.stop) - ? fields.stop - : [fields.stop] - : defaultValues.stop; + const stop = toArrayOrUndefined(fields.stop); super({ ...defaultValues, ...fields, stop, // overrides the apikey values as they are not applicable for BTP azureOpenAIApiKey: undefined, - openAIApiKey: undefined, - apiKey: 'dummy' + openAIApiKey: undefined }); this.openAiChatClient = new OpenAiChatClientBase({ ...fields }); diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 82e9c1595..ffc5610f8 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -10,7 +10,7 @@ import { OpenAiEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { +export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { private btpOpenAIClient: OpenAiEmbeddingClientBase; constructor(fields: OpenAiEmbeddingInput) { @@ -45,9 +45,8 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { private async createEmbedding( query: OpenAiEmbeddingParameters ): Promise { - const res = await this.caller.callWithOptions({}, () => + return this.caller.callWithOptions({}, () => this.btpOpenAIClient.run(query) ); - return res; } } diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 8ce5cc62a..e00d191c4 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -14,7 +14,7 @@ import { parseMockResponse } from '../../../../test-util/mock-http.js'; import { mapResponseToChatResult } from './util.js'; -import { OpenAiChatClient } from './chat.js'; +import { AzureOpenAiChatClient } from './chat.js'; const openAiMockResponse = parseMockResponse( 'foundation-models', @@ -98,7 +98,7 @@ describe('Mapping Functions', () => { chatCompletionEndpoint ); - const client = new OpenAiChatClient({ deploymentId: '1234' }); + const client = new AzureOpenAiChatClient({ deploymentId: '1234' }); const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); await client.generate([[langchainPrompt]]); expect(runSpy).toHaveBeenCalledWith(request); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 2d1b989ac..2a30ec570 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -15,7 +15,7 @@ import type { OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; -import { OpenAiChatClient } from './chat.js'; +import { AzureOpenAiChatClient } from './chat.js'; import { OpenAiChatCallOptions } from './types.js'; /** @@ -143,6 +143,18 @@ export function mapBaseMessageToOpenAiChatMessage( } as OpenAiChatMessage; } +/** + * Converts a value to an array or returns undefined. + * @param value - The value to convert. + * @returns The value as an array, undefined if the input is falsy, or the original array if input is already an array. + */ +export function toArrayOrUndefined(value?: T | T[]): T[] | undefined { + if(value === undefined) { + return undefined; + } + return Array.isArray(value) ? value : [value]; +} + /** * Checks if a given array is a structured tool array. * @param tools - The array to check. @@ -159,15 +171,15 @@ export function isStructuredToolArray( } /** - * Maps the langchain's input interface to our own client's input interface + * Maps Langchain's input interface to our own client's input interface * @param client The Langchain OpenAI client * @param options The Langchain call options * @param messages The messages to be send - * @returns A AI SDK compatibile request + * @returns An AI SDK compatibile request * @internal */ export function mapLangchainToAiClient( - client: OpenAiChatClient, + client: AzureOpenAiChatClient, options: OpenAiChatCallOptions, messages: BaseMessage[] ): OpenAiChatCompletionParameters { From 2d41b661296b92ff28927fa5483764bcaf8998cf Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 16 Sep 2024 17:53:39 +0200 Subject: [PATCH 62/95] bump version and other suggestions --- packages/langchain/package.json | 4 +- packages/langchain/src/index.ts | 5 +- packages/langchain/src/openai/chat.ts | 6 +- packages/langchain/src/openai/embedding.ts | 6 +- packages/langchain/src/openai/util.ts | 39 +++++----- pnpm-lock.yaml | 84 ++++++++++++++++------ sample-code/package.json | 2 +- sample-code/src/langchain-openai.ts | 6 +- sample-code/src/server.ts | 4 +- 9 files changed, 100 insertions(+), 56 deletions(-) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 4ba85204c..4b00cf5ee 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -29,12 +29,12 @@ "dependencies": { "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", - "@langchain/core": "^0.2.30", + "@langchain/core": "0.3.1", "zod-to-json-schema": "^3.23.2", "@sap-cloud-sdk/util": "^3.20.0" }, "peerDependencies": { - "@langchain/openai": "^0.2.8" + "@langchain/openai": "^0.3.0" }, "peerDependenciesMeta": { "@langchain/openai": { diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 75d19020b..8cde18617 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -1,4 +1,7 @@ -export { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from './openai/index.js'; +export { + AzureOpenAiChatClient, + AzureOpenAiEmbeddingClient +} from './openai/index.js'; export type { OpenAiChatModelInput, OpenAiEmbeddingInput, diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 4546f3375..14eea5bb8 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -3,7 +3,11 @@ import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; -import { mapLangchainToAiClient, mapResponseToChatResult, toArrayOrUndefined } from './util.js'; +import { + mapLangchainToAiClient, + mapResponseToChatResult, + toArrayOrUndefined +} from './util.js'; import type { OpenAiChatModelInput, OpenAiChatCallOptions } from './types.js'; /** diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index ffc5610f8..6fa7be803 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -11,13 +11,13 @@ import { OpenAiEmbeddingInput } from './types.js'; * OpenAI GPT Language Model Wrapper to embed texts. */ export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { - private btpOpenAIClient: OpenAiEmbeddingClientBase; + private btpOpenAiClient: OpenAiEmbeddingClientBase; constructor(fields: OpenAiEmbeddingInput) { // overrides the apikey value as it is not applicable in BTP super({ ...fields, apiKey: 'dummy', azureOpenAIApiKey: undefined }); - this.btpOpenAIClient = new OpenAiEmbeddingClientBase({ ...fields }); + this.btpOpenAiClient = new OpenAiEmbeddingClientBase({ ...fields }); } override async embedDocuments(documents: string[]): Promise { @@ -46,7 +46,7 @@ export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { query: OpenAiEmbeddingParameters ): Promise { return this.caller.callWithOptions({}, () => - this.btpOpenAIClient.run(query) + this.btpOpenAiClient.run(query) ); } } diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 2a30ec570..ab7b767fc 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -20,8 +20,8 @@ import { OpenAiChatCallOptions } from './types.js'; /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. - * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. - * @returns The OpenAI Chat Completion Function. + * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI chat completion function. * @internal */ export function mapToolToOpenAiFunction( @@ -36,8 +36,8 @@ export function mapToolToOpenAiFunction( /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. - * @param tool - Base class for Tools that accept input of any shape defined by a Zod schema. - * @returns The OpenAI Chat Completion Tool. + * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. + * @returns The OpenAI chat completion tool. * @internal */ export function mapToolToOpenAiTool( @@ -45,18 +45,14 @@ export function mapToolToOpenAiTool( ): OpenAiChatCompletionTool { return { type: 'function', - function: { - name: tool.name, - description: tool.description, - parameters: zodToJsonSchema(tool.schema) - } + function: mapToolToOpenAiFunction(tool) }; } /** - * Maps a {@link BaseMessage} to OpenAI's Message Role. + * Maps a {@link BaseMessage} to OpenAI's message role. * @param message - The message to map. - * @returns The OpenAI Message Role. + * @returns The OpenAI meessage Role. * @internal */ export function mapBaseMessageToRole( @@ -82,8 +78,8 @@ export function mapBaseMessageToRole( /** * Maps OpenAI messages to LangChain's {@link ChatResult}. - * @param res - The OpenAI Chat Completion Output. - * @returns The LangChain Chat Result. + * @param res - The OpenAI chat completion output. + * @returns The LangChain chat result. * @internal */ export function mapResponseToChatResult( @@ -124,9 +120,9 @@ export function mapResponseToChatResult( } /** - * Maps {@link BaseMessage} to OpenAI Messages. + * Maps {@link BaseMessage} to OpenAI messages. * @param message - The message to map. - * @returns The OpenAI Chat Message. + * @returns The OpenAI chat Message. * @internal */ export function mapBaseMessageToOpenAiChatMessage( @@ -149,7 +145,7 @@ export function mapBaseMessageToOpenAiChatMessage( * @returns The value as an array, undefined if the input is falsy, or the original array if input is already an array. */ export function toArrayOrUndefined(value?: T | T[]): T[] | undefined { - if(value === undefined) { + if (value === undefined) { return undefined; } return Array.isArray(value) ? value : [value]; @@ -164,16 +160,15 @@ export function toArrayOrUndefined(value?: T | T[]): T[] | undefined { export function isStructuredToolArray( tools?: unknown[] ): tools is StructuredTool[] { - return ( - tools !== undefined && - tools.every(tool => Array.isArray((tool as StructuredTool).lc_namespace)) + return !!tools?.every(tool => + Array.isArray((tool as StructuredTool).lc_namespace) ); } /** - * Maps Langchain's input interface to our own client's input interface - * @param client The Langchain OpenAI client - * @param options The Langchain call options + * Maps LangChain's input interface to our own client's input interface + * @param client The LangChain OpenAI client + * @param options The LangChain call options * @param messages The messages to be send * @returns An AI SDK compatibile request * @internal diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1fe401a9c..603326698 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -155,11 +155,11 @@ importers: packages/langchain: dependencies: '@langchain/core': - specifier: ^0.2.30 - version: 0.2.30(openai@4.56.1(zod@3.23.8)) + specifier: 0.3.1 + version: 0.3.1(openai@4.61.1(zod@3.23.8)) '@langchain/openai': - specifier: ^0.2.8 - version: 0.2.8 + specifier: ^0.3.0 + version: 0.3.0(@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))) '@sap-ai-sdk/ai-api': specifier: workspace:^ version: link:../ai-api @@ -211,8 +211,8 @@ importers: sample-code: dependencies: '@langchain/core': - specifier: ^0.2.30 - version: 0.2.30(openai@4.56.1(zod@3.23.8)) + specifier: 0.3.1 + version: 0.3.1(openai@4.61.1(zod@3.23.8)) '@sap-ai-sdk/ai-api': specifier: workspace:^ version: link:../packages/ai-api @@ -283,7 +283,7 @@ importers: devDependencies: '@langchain/core': specifier: ^0.2.30 - version: 0.2.30(openai@4.56.1(zod@3.23.8)) + version: 0.2.30(openai@4.61.1(zod@3.23.8)) '@sap-ai-sdk/core': specifier: workspace:^ version: link:../../packages/core @@ -766,10 +766,16 @@ packages: resolution: {integrity: sha512-jeLmLTxnEq9zSq0J/fMlBCMT5Ix8tbZriqNYTm3oS7CPM2uHBcRQhV3fpsh4G8FnE7Pxa4sWfrFzc2jykhlk7A==} engines: {node: '>=18'} - '@langchain/openai@0.2.8': - resolution: {integrity: sha512-p5fxEAKuR8UV9jWIxkZ6AY/vAPSYxJI0Pf/UM4T3FKk/dn99G/mAEDLhfI4pBf7B8o8TudSVyBW2hRjZqlQu7g==} + '@langchain/core@0.3.1': + resolution: {integrity: sha512-xYdTAgS9hYPt+h0/OwpyRcMB5HKR40LXutbSr2jw3hMVIOwD1DnvhnUEnWgBK4lumulVW2jrosNPyBKMhRZAZg==} engines: {node: '>=18'} + '@langchain/openai@0.3.0': + resolution: {integrity: sha512-yXrz5Qn3t9nq3NQAH2l4zZOI4ev2CFdLC5kvmi5SdW4bggRuM40SXTUAY3VRld4I5eocYfk82VbrlA+6dvN5EA==} + engines: {node: '>=18'} + peerDependencies: + '@langchain/core': '>=0.2.26 <0.4.0' + '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} @@ -2528,6 +2534,14 @@ packages: openai: optional: true + langsmith@0.1.56-rc.1: + resolution: {integrity: sha512-XsOxlhBAlTCGR9hNEL2VSREmiz8v6czNuX3CIwec9fH9T0WbNPle8Q/7Jy/h9UCbS9vuzTjfgc4qO5Dc9cu5Ig==} + peerDependencies: + openai: '*' + peerDependenciesMeta: + openai: + optional: true + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} @@ -2825,8 +2839,8 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} - openai@4.56.1: - resolution: {integrity: sha512-XMsxdjrWBYgbP6EsDIwbhkQEgeyL2C41te/QrJm8kdfho22exhTUJ/cFJSmCTToam/RSOC1BlOylHvD6i/bmsA==} + openai@4.61.1: + resolution: {integrity: sha512-jZ2WRn+f4QWZkYnrUS+xzEUIBllsGN75dUCaXmMIHcv2W9yn7O8amaReTbGHCNEYkL43vuDOcxPUWfNPUmoD3Q==} hasBin: true peerDependencies: zod: ^3.23.8 @@ -4415,13 +4429,13 @@ snapshots: '@jsdevtools/ono@7.1.3': {} - '@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8))': + '@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8))': dependencies: ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.14 - langsmith: 0.1.48(@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8)))(openai@4.56.1(zod@3.23.8)) + langsmith: 0.1.48(@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8)))(openai@4.61.1(zod@3.23.8)) mustache: 4.2.0 p-queue: 6.6.2 p-retry: 4.6.2 @@ -4432,16 +4446,31 @@ snapshots: - langchain - openai - '@langchain/openai@0.2.8': + '@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))': dependencies: - '@langchain/core': 0.2.30(openai@4.56.1(zod@3.23.8)) + ansi-styles: 5.2.0 + camelcase: 6.3.0 + decamelize: 1.2.0 js-tiktoken: 1.0.14 - openai: 4.56.1(zod@3.23.8) + langsmith: 0.1.56-rc.1(openai@4.61.1(zod@3.23.8)) + mustache: 4.2.0 + p-queue: 6.6.2 + p-retry: 4.6.2 + uuid: 10.0.0 + zod: 3.23.8 + zod-to-json-schema: 3.23.2(zod@3.23.8) + transitivePeerDependencies: + - openai + + '@langchain/openai@0.3.0(@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.1(openai@4.61.1(zod@3.23.8)) + js-tiktoken: 1.0.14 + openai: 4.61.1(zod@3.23.8) zod: 3.23.8 zod-to-json-schema: 3.23.2(zod@3.23.8) transitivePeerDependencies: - encoding - - langchain '@manypkg/find-root@1.1.0': dependencies: @@ -6707,7 +6736,19 @@ snapshots: kuler@2.0.0: {} - langsmith@0.1.48(@langchain/core@0.2.30(openai@4.56.1(zod@3.23.8)))(openai@4.56.1(zod@3.23.8)): + langsmith@0.1.48(@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8)))(openai@4.61.1(zod@3.23.8)): + dependencies: + '@types/uuid': 10.0.0 + commander: 10.0.1 + p-queue: 6.6.2 + p-retry: 4.6.2 + semver: 7.6.3 + uuid: 10.0.0 + optionalDependencies: + '@langchain/core': 0.2.30(openai@4.61.1(zod@3.23.8)) + openai: 4.61.1(zod@3.23.8) + + langsmith@0.1.56-rc.1(openai@4.61.1(zod@3.23.8)): dependencies: '@types/uuid': 10.0.0 commander: 10.0.1 @@ -6716,8 +6757,7 @@ snapshots: semver: 7.6.3 uuid: 10.0.0 optionalDependencies: - '@langchain/core': 0.2.30(openai@4.56.1(zod@3.23.8)) - openai: 4.56.1(zod@3.23.8) + openai: 4.61.1(zod@3.23.8) leven@3.1.0: {} @@ -7012,15 +7052,17 @@ snapshots: dependencies: mimic-fn: 2.1.0 - openai@4.56.1(zod@3.23.8): + openai@4.61.1(zod@3.23.8): dependencies: '@types/node': 18.19.47 '@types/node-fetch': 2.6.11 + '@types/qs': 6.9.15 abort-controller: 3.0.0 agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 node-fetch: 2.7.0 + qs: 6.13.0 optionalDependencies: zod: 3.23.8 transitivePeerDependencies: diff --git a/sample-code/package.json b/sample-code/package.json index 315a3b3a2..b49615475 100644 --- a/sample-code/package.json +++ b/sample-code/package.json @@ -27,7 +27,7 @@ "@sap-ai-sdk/foundation-models": "workspace:^", "@sap-ai-sdk/orchestration": "workspace:^", "@sap-ai-sdk/langchain": "workspace:^", - "@langchain/core": "^0.2.30", + "@langchain/core": "0.3.1", "@types/express": "^4.17.21", "express": "^4.21.0" } diff --git a/sample-code/src/langchain-openai.ts b/sample-code/src/langchain-openai.ts index 7500bf9c7..44a21c9c2 100644 --- a/sample-code/src/langchain-openai.ts +++ b/sample-code/src/langchain-openai.ts @@ -1,12 +1,12 @@ import { HumanMessage } from '@langchain/core/messages'; -import { OpenAiChatClient, OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; +import { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; /** * Ask GPT about the capital of France. * @returns The answer from GPT. */ export async function generate(): Promise { - const client = new OpenAiChatClient({ modelName: 'gpt-35-turbo' }); + const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); const response = await client.generate([ [new HumanMessage('What is the capital of France?')] ]); @@ -18,7 +18,7 @@ export async function generate(): Promise { * @returns An embedding vector. */ export async function embedQuery(): Promise { - const client = new OpenAiEmbeddingClient({ + const client = new AzureOpenAiEmbeddingClient({ modelName: 'text-embedding-ada-002' }); return client.embedQuery('Hello, world!'); diff --git a/sample-code/src/server.ts b/sample-code/src/server.ts index 714d247c3..4c86eb84e 100644 --- a/sample-code/src/server.ts +++ b/sample-code/src/server.ts @@ -29,7 +29,7 @@ app.get('/llm', async (req, res) => { app.get('/embedding', async (req, res) => { try { const result = await computeEmbedding(); - if (result.length === 0) { + if (!result.length) { throw new Error('No embedding vector returned'); } res.send('Number crunching success, got a nice vector.'); @@ -77,7 +77,7 @@ app.get('/langchain/chat', async (req, res) => { app.get('/langchain/embedding', async (req, res) => { try { const result = await embedQuery(); - if (result.length === 0) { + if (!result.length) { throw new Error('No embedding vector returned'); } res.send('Number crunching success, got a nice vector.'); From 21907d1af988a698bcc9ff1c62af2d10f91f833d Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Mon, 16 Sep 2024 15:54:28 +0000 Subject: [PATCH 63/95] fix: Changes from lint --- sample-code/src/langchain-openai.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sample-code/src/langchain-openai.ts b/sample-code/src/langchain-openai.ts index 44a21c9c2..182facc45 100644 --- a/sample-code/src/langchain-openai.ts +++ b/sample-code/src/langchain-openai.ts @@ -1,5 +1,8 @@ import { HumanMessage } from '@langchain/core/messages'; -import { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; +import { + AzureOpenAiChatClient, + AzureOpenAiEmbeddingClient +} from '@sap-ai-sdk/langchain'; /** * Ask GPT about the capital of France. From e64243e36850cc03df42d162b840b7811a9eb2df Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Mon, 16 Sep 2024 18:35:36 +0200 Subject: [PATCH 64/95] change visibility, naming, docs --- packages/ai-api/src/index.ts | 2 +- packages/ai-api/src/utils/deployment-resolver.ts | 4 ++-- packages/langchain/src/openai/embedding.ts | 8 ++++---- packages/langchain/src/openai/types.ts | 6 +++--- packages/langchain/src/openai/util.test.ts | 3 --- packages/langchain/src/openai/util.ts | 1 + sample-code/src/server.ts | 10 ++++++---- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/ai-api/src/index.ts b/packages/ai-api/src/index.ts index 7d54d8007..195b1ca00 100644 --- a/packages/ai-api/src/index.ts +++ b/packages/ai-api/src/index.ts @@ -5,5 +5,5 @@ export type { DeploymentIdConfiguration, ModelConfiguration, ResourceGroupConfiguration, - ConfigurationOptions + ModelDeploymentConfig } from './utils/index.js'; diff --git a/packages/ai-api/src/utils/deployment-resolver.ts b/packages/ai-api/src/utils/deployment-resolver.ts index 587088b53..b22f75cbd 100644 --- a/packages/ai-api/src/utils/deployment-resolver.ts +++ b/packages/ai-api/src/utils/deployment-resolver.ts @@ -46,13 +46,13 @@ export interface ResourceGroupConfiguration { */ export type ModelDeployment = | ModelNameT - | ConfigurationOptions; + | ModelDeploymentConfig; /** * The configuration options for a model deployment. * @typeParam ModelNameT - String literal type representing the name of the model. */ -export type ConfigurationOptions = Xor< +export type ModelDeploymentConfig = Xor< ModelConfiguration, DeploymentIdConfiguration > & diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 6fa7be803..a4f5093d2 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -29,17 +29,17 @@ export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { ); const embeddings: number[][] = []; for await (const promptChunk of chunkedPrompts) { - const resArr = await this.createEmbedding({ input: promptChunk }); - resArr.data.forEach(res => embeddings.push(res.embedding)); + const embeddingResponse = await this.createEmbedding({ input: promptChunk }); + embeddingResponse.data.forEach(entry => embeddings.push(entry.embedding)); } return embeddings; } override async embedQuery(query: string): Promise { - const resArr = await this.createEmbedding({ + const embeddingResponse = await this.createEmbedding({ input: this.stripNewLines ? query.replace(/\n/g, ' ') : query }); - return resArr.data[0].embedding; + return embeddingResponse.data[0].embedding; } private async createEmbedding( diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index bab272671..74f2adca4 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -10,7 +10,7 @@ import type { AzureOpenAiChatModel, AzureOpenAiEmbeddingModel } from '@sap-ai-sdk/core'; -import type { ConfigurationOptions } from '@sap-ai-sdk/ai-api'; +import type { ModelDeploymentConfig } from '@sap-ai-sdk/ai-api'; /** * Input type for OpenAI Chat models. @@ -33,7 +33,7 @@ export type OpenAiChatModelInput = Omit< > & Omit & BaseChatModelParams & - ConfigurationOptions; + ModelDeploymentConfig; /** * Chat model call options for OpenAI. @@ -60,5 +60,5 @@ export type OpenAiEmbeddingInput = Omit< OpenAIEmbeddingsParams, 'modelName' | 'model' | 'azureOpenAIApiKey' | 'apiKey' > & - ConfigurationOptions & + ModelDeploymentConfig & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index e00d191c4..aa0683d89 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,6 +1,3 @@ -// mapResponseToChatResult -// mapLangchainToAiClient - import { OpenAiChatClient as OpenAiChatClientBase, OpenAiChatCompletionOutput diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index ab7b767fc..5204bb67c 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -143,6 +143,7 @@ export function mapBaseMessageToOpenAiChatMessage( * Converts a value to an array or returns undefined. * @param value - The value to convert. * @returns The value as an array, undefined if the input is falsy, or the original array if input is already an array. + * @internal */ export function toArrayOrUndefined(value?: T | T[]): T[] | undefined { if (value === undefined) { diff --git a/sample-code/src/server.ts b/sample-code/src/server.ts index 4c86eb84e..01a96ebb3 100644 --- a/sample-code/src/server.ts +++ b/sample-code/src/server.ts @@ -30,9 +30,10 @@ app.get('/embedding', async (req, res) => { try { const result = await computeEmbedding(); if (!result.length) { - throw new Error('No embedding vector returned'); + res.status(500).send('No embedding vector returned.'); + } else { + res.send('Number crunching success, got a nice vector.'); } - res.send('Number crunching success, got a nice vector.'); } catch (error: any) { console.error(error); res @@ -78,9 +79,10 @@ app.get('/langchain/embedding', async (req, res) => { try { const result = await embedQuery(); if (!result.length) { - throw new Error('No embedding vector returned'); + res.status(500).send('No embedding vector returned.'); + } else { + res.send('Number crunching success, got a nice vector.'); } - res.send('Number crunching success, got a nice vector.'); } catch (error: any) { console.error(error); res From bf5d5d648b79c7ae39e7d82727718f52f355563d Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Mon, 16 Sep 2024 16:36:23 +0000 Subject: [PATCH 65/95] fix: Changes from lint --- packages/langchain/src/openai/embedding.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index a4f5093d2..9fc37004b 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -29,7 +29,9 @@ export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { ); const embeddings: number[][] = []; for await (const promptChunk of chunkedPrompts) { - const embeddingResponse = await this.createEmbedding({ input: promptChunk }); + const embeddingResponse = await this.createEmbedding({ + input: promptChunk + }); embeddingResponse.data.forEach(entry => embeddings.push(entry.embedding)); } return embeddings; From 198cabdffca708d96d3b48b6e1ecad6046e080b5 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 17 Sep 2024 16:23:33 +0200 Subject: [PATCH 66/95] move to basechatmodel --- packages/ai-api/src/index.ts | 1 - .../ai-api/src/utils/deployment-resolver.ts | 14 +---- packages/langchain/README.md | 2 +- packages/langchain/src/openai/chat.ts | 34 +++------- packages/langchain/src/openai/embedding.ts | 39 +++++------- packages/langchain/src/openai/types.ts | 62 +++++-------------- packages/langchain/src/openai/util.ts | 4 +- pnpm-lock.yaml | 40 ++++++------ 8 files changed, 61 insertions(+), 135 deletions(-) diff --git a/packages/ai-api/src/index.ts b/packages/ai-api/src/index.ts index 195b1ca00..0244d149f 100644 --- a/packages/ai-api/src/index.ts +++ b/packages/ai-api/src/index.ts @@ -5,5 +5,4 @@ export type { DeploymentIdConfiguration, ModelConfiguration, ResourceGroupConfiguration, - ModelDeploymentConfig } from './utils/index.js'; diff --git a/packages/ai-api/src/utils/deployment-resolver.ts b/packages/ai-api/src/utils/deployment-resolver.ts index b22f75cbd..d3c538220 100644 --- a/packages/ai-api/src/utils/deployment-resolver.ts +++ b/packages/ai-api/src/utils/deployment-resolver.ts @@ -1,4 +1,3 @@ -import { Xor } from '@sap-cloud-sdk/util'; import { type AiDeployment, DeploymentApi @@ -46,17 +45,8 @@ export interface ResourceGroupConfiguration { */ export type ModelDeployment = | ModelNameT - | ModelDeploymentConfig; - -/** - * The configuration options for a model deployment. - * @typeParam ModelNameT - String literal type representing the name of the model. - */ -export type ModelDeploymentConfig = Xor< - ModelConfiguration, - DeploymentIdConfiguration -> & - ResourceGroupConfiguration; + | ((ModelConfiguration | DeploymentIdConfiguration) & + ResourceGroupConfiguration); /** * Type guard to check if the given deployment configuration is a deployment ID configuration. diff --git a/packages/langchain/README.md b/packages/langchain/README.md index cc787984b..48f31d76d 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -17,7 +17,7 @@ $ npm install @langchain/openai // if you want to use OpenAI models ## Usage -All clients comply with [LangChain's interface](https://python.langchain.com/v0.2/api_reference/openai/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html#langchain_openai.chat_models.azure.AzureChatOpenAI). +All clients comply with [LangChain's interface](https://js.langchain.com/docs/introduction). To initialize the client, you can pass either: diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 14eea5bb8..c74f02135 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -1,51 +1,33 @@ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; -import { AzureChatOpenAI, AzureOpenAI } from '@langchain/openai'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { mapLangchainToAiClient, - mapResponseToChatResult, - toArrayOrUndefined + mapResponseToChatResult } from './util.js'; import type { OpenAiChatModelInput, OpenAiChatCallOptions } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class AzureOpenAiChatClient extends AzureChatOpenAI { +export class AzureOpenAiChatClient extends BaseChatModel { declare CallOptions: OpenAiChatCallOptions; private openAiChatClient: OpenAiChatClientBase; - constructor(fields: OpenAiChatModelInput) { - const defaultValues = new AzureOpenAI({ apiKey: 'dummy' }); - const stop = toArrayOrUndefined(fields.stop); - super({ - ...defaultValues, - ...fields, - stop, - // overrides the apikey values as they are not applicable for BTP - azureOpenAIApiKey: undefined, - openAIApiKey: undefined - }); + super(fields); + this.openAiChatClient = new OpenAiChatClientBase(fields); + } - this.openAiChatClient = new OpenAiChatClientBase({ ...fields }); + _llmType(): string { + return 'azure_openai'; } override get callKeys(): (keyof OpenAiChatCallOptions)[] { return [...(super.callKeys as (keyof OpenAiChatCallOptions)[])]; } - override get lc_secrets(): { [key: string]: string } | undefined { - // overrides default keys as they are not applicable for BTP - return {}; - } - - override get lc_aliases(): Record { - // overrides default keys as they are not applicable for BTP - return {}; - } - override async _generate( messages: BaseMessage[], options: this['CallOptions'], diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 9fc37004b..c7425a336 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -1,46 +1,35 @@ -import { AzureOpenAIEmbeddings } from '@langchain/openai'; import { OpenAiEmbeddingClient as OpenAiEmbeddingClientBase, OpenAiEmbeddingOutput, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; -import { splitInChunks } from '@sap-cloud-sdk/util'; +import { Embeddings } from '@langchain/core/embeddings'; import { OpenAiEmbeddingInput } from './types.js'; /** * OpenAI GPT Language Model Wrapper to embed texts. */ -export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { - private btpOpenAiClient: OpenAiEmbeddingClientBase; +export class AzureOpenAiEmbeddingClient extends Embeddings { + private openAiEmbeddingClient: OpenAiEmbeddingClientBase; constructor(fields: OpenAiEmbeddingInput) { - // overrides the apikey value as it is not applicable in BTP - super({ ...fields, apiKey: 'dummy', azureOpenAIApiKey: undefined }); + super(fields); - this.btpOpenAiClient = new OpenAiEmbeddingClientBase({ ...fields }); + this.openAiEmbeddingClient = new OpenAiEmbeddingClientBase(fields); } override async embedDocuments(documents: string[]): Promise { - const chunkedPrompts = splitInChunks( - this.stripNewLines - ? documents.map(t => t.replace(/\n/g, ' ')) - : documents, - this.batchSize + return Promise.all( + documents + .map(async document => (await this.createEmbedding({ input: document })).data + .map(embeddingResponse => embeddingResponse.embedding) + .flat() + ) ); - const embeddings: number[][] = []; - for await (const promptChunk of chunkedPrompts) { - const embeddingResponse = await this.createEmbedding({ - input: promptChunk - }); - embeddingResponse.data.forEach(entry => embeddings.push(entry.embedding)); - } - return embeddings; } - override async embedQuery(query: string): Promise { - const embeddingResponse = await this.createEmbedding({ - input: this.stripNewLines ? query.replace(/\n/g, ' ') : query - }); + override async embedQuery(input: string): Promise { + const embeddingResponse = await this.createEmbedding({ input }); return embeddingResponse.data[0].embedding; } @@ -48,7 +37,7 @@ export class AzureOpenAiEmbeddingClient extends AzureOpenAIEmbeddings { query: OpenAiEmbeddingParameters ): Promise { return this.caller.callWithOptions({}, () => - this.btpOpenAiClient.run(query) + this.openAiEmbeddingClient.run(query) ); } } diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 74f2adca4..71f03ae46 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -1,64 +1,32 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { BaseLLMParams } from '@langchain/core/language_models/llms'; +import type { OpenAiChatCompletionParameters, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; import type { - ChatOpenAICallOptions, - OpenAIChatInput, - OpenAIEmbeddingsParams -} from '@langchain/openai'; -import type { OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; -import type { - AzureOpenAiChatModel, - AzureOpenAiEmbeddingModel + AzureOpenAiChatModel } from '@sap-ai-sdk/core'; -import type { ModelDeploymentConfig } from '@sap-ai-sdk/ai-api'; +import type { ModelConfiguration, ResourceGroupConfiguration } from '@sap-ai-sdk/ai-api'; +import { BaseFunctionCallOptions, BaseLanguageModelCallOptions } from '@langchain/core/language_models/base'; /** - * Input type for OpenAI Chat models. + * Input type for OpenAI chat models. */ -export type OpenAiChatModelInput = Omit< - OpenAIChatInput, - | 'frequencyPenalty' - | 'presencePenalty' - | 'topP' - | 'temperature' - | 'stop' - | 'n' - | 'modelName' - | 'model' - | 'openAIApiKey' - | 'streaming' - | 'azureOpenAIApiKey' - | 'openAIApiKey' - | 'apiKey' -> & - Omit & +export type OpenAiChatModelInput = Omit & BaseChatModelParams & - ModelDeploymentConfig; + ModelConfiguration & + ResourceGroupConfiguration; /** * Chat model call options for OpenAI. */ export interface OpenAiChatCallOptions - extends Omit< - ChatOpenAICallOptions, - | 'tool_choice' - | 'promptIndex' - | 'functions' - | 'function_call' - | 'tools' - | 'response_format' - >, - Pick< - OpenAiChatCompletionParameters, - 'tool_choice' | 'functions' | 'tools' | 'response_format' - > {} + extends Omit, + BaseLanguageModelCallOptions, + BaseFunctionCallOptions {} /** - * Input type for OpenAI Embedding models. + * Input type for OpenAI embedding models. */ -export type OpenAiEmbeddingInput = Omit< - OpenAIEmbeddingsParams, - 'modelName' | 'model' | 'azureOpenAIApiKey' | 'apiKey' -> & - ModelDeploymentConfig & +export type OpenAiEmbeddingInput = ModelConfiguration & + ResourceGroupConfiguration & + OpenAiEmbeddingParameters & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 5204bb67c..c3d921f1b 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -52,7 +52,7 @@ export function mapToolToOpenAiTool( /** * Maps a {@link BaseMessage} to OpenAI's message role. * @param message - The message to map. - * @returns The OpenAI meessage Role. + * @returns The OpenAI message Role. * @internal */ export function mapBaseMessageToRole( @@ -187,8 +187,6 @@ export function mapLangchainToAiClient( logit_bias: client.logitBias, n: client.n, stop: options?.stop ?? client.stop, - presence_penalty: client.presencePenalty, - frequency_penalty: client.frequencyPenalty, functions: isStructuredToolArray(options?.functions) ? options?.functions.map(mapToolToOpenAiFunction) : options?.functions, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 603326698..e073a632e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -258,13 +258,13 @@ importers: dependencies: '@sap-ai-sdk/ai-api': specifier: canary - version: 0.1.1-20240912013039.0 + version: 0.1.1-20240917012643.0 '@sap-ai-sdk/foundation-models': specifier: canary - version: 0.1.1-20240912013039.0 + version: 0.1.1-20240917012643.0 '@sap-ai-sdk/orchestration': specifier: canary - version: 0.1.1-20240912013039.0 + version: 0.1.1-20240917012643.0 express: specifier: ^4.21.0 version: 4.21.0 @@ -802,17 +802,17 @@ packages: resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@sap-ai-sdk/ai-api@0.1.1-20240912013039.0': - resolution: {integrity: sha512-S1rePmkw0QVrs9xopu31+B51upIpavxe7wiUy+0wfOsT2VsTvNT7TiJfEWWCnZilcwIejZUKGiuvBDTTuu530g==} + '@sap-ai-sdk/ai-api@0.1.1-20240917012643.0': + resolution: {integrity: sha512-96gQayCBFjJPKbX1bsN+9icqopTzbHdfH/tdF9YHwE+NxqpAb9aYiK8fB+mADRJoIdR0HgIOBSA47qrOqtP7IQ==} - '@sap-ai-sdk/core@0.1.1-20240912013039.0': - resolution: {integrity: sha512-LzNQ1nFHc54Y0rTELccNPl22Z+j0gEQGjgHpETPvcZc++qGnJ34VFDKcgJJH0ZGOJDuVnDtmVaOW6vMfmNwOTQ==} + '@sap-ai-sdk/core@0.1.1-20240917012643.0': + resolution: {integrity: sha512-7dPg4Ll36BytMCHEwmjRSaDnBch/g+UPFyswVe4Z/l1SL2gGwQ4TpQHECCuqTDHKvB6NzqTvrmWM3hUzxnmeUA==} - '@sap-ai-sdk/foundation-models@0.1.1-20240912013039.0': - resolution: {integrity: sha512-sbCwvklbMtQUX6VIgTiwf6BjRO2cUoiB1VUZrbzaw5q3WI72cIxXXkkyt1HHagpSgN7wXGg5STAIco6QBat9Mw==} + '@sap-ai-sdk/foundation-models@0.1.1-20240917012643.0': + resolution: {integrity: sha512-iP2hd/LmXKyKOn/Dh8WWI4njNMijmBmU7FuM1aKJs7opEJURO1UvCe5Hx+bV56deN75qeJ4lFMeIIYDWLqQpLQ==} - '@sap-ai-sdk/orchestration@0.1.1-20240912013039.0': - resolution: {integrity: sha512-sk3+BjnDHUwC/tEUVZF5jGiqaBQDhf2ZdTiWO61NDRjsRL+LIoqrpTHOwDz/hVvLtRY9mgPTf8jWeRqnAPx+dQ==} + '@sap-ai-sdk/orchestration@0.1.1-20240917012643.0': + resolution: {integrity: sha512-TB3v4mD4Gp8jk/VXh+Juj+9CZ27vVkXWMQiDlJXbl34/NEV2dsv/NM+WDe+X8AENc3ZDD9y1c/B0nFbDXExXtg==} '@sap-cloud-sdk/connectivity@3.20.0': resolution: {integrity: sha512-H9jWH6+owUu0vDiz1WWgB+o/1LzFnmmvELUHakdQSU1n930giPOBT9wwCmdbQgsQ+MJ4G6GURyqo9eKberBdXg==} @@ -4505,15 +4505,15 @@ snapshots: '@pkgr/core@0.1.1': {} - '@sap-ai-sdk/ai-api@0.1.1-20240912013039.0': + '@sap-ai-sdk/ai-api@0.1.1-20240917012643.0': dependencies: - '@sap-ai-sdk/core': 0.1.1-20240912013039.0 + '@sap-ai-sdk/core': 0.1.1-20240917012643.0 transitivePeerDependencies: - debug - encoding - supports-color - '@sap-ai-sdk/core@0.1.1-20240912013039.0': + '@sap-ai-sdk/core@0.1.1-20240917012643.0': dependencies: '@sap-cloud-sdk/connectivity': 3.20.0 '@sap-cloud-sdk/http-client': 3.20.0 @@ -4524,10 +4524,10 @@ snapshots: - encoding - supports-color - '@sap-ai-sdk/foundation-models@0.1.1-20240912013039.0': + '@sap-ai-sdk/foundation-models@0.1.1-20240917012643.0': dependencies: - '@sap-ai-sdk/ai-api': 0.1.1-20240912013039.0 - '@sap-ai-sdk/core': 0.1.1-20240912013039.0 + '@sap-ai-sdk/ai-api': 0.1.1-20240917012643.0 + '@sap-ai-sdk/core': 0.1.1-20240917012643.0 '@sap-cloud-sdk/connectivity': 3.20.0 '@sap-cloud-sdk/http-client': 3.20.0 '@sap-cloud-sdk/openapi': 3.20.0 @@ -4537,10 +4537,10 @@ snapshots: - encoding - supports-color - '@sap-ai-sdk/orchestration@0.1.1-20240912013039.0': + '@sap-ai-sdk/orchestration@0.1.1-20240917012643.0': dependencies: - '@sap-ai-sdk/ai-api': 0.1.1-20240912013039.0 - '@sap-ai-sdk/core': 0.1.1-20240912013039.0 + '@sap-ai-sdk/ai-api': 0.1.1-20240917012643.0 + '@sap-ai-sdk/core': 0.1.1-20240917012643.0 '@sap-cloud-sdk/connectivity': 3.20.0 '@sap-cloud-sdk/http-client': 3.20.0 '@sap-cloud-sdk/openapi': 3.20.0 From 50b902a7072ef489d6b3963a52b9d8ce8a49f83f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 17 Sep 2024 19:04:12 +0200 Subject: [PATCH 67/95] adjust input --- packages/langchain/src/openai/chat.ts | 35 +++++++++++++++++----- packages/langchain/src/openai/embedding.ts | 11 +++---- packages/langchain/src/openai/types.ts | 5 +--- packages/langchain/src/openai/util.ts | 6 ++-- 4 files changed, 35 insertions(+), 22 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index c74f02135..5bedbda88 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -3,34 +3,53 @@ import { BaseMessage } from '@langchain/core/messages'; import type { ChatResult } from '@langchain/core/outputs'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; -import type { OpenAiChatModelInput, OpenAiChatCallOptions } from './types.js'; +import type { OpenAiChatCallOptions, OpenAiChatModelInput } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class AzureOpenAiChatClient extends BaseChatModel { - declare CallOptions: OpenAiChatCallOptions; +export class AzureOpenAiChatClient extends BaseChatModel implements OpenAiChatModelInput { + modelName: AzureOpenAiChatModel; + modelVersion?: string; + resourceGroup?: string; + temperature?: number; + top_p?: number; + logit_bias?: Record; + user?: string; + n?: number; + presence_penalty?: number; + frequency_penalty?: number; + stop?: string | string[]; private openAiChatClient: OpenAiChatClientBase; + constructor(fields: OpenAiChatModelInput) { super(fields); this.openAiChatClient = new OpenAiChatClientBase(fields); + this.modelName = fields.modelName; + this.modelVersion = fields.modelVersion; + this.resourceGroup = fields.resourceGroup; + this.temperature = fields.temperature; + this.top_p = fields.top_p; + this.logit_bias = fields.logit_bias; + this.user = fields.user; + this.n = fields.n; + this.stop = fields.stop; + this.presence_penalty = fields.presence_penalty; + this.frequency_penalty = fields.frequency_penalty; } _llmType(): string { return 'azure_openai'; } - override get callKeys(): (keyof OpenAiChatCallOptions)[] { - return [...(super.callKeys as (keyof OpenAiChatCallOptions)[])]; - } - override async _generate( messages: BaseMessage[], - options: this['CallOptions'], + options: typeof this.ParsedCallOptions, runManager?: CallbackManagerForLLMRun ): Promise { const res = await this.caller.callWithOptions( diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index c7425a336..15ea694ad 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -7,7 +7,7 @@ import { Embeddings } from '@langchain/core/embeddings'; import { OpenAiEmbeddingInput } from './types.js'; /** - * OpenAI GPT Language Model Wrapper to embed texts. + * OpenAI GPT Embedding Model Wrapper to embed texts. */ export class AzureOpenAiEmbeddingClient extends Embeddings { private openAiEmbeddingClient: OpenAiEmbeddingClientBase; @@ -19,13 +19,10 @@ export class AzureOpenAiEmbeddingClient extends Embeddings { } override async embedDocuments(documents: string[]): Promise { - return Promise.all( - documents - .map(async document => (await this.createEmbedding({ input: document })).data - .map(embeddingResponse => embeddingResponse.embedding) - .flat() - ) + const documentEmbeddings = await Promise.all( + documents.map(document => this.createEmbedding({ input: document })) ); + return documentEmbeddings.map(embedding => embedding.data.map(entry => entry.embedding)).flat(); } override async embedQuery(input: string): Promise { diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 71f03ae46..08c67d2a5 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -5,7 +5,6 @@ import type { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; import type { ModelConfiguration, ResourceGroupConfiguration } from '@sap-ai-sdk/ai-api'; -import { BaseFunctionCallOptions, BaseLanguageModelCallOptions } from '@langchain/core/language_models/base'; /** * Input type for OpenAI chat models. @@ -19,9 +18,7 @@ export type OpenAiChatModelInput = Omit, - BaseLanguageModelCallOptions, - BaseFunctionCallOptions {} + extends Omit, BaseChatModelParams {}; /** * Input type for OpenAI embedding models. diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index c3d921f1b..840a15959 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -181,10 +181,10 @@ export function mapLangchainToAiClient( ): OpenAiChatCompletionParameters { return { messages: messages.map(mapBaseMessageToOpenAiChatMessage), - max_tokens: client.maxTokens === -1 ? undefined : client.maxTokens, + max_tokens: client.maxTokens === -1 ? undefined : client.max_tokens, temperature: client.temperature, - top_p: client.topP, - logit_bias: client.logitBias, + top_p: client.top_p, + logit_bias: client.logit_bias, n: client.n, stop: options?.stop ?? client.stop, functions: isStructuredToolArray(options?.functions) From e8dd09f38fa9e30f97d9e43e09d936a50ec634ed Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Tue, 17 Sep 2024 20:35:48 +0200 Subject: [PATCH 68/95] almost there --- packages/langchain/src/index.ts | 4 ++-- packages/langchain/src/openai/chat.ts | 11 +++++++---- packages/langchain/src/openai/embedding.ts | 15 +++++++++++---- packages/langchain/src/openai/types.ts | 14 +++++++------- packages/langchain/src/openai/util.ts | 8 ++------ 5 files changed, 29 insertions(+), 23 deletions(-) diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index 8cde18617..ba7dc4aec 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -3,7 +3,7 @@ export { AzureOpenAiEmbeddingClient } from './openai/index.js'; export type { - OpenAiChatModelInput, - OpenAiEmbeddingInput, + OpenAiChatModelParams, + OpenAiEmbeddingModelParams, OpenAiChatCallOptions } from './openai/index.js'; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 5bedbda88..35a5353a5 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -8,12 +8,12 @@ import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; -import type { OpenAiChatCallOptions, OpenAiChatModelInput } from './types.js'; +import type { OpenAiChatCallOptions, OpenAiChatModelParams } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class AzureOpenAiChatClient extends BaseChatModel implements OpenAiChatModelInput { +export class AzureOpenAiChatClient extends BaseChatModel implements OpenAiChatModelParams { modelName: AzureOpenAiChatModel; modelVersion?: string; resourceGroup?: string; @@ -25,9 +25,10 @@ export class AzureOpenAiChatClient extends BaseChatModel presence_penalty?: number; frequency_penalty?: number; stop?: string | string[]; + max_tokens?: number; private openAiChatClient: OpenAiChatClientBase; - constructor(fields: OpenAiChatModelInput) { + constructor(fields: OpenAiChatModelParams) { super(fields); this.openAiChatClient = new OpenAiChatClientBase(fields); this.modelName = fields.modelName; @@ -41,6 +42,7 @@ export class AzureOpenAiChatClient extends BaseChatModel this.stop = fields.stop; this.presence_penalty = fields.presence_penalty; this.frequency_penalty = fields.frequency_penalty; + this.max_tokens = fields.max_tokens; } _llmType(): string { @@ -58,7 +60,7 @@ export class AzureOpenAiChatClient extends BaseChatModel }, () => this.openAiChatClient.run( - mapLangchainToAiClient(this, options, messages) + mapLangchainToAiClient(this, options, messages), options ) ); @@ -72,3 +74,4 @@ export class AzureOpenAiChatClient extends BaseChatModel return mapResponseToChatResult(res.data); } } + diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 15ea694ad..2b77ff571 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -4,18 +4,25 @@ import { OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; import { Embeddings } from '@langchain/core/embeddings'; -import { OpenAiEmbeddingInput } from './types.js'; +import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; +import { OpenAiEmbeddingModelParams } from './types.js'; /** * OpenAI GPT Embedding Model Wrapper to embed texts. */ -export class AzureOpenAiEmbeddingClient extends Embeddings { +export class AzureOpenAiEmbeddingClient extends Embeddings implements OpenAiEmbeddingModelParams { + modelName: AzureOpenAiChatModel; + modelVersion?: string; + resourceGroup?: string; + private openAiEmbeddingClient: OpenAiEmbeddingClientBase; - constructor(fields: OpenAiEmbeddingInput) { + constructor(fields: OpenAiEmbeddingModelParams) { super(fields); - this.openAiEmbeddingClient = new OpenAiEmbeddingClientBase(fields); + this.modelName = fields.modelName; + this.modelVersion = fields.modelVersion; + this.resourceGroup = fields.resourceGroup; } override async embedDocuments(documents: string[]): Promise { diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 08c67d2a5..6561fe209 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -1,15 +1,16 @@ -import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import type { BaseChatModelCallOptions, BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { BaseLLMParams } from '@langchain/core/language_models/llms'; -import type { OpenAiChatCompletionParameters, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; +import type { OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import type { - AzureOpenAiChatModel + AzureOpenAiChatModel, + CustomRequestConfig } from '@sap-ai-sdk/core'; import type { ModelConfiguration, ResourceGroupConfiguration } from '@sap-ai-sdk/ai-api'; /** * Input type for OpenAI chat models. */ -export type OpenAiChatModelInput = Omit & +export type OpenAiChatModelParams = Omit & BaseChatModelParams & ModelConfiguration & ResourceGroupConfiguration; @@ -18,12 +19,11 @@ export type OpenAiChatModelInput = Omit, BaseChatModelParams {}; + extends CustomRequestConfig, BaseChatModelCallOptions {}; /** * Input type for OpenAI embedding models. */ -export type OpenAiEmbeddingInput = ModelConfiguration & +export type OpenAiEmbeddingModelParams = ModelConfiguration & ResourceGroupConfiguration & - OpenAiEmbeddingParameters & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 840a15959..00e1bedd7 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -1,7 +1,6 @@ import { AIMessage, BaseMessage, - ChatMessage, ToolMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; @@ -69,8 +68,6 @@ export function mapBaseMessageToRole( return 'function'; case 'tool': return 'tool'; - case 'generic': - return (message as ChatMessage).role as OpenAiChatMessage['role']; default: throw new Error(`Unknown message type: ${message._getType()}`); } @@ -181,7 +178,7 @@ export function mapLangchainToAiClient( ): OpenAiChatCompletionParameters { return { messages: messages.map(mapBaseMessageToOpenAiChatMessage), - max_tokens: client.maxTokens === -1 ? undefined : client.max_tokens, + max_tokens: client.max_tokens === -1 ? undefined : client.max_tokens, temperature: client.temperature, top_p: client.top_p, logit_bias: client.logit_bias, @@ -195,7 +192,6 @@ export function mapLangchainToAiClient( : options?.tools, tool_choice: options?.tool_choice, response_format: options?.response_format, - seed: options?.seed, - ...client.modelKwargs + seed: options?.seed }; } From 19c8ba5f91adf62605356ba3101a5ff51c78c950 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 09:16:46 +0200 Subject: [PATCH 69/95] update visibiltiy and mapping --- packages/langchain/src/openai/types.ts | 30 ++++++++++++++-- packages/langchain/src/openai/util.ts | 49 +++++++++++++------------- 2 files changed, 52 insertions(+), 27 deletions(-) diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 6561fe209..4ebdfd26c 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -18,8 +18,7 @@ export type OpenAiChatModelParams = Omit & ResourceGroupConfiguration & BaseLLMParams; + +/** + * OpenAI toolchoice type. + */ +export type ToolChoice = + | 'none' + | 'auto' + | { + /** + * The type of the tool. + */ + type: 'function'; + /** + * Use to force the model to call a specific function. + */ + function: { + /** + * The name of the function to call. + */ + name: string; + }; + }; + +/** + * LangChain's toolchoice type. + */ +export type LangChainToolChoice = string | Record | 'auto' | 'any'; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 00e1bedd7..22b6eb9ad 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -15,15 +15,14 @@ import type { } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; -import { OpenAiChatCallOptions } from './types.js'; +import { LangChainToolChoice, OpenAiChatCallOptions, ToolChoice } from './types.js'; /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI chat completion function. - * @internal */ -export function mapToolToOpenAiFunction( +function mapToolToOpenAiFunction( tool: StructuredTool ): OpenAiChatCompletionFunction { return { @@ -37,9 +36,8 @@ export function mapToolToOpenAiFunction( * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionTool}. * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI chat completion tool. - * @internal */ -export function mapToolToOpenAiTool( +function mapToolToOpenAiTool( tool: StructuredTool ): OpenAiChatCompletionTool { return { @@ -52,9 +50,8 @@ export function mapToolToOpenAiTool( * Maps a {@link BaseMessage} to OpenAI's message role. * @param message - The message to map. * @returns The OpenAI message Role. - * @internal */ -export function mapBaseMessageToRole( +function mapBaseMessageToRole( message: BaseMessage ): OpenAiChatMessage['role'] { switch (message._getType()) { @@ -120,9 +117,8 @@ export function mapResponseToChatResult( * Maps {@link BaseMessage} to OpenAI messages. * @param message - The message to map. * @returns The OpenAI chat Message. - * @internal */ -export function mapBaseMessageToOpenAiChatMessage( +function mapBaseMessageToOpenAiChatMessage( message: BaseMessage ): OpenAiChatMessage { return { @@ -136,26 +132,12 @@ export function mapBaseMessageToOpenAiChatMessage( } as OpenAiChatMessage; } -/** - * Converts a value to an array or returns undefined. - * @param value - The value to convert. - * @returns The value as an array, undefined if the input is falsy, or the original array if input is already an array. - * @internal - */ -export function toArrayOrUndefined(value?: T | T[]): T[] | undefined { - if (value === undefined) { - return undefined; - } - return Array.isArray(value) ? value : [value]; -} - /** * Checks if a given array is a structured tool array. * @param tools - The array to check. * @returns Whether the array is a structured tool array. - * @internal */ -export function isStructuredToolArray( +function isStructuredToolArray( tools?: unknown[] ): tools is StructuredTool[] { return !!tools?.every(tool => @@ -163,6 +145,23 @@ export function isStructuredToolArray( ); } +function mapToolChoice(toolChoice?: LangChainToolChoice): ToolChoice | undefined { + if (!toolChoice) { + return undefined; + } + + if (toolChoice === 'auto' || toolChoice === 'none') { + return toolChoice; + } + + if (typeof toolChoice === 'string') { + return { + type: 'function', + function: { name: toolChoice } + }; + } +} + /** * Maps LangChain's input interface to our own client's input interface * @param client The LangChain OpenAI client @@ -190,7 +189,7 @@ export function mapLangchainToAiClient( tools: isStructuredToolArray(options?.tools) ? options?.tools.map(mapToolToOpenAiTool) : options?.tools, - tool_choice: options?.tool_choice, + tool_choice: mapToolChoice(options?.tool_choice), response_format: options?.response_format, seed: options?.seed }; From d0c50f2bf714f09c8538ba3eeef016a71b372984 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 11:28:58 +0200 Subject: [PATCH 70/95] fix mocking function, improve tests, update mapping --- packages/langchain/README.md | 2 +- packages/langchain/src/openai/util.test.ts | 34 +++++----------------- packages/langchain/src/openai/util.ts | 20 +++++++++---- 3 files changed, 24 insertions(+), 32 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 48f31d76d..f0f8c2237 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,4 +1,4 @@ -# @sap-ai-sdk/LangChain +# @sap-ai-sdk/langchain This package contains Langchain compliant models, based on the @sap-ai-sdk clients. diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index aa0683d89..fecfc1f71 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -7,6 +7,7 @@ import nock from 'nock'; import { HumanMessage } from '@langchain/core/messages'; import { mockClientCredentialsGrantCall, + mockDeploymentsList, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; @@ -28,15 +29,8 @@ const prompt = { { role: 'user', content: 'Where is the deepest place on earth located', - tool_call_id: '' } ], - max_tokens: 256, - temperature: 0.7, - top_p: 1, - n: 1, - presence_penalty: 0, - frequency_penalty: 0 }; const langchainPrompt = new HumanMessage( @@ -44,29 +38,12 @@ const langchainPrompt = new HumanMessage( ); const request = { - frequency_penalty: 0, - functions: undefined, - logit_bias: undefined, - max_tokens: 256, messages: [ { - content: 'Where is the deepest place on earth located', - function_call: undefined, - name: undefined, role: 'user', - tool_call_id: '', - tool_calls: undefined + content: 'Where is the deepest place on earth located', } ], - n: 1, - presence_penalty: 0, - response_format: undefined, - seed: undefined, - stop: undefined, - temperature: 0.7, - tool_choice: undefined, - tools: undefined, - top_p: 1 }; describe('Mapping Functions', () => { @@ -84,6 +61,11 @@ describe('Mapping Functions', () => { }); it('should parse a Langchain input to an ai sdk input', async () => { + mockDeploymentsList( + { scenarioId: 'foundation-models', executableId: 'azure-openai' }, + { id: '1234', model: { name: 'gpt-35-turbo' } } + ); + mockInference( { data: prompt @@ -95,7 +77,7 @@ describe('Mapping Functions', () => { chatCompletionEndpoint ); - const client = new AzureOpenAiChatClient({ deploymentId: '1234' }); + const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); await client.generate([[langchainPrompt]]); expect(runSpy).toHaveBeenCalledWith(request); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 22b6eb9ad..22c1e3f88 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -121,15 +121,15 @@ export function mapResponseToChatResult( function mapBaseMessageToOpenAiChatMessage( message: BaseMessage ): OpenAiChatMessage { - return { + return removeUndefinedProperties({ content: message.content, name: message.name, role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, tool_call_id: - message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : '' - } as OpenAiChatMessage; + message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : undefined + } as OpenAiChatMessage); } /** @@ -175,7 +175,7 @@ export function mapLangchainToAiClient( options: OpenAiChatCallOptions, messages: BaseMessage[] ): OpenAiChatCompletionParameters { - return { + return removeUndefinedProperties({ messages: messages.map(mapBaseMessageToOpenAiChatMessage), max_tokens: client.max_tokens === -1 ? undefined : client.max_tokens, temperature: client.temperature, @@ -192,5 +192,15 @@ export function mapLangchainToAiClient( tool_choice: mapToolChoice(options?.tool_choice), response_format: options?.response_format, seed: options?.seed - }; + }); +} + +function removeUndefinedProperties(obj: T): T { + const result = { ...obj }; + for (const key in result) { + if (result[key as keyof T] === undefined) { + delete result[key as keyof T]; + } + } + return result; } From a0c44e3186e9c101a98ea2e586e18d75fe6341b6 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 11:29:04 +0200 Subject: [PATCH 71/95] fix mocking function, improve tests, update mapping --- test-util/mock-http.ts | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/test-util/mock-http.ts b/test-util/mock-http.ts index 59beaa779..f6a0026b1 100644 --- a/test-util/mock-http.ts +++ b/test-util/mock-http.ts @@ -119,17 +119,15 @@ export function mockDeploymentsList( opts: DeploymentResolutionOptions, ...deployments: { id: string; model?: FoundationModel }[] ): nock.Scope { - const nockOpts = opts?.resourceGroup - ? { - reqheaders: { - 'ai-resource-group': opts?.resourceGroup - } - } - : undefined; + const nockOpts = { + reqheaders: { + 'ai-resource-group': opts?.resourceGroup ?? 'default', + } + }; const query = { status: 'RUNNING', scenarioId: opts.scenarioId, - ...(opts.executableId && { executableIds: [opts.executableId] }) + ...(opts.executableId && { executableIds: [opts.executableId].toString() }) }; return nock(aiCoreDestination.url, nockOpts) .get('/v2/lm/deployments') From cc3a0079ea7f557f02050f8f679108309b1320b7 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 12:37:20 +0200 Subject: [PATCH 72/95] remove redundant dependencies --- packages/langchain/README.md | 4 ---- packages/langchain/package.json | 8 -------- packages/langchain/src/openai/chat.ts | 2 +- packages/langchain/src/openai/types.ts | 4 +++- 4 files changed, 4 insertions(+), 14 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index f0f8c2237..c06d06ee8 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -6,8 +6,6 @@ This package contains Langchain compliant models, based on the @sap-ai-sdk clien ``` $ npm install @sap-ai-sdk/langchain - -$ npm install @langchain/openai // if you want to use OpenAI models ``` ## Pre-requisites @@ -43,8 +41,6 @@ Instead, the credentials in the binding are used to call SAP's LLM Proxy. We offer two types of clients for OpenAI models: chat and embedding. -All clients assume the same set of default values as [LangChain's default OpenAI client](https://www.npmjs.com/package/@langchain/openai) does. - #### Chat There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 4b00cf5ee..80c098c08 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -33,14 +33,6 @@ "zod-to-json-schema": "^3.23.2", "@sap-cloud-sdk/util": "^3.20.0" }, - "peerDependencies": { - "@langchain/openai": "^0.3.0" - }, - "peerDependenciesMeta": { - "@langchain/openai": { - "optional": true - } - }, "devDependencies": { "typescript": "^5.5.4" } diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 35a5353a5..ff8bdd978 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -60,7 +60,7 @@ export class AzureOpenAiChatClient extends BaseChatModel }, () => this.openAiChatClient.run( - mapLangchainToAiClient(this, options, messages), options + mapLangchainToAiClient(this, options, messages), options.requestConfig ) ); diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 4ebdfd26c..c1fc23405 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -18,7 +18,9 @@ export type OpenAiChatModelParams = Omit Date: Wed, 18 Sep 2024 13:12:03 +0200 Subject: [PATCH 73/95] adjust api --- packages/ai-api/src/index.ts | 2 +- packages/langchain/package.json | 1 + packages/langchain/src/openai/chat.ts | 14 ++++----- packages/langchain/src/openai/embedding.ts | 9 ++++-- packages/langchain/src/openai/types.ts | 32 +++++++++++++++------ packages/langchain/src/openai/util.test.ts | 10 +++---- packages/langchain/src/openai/util.ts | 33 +++++++++++----------- pnpm-lock.yaml | 2 +- 8 files changed, 61 insertions(+), 42 deletions(-) diff --git a/packages/ai-api/src/index.ts b/packages/ai-api/src/index.ts index 0244d149f..f186b6973 100644 --- a/packages/ai-api/src/index.ts +++ b/packages/ai-api/src/index.ts @@ -4,5 +4,5 @@ export type { ModelDeployment, DeploymentIdConfiguration, ModelConfiguration, - ResourceGroupConfiguration, + ResourceGroupConfiguration } from './utils/index.js'; diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 80c098c08..43a526128 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -29,6 +29,7 @@ "dependencies": { "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", + "@langchain/openai": "0.3.0", "@langchain/core": "0.3.1", "zod-to-json-schema": "^3.23.2", "@sap-cloud-sdk/util": "^3.20.0" diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index ff8bdd978..3c80f29f9 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -4,16 +4,16 @@ import type { ChatResult } from '@langchain/core/outputs'; import { OpenAiChatClient as OpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; -import { - mapLangchainToAiClient, - mapResponseToChatResult -} from './util.js'; +import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; import type { OpenAiChatCallOptions, OpenAiChatModelParams } from './types.js'; /** * OpenAI Language Model Wrapper to generate texts. */ -export class AzureOpenAiChatClient extends BaseChatModel implements OpenAiChatModelParams { +export class AzureOpenAiChatClient + extends BaseChatModel + implements OpenAiChatModelParams +{ modelName: AzureOpenAiChatModel; modelVersion?: string; resourceGroup?: string; @@ -60,7 +60,8 @@ export class AzureOpenAiChatClient extends BaseChatModel }, () => this.openAiChatClient.run( - mapLangchainToAiClient(this, options, messages), options.requestConfig + mapLangchainToAiClient(this, options, messages), + options.requestConfig ) ); @@ -74,4 +75,3 @@ export class AzureOpenAiChatClient extends BaseChatModel return mapResponseToChatResult(res.data); } } - diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 2b77ff571..3d5a507cf 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -10,7 +10,10 @@ import { OpenAiEmbeddingModelParams } from './types.js'; /** * OpenAI GPT Embedding Model Wrapper to embed texts. */ -export class AzureOpenAiEmbeddingClient extends Embeddings implements OpenAiEmbeddingModelParams { +export class AzureOpenAiEmbeddingClient + extends Embeddings + implements OpenAiEmbeddingModelParams +{ modelName: AzureOpenAiChatModel; modelVersion?: string; resourceGroup?: string; @@ -29,7 +32,9 @@ export class AzureOpenAiEmbeddingClient extends Embeddings implements OpenAiEmbe const documentEmbeddings = await Promise.all( documents.map(document => this.createEmbedding({ input: document })) ); - return documentEmbeddings.map(embedding => embedding.data.map(entry => entry.embedding)).flat(); + return documentEmbeddings + .map(embedding => embedding.data.map(entry => entry.embedding)) + .flat(); } override async embedQuery(input: string): Promise { diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index c1fc23405..f2eb29390 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -1,16 +1,25 @@ -import type { BaseChatModelCallOptions, BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import type { + BaseChatModelCallOptions, + BaseChatModelParams +} from '@langchain/core/language_models/chat_models'; import { BaseLLMParams } from '@langchain/core/language_models/llms'; import type { OpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import type { AzureOpenAiChatModel, CustomRequestConfig } from '@sap-ai-sdk/core'; -import type { ModelConfiguration, ResourceGroupConfiguration } from '@sap-ai-sdk/ai-api'; +import type { + ModelConfiguration, + ResourceGroupConfiguration +} from '@sap-ai-sdk/ai-api'; /** * Input type for OpenAI chat models. */ -export type OpenAiChatModelParams = Omit & +export type OpenAiChatModelParams = Omit< + OpenAiChatCompletionParameters, + 'messages' | 'response_format' | 'seed' | 'functions' | 'tools' | 'tool_choice' +> & BaseChatModelParams & ModelConfiguration & ResourceGroupConfiguration; @@ -18,16 +27,21 @@ export type OpenAiChatModelParams = Omit & { + requestConfig?: CustomRequestConfig; + }; /** * Input type for OpenAI embedding models. */ -export type OpenAiEmbeddingModelParams = ModelConfiguration & - ResourceGroupConfiguration & - BaseLLMParams; +export type OpenAiEmbeddingModelParams = + ModelConfiguration & + ResourceGroupConfiguration & + BaseLLMParams; /** * OpenAI toolchoice type. diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index fecfc1f71..b9f1f8d9e 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -28,9 +28,9 @@ const prompt = { messages: [ { role: 'user', - content: 'Where is the deepest place on earth located', + content: 'Where is the deepest place on earth located' } - ], + ] }; const langchainPrompt = new HumanMessage( @@ -41,9 +41,9 @@ const request = { messages: [ { role: 'user', - content: 'Where is the deepest place on earth located', + content: 'Where is the deepest place on earth located' } - ], + ] }; describe('Mapping Functions', () => { @@ -80,6 +80,6 @@ describe('Mapping Functions', () => { const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); await client.generate([[langchainPrompt]]); - expect(runSpy).toHaveBeenCalledWith(request); + expect(runSpy).toHaveBeenCalledWith(request, undefined); }); }); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 22c1e3f88..be6cd8da7 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -1,8 +1,4 @@ -import { - AIMessage, - BaseMessage, - ToolMessage -} from '@langchain/core/messages'; +import { AIMessage, BaseMessage, ToolMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; import type { @@ -15,7 +11,11 @@ import type { } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; -import { LangChainToolChoice, OpenAiChatCallOptions, ToolChoice } from './types.js'; +import { + LangChainToolChoice, + OpenAiChatCallOptions, + ToolChoice +} from './types.js'; /** * Maps a LangChain {@link StructuredTool} to {@link OpenAiChatCompletionFunction}. @@ -37,9 +37,7 @@ function mapToolToOpenAiFunction( * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI chat completion tool. */ -function mapToolToOpenAiTool( - tool: StructuredTool -): OpenAiChatCompletionTool { +function mapToolToOpenAiTool(tool: StructuredTool): OpenAiChatCompletionTool { return { type: 'function', function: mapToolToOpenAiFunction(tool) @@ -51,9 +49,7 @@ function mapToolToOpenAiTool( * @param message - The message to map. * @returns The OpenAI message Role. */ -function mapBaseMessageToRole( - message: BaseMessage -): OpenAiChatMessage['role'] { +function mapBaseMessageToRole(message: BaseMessage): OpenAiChatMessage['role'] { switch (message._getType()) { case 'ai': return 'assistant'; @@ -121,6 +117,7 @@ export function mapResponseToChatResult( function mapBaseMessageToOpenAiChatMessage( message: BaseMessage ): OpenAiChatMessage { + // TODO: remove type casting, improve message.content handling return removeUndefinedProperties({ content: message.content, name: message.name, @@ -128,7 +125,9 @@ function mapBaseMessageToOpenAiChatMessage( function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, tool_call_id: - message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : undefined + message._getType() === 'tool' + ? (message as ToolMessage).tool_call_id + : undefined } as OpenAiChatMessage); } @@ -137,15 +136,15 @@ function mapBaseMessageToOpenAiChatMessage( * @param tools - The array to check. * @returns Whether the array is a structured tool array. */ -function isStructuredToolArray( - tools?: unknown[] -): tools is StructuredTool[] { +function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { return !!tools?.every(tool => Array.isArray((tool as StructuredTool).lc_namespace) ); } -function mapToolChoice(toolChoice?: LangChainToolChoice): ToolChoice | undefined { +function mapToolChoice( + toolChoice?: LangChainToolChoice +): ToolChoice | undefined { if (!toolChoice) { return undefined; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e073a632e..6e94309b5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -158,7 +158,7 @@ importers: specifier: 0.3.1 version: 0.3.1(openai@4.61.1(zod@3.23.8)) '@langchain/openai': - specifier: ^0.3.0 + specifier: 0.3.0 version: 0.3.0(@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))) '@sap-ai-sdk/ai-api': specifier: workspace:^ From d4b53440f92e0c9d3dca5b069aebc9f819bd026b Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Wed, 18 Sep 2024 11:37:19 +0000 Subject: [PATCH 74/95] fix: Changes from lint --- .../src/openai/openai-response.test.ts | 1 - packages/langchain/src/openai/embedding.ts | 6 ++++-- packages/langchain/src/openai/types.ts | 15 ++++++++------- packages/langchain/src/openai/util.ts | 8 ++++++-- sample-code/src/index.ts | 2 +- 5 files changed, 19 insertions(+), 13 deletions(-) diff --git a/packages/foundation-models/src/openai/openai-response.test.ts b/packages/foundation-models/src/openai/openai-response.test.ts index c2d587f37..9ff6f7979 100644 --- a/packages/foundation-models/src/openai/openai-response.test.ts +++ b/packages/foundation-models/src/openai/openai-response.test.ts @@ -45,4 +45,3 @@ describe('OpenAI response', () => { expect(openAiChatClientResponse.getContent(1)).toBeUndefined(); }); }); - diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 3908b755d..f8360bac1 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -40,8 +40,10 @@ export class AzureOpenAiEmbeddingClient private async createEmbedding( query: AzureOpenAiEmbeddingParameters ): Promise { - return this.caller.callWithOptions({}, async () => - (await this.openAiEmbeddingClient.run(query)).getEmbedding() ?? [] + return this.caller.callWithOptions( + {}, + async () => + (await this.openAiEmbeddingClient.run(query)).getEmbedding() ?? [] ); } } diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 4153f55cd..dd7386d66 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -8,17 +8,19 @@ import type { AzureOpenAiChatModel, CustomRequestConfig } from '@sap-ai-sdk/core'; -import type { - ModelConfig, - ResourceGroupConfig -} from '@sap-ai-sdk/ai-api'; +import type { ModelConfig, ResourceGroupConfig } from '@sap-ai-sdk/ai-api'; /** * Input type for OpenAI chat models. */ export type OpenAiChatModelParams = Omit< AzureOpenAiChatCompletionParameters, - 'messages' | 'response_format' | 'seed' | 'functions' | 'tools' | 'tool_choice' + | 'messages' + | 'response_format' + | 'seed' + | 'functions' + | 'tools' + | 'tool_choice' > & BaseChatModelParams & ModelConfig & @@ -38,8 +40,7 @@ export type OpenAiChatCallOptions = BaseChatModelCallOptions & /** * Input type for OpenAI embedding models. */ -export type OpenAiEmbeddingModelParams = - ModelConfig & +export type OpenAiEmbeddingModelParams = ModelConfig & ResourceGroupConfig & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 534ec270c..28a007eda 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -37,7 +37,9 @@ function mapToolToOpenAiFunction( * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI chat completion tool. */ -function mapToolToOpenAiTool(tool: StructuredTool): AzureOpenAiChatCompletionTool { +function mapToolToOpenAiTool( + tool: StructuredTool +): AzureOpenAiChatCompletionTool { return { type: 'function', function: mapToolToOpenAiFunction(tool) @@ -49,7 +51,9 @@ function mapToolToOpenAiTool(tool: StructuredTool): AzureOpenAiChatCompletionToo * @param message - The message to map. * @returns The OpenAI message Role. */ -function mapBaseMessageToRole(message: BaseMessage): AzureOpenAiChatMessage['role'] { +function mapBaseMessageToRole( + message: BaseMessage +): AzureOpenAiChatMessage['role'] { switch (message._getType()) { case 'ai': return 'assistant'; diff --git a/sample-code/src/index.ts b/sample-code/src/index.ts index 0bd517f75..1b8c4d2af 100644 --- a/sample-code/src/index.ts +++ b/sample-code/src/index.ts @@ -1,7 +1,7 @@ // exported for e2e tests export { chatCompletion, - computeEmbedding, + computeEmbedding } from './foundation-models-azure-openai.js'; export { embedQuery, generate } from './langchain-azure-openai.js'; From addf4deb9bff00a68615e38a051edb59cc099efb Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 13:46:21 +0200 Subject: [PATCH 75/95] review --- packages/langchain/src/openai/chat.ts | 4 ++-- packages/langchain/src/openai/util.test.ts | 8 ++++---- tests/e2e-tests/src/foundation-models.test.ts | 2 -- tests/e2e-tests/src/open-ai-langchain.test.ts | 2 -- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index ec6846703..801ee03c5 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -67,8 +67,8 @@ export class AzureOpenAiChatClient // we currently do not support streaming await runManager?.handleLLMNewToken( - typeof res.data.choices[0].message.content === 'string' - ? res.data.choices[0].message.content + typeof res.getContent() === 'string' + ? res.getContent() as string : '' ); diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 722cdd913..ca8fb5187 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,6 +1,6 @@ import { AzureOpenAiChatClient as AzureOpenAiChatClientBase, - OpenAiChatCompletionOutput + AzureOpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; import { jest } from '@jest/globals'; import nock from 'nock'; @@ -14,7 +14,7 @@ import { import { mapResponseToChatResult } from './util.js'; import { AzureOpenAiChatClient } from './chat.js'; -const openAiMockResponse = parseMockResponse( +const openAiMockResponse = parseMockResponse( 'foundation-models', 'openai-chat-completion-success-response.json' ); @@ -55,12 +55,12 @@ describe('Mapping Functions', () => { nock.cleanAll(); }); - it('should parse an OpenAi response to a (Langchain) chat response', async () => { + it('should parse an OpenAI response to a (LangChain) chat response', async () => { const result = mapResponseToChatResult(openAiMockResponse); expect(result).toMatchSnapshot(); }); - it('should parse a Langchain input to an ai sdk input', async () => { + it('should parse a LangChain input to an AI SDK input', async () => { mockDeploymentsList( { scenarioId: 'foundation-models', executableId: 'azure-openai' }, { id: '1234', model: { name: 'gpt-35-turbo' } } diff --git a/tests/e2e-tests/src/foundation-models.test.ts b/tests/e2e-tests/src/foundation-models.test.ts index 8909fc3ca..635cb9382 100644 --- a/tests/e2e-tests/src/foundation-models.test.ts +++ b/tests/e2e-tests/src/foundation-models.test.ts @@ -11,13 +11,11 @@ dotenv.config({ path: path.resolve(__dirname, '../.env') }); describe('Azure OpenAI Foundation Model Access', () => { it('should complete a chat', async () => { const result = await chatCompletion(); - expect(result).toBeDefined(); expect(result).toContain('Paris'); }); it('should compute an embedding vector', async () => { const result = await computeEmbedding(); - expect(result).toBeDefined(); expect(result).not.toHaveLength(0); }); }); diff --git a/tests/e2e-tests/src/open-ai-langchain.test.ts b/tests/e2e-tests/src/open-ai-langchain.test.ts index e5381f9e7..d45f180ca 100644 --- a/tests/e2e-tests/src/open-ai-langchain.test.ts +++ b/tests/e2e-tests/src/open-ai-langchain.test.ts @@ -11,13 +11,11 @@ dotenv.config({ path: path.resolve(__dirname, '../.env') }); describe('Langchain OpenAI Access', () => { it('should complete a chat', async () => { const result = await generate(); - expect(result).toBeDefined(); expect(result).toContain('Paris'); }); it('should compute an embedding vector', async () => { const result = await embedQuery(); - expect(result).toBeDefined(); expect(result).not.toHaveLength(0); }); }); From 9d2abcc6af03f70d8013220c534ea2d8b811518e Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Wed, 18 Sep 2024 11:47:14 +0000 Subject: [PATCH 76/95] fix: Changes from lint --- packages/langchain/src/openai/chat.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 801ee03c5..9c01c3293 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -67,9 +67,7 @@ export class AzureOpenAiChatClient // we currently do not support streaming await runManager?.handleLLMNewToken( - typeof res.getContent() === 'string' - ? res.getContent() as string - : '' + typeof res.getContent() === 'string' ? (res.getContent() as string) : '' ); return mapResponseToChatResult(res.data); From 9cb519546d955a16c7a1b8b87bc2e6d36a3de1bc Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 13:51:19 +0200 Subject: [PATCH 77/95] naming --- packages/langchain/src/index.ts | 6 +++--- packages/langchain/src/openai/chat.ts | 10 +++++----- packages/langchain/src/openai/embedding.ts | 6 +++--- packages/langchain/src/openai/types.ts | 6 +++--- packages/langchain/src/openai/util.ts | 8 ++------ 5 files changed, 16 insertions(+), 20 deletions(-) diff --git a/packages/langchain/src/index.ts b/packages/langchain/src/index.ts index ba7dc4aec..5e1fcb6ce 100644 --- a/packages/langchain/src/index.ts +++ b/packages/langchain/src/index.ts @@ -3,7 +3,7 @@ export { AzureOpenAiEmbeddingClient } from './openai/index.js'; export type { - OpenAiChatModelParams, - OpenAiEmbeddingModelParams, - OpenAiChatCallOptions + AzureOpenAiChatModelParams, + AzureOpenAiEmbeddingModelParams, + AzureOpenAiChatCallOptions } from './openai/index.js'; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 801ee03c5..6d90ab9e4 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -5,14 +5,14 @@ import { AzureOpenAiChatClient as AzureOpenAiChatClientBase } from '@sap-ai-sdk/ import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; -import type { OpenAiChatCallOptions, OpenAiChatModelParams } from './types.js'; +import type { AzureOpenAiChatCallOptions, AzureOpenAiChatModelParams } from './types.js'; /** - * OpenAI Language Model Wrapper to generate texts. + * OpenAI language model wrapper to generate texts. */ export class AzureOpenAiChatClient - extends BaseChatModel - implements OpenAiChatModelParams + extends BaseChatModel + implements AzureOpenAiChatModelParams { modelName: AzureOpenAiChatModel; modelVersion?: string; @@ -28,7 +28,7 @@ export class AzureOpenAiChatClient max_tokens?: number; private openAiChatClient: AzureOpenAiChatClientBase; - constructor(fields: OpenAiChatModelParams) { + constructor(fields: AzureOpenAiChatModelParams) { super(fields); this.openAiChatClient = new AzureOpenAiChatClientBase(fields); this.modelName = fields.modelName; diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index f8360bac1..fff002315 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -4,14 +4,14 @@ import { } from '@sap-ai-sdk/foundation-models'; import { Embeddings } from '@langchain/core/embeddings'; import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; -import { OpenAiEmbeddingModelParams } from './types.js'; +import { AzureOpenAiEmbeddingModelParams } from './types.js'; /** * OpenAI GPT Embedding Model Wrapper to embed texts. */ export class AzureOpenAiEmbeddingClient extends Embeddings - implements OpenAiEmbeddingModelParams + implements AzureOpenAiEmbeddingModelParams { modelName: AzureOpenAiChatModel; modelVersion?: string; @@ -19,7 +19,7 @@ export class AzureOpenAiEmbeddingClient private openAiEmbeddingClient: AzureOpenAiEmbeddingClientBase; - constructor(fields: OpenAiEmbeddingModelParams) { + constructor(fields: AzureOpenAiEmbeddingModelParams) { super(fields); this.openAiEmbeddingClient = new AzureOpenAiEmbeddingClientBase(fields); this.modelName = fields.modelName; diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index dd7386d66..e7076ba51 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -13,7 +13,7 @@ import type { ModelConfig, ResourceGroupConfig } from '@sap-ai-sdk/ai-api'; /** * Input type for OpenAI chat models. */ -export type OpenAiChatModelParams = Omit< +export type AzureOpenAiChatModelParams = Omit< AzureOpenAiChatCompletionParameters, | 'messages' | 'response_format' @@ -29,7 +29,7 @@ export type OpenAiChatModelParams = Omit< /** * Chat model call options for OpenAI. */ -export type OpenAiChatCallOptions = BaseChatModelCallOptions & +export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & Pick< AzureOpenAiChatCompletionParameters, 'response_format' | 'seed' | 'functions' | 'tools' | 'tool_choice' @@ -40,7 +40,7 @@ export type OpenAiChatCallOptions = BaseChatModelCallOptions & /** * Input type for OpenAI embedding models. */ -export type OpenAiEmbeddingModelParams = ModelConfig & +export type AzureOpenAiEmbeddingModelParams = ModelConfig & ResourceGroupConfig & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 28a007eda..4712f8786 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -13,7 +13,7 @@ import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; import { LangChainToolChoice, - OpenAiChatCallOptions, + AzureOpenAiChatCallOptions, ToolChoice } from './types.js'; @@ -149,10 +149,6 @@ function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { function mapToolChoice( toolChoice?: LangChainToolChoice ): ToolChoice | undefined { - if (!toolChoice) { - return undefined; - } - if (toolChoice === 'auto' || toolChoice === 'none') { return toolChoice; } @@ -175,7 +171,7 @@ function mapToolChoice( */ export function mapLangchainToAiClient( client: AzureOpenAiChatClient, - options: OpenAiChatCallOptions, + options: AzureOpenAiChatCallOptions, messages: BaseMessage[] ): AzureOpenAiChatCompletionParameters { return removeUndefinedProperties({ From 3e7a25e6b420dbaa05299c315f7ebb15836e19f5 Mon Sep 17 00:00:00 2001 From: cloud-sdk-js Date: Wed, 18 Sep 2024 11:52:14 +0000 Subject: [PATCH 78/95] fix: Changes from lint --- packages/langchain/src/openai/chat.ts | 5 ++++- packages/langchain/src/openai/types.ts | 5 ++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index ec2476790..c5adce6c8 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -5,7 +5,10 @@ import { AzureOpenAiChatClient as AzureOpenAiChatClientBase } from '@sap-ai-sdk/ import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; -import type { AzureOpenAiChatCallOptions, AzureOpenAiChatModelParams } from './types.js'; +import type { + AzureOpenAiChatCallOptions, + AzureOpenAiChatModelParams +} from './types.js'; /** * OpenAI language model wrapper to generate texts. diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index e7076ba51..4e201b1c9 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -40,9 +40,8 @@ export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & /** * Input type for OpenAI embedding models. */ -export type AzureOpenAiEmbeddingModelParams = ModelConfig & - ResourceGroupConfig & - BaseLLMParams; +export type AzureOpenAiEmbeddingModelParams = + ModelConfig & ResourceGroupConfig & BaseLLMParams; /** * OpenAI toolchoice type. From 03c1b522d7685c666ec097bb9708ceac5ad527f6 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 15:47:00 +0200 Subject: [PATCH 79/95] adjust everything --- packages/langchain/src/openai/types.ts | 4 +- pnpm-lock.yaml | 3 ++ sample-code/package.json | 1 + sample-code/src/index.ts | 2 +- sample-code/src/langchain-azure-openai.ts | 38 ++++++++++++++++--- sample-code/src/server.ts | 33 ++++++++++++++-- tests/e2e-tests/src/open-ai-langchain.test.ts | 18 +++++++-- tests/type-tests/test/langchain.test-d.ts | 32 ---------------- 8 files changed, 84 insertions(+), 47 deletions(-) delete mode 100644 tests/type-tests/test/langchain.test-d.ts diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index e7076ba51..de7fca7a5 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -3,7 +3,7 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; import { BaseLLMParams } from '@langchain/core/language_models/llms'; -import type { AzureOpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; +import type { AzureOpenAiChatCompletionParameters, AzureOpenAiEmbeddingModel } from '@sap-ai-sdk/foundation-models'; import type { AzureOpenAiChatModel, CustomRequestConfig @@ -40,7 +40,7 @@ export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & /** * Input type for OpenAI embedding models. */ -export type AzureOpenAiEmbeddingModelParams = ModelConfig & +export type AzureOpenAiEmbeddingModelParams = ModelConfig & ResourceGroupConfig & BaseLLMParams; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c53ef694a..89e7ce8e1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -219,6 +219,9 @@ importers: '@langchain/core': specifier: 0.3.1 version: 0.3.1(openai@4.61.1(zod@3.23.8)) + '@langchain/openai': + specifier: 0.3.0 + version: 0.3.0(@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))) '@sap-ai-sdk/ai-api': specifier: workspace:^ version: link:../packages/ai-api diff --git a/sample-code/package.json b/sample-code/package.json index b49615475..c936b234f 100644 --- a/sample-code/package.json +++ b/sample-code/package.json @@ -27,6 +27,7 @@ "@sap-ai-sdk/foundation-models": "workspace:^", "@sap-ai-sdk/orchestration": "workspace:^", "@sap-ai-sdk/langchain": "workspace:^", + "@langchain/openai": "0.3.0", "@langchain/core": "0.3.1", "@types/express": "^4.17.21", "express": "^4.21.0" diff --git a/sample-code/src/index.ts b/sample-code/src/index.ts index 1b8c4d2af..c9c4b4e32 100644 --- a/sample-code/src/index.ts +++ b/sample-code/src/index.ts @@ -4,4 +4,4 @@ export { computeEmbedding } from './foundation-models-azure-openai.js'; -export { embedQuery, generate } from './langchain-azure-openai.js'; +export { embedQuery, embedDocument, simpleInvoke, complexInvoke } from './langchain-azure-openai.js'; diff --git a/sample-code/src/langchain-azure-openai.ts b/sample-code/src/langchain-azure-openai.ts index 182facc45..d02f7a41f 100644 --- a/sample-code/src/langchain-azure-openai.ts +++ b/sample-code/src/langchain-azure-openai.ts @@ -1,4 +1,5 @@ -import { HumanMessage } from '@langchain/core/messages'; +import { StringOutputParser } from '@langchain/core/output_parsers'; +import { ChatPromptTemplate } from '@langchain/core/prompts'; import { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient @@ -8,12 +9,25 @@ import { * Ask GPT about the capital of France. * @returns The answer from GPT. */ -export async function generate(): Promise { +export async function simpleInvoke(): Promise { const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); - const response = await client.generate([ - [new HumanMessage('What is the capital of France?')] + const parser = new StringOutputParser(); + return client.pipe(parser).invoke('What is the capital of France?'); +} + +/** + * Ask GPT about the capital of France, with a more complex prompt. + * @returns The answer from GPT. + */ +export async function complexInvoke(): Promise { + const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); + const promptTemplate = ChatPromptTemplate.fromMessages([ + ['system', 'Answer the following in {language}:'], + ['user', '{text}'], ]); - return response.generations[0][0].text; + const parser = new StringOutputParser(); + const llmChain = promptTemplate.pipe(client).pipe(parser); + return llmChain.invoke({ language: 'german', text: 'What is the capital of France?' }); } /** @@ -26,3 +40,17 @@ export async function embedQuery(): Promise { }); return client.embedQuery('Hello, world!'); } + +/** + * Embed 'Hello, world!' and 'Goodbye, world!' using the OpenAI ADA model. + * @returns An array of embedding vectors. + */ +export async function embedDocument(): Promise { + const client = new AzureOpenAiEmbeddingClient({ + modelName: 'text-embedding-ada-002' + }); + return client.embedDocuments([ + 'Hello, world!', + 'Goodbye, world!' + ]); +} diff --git a/sample-code/src/server.ts b/sample-code/src/server.ts index afa3a281d..145e07329 100644 --- a/sample-code/src/server.ts +++ b/sample-code/src/server.ts @@ -6,7 +6,7 @@ import { } from './foundation-models-azure-openai.js'; import { orchestrationCompletion } from './orchestration.js'; import { getDeployments } from './ai-api.js'; -import { embedQuery, generate } from './langchain-azure-openai.js'; +import { complexInvoke, embedDocument, embedQuery, simpleInvoke } from './langchain-azure-openai.js'; const app = express(); const port = 8080; @@ -66,7 +66,7 @@ app.get('/ai-api/get-deployments', async (req, res) => { app.get('/langchain/chat', async (req, res) => { try { - res.send(await generate()); + res.send(await simpleInvoke()); } catch (error: any) { console.error(error); res @@ -75,7 +75,18 @@ app.get('/langchain/chat', async (req, res) => { } }); -app.get('/langchain/embedding', async (req, res) => { +app.get('/langchain/complex-chat', async (req, res) => { + try { + res.send(await complexInvoke()); + } catch (error: any) { + console.error(error); + res + .status(500) + .send('Yikes, vibes are off apparently 😬 -> ' + error.message); + } +}); + +app.get('/langchain/embed-query', async (req, res) => { try { const result = await embedQuery(); if (!result.length) { @@ -91,6 +102,22 @@ app.get('/langchain/embedding', async (req, res) => { } }); +app.get('/langchain/embed-document', async (req, res) => { + try { + const result = await embedDocument(); + if (!result.length) { + res.status(500).send('No embedding vector returned.'); + } else { + res.send('Number crunching success, got a nice vector.'); + } + } catch (error: any) { + console.error(error); + res + .status(500) + .send('Yikes, vibes are off apparently 😬 -> ' + error.message); + } +}); + app.listen(port, () => { console.log(`Server running at http://localhost:${port}`); }); diff --git a/tests/e2e-tests/src/open-ai-langchain.test.ts b/tests/e2e-tests/src/open-ai-langchain.test.ts index d45f180ca..1daf06fda 100644 --- a/tests/e2e-tests/src/open-ai-langchain.test.ts +++ b/tests/e2e-tests/src/open-ai-langchain.test.ts @@ -1,7 +1,7 @@ import path from 'path'; import { fileURLToPath } from 'url'; import dotenv from 'dotenv'; -import { embedQuery, generate } from '@sap-ai-sdk/sample-code'; +import { complexInvoke, embedDocument, embedQuery, simpleInvoke } from '@sap-ai-sdk/sample-code'; // Pick .env file from root directory const __filename = fileURLToPath(import.meta.url); @@ -9,13 +9,23 @@ const __dirname = path.dirname(__filename); dotenv.config({ path: path.resolve(__dirname, '../.env') }); describe('Langchain OpenAI Access', () => { - it('should complete a chat', async () => { - const result = await generate(); + it('executes a basic invoke', async () => { + const result = await simpleInvoke(); expect(result).toContain('Paris'); }); - it('should compute an embedding vector', async () => { + it('executes invoke as part of a chain ', async () => { + const result = await complexInvoke(); + expect(result).toContain('Paris'); + }); + + it('should compute an embedding vector based on a string', async () => { const result = await embedQuery(); expect(result).not.toHaveLength(0); }); + + it('should compute an embedding vector based on a string array', async () => { + const result = await embedDocument(); + expect(result).not.toHaveLength(0); + }); }); diff --git a/tests/type-tests/test/langchain.test-d.ts b/tests/type-tests/test/langchain.test-d.ts deleted file mode 100644 index 4b9836622..000000000 --- a/tests/type-tests/test/langchain.test-d.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { expectError, expectType } from 'tsd'; -import { OpenAiChatClient, OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; -import { AIMessageChunk } from '@langchain/core/messages'; -import { LLMResult } from '@langchain/core/outputs'; - -expectError( - new OpenAiChatClient({ deploymentId: 'test', modelName: 'test' }).invoke( - 'Test' - ) -); - -expectError(new OpenAiChatClient({ modelName: 'my-cool-chat-model' })); - -expectError(new OpenAiChatClient({ deploymentId: 'test', apiKey: 'test' })); - -expectType>( - new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).invoke('Test') -); - -expectType>( - new OpenAiChatClient({ modelName: 'gpt-35-turbo' }).generate([['Test']]) -); - -expectType>( - new OpenAiEmbeddingClient({ modelName: 'text-embedding-3-large' }).embedQuery( - 'test' - ) -); - -expectError( - new OpenAiEmbeddingClient({ modelName: 'my-cool-embedding-model' }) -); From 16d771f7dd9863fc1e4811d6ff91e734e47759d9 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Wed, 18 Sep 2024 16:12:05 +0200 Subject: [PATCH 80/95] adjust mapping test --- packages/langchain/package.json | 1 - packages/langchain/src/openai/util.test.ts | 77 +++++++--------------- packages/langchain/src/openai/util.ts | 2 +- pnpm-lock.yaml | 3 - 4 files changed, 23 insertions(+), 60 deletions(-) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 43a526128..80c098c08 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -29,7 +29,6 @@ "dependencies": { "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", - "@langchain/openai": "0.3.0", "@langchain/core": "0.3.1", "zod-to-json-schema": "^3.23.2", "@sap-cloud-sdk/util": "^3.20.0" diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index ca8fb5187..e6cbe0c8f 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,51 +1,18 @@ -import { - AzureOpenAiChatClient as AzureOpenAiChatClientBase, - AzureOpenAiChatCompletionOutput -} from '@sap-ai-sdk/foundation-models'; -import { jest } from '@jest/globals'; +import { AzureOpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; import nock from 'nock'; import { HumanMessage } from '@langchain/core/messages'; import { mockClientCredentialsGrantCall, - mockDeploymentsList, - mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; -import { mapResponseToChatResult } from './util.js'; +import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; import { AzureOpenAiChatClient } from './chat.js'; const openAiMockResponse = parseMockResponse( 'foundation-models', - 'openai-chat-completion-success-response.json' -); - -const chatCompletionEndpoint = { - url: 'inference/deployments/1234/chat/completions', - apiVersion: '2024-02-01' -}; - -const prompt = { - messages: [ - { - role: 'user', - content: 'Where is the deepest place on earth located' - } - ] -}; - -const langchainPrompt = new HumanMessage( - 'Where is the deepest place on earth located' + 'azure-openai-chat-completion-success-response.json' ); -const request = { - messages: [ - { - role: 'user', - content: 'Where is the deepest place on earth located' - } - ] -}; - describe('Mapping Functions', () => { beforeEach(() => { mockClientCredentialsGrantCall(); @@ -61,25 +28,25 @@ describe('Mapping Functions', () => { }); it('should parse a LangChain input to an AI SDK input', async () => { - mockDeploymentsList( - { scenarioId: 'foundation-models', executableId: 'azure-openai' }, - { id: '1234', model: { name: 'gpt-35-turbo' } } - ); - - mockInference( - { - data: prompt - }, - { - data: openAiMockResponse, - status: 200 - }, - chatCompletionEndpoint - ); - + const langchainPrompt = [ + new HumanMessage('Where is the deepest place on earth located') + ]; + + const request = { + messages: [ + { + role: 'user', + content: 'Where is the deepest place on earth located' + } + ] + }; const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); - const runSpy = jest.spyOn(AzureOpenAiChatClientBase.prototype, 'run'); - await client.generate([[langchainPrompt]]); - expect(runSpy).toHaveBeenCalledWith(request, undefined); + const defaultOptions = { signal: undefined, promptIndex: 0 }; + const mapping = mapLangchainToAiClient( + client, + defaultOptions, + langchainPrompt + ); + expect(mapping).toMatchObject(request); }); }); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 4712f8786..6e331fa66 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -171,7 +171,7 @@ function mapToolChoice( */ export function mapLangchainToAiClient( client: AzureOpenAiChatClient, - options: AzureOpenAiChatCallOptions, + options: AzureOpenAiChatCallOptions & { promptIndex?: number }, messages: BaseMessage[] ): AzureOpenAiChatCompletionParameters { return removeUndefinedProperties({ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 89e7ce8e1..6d3ec4c84 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -163,9 +163,6 @@ importers: '@langchain/core': specifier: 0.3.1 version: 0.3.1(openai@4.61.1(zod@3.23.8)) - '@langchain/openai': - specifier: 0.3.0 - version: 0.3.0(@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))) '@sap-ai-sdk/ai-api': specifier: workspace:^ version: link:../ai-api From db7477aa92b5e3d03539fbd7c1fad1e888e1144f Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 13:55:15 +0200 Subject: [PATCH 81/95] update docs --- packages/langchain/README.md | 114 ++++++++++----------- packages/langchain/package.json | 2 +- packages/langchain/src/openai/types.ts | 27 ----- packages/langchain/src/openai/util.test.ts | 8 +- packages/langchain/src/openai/util.ts | 41 +++++++- 5 files changed, 94 insertions(+), 98 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index c06d06ee8..3ae22c331 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,113 +1,103 @@ # @sap-ai-sdk/langchain +This package provides LangChain model clients, built on top of the `@sap-ai-sdk`. -This package contains Langchain compliant models, based on the @sap-ai-sdk clients. +## Table of Contents +1. [Installation](#installation) +2. [Pre-requisites](#pre-requisites) +3. [Usage](#usage) + - [Client Initialization](#client-initialization) + - [Chat Clients](#chat-clients) + - [Embedding Clients](#embedding-clients) +4. [Support, Feedback, Contribution](#support-feedback-contribution) +5. [License](#license) -### Installation +## Installation ``` $ npm install @sap-ai-sdk/langchain ``` ## Pre-requisites - - [Enable the AI Core service in BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). +- Bind the service to your application. - Ensure the project is configured with Node.js v20 or higher, along with native ESM support. +- For testing your application locally: + - Download a service key for your AI Core service instance. + - Create a `.env` file in the sample-code directory. + - Add an entry `AICORE_SERVICE_KEY=''`. ## Usage - All clients comply with [LangChain's interface](https://js.langchain.com/docs/introduction). +We offer chat and embedding clients, currently only for Azure OpenAI. -To initialize the client, you can pass either: - -```ts - modelName: string, - modelVersion?: string, - resourceGroup?: string, - ...others -``` - -or alternatively: +### Client Initialization +To initialize a client, you only need to provide the model name: ```ts - deploymentId: string, - resourceGroup?: string - ...others +import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; +const chatClient = new AzureOpenAiChatClient({ modelName: 'gpt-4o' }); ``` -If you pass API keys, they are ignored, as it is not intended to call the vendor's endpoints directly. -Instead, the credentials in the binding are used to call SAP's LLM Proxy. - -### OpenAI - -We offer two types of clients for OpenAI models: chat and embedding. - -#### Chat - -There are two common APIs, `.invoke()` for simple text completion and `.generate()` for chat completion. -You can also combine them with the usual LangChain functionality, e.g. prompt templates. - -A simple text completion might look like: - -##### Initialization +In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are also SDK-specific parameters, which you can use to narrow down the search for the model you want to use: ```ts -import { OpenAiChatClient } from '@sap-ai-sdk/langchain'; -const chatClient = new OpenAiChatClient({ modelName: 'gpt-4o' }); +const chatClient = new AzureOpenAiChatClient({ + modelName: 'gpt-4o', + modelVersion: '24-07-2021', + resourceGroup: 'my-resource-group' +}); ``` -##### Usage +### Chat Clients +The chat clients allow you to interact with Azure OpenAI chat models, accessible via SAP Gen AI Hub. +To invoke the client, you only have a to pass a prompt. +#### Simple Example ```ts -const response = await chatClient.invoke("What's the capital of France?'"); +const response = await chatClient.invoke("What's the capital of France?"); ``` -A chat completion example might be: - +#### Advanced Example with Templating and Output Parsing ```ts -const response = await chatClient.generate([ - [new SystemMessage('You are an IT support agent answering questions.')], - [new HumanMessage('Why is my internet not working?')] +import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; +import { StringOutputParser } from '@langchain/core/output_parsers'; +import { ChatPromptTemplate } from '@langchain/core/prompts'; + +const client = new AzureOpenAiChatClient({ modelName: 'gpt-35-turbo' }); +const promptTemplate = ChatPromptTemplate.fromMessages([ + ['system', 'Answer the following in {language}:'], + ['user', '{text}'] ]); -``` - -#### Embedding - -You have the option to either embed a text or a document. -Documents have to be represented as an array of strings. - -Below are two examples. - -##### Initialization - -```ts -import { OpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; - -const embeddingClient = new OpenAiEmbeddingClient({ - modelName: 'text-embedding-ada-002' +const parser = new StringOutputParser(); +const llmChain = promptTemplate.pipe(client).pipe(parser); +const response = llmChain.invoke({ + language: 'german', + text: 'What is the capital of France?' }); ``` -##### Usage +### Embedding Clients +Embedding clients allow embedding either text or documents (represented as arrays of strings). +#### Embed Text ```ts const embeddedText = await embeddingClient.embedQuery( - 'Paris is the capitol of France.' + 'Paris is the capital of France.' ); ``` +#### Embed Documents ```ts const embeddedDocument = await embeddingClient.embedDocuments([ - 'Page 1: Paris is the capitol of France.', + 'Page 1: Paris is the capital of France.', 'Page 2: It is a beautiful city.' ]); ``` ## Support, Feedback, Contribution - This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/SAP/ai-sdk-js/issues). Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](https://github.com/SAP/ai-sdk-js/blob/main/CONTRIBUTING.md). ## License - The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/) diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 80c098c08..d00dff13d 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -1,7 +1,7 @@ { "name": "@sap-ai-sdk/langchain", "version": "0.0.0", - "description": "", + "description": "LangChain clients based on the @sap-ai-sdk", "license": "Apache-2.0", "keywords": [ "sap-ai-sdk", diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 4e201b1c9..52d7a4d98 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -42,30 +42,3 @@ export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & */ export type AzureOpenAiEmbeddingModelParams = ModelConfig & ResourceGroupConfig & BaseLLMParams; - -/** - * OpenAI toolchoice type. - */ -export type ToolChoice = - | 'none' - | 'auto' - | { - /** - * The type of the tool. - */ - type: 'function'; - /** - * Use to force the model to call a specific function. - */ - function: { - /** - * The name of the function to call. - */ - name: string; - }; - }; - -/** - * LangChain's toolchoice type. - */ -export type LangChainToolChoice = string | Record | 'auto' | 'any'; diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index e6cbe0c8f..fe181da3e 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,6 +1,6 @@ -import { AzureOpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; +import { AzureOpenAiChatCompletionOutput, AzureOpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; import nock from 'nock'; -import { HumanMessage } from '@langchain/core/messages'; +import { BaseMessage, HumanMessage } from '@langchain/core/messages'; import { mockClientCredentialsGrantCall, parseMockResponse @@ -28,11 +28,11 @@ describe('Mapping Functions', () => { }); it('should parse a LangChain input to an AI SDK input', async () => { - const langchainPrompt = [ + const langchainPrompt: BaseMessage[] = [ new HumanMessage('Where is the deepest place on earth located') ]; - const request = { + const request: AzureOpenAiChatCompletionParameters = { messages: [ { role: 'user', diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 6e331fa66..4d8c60d7c 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -12,11 +12,33 @@ import type { import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; import { - LangChainToolChoice, AzureOpenAiChatCallOptions, - ToolChoice } from './types.js'; +type ToolChoice = + | 'none' + | 'auto' + | { + /** + * The type of the tool. + */ + type: 'function'; + /** + * Use to force the model to call a specific function. + */ + function: { + /** + * The name of the function to call. + */ + name: string; + }; + }; + +/** + * LangChain's toolchoice type. + */ +type LangChainToolChoice = string | Record | 'auto' | 'any'; + /** * Maps a LangChain {@link StructuredTool} to {@link AzureOpenAiChatCompletionFunction}. * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. @@ -123,8 +145,8 @@ function mapBaseMessageToAzureOpenAiChatMessage( ): AzureOpenAiChatMessage { // TODO: remove type casting, improve message.content handling return removeUndefinedProperties({ - content: message.content, name: message.name, + content: message.content, role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, @@ -132,7 +154,18 @@ function mapBaseMessageToAzureOpenAiChatMessage( message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : undefined - } as AzureOpenAiChatMessage); + }); +} + +function mapBaseMessageToContent(baseMessage: BaseMessage): AzureOpenAiChatMessage['content'] { + if (typeof baseMessage.content === 'object' && ('text' in baseMessage.content || 'image_url' in baseMessage.content)) { + const { text, image_url, ...rest } = baseMessage.content; + if (rest) { + // log warning + return; + } + } + return baseMessage.content as AzureOpenAiChatMessage['content']; } /** From 126279624282f7db3215b14308bed18f30309329 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 14:43:50 +0200 Subject: [PATCH 82/95] update typedocs --- packages/langchain/README.md | 5 +- packages/langchain/src/openai/chat.ts | 6 +-- packages/langchain/src/openai/embedding.ts | 2 +- packages/langchain/src/openai/types.ts | 6 +-- packages/langchain/src/openai/util.test.ts | 4 +- packages/langchain/src/openai/util.ts | 61 ++++++++++------------ 6 files changed, 39 insertions(+), 45 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 3ae22c331..7ca1742a5 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -11,7 +11,6 @@ This package provides LangChain model clients, built on top of the `@sap-ai-sdk` 4. [Support, Feedback, Contribution](#support-feedback-contribution) 5. [License](#license) - ## Installation ``` $ npm install @sap-ai-sdk/langchain @@ -27,8 +26,8 @@ $ npm install @sap-ai-sdk/langchain - Add an entry `AICORE_SERVICE_KEY=''`. ## Usage +This package provides both chat and embedding clients, currently supporting Azure OpenAI. All clients comply with [LangChain's interface](https://js.langchain.com/docs/introduction). -We offer chat and embedding clients, currently only for Azure OpenAI. ### Client Initialization To initialize a client, you only need to provide the model name: @@ -70,7 +69,7 @@ const promptTemplate = ChatPromptTemplate.fromMessages([ ]); const parser = new StringOutputParser(); const llmChain = promptTemplate.pipe(client).pipe(parser); -const response = llmChain.invoke({ +const response = await llmChain.invoke({ language: 'german', text: 'What is the capital of France?' }); diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index c5adce6c8..4aac6da48 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -4,14 +4,14 @@ import type { ChatResult } from '@langchain/core/outputs'; import { AzureOpenAiChatClient as AzureOpenAiChatClientBase } from '@sap-ai-sdk/foundation-models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; -import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; +import { mapLangchainToAiClient, mapOutputToChatResult } from './util.js'; import type { AzureOpenAiChatCallOptions, AzureOpenAiChatModelParams } from './types.js'; /** - * OpenAI language model wrapper to generate texts. + * LangChain chat client for Azure OpenAI consumption on SAP BTP. */ export class AzureOpenAiChatClient extends BaseChatModel @@ -73,6 +73,6 @@ export class AzureOpenAiChatClient typeof res.getContent() === 'string' ? (res.getContent() as string) : '' ); - return mapResponseToChatResult(res.data); + return mapOutputToChatResult(res.data); } } diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index fff002315..c82472aaf 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -7,7 +7,7 @@ import { AzureOpenAiChatModel } from '@sap-ai-sdk/core'; import { AzureOpenAiEmbeddingModelParams } from './types.js'; /** - * OpenAI GPT Embedding Model Wrapper to embed texts. + * LangChain embedding client for Azure OpenAI consumption on SAP BTP. */ export class AzureOpenAiEmbeddingClient extends Embeddings diff --git a/packages/langchain/src/openai/types.ts b/packages/langchain/src/openai/types.ts index 52d7a4d98..26d94d7fa 100644 --- a/packages/langchain/src/openai/types.ts +++ b/packages/langchain/src/openai/types.ts @@ -11,7 +11,7 @@ import type { import type { ModelConfig, ResourceGroupConfig } from '@sap-ai-sdk/ai-api'; /** - * Input type for OpenAI chat models. + * Input type for {@link AzureOpenAiChatClient} initialization. */ export type AzureOpenAiChatModelParams = Omit< AzureOpenAiChatCompletionParameters, @@ -27,7 +27,7 @@ export type AzureOpenAiChatModelParams = Omit< ResourceGroupConfig; /** - * Chat model call options for OpenAI. + * Call options for the {@link AzureOpenAiChatClient}. */ export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & Pick< @@ -38,7 +38,7 @@ export type AzureOpenAiChatCallOptions = BaseChatModelCallOptions & }; /** - * Input type for OpenAI embedding models. + * Input type for {@link AzureOpenAiEmbeddingClient} initialization. */ export type AzureOpenAiEmbeddingModelParams = ModelConfig & ResourceGroupConfig & BaseLLMParams; diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index fe181da3e..2bbd776b2 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -5,7 +5,7 @@ import { mockClientCredentialsGrantCall, parseMockResponse } from '../../../../test-util/mock-http.js'; -import { mapLangchainToAiClient, mapResponseToChatResult } from './util.js'; +import { mapLangchainToAiClient, mapOutputToChatResult } from './util.js'; import { AzureOpenAiChatClient } from './chat.js'; const openAiMockResponse = parseMockResponse( @@ -23,7 +23,7 @@ describe('Mapping Functions', () => { }); it('should parse an OpenAI response to a (LangChain) chat response', async () => { - const result = mapResponseToChatResult(openAiMockResponse); + const result = mapOutputToChatResult(openAiMockResponse); expect(result).toMatchSnapshot(); }); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 4d8c60d7c..1324c617d 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -34,9 +34,6 @@ type ToolChoice = }; }; -/** - * LangChain's toolchoice type. - */ type LangChainToolChoice = string | Record | 'auto' | 'any'; /** @@ -69,9 +66,9 @@ function mapToolToOpenAiTool( } /** - * Maps a {@link BaseMessage} to OpenAI's message role. - * @param message - The message to map. - * @returns The OpenAI message Role. + * Maps a {@link BaseMessage} to{@link AzureOpenAiChatMessage} message role. + * @param message - The {@link BaseMessage} to map. + * @returns The {@link AzureOpenAiChatMessage} message Role. */ function mapBaseMessageToRole( message: BaseMessage @@ -93,16 +90,16 @@ function mapBaseMessageToRole( } /** - * Maps OpenAI messages to LangChain's {@link ChatResult}. - * @param res - The OpenAI chat completion output. - * @returns The LangChain chat result. + * Maps {@link AzureOpenAiChatCompletionOutput} to LangChain's {@link ChatResult}. + * @param completionResponse - The {@link AzureOpenAiChatCompletionOutput} response. + * @returns The LangChain {@link ChatResult} * @internal */ -export function mapResponseToChatResult( - res: AzureOpenAiChatCompletionOutput +export function mapOutputToChatResult( + completionResponse: AzureOpenAiChatCompletionOutput ): ChatResult { return { - generations: res.choices.map((choice: AzureOpenAiChatCompletionChoice) => ({ + generations: completionResponse.choices.map((choice: AzureOpenAiChatCompletionChoice) => ({ text: choice.message.content || '', message: new AIMessage({ content: choice.message.content || '', @@ -122,23 +119,23 @@ export function mapResponseToChatResult( } })), llmOutput: { - created: res.created, - id: res.id, - model: res.model, - object: res.object, + created: completionResponse.created, + id: completionResponse.id, + model: completionResponse.model, + object: completionResponse.object, tokenUsage: { - completionTokens: res.usage.completion_tokens, - promptTokens: res.usage.prompt_tokens, - totalTokens: res.usage.total_tokens + completionTokens: completionResponse.usage.completion_tokens, + promptTokens: completionResponse.usage.prompt_tokens, + totalTokens: completionResponse.usage.total_tokens } } }; } /** - * Maps {@link BaseMessage} to OpenAI messages. + * Maps {@link BaseMessage} to {@link AzureOpenAiChatMessage}. * @param message - The message to map. - * @returns The OpenAI chat Message. + * @returns The {@link AzureOpenAiChatMessage}. */ function mapBaseMessageToAzureOpenAiChatMessage( message: BaseMessage @@ -150,10 +147,7 @@ function mapBaseMessageToAzureOpenAiChatMessage( role: mapBaseMessageToRole(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, - tool_call_id: - message._getType() === 'tool' - ? (message as ToolMessage).tool_call_id - : undefined + tool_call_id: mapToolCallId(message) }); } @@ -168,17 +162,18 @@ function mapBaseMessageToContent(baseMessage: BaseMessage): AzureOpenAiChatMessa return baseMessage.content as AzureOpenAiChatMessage['content']; } -/** - * Checks if a given array is a structured tool array. - * @param tools - The array to check. - * @returns Whether the array is a structured tool array. - */ function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { return !!tools?.every(tool => Array.isArray((tool as StructuredTool).lc_namespace) ); } +function mapToolCallId(message: BaseMessage): string | undefined { + if (message._getType() === 'tool') { + return (message as ToolMessage).tool_call_id; + } +} + function mapToolChoice( toolChoice?: LangChainToolChoice ): ToolChoice | undefined { @@ -195,9 +190,9 @@ function mapToolChoice( } /** - * Maps LangChain's input interface to our own client's input interface - * @param client The LangChain OpenAI client - * @param options The LangChain call options + * Maps LangChain's input interface to the AI SDK client's input interface + * @param client The LangChain Azure OpenAI client + * @param options The {@link AzureOpenAiChatCallOptions} * @param messages The messages to be send * @returns An AI SDK compatibile request * @internal From e91e0c5743f3d73f96e285efa7438e97dac9bd44 Mon Sep 17 00:00:00 2001 From: Marika Marszalkowski <868536+marikaner@users.noreply.github.com> Date: Thu, 19 Sep 2024 15:01:18 +0200 Subject: [PATCH 83/95] Update packages/ai-api/src/utils/deployment-resolver.ts --- packages/ai-api/src/utils/deployment-resolver.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/ai-api/src/utils/deployment-resolver.ts b/packages/ai-api/src/utils/deployment-resolver.ts index a6b0b0e13..8a4c1ddac 100644 --- a/packages/ai-api/src/utils/deployment-resolver.ts +++ b/packages/ai-api/src/utils/deployment-resolver.ts @@ -4,6 +4,7 @@ import { } from '../client/AI_CORE_API/index.js'; import { deploymentCache } from './deployment-cache.js'; import { extractModel, type FoundationModel } from './model.js'; + /** * The model deployment configuration when using a model. * @typeParam ModelNameT - String literal type representing the name of the model. From 2370aa365cda387308ddc65e3c46967521c50e6d Mon Sep 17 00:00:00 2001 From: Deeksha Sinha <88374536+deekshas8@users.noreply.github.com> Date: Thu, 19 Sep 2024 16:08:13 +0200 Subject: [PATCH 84/95] Update packages/langchain/README.md Co-authored-by: Marika Marszalkowski <868536+marikaner@users.noreply.github.com> --- packages/langchain/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 7ca1742a5..1a2e63ac1 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -30,7 +30,7 @@ This package provides both chat and embedding clients, currently supporting Azur All clients comply with [LangChain's interface](https://js.langchain.com/docs/introduction). ### Client Initialization -To initialize a client, you only need to provide the model name: +To initialize a client, provide the model name: ```ts import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; From 925b9d6454fb592ef1ba8dc862e1951cddb5c620 Mon Sep 17 00:00:00 2001 From: Deeksha Sinha <88374536+deekshas8@users.noreply.github.com> Date: Thu, 19 Sep 2024 16:08:40 +0200 Subject: [PATCH 85/95] Update packages/langchain/README.md Co-authored-by: Marika Marszalkowski <868536+marikaner@users.noreply.github.com> --- packages/langchain/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 1a2e63ac1..8784d7aa2 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -17,7 +17,7 @@ $ npm install @sap-ai-sdk/langchain ``` ## Pre-requisites -- [Enable the AI Core service in BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). +- [Enable the AI Core service in SAP BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). - Bind the service to your application. - Ensure the project is configured with Node.js v20 or higher, along with native ESM support. - For testing your application locally: From 2522a9a4f1fd8e52f1af6022ce89eff0e6467b9e Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 16:11:35 +0200 Subject: [PATCH 86/95] review --- eslint.config.js | 8 -------- packages/langchain/README.md | 7 +++---- packages/langchain/src/openai/chat.ts | 4 +++- packages/langchain/src/openai/util.ts | 27 +++++++++++++-------------- 4 files changed, 19 insertions(+), 27 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index dc7400ec8..31c2e20fa 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -33,14 +33,6 @@ export default [ 'jsdoc/require-jsdoc': 'off' } }, - { - files: [ - '**/packages/orchestration/src/client/api/default-api.ts' - ], - rules: { - '@typescript-eslint/explicit-module-boundary-types': 'off' - } - }, { files: ['packages/langchain/**/*.ts'], rules: { diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 7ca1742a5..a7b2a2a25 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -47,11 +47,10 @@ const chatClient = new AzureOpenAiChatClient({ }); ``` -### Chat Clients +### Chat Client The chat clients allow you to interact with Azure OpenAI chat models, accessible via SAP Gen AI Hub. -To invoke the client, you only have a to pass a prompt. +To invoke the client, you only have a to pass a prompt: -#### Simple Example ```ts const response = await chatClient.invoke("What's the capital of France?"); ``` @@ -75,7 +74,7 @@ const response = await llmChain.invoke({ }); ``` -### Embedding Clients +### Embedding Client Embedding clients allow embedding either text or documents (represented as arrays of strings). #### Embed Text diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 4aac6da48..6fb7a6e39 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -68,9 +68,11 @@ export class AzureOpenAiChatClient ) ); + const content = res.getContent(); + // we currently do not support streaming await runManager?.handleLLMNewToken( - typeof res.getContent() === 'string' ? (res.getContent() as string) : '' + typeof content === 'string' ? content : '' ); return mapOutputToChatResult(res.data); diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 1324c617d..91a225ea8 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -73,20 +73,20 @@ function mapToolToOpenAiTool( function mapBaseMessageToRole( message: BaseMessage ): AzureOpenAiChatMessage['role'] { - switch (message._getType()) { - case 'ai': - return 'assistant'; - case 'human': - return 'user'; - case 'system': - return 'system'; - case 'function': - return 'function'; - case 'tool': - return 'tool'; - default: - throw new Error(`Unknown message type: ${message._getType()}`); + const messageTypeToRoleMap = new Map([ + ['human', 'user'], + ['ai', 'assistant'], + ['system', 'system'], + ['function', 'function'], + ['tool', 'tool'] + ]); + + const messageType = message._getType(); + const role = messageTypeToRoleMap.get(messageType); + if(!role) { + throw new Error(`Unsupported message type: ${messageType}`); } + return role; } /** @@ -155,7 +155,6 @@ function mapBaseMessageToContent(baseMessage: BaseMessage): AzureOpenAiChatMessa if (typeof baseMessage.content === 'object' && ('text' in baseMessage.content || 'image_url' in baseMessage.content)) { const { text, image_url, ...rest } = baseMessage.content; if (rest) { - // log warning return; } } From c97c22ff4ef3ff2c9f7309446d09a33c4a850308 Mon Sep 17 00:00:00 2001 From: Deeksha Sinha <88374536+deekshas8@users.noreply.github.com> Date: Thu, 19 Sep 2024 16:13:52 +0200 Subject: [PATCH 87/95] Apply suggestions from code review Co-authored-by: Marika Marszalkowski <868536+marikaner@users.noreply.github.com> --- packages/langchain/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 17de95ff5..68d449915 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,5 +1,5 @@ # @sap-ai-sdk/langchain -This package provides LangChain model clients, built on top of the `@sap-ai-sdk`. +This package provides LangChain model clients, built on top of the foundation model clients of the SAP Cloud SDK for AI. ## Table of Contents 1. [Installation](#installation) @@ -37,7 +37,7 @@ import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; const chatClient = new AzureOpenAiChatClient({ modelName: 'gpt-4o' }); ``` -In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are also SDK-specific parameters, which you can use to narrow down the search for the model you want to use: +In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are additional parameters, which you can use to narrow down the search for the model you want to use: ```ts const chatClient = new AzureOpenAiChatClient({ @@ -48,7 +48,7 @@ const chatClient = new AzureOpenAiChatClient({ ``` ### Chat Client -The chat clients allow you to interact with Azure OpenAI chat models, accessible via SAP Gen AI Hub. +The chat clients allow you to interact with Azure OpenAI chat models, accessible via generative AI hub of SAP AI Core. To invoke the client, you only have a to pass a prompt: ```ts From 659f8fbae1c45c0e4dd403029b11905b7f904eb3 Mon Sep 17 00:00:00 2001 From: Deeksha Sinha <88374536+deekshas8@users.noreply.github.com> Date: Thu, 19 Sep 2024 16:14:54 +0200 Subject: [PATCH 88/95] Update packages/langchain/README.md --- packages/langchain/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 68d449915..cebb67f85 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -98,4 +98,4 @@ This project is open to feature requests/suggestions, bug reports etc. via [GitH Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](https://github.com/SAP/ai-sdk-js/blob/main/CONTRIBUTING.md). ## License -The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/) +The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/). From e04d63a62e8dec298456a1ae79966f96900d0497 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 16:18:24 +0200 Subject: [PATCH 89/95] more review comments --- packages/ai-api/package.json | 3 +-- packages/langchain/README.md | 8 ++++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/ai-api/package.json b/packages/ai-api/package.json index 2f721ac62..649fa8f3b 100644 --- a/packages/ai-api/package.json +++ b/packages/ai-api/package.json @@ -29,8 +29,7 @@ "check:public-api": "node --loader ts-node/esm ../../scripts/check-public-api-cli.ts" }, "dependencies": { - "@sap-ai-sdk/core": "workspace:^", - "@sap-cloud-sdk/util": "^3.20.0" + "@sap-ai-sdk/core": "workspace:^" }, "devDependencies": { "typescript": "^5.5.4", diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 17de95ff5..6fe7a98a0 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -22,7 +22,7 @@ $ npm install @sap-ai-sdk/langchain - Ensure the project is configured with Node.js v20 or higher, along with native ESM support. - For testing your application locally: - Download a service key for your AI Core service instance. - - Create a `.env` file in the sample-code directory. + - Create a `.env` file in the root of your directory. - Add an entry `AICORE_SERVICE_KEY=''`. ## Usage @@ -37,7 +37,7 @@ import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; const chatClient = new AzureOpenAiChatClient({ modelName: 'gpt-4o' }); ``` -In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are also SDK-specific parameters, which you can use to narrow down the search for the model you want to use: +In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are also additional parameters, which you can use to narrow down the search for the model you want to use: ```ts const chatClient = new AzureOpenAiChatClient({ @@ -48,7 +48,7 @@ const chatClient = new AzureOpenAiChatClient({ ``` ### Chat Client -The chat clients allow you to interact with Azure OpenAI chat models, accessible via SAP Gen AI Hub. +The chat clients allow you to interact with Azure OpenAI chat models, accessible via the generative AI hub of SAP AI Core. To invoke the client, you only have a to pass a prompt: ```ts @@ -98,4 +98,4 @@ This project is open to feature requests/suggestions, bug reports etc. via [GitH Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](https://github.com/SAP/ai-sdk-js/blob/main/CONTRIBUTING.md). ## License -The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/) +The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/). From 5e9debe01a54b9cd5cb305211f7ce813c414bcff Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:19:13 +0200 Subject: [PATCH 90/95] update everything --- packages/langchain/README.md | 6 ++- packages/langchain/package.json | 3 +- packages/langchain/src/openai/util.test.ts | 6 +-- packages/langchain/src/openai/util.ts | 62 ++++++++++++++-------- tests/type-tests/package.json | 2 - 5 files changed, 50 insertions(+), 29 deletions(-) diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 4c3ed51ec..3937b688d 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -33,8 +33,12 @@ All clients comply with [LangChain's interface](https://js.langchain.com/docs/in To initialize a client, provide the model name: ```ts -import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; +import { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; + +// For a chat client const chatClient = new AzureOpenAiChatClient({ modelName: 'gpt-4o' }); +// For an embedding client +const embeddingClient = new AzureOpenAiEmbeddingClient({ modelName: 'gpt-4o' }); ``` In addition to the default parameters of the model vendor (e.g. OpenAI) and LangChain, there are additional parameters, which you can use to narrow down the search for the model you want to use: diff --git a/packages/langchain/package.json b/packages/langchain/package.json index 815230430..c955be7b7 100644 --- a/packages/langchain/package.json +++ b/packages/langchain/package.json @@ -30,8 +30,7 @@ "@sap-ai-sdk/ai-api": "workspace:^", "@sap-ai-sdk/foundation-models": "workspace:^", "@langchain/core": "0.3.1", - "zod-to-json-schema": "^3.23.2", - "@sap-cloud-sdk/util": "^3.20.0" + "zod-to-json-schema": "^3.23.2" }, "devDependencies": { "typescript": "^5.5.4" diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 2bbd776b2..e22e445d4 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,4 +1,4 @@ -import { AzureOpenAiChatCompletionOutput, AzureOpenAiChatCompletionParameters } from '@sap-ai-sdk/foundation-models'; +import { AzureOpenAiCreateChatCompletionResponse, AzureOpenAiCreateChatCompletionRequest } from '@sap-ai-sdk/foundation-models'; import nock from 'nock'; import { BaseMessage, HumanMessage } from '@langchain/core/messages'; import { @@ -8,7 +8,7 @@ import { import { mapLangchainToAiClient, mapOutputToChatResult } from './util.js'; import { AzureOpenAiChatClient } from './chat.js'; -const openAiMockResponse = parseMockResponse( +const openAiMockResponse = parseMockResponse( 'foundation-models', 'azure-openai-chat-completion-success-response.json' ); @@ -32,7 +32,7 @@ describe('Mapping Functions', () => { new HumanMessage('Where is the deepest place on earth located') ]; - const request: AzureOpenAiChatCompletionParameters = { + const request: AzureOpenAiCreateChatCompletionRequest = { messages: [ { role: 'user', diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index c78f21b6b..bd4a066c3 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -1,13 +1,17 @@ import { AIMessage, BaseMessage, ToolMessage } from '@langchain/core/messages'; import { ChatResult } from '@langchain/core/outputs'; import { StructuredTool } from '@langchain/core/tools'; -import type { - AzureOpenAiChatCompletionFunction, - AzureOpenAiChatCompletionTool, - AzureOpenAiChatCompletionRequestMessage, - AzureOpenAiCreateChatCompletionResponse, - AzureOpenAiCreateChatCompletionRequest, - AzureOpenAiChatCompletionFunctionParameters +import { + type AzureOpenAiChatCompletionTool, + type AzureOpenAiChatCompletionRequestMessage, + type AzureOpenAiCreateChatCompletionResponse, + type AzureOpenAiCreateChatCompletionRequest, + type AzureOpenAiChatCompletionFunctionParameters, + AzureOpenAiChatCompletionRequestMessageSystem, + AzureOpenAiChatCompletionRequestMessageUser, + AzureOpenAiChatCompletionRequestMessageAssistant, + AzureOpenAiChatCompletionRequestMessageTool, + AzureOpenAiChatCompletionRequestMessageFunction, } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; @@ -144,25 +148,40 @@ export function mapOutputToChatResult( function mapBaseMessageToAzureOpenAiChatMessage( message: BaseMessage ): AzureOpenAiChatCompletionRequestMessage { - // TODO: remove type casting, improve message.content handling return removeUndefinedProperties({ name: message.name, - content: message.content, - role: mapBaseMessageToRole(message), + ...mapRoleAndContent(message), function_call: message.additional_kwargs.function_call, tool_calls: message.additional_kwargs.tool_calls, tool_call_id: mapToolCallId(message) }); } -function mapBaseMessageToContent(baseMessage: BaseMessage): AzureOpenAiChatCompletionRequestMessage['content'] { - if (typeof baseMessage.content === 'object' && ('text' in baseMessage.content || 'image_url' in baseMessage.content)) { - const { text, image_url, ...rest } = baseMessage.content; - if (rest) { - return; - } +// The following types are used to match a role to its specific content, otherwise TypeScript would not be able to infer the content type. + +type Role = 'system' | 'user' | 'assistant' | 'tool' | 'function'; + +type ContentType = + T extends 'system' ? AzureOpenAiChatCompletionRequestMessageSystem['content'] : + T extends 'user' ? AzureOpenAiChatCompletionRequestMessageUser['content'] : + T extends 'assistant' ? AzureOpenAiChatCompletionRequestMessageAssistant['content'] : + T extends 'tool' ? AzureOpenAiChatCompletionRequestMessageTool['content'] : + T extends 'function' ? AzureOpenAiChatCompletionRequestMessageFunction['content'] : + never; + +type RoleAndContent = { + [T in Role]: { role: T; content: ContentType } +}[Role]; + +function mapRoleAndContent(baseMessage: BaseMessage): RoleAndContent { + const role = mapBaseMessageToRole(baseMessage); + if (!['system', 'user', 'assistant', 'tool', 'function'].includes(role)) { + throw new Error(`Unsupported message role: ${role}`); } - return baseMessage.content as AzureOpenAiChatCompletionRequestMessage['content']; + return { + role, + content: baseMessage.content as ContentType + } as RoleAndContent; } function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { @@ -171,10 +190,11 @@ function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { ); } -function mapToolCallId(message: BaseMessage): string | undefined { - if (message._getType() === 'tool') { - return (message as ToolMessage).tool_call_id; - } +/** + * Has to return an empty string to match one of the types of {@link AzureOpenAiChatCompletionRequestMessage}. + */ +function mapToolCallId(message: BaseMessage): string { + return message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : ''; } function mapToolChoice( diff --git a/tests/type-tests/package.json b/tests/type-tests/package.json index 0556cfd82..f48c5b7b9 100644 --- a/tests/type-tests/package.json +++ b/tests/type-tests/package.json @@ -16,8 +16,6 @@ "@sap-ai-sdk/foundation-models": "workspace:^", "@sap-ai-sdk/orchestration": "workspace:^", "@sap-ai-sdk/core": "workspace:^", - "@sap-ai-sdk/langchain": "workspace:^", - "@langchain/core": "^0.2.30", "tsd": "^0.31.2" } } From e4155c7c3c6d79d366b24ab3363090ada45b8c08 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:21:55 +0200 Subject: [PATCH 91/95] lockfile and lint --- packages/langchain/src/openai/util.ts | 8 ++-- pnpm-lock.yaml | 56 --------------------------- 2 files changed, 4 insertions(+), 60 deletions(-) diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index bd4a066c3..71904640d 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -161,7 +161,7 @@ function mapBaseMessageToAzureOpenAiChatMessage( type Role = 'system' | 'user' | 'assistant' | 'tool' | 'function'; -type ContentType = +type ContentType = T extends 'system' ? AzureOpenAiChatCompletionRequestMessageSystem['content'] : T extends 'user' ? AzureOpenAiChatCompletionRequestMessageUser['content'] : T extends 'assistant' ? AzureOpenAiChatCompletionRequestMessageAssistant['content'] : @@ -178,9 +178,9 @@ function mapRoleAndContent(baseMessage: BaseMessage): RoleAndContent { if (!['system', 'user', 'assistant', 'tool', 'function'].includes(role)) { throw new Error(`Unsupported message role: ${role}`); } - return { - role, - content: baseMessage.content as ContentType + return { + role, + content: baseMessage.content as ContentType } as RoleAndContent; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3c9d5b5ce..4ce26dfd0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -163,9 +163,6 @@ importers: '@sap-ai-sdk/foundation-models': specifier: workspace:^ version: link:../foundation-models - '@sap-cloud-sdk/util': - specifier: ^3.20.0 - version: 3.21.0 zod-to-json-schema: specifier: ^3.23.2 version: 3.23.3(zod@3.23.8) @@ -284,18 +281,12 @@ importers: tests/type-tests: devDependencies: - '@langchain/core': - specifier: ^0.2.30 - version: 0.2.30(openai@4.61.1(zod@3.23.8)) '@sap-ai-sdk/core': specifier: workspace:^ version: link:../../packages/core '@sap-ai-sdk/foundation-models': specifier: workspace:^ version: link:../../packages/foundation-models - '@sap-ai-sdk/langchain': - specifier: workspace:^ - version: link:../../packages/langchain '@sap-ai-sdk/orchestration': specifier: workspace:^ version: link:../../packages/orchestration @@ -761,10 +752,6 @@ packages: '@jsdevtools/ono@7.1.3': resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} - '@langchain/core@0.2.30': - resolution: {integrity: sha512-jeLmLTxnEq9zSq0J/fMlBCMT5Ix8tbZriqNYTm3oS7CPM2uHBcRQhV3fpsh4G8FnE7Pxa4sWfrFzc2jykhlk7A==} - engines: {node: '>=18'} - '@langchain/core@0.3.1': resolution: {integrity: sha512-xYdTAgS9hYPt+h0/OwpyRcMB5HKR40LXutbSr2jw3hMVIOwD1DnvhnUEnWgBK4lumulVW2jrosNPyBKMhRZAZg==} engines: {node: '>=18'} @@ -2627,20 +2614,6 @@ packages: kuler@2.0.0: resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} - langsmith@0.1.48: - resolution: {integrity: sha512-lh98dZeShVPG1VzENpbjFWiburyYpChsO7nehGwxuqQ5/E1BBFKpsDCxLTcgXYpgWFPJxRvMqq7bLeq/txjakw==} - peerDependencies: - '@langchain/core': '*' - langchain: '*' - openai: '*' - peerDependenciesMeta: - '@langchain/core': - optional: true - langchain: - optional: true - openai: - optional: true - langsmith@0.1.56-rc.1: resolution: {integrity: sha512-XsOxlhBAlTCGR9hNEL2VSREmiz8v6czNuX3CIwec9fH9T0WbNPle8Q/7Jy/h9UCbS9vuzTjfgc4qO5Dc9cu5Ig==} peerDependencies: @@ -4607,23 +4580,6 @@ snapshots: '@jsdevtools/ono@7.1.3': {} - '@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8))': - dependencies: - ansi-styles: 5.2.0 - camelcase: 6.3.0 - decamelize: 1.2.0 - js-tiktoken: 1.0.14 - langsmith: 0.1.48(@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8)))(openai@4.61.1(zod@3.23.8)) - mustache: 4.2.0 - p-queue: 6.6.2 - p-retry: 4.6.2 - uuid: 10.0.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.3(zod@3.23.8) - transitivePeerDependencies: - - langchain - - openai - '@langchain/core@0.3.1(openai@4.61.1(zod@3.23.8))': dependencies: ansi-styles: 5.2.0 @@ -7035,18 +6991,6 @@ snapshots: kuler@2.0.0: {} - langsmith@0.1.48(@langchain/core@0.2.30(openai@4.61.1(zod@3.23.8)))(openai@4.61.1(zod@3.23.8)): - dependencies: - '@types/uuid': 10.0.0 - commander: 10.0.1 - p-queue: 6.6.2 - p-retry: 4.6.2 - semver: 7.6.3 - uuid: 10.0.0 - optionalDependencies: - '@langchain/core': 0.2.30(openai@4.61.1(zod@3.23.8)) - openai: 4.61.1(zod@3.23.8) - langsmith@0.1.56-rc.1(openai@4.61.1(zod@3.23.8)): dependencies: '@types/uuid': 10.0.0 From 3118383395e6aba243661219d96b75781ee8e644 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:33:23 +0200 Subject: [PATCH 92/95] snap --- .../src/openai/openai-response.test.ts | 47 ------------ packages/langchain/README.md | 26 +++++-- .../openai/__snapshots__/util.test.ts.snap | 49 ++++++++++++ packages/langchain/src/openai/util.test.ts | 14 ++-- packages/langchain/src/openai/util.ts | 74 ++++++++++--------- 5 files changed, 120 insertions(+), 90 deletions(-) delete mode 100644 packages/foundation-models/src/openai/openai-response.test.ts diff --git a/packages/foundation-models/src/openai/openai-response.test.ts b/packages/foundation-models/src/openai/openai-response.test.ts deleted file mode 100644 index 9ff6f7979..000000000 --- a/packages/foundation-models/src/openai/openai-response.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { parseMockResponse } from '../../../../test-util/mock-http.js'; -import { OpenAiChatCompletionResponse } from './openai-response.js'; -import { OpenAiChatCompletionOutput } from './openai-types.js'; - -describe('OpenAI response', () => { - const mockResponse = parseMockResponse( - 'foundation-models', - 'openai-chat-completion-success-response.json' - ); - const rawResponse = { - data: mockResponse, - status: 200, - headers: {}, - request: {} - }; - const openAiChatClientResponse = new OpenAiChatCompletionResponse( - rawResponse - ); - - it('should initialize with raw response', () => { - expect(openAiChatClientResponse.rawResponse).toBe(rawResponse); - }); - - it('should return the completion response', () => { - expect(openAiChatClientResponse.data).toBe(mockResponse); - }); - - it('should get token usage', () => { - expect(openAiChatClientResponse.getTokenUsage()).toMatchObject({ - completion_tokens: expect.any(Number), - prompt_tokens: expect.any(Number), - total_tokens: expect.any(Number) - }); - }); - - it('should return default choice index with convenience functions', () => { - expect(openAiChatClientResponse.getFinishReason()).toBe('stop'); - expect(openAiChatClientResponse.getContent()).toBe( - 'The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.' - ); - }); - - it('should return undefined when convenience function is called with incorrect index', () => { - expect(openAiChatClientResponse.getFinishReason(1)).toBeUndefined(); - expect(openAiChatClientResponse.getContent(1)).toBeUndefined(); - }); -}); diff --git a/packages/langchain/README.md b/packages/langchain/README.md index 3937b688d..331a53378 100644 --- a/packages/langchain/README.md +++ b/packages/langchain/README.md @@ -1,22 +1,26 @@ # @sap-ai-sdk/langchain + This package provides LangChain model clients, built on top of the foundation model clients of the SAP Cloud SDK for AI. ## Table of Contents + 1. [Installation](#installation) 2. [Pre-requisites](#pre-requisites) 3. [Usage](#usage) - - [Client Initialization](#client-initialization) - - [Chat Clients](#chat-clients) - - [Embedding Clients](#embedding-clients) + - [Client Initialization](#client-initialization) + - [Chat Clients](#chat-clients) + - [Embedding Clients](#embedding-clients) 4. [Support, Feedback, Contribution](#support-feedback-contribution) 5. [License](#license) ## Installation + ``` $ npm install @sap-ai-sdk/langchain ``` ## Pre-requisites + - [Enable the AI Core service in SAP BTP](https://help.sap.com/docs/sap-ai-core/sap-ai-core-service-guide/initial-setup). - Bind the service to your application. - Ensure the project is configured with Node.js v20 or higher, along with native ESM support. @@ -26,14 +30,19 @@ $ npm install @sap-ai-sdk/langchain - Add an entry `AICORE_SERVICE_KEY=''`. ## Usage + This package provides both chat and embedding clients, currently supporting Azure OpenAI. All clients comply with [LangChain's interface](https://js.langchain.com/docs/introduction). ### Client Initialization + To initialize a client, provide the model name: ```ts -import { AzureOpenAiChatClient, AzureOpenAiEmbeddingClient } from '@sap-ai-sdk/langchain'; +import { + AzureOpenAiChatClient, + AzureOpenAiEmbeddingClient +} from '@sap-ai-sdk/langchain'; // For a chat client const chatClient = new AzureOpenAiChatClient({ modelName: 'gpt-4o' }); @@ -52,7 +61,8 @@ const chatClient = new AzureOpenAiChatClient({ ``` ### Chat Client -The chat clients allow you to interact with Azure OpenAI chat models, accessible via the generative AI hub of SAP AI Core. + +The chat clients allow you to interact with Azure OpenAI chat models, accessible via the generative AI hub of SAP AI Core. To invoke the client, you only have a to pass a prompt: ```ts @@ -60,6 +70,7 @@ const response = await chatClient.invoke("What's the capital of France?"); ``` #### Advanced Example with Templating and Output Parsing + ```ts import { AzureOpenAiChatClient } from '@sap-ai-sdk/langchain'; import { StringOutputParser } from '@langchain/core/output_parsers'; @@ -79,9 +90,11 @@ const response = await llmChain.invoke({ ``` ### Embedding Client + Embedding clients allow embedding either text or documents (represented as arrays of strings). #### Embed Text + ```ts const embeddedText = await embeddingClient.embedQuery( 'Paris is the capital of France.' @@ -89,6 +102,7 @@ const embeddedText = await embeddingClient.embedQuery( ``` #### Embed Documents + ```ts const embeddedDocument = await embeddingClient.embedDocuments([ 'Page 1: Paris is the capital of France.', @@ -97,9 +111,11 @@ const embeddedDocument = await embeddingClient.embedDocuments([ ``` ## Support, Feedback, Contribution + This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/SAP/ai-sdk-js/issues). Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](https://github.com/SAP/ai-sdk-js/blob/main/CONTRIBUTING.md). ## License + The SAP Cloud SDK for AI is released under the [Apache License Version 2.0.](http://www.apache.org/licenses/). diff --git a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap index 6556ecb33..ec5325db1 100644 --- a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap +++ b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap @@ -1,5 +1,54 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`Mapping Functions should parse an OpenAI response to a (LangChain) chat response 1`] = ` +{ + "generations": [ + { + "generationInfo": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_calls": undefined, + }, + "message": { + "id": [ + "langchain_core", + "messages", + "AIMessage", + ], + "kwargs": { + "additional_kwargs": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_call_id": "", + "tool_calls": undefined, + }, + "content": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + "invalid_tool_calls": [], + "response_metadata": {}, + "tool_calls": [], + }, + "lc": 1, + "type": "constructor", + }, + "text": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + }, + ], + "llmOutput": { + "created": 1725457796, + "id": "chatcmpl-A3kgOwg9B6j87n0IkoCFCUCxRSwQZ", + "model": "gpt-4-32k", + "object": "chat.completion", + "tokenUsage": { + "completionTokens": 22, + "promptTokens": 15, + "totalTokens": 37, + }, + }, +} +`; + exports[`Mapping Functions should parse an OpenAi response to a (Langchain) chat response 1`] = ` { "generations": [ diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index e22e445d4..efeaa6e64 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,4 +1,7 @@ -import { AzureOpenAiCreateChatCompletionResponse, AzureOpenAiCreateChatCompletionRequest } from '@sap-ai-sdk/foundation-models'; +import { + AzureOpenAiCreateChatCompletionResponse, + AzureOpenAiCreateChatCompletionRequest +} from '@sap-ai-sdk/foundation-models'; import nock from 'nock'; import { BaseMessage, HumanMessage } from '@langchain/core/messages'; import { @@ -8,10 +11,11 @@ import { import { mapLangchainToAiClient, mapOutputToChatResult } from './util.js'; import { AzureOpenAiChatClient } from './chat.js'; -const openAiMockResponse = parseMockResponse( - 'foundation-models', - 'azure-openai-chat-completion-success-response.json' -); +const openAiMockResponse = + parseMockResponse( + 'foundation-models', + 'azure-openai-chat-completion-success-response.json' + ); describe('Mapping Functions', () => { beforeEach(() => { diff --git a/packages/langchain/src/openai/util.ts b/packages/langchain/src/openai/util.ts index 71904640d..a0eabe224 100644 --- a/packages/langchain/src/openai/util.ts +++ b/packages/langchain/src/openai/util.ts @@ -11,13 +11,11 @@ import { AzureOpenAiChatCompletionRequestMessageUser, AzureOpenAiChatCompletionRequestMessageAssistant, AzureOpenAiChatCompletionRequestMessageTool, - AzureOpenAiChatCompletionRequestMessageFunction, + AzureOpenAiChatCompletionRequestMessageFunction } from '@sap-ai-sdk/foundation-models'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { AzureOpenAiChatClient } from './chat.js'; -import { - AzureOpenAiChatCallOptions, -} from './types.js'; +import { AzureOpenAiChatCallOptions } from './types.js'; type ToolChoice = | 'none' @@ -45,9 +43,7 @@ type LangChainToolChoice = string | Record | 'auto' | 'any'; * @param tool - Base class for tools that accept input of any shape defined by a Zod schema. * @returns The OpenAI chat completion function. */ -function mapToolToOpenAiFunction( - tool: StructuredTool -): { +function mapToolToOpenAiFunction(tool: StructuredTool): { description?: string; name: string; parameters: AzureOpenAiChatCompletionFunctionParameters; @@ -81,7 +77,10 @@ function mapToolToOpenAiTool( function mapBaseMessageToRole( message: BaseMessage ): AzureOpenAiChatCompletionRequestMessage['role'] { - const messageTypeToRoleMap = new Map([ + const messageTypeToRoleMap = new Map< + string, + AzureOpenAiChatCompletionRequestMessage['role'] + >([ ['human', 'user'], ['ai', 'assistant'], ['system', 'system'], @@ -91,7 +90,7 @@ function mapBaseMessageToRole( const messageType = message._getType(); const role = messageTypeToRoleMap.get(messageType); - if(!role) { + if (!role) { throw new Error(`Unsupported message type: ${messageType}`); } return role; @@ -107,25 +106,27 @@ export function mapOutputToChatResult( completionResponse: AzureOpenAiCreateChatCompletionResponse ): ChatResult { return { - generations: completionResponse.choices.map((choice: typeof completionResponse['choices'][0]) => ({ - text: choice.message?.content || '', - message: new AIMessage({ - content: choice.message?.content || '', - additional_kwargs: { + generations: completionResponse.choices.map( + (choice: (typeof completionResponse)['choices'][0]) => ({ + text: choice.message?.content || '', + message: new AIMessage({ + content: choice.message?.content || '', + additional_kwargs: { + finish_reason: choice.finish_reason, + index: choice.index, + function_call: choice.message?.function_call, + tool_calls: choice.message?.tool_calls, + tool_call_id: '' + } + }), + generationInfo: { finish_reason: choice.finish_reason, index: choice.index, function_call: choice.message?.function_call, - tool_calls: choice.message?.tool_calls, - tool_call_id: '' + tool_calls: choice.message?.tool_calls } - }), - generationInfo: { - finish_reason: choice.finish_reason, - index: choice.index, - function_call: choice.message?.function_call, - tool_calls: choice.message?.tool_calls - } - })), + }) + ), llmOutput: { created: completionResponse.created, id: completionResponse.id, @@ -161,16 +162,20 @@ function mapBaseMessageToAzureOpenAiChatMessage( type Role = 'system' | 'user' | 'assistant' | 'tool' | 'function'; -type ContentType = - T extends 'system' ? AzureOpenAiChatCompletionRequestMessageSystem['content'] : - T extends 'user' ? AzureOpenAiChatCompletionRequestMessageUser['content'] : - T extends 'assistant' ? AzureOpenAiChatCompletionRequestMessageAssistant['content'] : - T extends 'tool' ? AzureOpenAiChatCompletionRequestMessageTool['content'] : - T extends 'function' ? AzureOpenAiChatCompletionRequestMessageFunction['content'] : - never; +type ContentType = T extends 'system' + ? AzureOpenAiChatCompletionRequestMessageSystem['content'] + : T extends 'user' + ? AzureOpenAiChatCompletionRequestMessageUser['content'] + : T extends 'assistant' + ? AzureOpenAiChatCompletionRequestMessageAssistant['content'] + : T extends 'tool' + ? AzureOpenAiChatCompletionRequestMessageTool['content'] + : T extends 'function' + ? AzureOpenAiChatCompletionRequestMessageFunction['content'] + : never; type RoleAndContent = { - [T in Role]: { role: T; content: ContentType } + [T in Role]: { role: T; content: ContentType }; }[Role]; function mapRoleAndContent(baseMessage: BaseMessage): RoleAndContent { @@ -192,9 +197,12 @@ function isStructuredToolArray(tools?: unknown[]): tools is StructuredTool[] { /** * Has to return an empty string to match one of the types of {@link AzureOpenAiChatCompletionRequestMessage}. + * @internal */ function mapToolCallId(message: BaseMessage): string { - return message._getType() === 'tool' ? (message as ToolMessage).tool_call_id : ''; + return message._getType() === 'tool' + ? (message as ToolMessage).tool_call_id + : ''; } function mapToolChoice( From facc79c48f5233f6763c5c319c97d426059f1c26 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:37:13 +0200 Subject: [PATCH 93/95] update snapshot --- package.json | 1 + .../openai/__snapshots__/util.test.ts.snap | 49 ------------------- 2 files changed, 1 insertion(+), 49 deletions(-) diff --git a/package.json b/package.json index 379ae309d..c04fa6731 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "foundation-models": "pnpm -F=@sap-ai-sdk/foundation-models", "orchestration": "pnpm -F=@sap-ai-sdk/orchestration", "core": "pnpm -F=@sap-ai-sdk/core", + "langchain": "pnpm -F=@sap-ai-sdk/langchain", "e2e-tests": "pnpm -F=@sap-ai-sdk/e2e-tests", "type-tests": "pnpm -F=@sap-ai-sdk/type-tests", "smoke-tests": "pnpm -F=@sap-ai-sdk/smoke-tests", diff --git a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap index ec5325db1..09fa9e71d 100644 --- a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap +++ b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap @@ -48,52 +48,3 @@ exports[`Mapping Functions should parse an OpenAI response to a (LangChain) chat }, } `; - -exports[`Mapping Functions should parse an OpenAi response to a (Langchain) chat response 1`] = ` -{ - "generations": [ - { - "generationInfo": { - "finish_reason": "stop", - "function_call": undefined, - "index": 0, - "tool_calls": undefined, - }, - "message": { - "id": [ - "langchain_core", - "messages", - "AIMessage", - ], - "kwargs": { - "additional_kwargs": { - "finish_reason": "stop", - "function_call": undefined, - "index": 0, - "tool_call_id": "", - "tool_calls": undefined, - }, - "content": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", - "invalid_tool_calls": [], - "response_metadata": {}, - "tool_calls": [], - }, - "lc": 1, - "type": "constructor", - }, - "text": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", - }, - ], - "llmOutput": { - "created": 1725457796, - "id": "chatcmpl-A3kgOwg9B6j87n0IkoCFCUCxRSwQZ", - "model": "gpt-4-32k", - "object": "chat.completion", - "tokenUsage": { - "completionTokens": 22, - "promptTokens": 15, - "totalTokens": 37, - }, - }, -} -`; From 5be9a501846ce646e7baccd9d763e6e6b5a9e3e2 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:52:37 +0200 Subject: [PATCH 94/95] add langchain to root readme --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index bba78674f..f09666fd8 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,7 @@ Integrate chat completion into your business applications with SAP Cloud SDK for - [@sap-ai-sdk/ai-api](#sap-ai-sdkai-api) - [@sap-ai-sdk/foundation-models](#sap-ai-sdkfoundation-models) - [@sap-ai-sdk/orchestration](#sap-ai-sdkorchestration) + - [@sap-ai-sdk/langchain](#sap-ai-sdklangchain) - [SAP Cloud SDK for AI Sample Project](#sap-cloud-sdk-for-ai-sample-project) - [Support, Feedback, Contribution](#support-feedback-contribution) - [Security / Disclosure](#security--disclosure) @@ -50,6 +51,16 @@ $ npm install @sap-ai-sdk/ai-api This package incorporates generative AI foundation models into your AI activities in SAP AI Core and SAP AI Launchpad. +### @sap-ai-sdk/langchain + +This package provides LangChain model clients, built on top of the foundation model clients of the SAP Cloud SDK for AI. + +#### Installation + +``` +$ npm install @sap-ai-sdk/langchain +``` + #### Installation ``` From 09f86997d4211579199ebea04c7385ffeeb945e5 Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Thu, 19 Sep 2024 18:55:25 +0200 Subject: [PATCH 95/95] fix root readme ... --- README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f09666fd8..11ec50b95 100644 --- a/README.md +++ b/README.md @@ -51,20 +51,20 @@ $ npm install @sap-ai-sdk/ai-api This package incorporates generative AI foundation models into your AI activities in SAP AI Core and SAP AI Launchpad. -### @sap-ai-sdk/langchain - -This package provides LangChain model clients, built on top of the foundation model clients of the SAP Cloud SDK for AI. - #### Installation ``` -$ npm install @sap-ai-sdk/langchain +$ npm install @sap-ai-sdk/foundation-models ``` +### @sap-ai-sdk/langchain + +This package provides LangChain model clients, built on top of the foundation model clients of the SAP Cloud SDK for AI. + #### Installation ``` -$ npm install @sap-ai-sdk/foundation-models +$ npm install @sap-ai-sdk/langchain ``` ## SAP Cloud SDK for AI Sample Project