Skip to content

Commit

Permalink
chore: Update Azure OpenAI types (#141)
Browse files Browse the repository at this point in the history
* update types

* zod generated schema

* fix: Changes from lint

* fix type test

* adjust public api check

* remove zod

* Revert "remove zod"

This reverts commit d4ecd6c.

* fix test + zod for embedding

* fix: Changes from lint

* regenerate with discriminator

* temp fix to skip linting

* import apiVersion in tests

* add missing error types

---------

Co-authored-by: cloud-sdk-js <[email protected]>
  • Loading branch information
deekshas8 and cloud-sdk-js authored Sep 19, 2024
1 parent ba9133b commit 54ba205
Show file tree
Hide file tree
Showing 113 changed files with 4,211 additions and 1,040 deletions.
9 changes: 8 additions & 1 deletion eslint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ export default [
'**/*.d.ts',
'**/dist/**/*',
'**/coverage/**/*',
'packages/ai-api/src/client/**/*'
'packages/ai-api/src/client/**/*',
'packages/foundation-models/src/azure-openai/client/inference/schema/on-your-data-system-assigned-managed-identity-authentication-options.ts',
]
},
{
Expand All @@ -33,6 +34,12 @@ export default [
'jsdoc/require-jsdoc': 'off'
}
},
{
files: ['packages/foundation-models/src/azure-openai/client/inference/schema/*.ts'],
rules: {
'jsdoc/check-indentation': 'off'
}
},
{
ignores: ['**/dist-cjs/**/*']
}
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
"@sap-cloud-sdk/generator-common": "^3.21.0",
"@sap-cloud-sdk/http-client": "^3.21.0",
"@sap-cloud-sdk/util": "^3.21.0",
"@sap-cloud-sdk/openapi-generator": "^3.21.0",
"@types/jest": "^29.5.13",
"@types/jsonwebtoken": "^9.0.7",
"@types/mock-fs": "^4.13.4",
Expand Down
3 changes: 1 addition & 2 deletions packages/ai-api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
"@sap-ai-sdk/core": "workspace:^"
},
"devDependencies": {
"typescript": "^5.6.2",
"@sap-cloud-sdk/openapi-generator": "^3.21.0"
"typescript": "^5.6.2"
}
}
11 changes: 6 additions & 5 deletions packages/foundation-models/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,16 @@
"internal.d.ts"
],
"scripts": {
"compile": "pnpm run generate-zod && tsc",
"compile": "tsc",
"compile:cjs": "tsc -p tsconfig.cjs.json",
"test": "pnpm run generate-zod && NODE_OPTIONS=--experimental-vm-modules jest",
"test": "NODE_OPTIONS=--experimental-vm-modules jest",
"lint": "eslint \"**/*.ts\" && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -c",
"lint:fix": "eslint \"**/*.ts\" --fix && prettier . --config ../../.prettierrc --ignore-path ../../.prettierignore -w --log-level error",
"check:public-api": "node --loader ts-node/esm ../../scripts/check-public-api-cli.ts",
"generate-zod": "ts-to-zod src/azure-openai/azure-openai-types.ts src/azure-openai/azure-openai-types-schema.ts",
"postgenerate-zod": "sed -i'' -e \"s|export const|\\/**\\n * @internal\\n *\\/\\nexport const|\" src/azure-openai/azure-openai-types-schema.ts"
"generate:azure-openai": "openapi-generator --generateESM --clearOutputDir -i ./src/azure-openai/spec/inference.yaml -o ./src/azure-openai/client --schemaPrefix AzureOpenAi",
"postgenerate:azure-openai": "rm ./src/azure-openai/client/inference/*.ts && pnpm lint:fix",
"generate-zod": "ts-to-zod src/azure-openai/azure-openai-embedding-types.ts src/azure-openai/ts-to-zod/azure-openai-embedding-types.zod.ts",
"postgenerate-zod": "sed -i'' -e \"s|export const|\\/**\\n * @internal\\n *\\/\\nexport const|\" src/azure-openai/ts-to-zod/azure-openai-embedding-types.zod.ts"
},
"dependencies": {
"@sap-ai-sdk/ai-api": "workspace:^",
Expand All @@ -38,7 +40,6 @@
"@sap-cloud-sdk/util": "^3.21.0"
},
"devDependencies": {
"@sap-cloud-sdk/openapi-generator": "^3.21.0",
"nock": "^13.5.5",
"ts-to-zod": "^3.13.0",
"typescript": "^5.6.2",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,14 @@ import {
mockInference,
parseMockResponse
} from '../../../../test-util/mock-http.js';
import {
AzureOpenAiChatCompletionOutput,
AzureOpenAiChatMessage
} from './azure-openai-types.js';
import { AzureOpenAiChatClient } from './azure-openai-chat-client.js';
import type { AzureOpenAiCreateChatCompletionResponse } from './client/inference/schema/index.js';
import { apiVersion } from './model-types.js';

describe('Azure OpenAI chat client', () => {
const chatCompletionEndpoint = {
url: 'inference/deployments/1234/chat/completions',
apiVersion: '2024-02-01'
apiVersion
};

const client = new AzureOpenAiChatClient({ deploymentId: '1234' });
Expand All @@ -30,16 +28,17 @@ describe('Azure OpenAI chat client', () => {
const prompt = {
messages: [
{
role: 'user',
role: 'user' as const,
content: 'Where is the deepest place on earth located'
}
] as AzureOpenAiChatMessage[]
]
};

const mockResponse = parseMockResponse<AzureOpenAiChatCompletionOutput>(
'foundation-models',
'azure-openai-chat-completion-success-response.json'
);
const mockResponse =
parseMockResponse<AzureOpenAiCreateChatCompletionResponse>(
'foundation-models',
'azure-openai-chat-completion-success-response.json'
);

mockInference(
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@ import {
getDeploymentId,
type ModelDeployment
} from '@sap-ai-sdk/ai-api/internal.js';
import type { AzureOpenAiChatModel } from './model-types.js';
import type { AzureOpenAiChatCompletionParameters } from './azure-openai-types.js';
import type { AzureOpenAiCreateChatCompletionRequest } from './client/inference/schema/index.js';
import { apiVersion, type AzureOpenAiChatModel } from './model-types.js';
import { AzureOpenAiChatCompletionResponse } from './azure-openai-chat-completion-response.js';

const apiVersion = '2024-02-01';

/**
* Azure OpenAI client for chat completion.
*/
Expand All @@ -26,7 +24,7 @@ export class AzureOpenAiChatClient {
* @returns The completion result.
*/
async run(
data: AzureOpenAiChatCompletionParameters,
data: AzureOpenAiCreateChatCompletionRequest,
requestConfig?: CustomRequestConfig
): Promise<AzureOpenAiChatCompletionResponse> {
const deploymentId = await getDeploymentId(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { parseMockResponse } from '../../../../test-util/mock-http.js';
import { AzureOpenAiChatCompletionResponse } from './azure-openai-chat-completion-response.js';
import { AzureOpenAiChatCompletionOutput } from './azure-openai-types.js';
import { azureOpenAiChatCompletionOutputSchema } from './azure-openai-types-schema.js';
import { AzureOpenAiCreateChatCompletionResponse } from './client/inference/schema/index.js';

describe('OpenAI chat completion response', () => {
const mockResponse = parseMockResponse<AzureOpenAiChatCompletionOutput>(
'foundation-models',
'azure-openai-chat-completion-success-response.json'
);
const mockResponse =
parseMockResponse<AzureOpenAiCreateChatCompletionResponse>(
'foundation-models',
'azure-openai-chat-completion-success-response.json'
);
const rawResponse = {
data: mockResponse,
status: 200,
Expand All @@ -17,7 +17,7 @@ describe('OpenAI chat completion response', () => {
const response = new AzureOpenAiChatCompletionResponse(rawResponse);

it('should return the completion response', () => {
const data = azureOpenAiChatCompletionOutputSchema.parse(response.data);
expect(data).toStrictEqual(mockResponse);
// TODO: Use zod schema to validate the response
expect(response.data).toStrictEqual(mockResponse);
});
});
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import { HttpResponse } from '@sap-cloud-sdk/http-client';
import { createLogger } from '@sap-cloud-sdk/util';
import {
AzureOpenAiChatCompletionOutput,
AzureOpenAiUsage
} from './azure-openai-types.js';
import type { AzureOpenAiCreateChatCompletionResponse } from './client/inference/schema/index.js';

const logger = createLogger({
package: 'gen-ai-hub',
package: 'foundation-models',
messageContext: 'azure-openai-chat-completion-response'
});

Expand All @@ -17,8 +14,7 @@ export class AzureOpenAiChatCompletionResponse {
/**
* The chat completion response.
*/
public readonly data: AzureOpenAiChatCompletionOutput;

public readonly data: AzureOpenAiCreateChatCompletionResponse;
constructor(public readonly rawResponse: HttpResponse) {
this.data = rawResponse.data;
}
Expand All @@ -27,7 +23,7 @@ export class AzureOpenAiChatCompletionResponse {
* Usage of tokens in the response.
* @returns Token usage.
*/
getTokenUsage(): AzureOpenAiUsage {
getTokenUsage(): this['data']['usage'] {
return this.data.usage;
}

Expand All @@ -36,7 +32,9 @@ export class AzureOpenAiChatCompletionResponse {
* @param choiceIndex - The index of the choice to parse.
* @returns The finish reason.
*/
getFinishReason(choiceIndex = 0): string | undefined {
getFinishReason(
choiceIndex = 0
): this['data']['choices'][0]['finish_reason'] {
this.logInvalidChoiceIndex(choiceIndex);
return this.data.choices[choiceIndex]?.finish_reason;
}
Expand All @@ -46,7 +44,7 @@ export class AzureOpenAiChatCompletionResponse {
* @param choiceIndex - The index of the choice to parse.
* @returns The message content.
*/
getContent(choiceIndex = 0): string | undefined {
getContent(choiceIndex = 0): string | undefined | null {
this.logInvalidChoiceIndex(choiceIndex);
return this.data.choices[choiceIndex]?.message?.content;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@ import {
import {
AzureOpenAiEmbeddingOutput,
AzureOpenAiEmbeddingParameters
} from './azure-openai-types.js';
} from './azure-openai-embedding-types.js';
import { AzureOpenAiEmbeddingClient } from './azure-openai-embedding-client.js';
import { apiVersion } from './model-types.js';

describe('Azure OpenAI embedding client', () => {
const embeddingsEndpoint = {
url: 'inference/deployments/1234/embeddings',
apiVersion: '2024-02-01'
apiVersion
};

const client = new AzureOpenAiEmbeddingClient({ deploymentId: '1234' });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@ import {
type ModelDeployment
} from '@sap-ai-sdk/ai-api/internal.js';
import { AzureOpenAiEmbeddingResponse } from './azure-openai-embedding-response.js';
import type { AzureOpenAiEmbeddingParameters } from './azure-openai-types.js';
import type { AzureOpenAiEmbeddingModel } from './model-types.js';

const apiVersion = '2024-02-01';
import type { AzureOpenAiEmbeddingParameters } from './azure-openai-embedding-types.js';
import { apiVersion, type AzureOpenAiEmbeddingModel } from './model-types.js';

/**
* Azure OpenAI client for embeddings.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { parseMockResponse } from '../../../../test-util/mock-http.js';
import { AzureOpenAiEmbeddingResponse } from './azure-openai-embedding-response.js';
import { azureOpenAiEmbeddingOutputSchema } from './azure-openai-types-schema.js';
import { azureOpenAiEmbeddingOutputSchema } from './ts-to-zod/azure-openai-embedding-types.zod.js';

describe('Azure OpenAI embedding response', () => {
const mockResponse = parseMockResponse<AzureOpenAiEmbeddingResponse>(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { HttpResponse } from '@sap-cloud-sdk/http-client';
import { createLogger } from '@sap-cloud-sdk/util';
import { AzureOpenAiEmbeddingOutput } from './azure-openai-types.js';
import { AzureOpenAiEmbeddingOutput } from './azure-openai-embedding-types.js';

const logger = createLogger({
package: 'gen-ai-hub',
package: 'foundation-models',
messageContext: 'azure-openai-embedding-response'
});

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/**
* Azure OpenAI embedding input parameters.
*/
export interface AzureOpenAiEmbeddingParameters {
/**
* Input text to get embeddings for, encoded as a string. The number of input tokens varies depending on what model you are using. Unless you're embedding code, we suggest replacing newlines (\n) in your input with a single space, as we have observed inferior results when newlines are present.
*/
input: string[] | string;
/**
* A unique identifier representing for your end-user. This will help Azure OpenAI monitor and detect abuse. Do not pass PII identifiers instead use pseudoanonymized values such as GUIDs.
*/
user?: string;
}

/**
* Azure OpenAI embedding output.
*/
export interface AzureOpenAiEmbeddingOutput {
/**
* List object.
*/
object: 'list';
/**
* Model used for embedding.
*/
model: string;
/**
* Array of result candidates.
*/
data: [
{
/**
* Embedding object.
*/
object: 'embedding';
/**
* Array of size `1536` (Azure OpenAI's embedding size) containing embedding vector.
*/
embedding: number[];
/**
* Index of choice.
*/
index: number;
}
];
/**
* Token Usage.
*/
usage: {
/**
* Tokens consumed for input prompt tokens.
*/
prompt_tokens: number;
/**
* Total tokens consumed.
*/
total_tokens: number;
};
}
Loading

0 comments on commit 54ba205

Please sign in to comment.