Skip to content

Commit

Permalink
Merge pull request #20 from ferenci84/add_temperature
Browse files Browse the repository at this point in the history
add OPENAI_TEMPERATURE configuration option
  • Loading branch information
Sitoi authored Feb 5, 2025
2 parents 4aee0bc + ee9c9dd commit 7197e4c
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 11 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu
| AZURE_API_VERSION | string | None | No | AZURE_API_VERSION |
| AI_COMMIT_LANGUAGE | string | en | Yes | Supports 19 languages |
| SYSTEM_PROMPT | string | None | No | Custom system prompt |
| OPENAI_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2. Lower values: more focused, Higher values: more creative |

## ⌨️ Local Development

Expand Down
1 change: 1 addition & 0 deletions README.zh_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
| AZURE_API_VERSION | string | None || AZURE_API_VERSION |
| AI_COMMIT_LANGUAGE | string | en || 支持 19 种语言 |
| SYSTEM_PROMPT | string | None || 自定义系统提示词 |
| OPENAI_TEMPERATURE | number | 0.7 || 控制输出的随机性。范围:0-2。较低的值:更加集中,较高的值:更有创造性 |

## ⌨️ 本地开发

Expand Down
7 changes: 7 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,13 @@
"type": "string",
"default": "",
"description": "Custom system prompt for generating commit messages"
},
"ai-commit.OPENAI_TEMPERATURE": {
"type": "number",
"default": 0.7,
"minimum": 0,
"maximum": 2,
"description": "OpenAI temperature setting (0-2). Higher values make output more random, lower values more deterministic."
}
},
"title": "AI Commit"
Expand Down
16 changes: 9 additions & 7 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,16 @@ import { createOpenAIApi } from './openai-utils';
* @property {string} AZURE_API_VERSION - The version of Azure API.
* @property {string} AI_COMMIT_LANGUAGE - The language for AI commit messages.
* @property {string} SYSTEM_PROMPT - The system prompt for generating commit messages.
* @property {string} OPENAI_TEMPERATURE - The temperature setting for OpenAI API.
*/
export enum ConfigKeys {
OPENAI_API_KEY = 'OPENAI_API_KEY',
OPENAI_BASE_URL = 'OPENAI_BASE_URL',
OPENAI_MODEL = 'OPENAI_MODEL',
AZURE_API_VERSION = 'AZURE_API_VERSION',
AI_COMMIT_LANGUAGE = 'AI_COMMIT_LANGUAGE',
SYSTEM_PROMPT = 'AI_COMMIT_SYSTEM_PROMPT'
SYSTEM_PROMPT = 'AI_COMMIT_SYSTEM_PROMPT',
OPENAI_TEMPERATURE = 'OPENAI_TEMPERATURE'
}

/**
Expand All @@ -34,9 +36,9 @@ export class ConfigurationManager {
this.disposable = vscode.workspace.onDidChangeConfiguration((event) => {
if (event.affectsConfiguration('ai-commit')) {
this.configCache.clear();
if (event.affectsConfiguration('ai-commit.OPENAI_BASE_URL') ||
event.affectsConfiguration('ai-commit.OPENAI_API_KEY')) {

if (event.affectsConfiguration('ai-commit.OPENAI_BASE_URL') ||
event.affectsConfiguration('ai-commit.OPENAI_API_KEY')) {
this.updateModelList();
}
}
Expand Down Expand Up @@ -69,14 +71,14 @@ export class ConfigurationManager {
try {
const openai = createOpenAIApi();
const models = await openai.models.list();

// Save available models to extension state
await this.context.globalState.update('availableModels', models.data.map(model => model.id));

// Get the current selected model
const config = vscode.workspace.getConfiguration('ai-commit');
const currentModel = config.get<string>('OPENAI_MODEL');

// If the current selected model is not in the available list, set it to the default value
const availableModels = models.data.map(model => model.id);
if (!availableModels.includes(currentModel)) {
Expand Down
9 changes: 5 additions & 4 deletions src/openai-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,14 @@ export function createOpenAIApi() {
*/
export async function ChatGPTAPI(messages: ChatCompletionMessageParam[]) {
const openai = createOpenAIApi();
const model = ConfigurationManager.getInstance().getConfig<string>(
ConfigKeys.OPENAI_MODEL
);
const configManager = ConfigurationManager.getInstance();
const model = configManager.getConfig<string>(ConfigKeys.OPENAI_MODEL);
const temperature = configManager.getConfig<number>(ConfigKeys.OPENAI_TEMPERATURE, 0.7);

const completion = await openai.chat.completions.create({
model,
messages: messages as ChatCompletionMessageParam[]
messages: messages as ChatCompletionMessageParam[],
temperature
});

return completion.choices[0]!.message?.content;
Expand Down

0 comments on commit 7197e4c

Please sign in to comment.