Skip to content

Commit

Permalink
Fix update issues and streamline AI service architecture
Browse files Browse the repository at this point in the history
Bug Fixes:
- Resolved update issues experienced by some users in version 0.5.1

Architecture:
- Consolidated AI services into UnifiedAIService for improved maintainability
- Removed individual service files (OpenAI, Groq, LocalAI, OpenRouter) in favor of unified approach

Settings:
- Implemented EndpointManager for centralized API endpoint and key management
- Removed redundant API key and endpoint settings for individual services

UI/UX:
- Updated template list styling and removed separate CSS file
- Improved license key validation and template download process

Performance:
- Optimized template handling by removing separate functions for filtering and parsing

Cleanup:
- Removed deprecated files and functions related to max tokens and status bar updates
- Consolidated template-related settings and functions for better organization

Documentation:
- Updated manifest.json and versions.json to reflect latest changes and fixes
  • Loading branch information
SystemSculpt committed Jul 25, 2024
1 parent 4de3010 commit 4534a2f
Show file tree
Hide file tree
Showing 38 changed files with 269 additions and 3,028 deletions.
2 changes: 1 addition & 1 deletion manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@
"authorUrl": "systemsculpt.com",
"fundingUrl": "https://www.patreon.com/SystemSculpt",
"minAppVersion": "1.5.0",
"version": "0.5.1",
"version": "0.5.2",
"isDesktopOnly": true
}
173 changes: 123 additions & 50 deletions src/api/AIService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,18 @@ export class AIService {
private static instance: AIService;
private services: { [key in AIProvider]: UnifiedAIService };
private cachedModels: { [key: string]: Model[] } = {};
private settings: {
openAIApiKey: string;
groqAPIKey: string;
openRouterAPIKey: string;
apiEndpoint: string;
localEndpoint?: string;
temperature: number;
showopenAISetting: boolean;
showgroqSetting: boolean;
showlocalEndpointSetting: boolean;
showopenRouterSetting: boolean;
};

private constructor(settings: {
openAIApiKey: string;
Expand All @@ -16,7 +28,12 @@ export class AIService {
apiEndpoint: string;
localEndpoint?: string;
temperature: number;
showopenAISetting: boolean;
showgroqSetting: boolean;
showlocalEndpointSetting: boolean;
showopenRouterSetting: boolean;
}) {
this.settings = settings;
this.services = {
openai: new UnifiedAIService(
settings.openAIApiKey,
Expand Down Expand Up @@ -50,17 +67,33 @@ export class AIService {
apiEndpoint: string;
localEndpoint?: string;
temperature: number;
showopenAISetting: boolean;
showgroqSetting: boolean;
showlocalEndpointSetting: boolean;
showopenRouterSetting: boolean;
},
forceNewInstance: boolean = false
): Promise<AIService> {
if (!AIService.instance || forceNewInstance) {
AIService.instance = new AIService(settings);
await AIService.instance.initializeModelCache();
} else {
AIService.instance.updateSettings(settings);
logger.log('AIService.getInstance called');
try {
if (!AIService.instance || forceNewInstance) {
logger.log('Creating new AIService instance');
AIService.instance = new AIService(settings);
logger.log('AIService instance created, initializing model cache');
await AIService.instance.initializeModelCache();
logger.log(
'AIService.getInstance: AIService instance created and model cache initialized'
);
} else {
logger.log('Updating existing AIService instance');
AIService.instance.updateSettings(settings);
logger.log('AIService.getInstance: AIService instance updated');
}
return AIService.instance;
} catch (error) {
logger.error('Error in AIService.getInstance:', error);
throw error; // Re-throw the error after logging
}

return AIService.instance;
}

public async ensureModelCacheInitialized(): Promise<void> {
Expand Down Expand Up @@ -257,55 +290,95 @@ export class AIService {
}

public async initializeModelCache(): Promise<void> {
if (Object.keys(this.cachedModels).length > 0) return;
logger.log('Initializing model cache');
if (Object.keys(this.cachedModels).length > 0) {
logger.log('Model cache already initialized, skipping');
return;
}

const providers: AIProvider[] = ['local', 'openai', 'groq', 'openRouter'];
const fetchPromises: Promise<void>[] = [];

await Promise.all(
providers.map(async provider => {
try {
// Skip providers without valid API keys
if (!this.services[provider].hasValidApiKey()) {
logger.log(
`Skipping ${provider} model fetch: No valid API key provided`
);
this.cachedModels[provider] = [];
return;
}
for (const provider of providers) {
fetchPromises.push(this.fetchModelsForProvider(provider));
}

await Promise.all(fetchPromises);

logger.log('Model cache initialization completed');
}

this.cachedModels[provider] = await this.services[
provider
].getModels();
logger.log(`Successfully fetched models for ${provider}`);
} catch (error: unknown) {
if (error instanceof Error) {
logger.error(`Error fetching ${provider} models:`, error.message);
} else {
logger.error(`Unknown error fetching ${provider} models`);
}
if (
typeof error === 'object' &&
error !== null &&
'status' in error
) {
if (error.status === 404) {
logger.error(
`${provider} API endpoint not found. Please check the API documentation and your settings.`
);
} else if (error.status === 401) {
logger.error(
`Invalid ${provider} API key. Please check your settings.`
);
}
}
this.cachedModels[provider] = [];
private async fetchModelsForProvider(provider: AIProvider): Promise<void> {
logger.log(`Initializing models for provider: ${provider}`);
try {
const isEnabled = this.isProviderEnabled(provider);
const hasValidApiKey = this.services[provider].hasValidApiKey();
const hasValidEndpoint = this.hasValidEndpoint(provider);

if (!isEnabled || !hasValidApiKey || !hasValidEndpoint) {
logger.log(
`Skipping ${provider} model fetch: Provider is disabled or no valid API key/endpoint provided`
);
this.cachedModels[provider] = [];
return;
}

logger.log(`Fetching models for ${provider}`);
const models = await Promise.race([
this.services[provider].getModels(),
new Promise<Model[]>((_, reject) =>
setTimeout(() => reject(new Error('Timeout')), 10000)
),
]);
this.cachedModels[provider] = models;
logger.log(
`Successfully fetched ${models.length} models for ${provider}`
);
} catch (error: unknown) {
logger.error(`Error fetching ${provider} models:`, error);
if (error instanceof Error && 'status' in error) {
const statusError = error as { status: number };
if (statusError.status === 404) {
logger.error(
`${provider} API endpoint not found. Please check the API documentation and your settings.`
);
} else if (statusError.status === 401) {
logger.error(
`Invalid ${provider} API key. Please check your settings.`
);
}
})
);
}
this.cachedModels[provider] = [];
}
}

// Log the number of models fetched for each provider
for (const [provider, models] of Object.entries(this.cachedModels)) {
logger.log(`Fetched ${models.length} models for ${provider}`);
private isProviderEnabled(provider: AIProvider): boolean {
switch (provider) {
case 'openai':
return this.settings.showopenAISetting;
case 'groq':
return this.settings.showgroqSetting;
case 'openRouter':
return this.settings.showopenRouterSetting;
case 'local':
return this.settings.showlocalEndpointSetting;
default:
return false;
}
}

private hasValidEndpoint(provider: AIProvider): boolean {
switch (provider) {
case 'openai':
return !!this.settings.apiEndpoint.trim();
case 'groq':
return true; // Groq uses a fixed endpoint
case 'openRouter':
return true; // OpenRouter uses a fixed endpoint
case 'local':
return !!this.settings.localEndpoint?.trim();
default:
return false;
}
}
}
34 changes: 0 additions & 34 deletions src/api/AIServiceInterface.ts

This file was deleted.

Loading

0 comments on commit 4534a2f

Please sign in to comment.