Skip to content

Commit

Permalink
Refactor Model limit logic
Browse files Browse the repository at this point in the history
  • Loading branch information
ahaapple committed Jan 12, 2025
1 parent a6b9b05 commit 446ad07
Show file tree
Hide file tree
Showing 20 changed files with 88 additions and 50 deletions.
22 changes: 6 additions & 16 deletions frontend/app/api/search/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { auth } from '@/auth';
import { NextRequest, NextResponse } from 'next/server';

import { isProModel, O1_MIMI, O1_PREVIEW, validModel } from '@/lib/model';
import { O1_MIMI, O1_PREVIEW, validModel } from '@/lib/llm/model';
import { logError } from '@/lib/log';
import { streamController } from '@/lib/llm/utils';
import { SearchCategory } from '@/lib/types';
Expand All @@ -12,7 +12,7 @@ import { o1Answer } from '@/lib/tools/o1-answer';
import { productSearch } from '@/lib/tools/product';
import { indieMakerSearch } from '@/lib/tools/indie';
import { handleRateLimit } from '@/lib/ratelimit';
import { isProUser } from '@/lib/shared-utils';
import { checkModelAccess, isProUser } from '@/lib/shared-utils';
import { chat } from '@/lib/tools/chat';

const updateSource = function (model, source, messages, isSearch) {
Expand Down Expand Up @@ -44,22 +44,12 @@ export async function POST(req: NextRequest) {

let { model, source, messages, profile, isSearch, questionLanguage, answerLanguage, summary } = await req.json();

if (isProModel(model) && !isPro) {
return NextResponse.json(
{
error: 'You need to upgrade a pro plan',
},
{ status: 429 },
);
if (!validModel(model)) {
return NextResponse.json({ error: 'Please choose a valid model' }, { status: 400 });
}

if (!validModel(model)) {
return NextResponse.json(
{
error: 'Please choose a valid model',
},
{ status: 400 },
);
if (!checkModelAccess(model, session?.user)) {
return NextResponse.json({ error: 'You need to upgrade your plan to use this model' }, { status: 429 });
}

source = updateSource(model, source, messages, isSearch);
Expand Down
2 changes: 1 addition & 1 deletion frontend/components/search/model-selection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { RowSelectItem, Select, SelectContent, SelectItem, SelectTrigger, Select
import { Box } from 'lucide-react';
import { useModelStore, useUserStore } from '@/lib/store/local-store';
import { useSigninModal } from '@/hooks/use-signin-modal';
import { Claude_35_Haiku, Claude_35_Sonnet, DEEPSEEK, GEMIMI_2, GPT_4o, GPT_4o_MIMI, O1_MIMI, O1_PREVIEW } from '@/lib/model';
import { Claude_35_Haiku, Claude_35_Sonnet, DEEPSEEK, GEMIMI_2, GPT_4o, GPT_4o_MIMI, O1_MIMI, O1_PREVIEW } from '@/lib/llm/model';
import { isProUser, isPremiumUser } from '@/lib/shared-utils';
import { useUpgradeModal } from '@/hooks/use-upgrade-modal';

Expand Down
2 changes: 1 addition & 1 deletion frontend/components/search/search-bar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import { Switch } from '@/components/ui/switch';
import { Label } from '@/components/ui/label';
import { SearchType } from '@/lib/types';
import WebImageModal, { WebImageFile } from '@/components/modal/web-images-model';
import { isImageInputModel } from '@/lib/model';
import { isImageInputModel } from '@/lib/llm/model';
import { SearchSettingsDialog } from '@/components/search/search-settings';
import { useCompressHistory } from '@/hooks/use-compress-history';

Expand Down
2 changes: 1 addition & 1 deletion frontend/hooks/use-compress-history.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { isProModel } from '@/lib/model';
import { isProModel } from '@/lib/llm/model';
import { useSearchStore } from '@/lib/store/local-history';
import { useConfigStore, useSearchState } from '@/lib/store/local-store';
import { compressHistory } from '@/lib/tools/compress-history';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/llm/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import 'server-only';
import { CoreMessage, CoreUserMessage, ImagePart, LanguageModel, TextPart } from 'ai';
import { createOpenAI } from '@ai-sdk/openai';
import { createAnthropic } from '@ai-sdk/anthropic';
import { Claude_35_Sonnet, DEEPSEEK, GPT_4o, GPT_4o_MIMI, O1_MIMI, O1_PREVIEW } from '@/lib/model';
import { Claude_35_Sonnet, DEEPSEEK, GPT_4o, GPT_4o_MIMI, O1_MIMI, O1_PREVIEW } from '@/lib/llm/model';
import { google } from '@ai-sdk/google';
import { Message } from '@/lib/types';
import { DEEPSEEK_API_KEY, OPENAI_BASE_URL } from '@/lib/env';
Expand Down
48 changes: 48 additions & 0 deletions frontend/lib/llm/model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
export const GPT_4o_MIMI = 'gpt-4o-mini';
export const GPT_4o = 'gpt-4o';
export const O1_MIMI = 'o1-mini';
export const O1_PREVIEW = 'o1-preview';
export const Claude_35_Haiku = 'claude-3-5-haiku-20241022';
export const Claude_35_Sonnet = 'claude-3-5-sonnet-20241022';
export const DEEPSEEK = 'deepseek-chat';
export const GEMIMI_2 = 'gemini-2.0-flash-exp';

export enum ModelType {
FREE = 'FREE',
PRO = 'PRO',
PREMIUM = 'PREMIUM',
}

export const MODEL_CONFIG = {
[GPT_4o_MIMI]: { type: ModelType.FREE, hasImageInput: true },
[DEEPSEEK]: { type: ModelType.FREE, hasImageInput: false },
[GPT_4o]: { type: ModelType.PRO, hasImageInput: true },
[O1_MIMI]: { type: ModelType.PRO, hasImageInput: false },
[O1_PREVIEW]: { type: ModelType.PREMIUM, hasImageInput: false },
[Claude_35_Sonnet]: { type: ModelType.PRO, hasImageInput: true },
[Claude_35_Haiku]: { type: ModelType.PRO, hasImageInput: false },
[GEMIMI_2]: { type: ModelType.PRO, hasImageInput: true },
} as const;

export function getModelAccess(model: string) {
const config = MODEL_CONFIG[model];
if (!config) return null;
return config;
}

export const validModel = (model: string): boolean => !!getModelAccess(model);

export const isProModel = (model: string): boolean => {
const access = getModelAccess(model);
return access?.type === ModelType.PRO;
};

export const isPremiumModel = (model: string): boolean => {
const access = getModelAccess(model);
return access?.type === ModelType.PREMIUM;
};

export const isImageInputModel = (model: string): boolean => {
const access = getModelAccess(model);
return !!access?.hasImageInput;
};
18 changes: 0 additions & 18 deletions frontend/lib/model.ts

This file was deleted.

18 changes: 18 additions & 0 deletions frontend/lib/shared-utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
// The utility functions in this file are shared between the client and the server.

import { getModelAccess, ModelType } from '@/lib/llm/model';

export function isValidUrl(input: string): boolean {
// return early if the url cannot be parsed
if ('canParse' in URL && !URL.canParse(input)) return false;
Expand Down Expand Up @@ -65,6 +67,22 @@ export function isPremiumUser(user: any): boolean {
return user?.level === 2 && isSubscriptionActive(user);
}

export function checkModelAccess(model: string, user: any): boolean {
const access = getModelAccess(model);
if (!access) return false;

switch (access.type) {
case ModelType.FREE:
return true;
case ModelType.PRO:
return isProUser(user);
case ModelType.PREMIUM:
return isPremiumUser(user);
default:
return false;
}
}

export function extractFirstImageUrl(text: string): string | null {
const regex = /https?:\/\/[^ ]+\.(jpg|jpeg|png|gif|bmp|webp)/i;
const match = text.match(regex);
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/store/local-store.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { create } from 'zustand';
import { User } from '@/lib/types';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { persist } from 'zustand/middleware';

interface ProfileState {
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/auto.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { convertToCoreMessages, getLLM, getMaxOutputToken } from '@/lib/llm/llm'
import { AutoAnswerPrompt } from '@/lib/llm/prompt';
import { getHistory, getHistoryMessages, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { getSearchEngine } from '@/lib/search/search';
import { extractErrorMessage, saveMessages } from '@/lib/server-utils';
import { accessWebPage } from '@/lib/tools/access';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { convertToCoreMessages, getLLM, getMaxOutputToken } from '@/lib/llm/llm'
import { ChatPrompt } from '@/lib/llm/prompt';
import { getHistoryMessages, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { extractErrorMessage, saveMessages } from '@/lib/server-utils';
import { Message as StoreMessage, SearchCategory, TextSource, VideoSource } from '@/lib/types';
import { streamText } from 'ai';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/compress-history.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import { getLLM } from '@/lib/llm/llm';
import { formatHistoryMessages } from '@/lib/llm/utils';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { Message } from '@/lib/types';
import { generateText } from 'ai';

Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/generate-title.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getLLM } from '@/lib/llm/llm';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { generateText } from 'ai';

export async function generateTitle(query: string): Promise<string> {
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/generate-ui.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { convertToCoreMessages, getLLM } from '@/lib/llm/llm';
import { log, logError } from '@/lib/log';
import { streamText } from 'ai';
import { SearchCategory, Message as StoreMessage } from '@/lib/types';
import { Claude_35_Sonnet } from '@/lib/model';
import { Claude_35_Sonnet } from '@/lib/llm/model';
import { extractErrorMessage, saveMessages } from '@/lib/server-utils';
import { getSearchEngine, TEXT_LIMIT } from '@/lib/search/search';
import util from 'util';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/improve-image-prompt.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getLLM } from '@/lib/llm/llm';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { format } from '@/lib/server-utils';
import { generateText } from 'ai';

Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/indie.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import 'server-only';
import { getLLM } from '@/lib/llm/llm';
import { getHistory, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { getSearchEngine, TEXT_LIMIT } from '@/lib/search/search';
import { saveMessages } from '@/lib/server-utils';
import { directlyAnswer } from '@/lib/tools/answer';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/knowledge-base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import 'server-only';
import { getLLM } from '@/lib/llm/llm';
import { getHistory, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { getVectorSearch } from '@/lib/search/search';
import { saveMessages } from '@/lib/server-utils';
import { directlyAnswer } from '@/lib/tools/answer';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/o1-answer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { getLLM } from '@/lib/llm/llm';
import { DirectAnswerPrompt } from '@/lib/llm/prompt';
import { getHistoryMessages, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { getSearchEngine } from '@/lib/search/search';
import { extractErrorMessage, saveMessages } from '@/lib/server-utils';
import { getRelatedQuestions } from '@/lib/tools/related';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/product.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import 'server-only';
import { getLLM } from '@/lib/llm/llm';
import { getHistory, streamResponse } from '@/lib/llm/utils';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { getSearchEngine, TEXT_LIMIT } from '@/lib/search/search';
import { saveMessages } from '@/lib/server-utils';
import { directlyAnswer } from '@/lib/tools/answer';
Expand Down
2 changes: 1 addition & 1 deletion frontend/lib/tools/related.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { getLLM, StreamHandler } from '@/lib/llm/llm';
import { MoreQuestionsPrompt } from '@/lib/llm/prompt';
import { logError } from '@/lib/log';
import { GPT_4o_MIMI } from '@/lib/model';
import { GPT_4o_MIMI } from '@/lib/llm/model';
import { TextSource } from '@/lib/types';
import { streamText } from 'ai';
import util from 'util';
Expand Down

0 comments on commit 446ad07

Please sign in to comment.