From 27d4ccb483b7d2bb82762792dd137b1572780813 Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:31:11 -0400 Subject: [PATCH 1/6] Adding LiteLLM client --- src/llm/litellm_client.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 src/llm/litellm_client.py diff --git a/src/llm/litellm_client.py b/src/llm/litellm_client.py new file mode 100644 index 00000000..70fca5b0 --- /dev/null +++ b/src/llm/litellm_client.py @@ -0,0 +1,28 @@ +import os +from litellm import completion +from src.config import Config + +class LiteLLM: + def __init__(self): + self.config = Config() + + def set_api_key(self, model_id: str): + if "openrouter" in model_id: + os.environ["OPENROUTER_API_KEY"] = self.config.get_openrouter_api_key() #tested + elif "deepinfra" in model_id: + os.environ["DEEPINFRA_API_KEY"] = self.config.get_deepinfra_api_key() #tested + elif "openai" in model_id: + os.environ["OPENAI_API_KEY"] = self.config.get_openai_api_key() # not tested + elif "anthropic" in model_id: + os.environ["ANTHROPIC_API_KEY"] = self.config.get_anthropic_api_key() # not tested + elif "mistral" in model_id: + os.environ["MISTRAL_API_KEY"] = self.config.get_mistral_api_key() # not tested + elif "cohere" in model_id: + os.environ["COHERE_API_KEY"] = self.config.get_cohere_api_key() # not tested + # Add other providers as needed + + def inference(self, model_id: str, prompt: str) -> str: + self.set_api_key(model_id) + messages = [{"role": "user", "content": prompt.strip()}] + response = completion(model=model_id, messages=messages) + return response['choices'][0]['message']['content'] \ No newline at end of file From 261a6e2409fcd8c156956b553b7bbfa552fac1a0 Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:33:03 -0400 Subject: [PATCH 2/6] Adding Lite LLM and a few providers/models. Te sted with OpenRouter and DeepInfra models --- src/llm/llm.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/llm/llm.py b/src/llm/llm.py index b7ad6a73..fbbdd203 100644 --- a/src/llm/llm.py +++ b/src/llm/llm.py @@ -10,6 +10,7 @@ from .gemini_client import Gemini from .mistral_client import MistralAi from .groq_client import Groq +from .litellm_client import LiteLLM from src.state import AgentState @@ -42,6 +43,8 @@ def __init__(self, model_id: str = None): ], "GOOGLE": [ ("Gemini 1.0 Pro", "gemini-pro"), + ("Gemini 1.5 Flash", "gemini-1.5-flash"), + ("Gemini 1.5 Pro", "gemini-1.5-pro-latest"), ], "MISTRAL": [ ("Mistral 7b", "open-mistral-7b"), @@ -57,6 +60,20 @@ def __init__(self, model_id: str = None): ("Mixtral", "mixtral-8x7b-32768"), ("GEMMA 7B", "gemma-7b-it"), ], + "LITELLM": [ + ("OpenRouter: Llama 3 8B Instruct", "openrouter/meta-llama/llama-3-8b-instruct:extended"), + ("OpenRouter: Claude 3 Haiku", "openrouter/anthropic/claude-3-haiku"), + ("OpenRouter: Claude 3 Sonnet", "openrouter/anthropic/claude-3-sonnet"), + ("OpenRouter: Claude 3 Sonnet 20240229", "openrouter/anthropic/claude-3-sonnet-20240229"), + ("DeepInfra: Mixtral 8x7B Instruct", "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1"), + ("DeepInfra: Dolphin 2.6 Mixtral", "deepinfra/cognitivecomputations/dolphin-2.6-mixtral-8x7b"), + ("OpenRouter: Gemini Pro 1.5", "openrouter/google/gemini-pro-1.5"), + ("OpenRouter: Gemini Flash 1.5", "openrouter/google/gemini-flash-1.5"), + ("OpenRouter: Gemma 7B", "openrouter/google/gemma-7b-it"), + ("OpenRouter: DeepSeek Coder", "openrouter/deepseek/deepseek-coder"), + ("OpenRouter: Palm 2 CodeChat Bison 32k", "openrouter/google/palm-2-codechat-bison-32k"), + ("OpenRouter: CodeLLama 34B Instruct", "openrouter/meta-llama/codellama-34b-instruct"), + ], "OLLAMA": [] } if ollama.client: @@ -96,7 +113,8 @@ def inference(self, prompt: str, project_name: str) -> str: "OPENAI": OpenAi(), "GOOGLE": Gemini(), "MISTRAL": MistralAi(), - "GROQ": Groq() + "GROQ": Groq(), + "LITELLM": LiteLLM(), } try: From 96d7136ba2dc78718c0fff135d5af93bdb36b89c Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:40:12 -0400 Subject: [PATCH 3/6] added LiteLLM --- requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index d2bdc164..2a83bd0b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,3 +30,5 @@ duckduckgo-search orjson gevent gevent-websocket +litellm + From 924e4f5098a53035f4d54921ff31d8cab3290d09 Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:41:05 -0400 Subject: [PATCH 4/6] Added OpenRouter and DeepInfra Api keys for litellm --- sample.config.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sample.config.toml b/sample.config.toml index cd796133..1c86a1cd 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -15,6 +15,8 @@ OPENAI = "" GEMINI = "" MISTRAL = "" GROQ = "" +OPENROUTER = "" +DEEPINFRA = "" NETLIFY = "" [API_ENDPOINTS] From ce72afbab1dd645ae7bc79961ce86e8c4b31a7be Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:44:32 -0400 Subject: [PATCH 5/6] LiteLLM client - tested with OpenRouter and DeepInfra hosted models --- src/llm/litellm_client.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/llm/litellm_client.py b/src/llm/litellm_client.py index 70fca5b0..40fae516 100644 --- a/src/llm/litellm_client.py +++ b/src/llm/litellm_client.py @@ -8,18 +8,18 @@ def __init__(self): def set_api_key(self, model_id: str): if "openrouter" in model_id: - os.environ["OPENROUTER_API_KEY"] = self.config.get_openrouter_api_key() #tested + os.environ["OPENROUTER_API_KEY"] = self.config.get_openrouter_api_key() elif "deepinfra" in model_id: - os.environ["DEEPINFRA_API_KEY"] = self.config.get_deepinfra_api_key() #tested + os.environ["DEEPINFRA_API_KEY"] = self.config.get_deepinfra_api_key() elif "openai" in model_id: - os.environ["OPENAI_API_KEY"] = self.config.get_openai_api_key() # not tested + os.environ["OPENAI_API_KEY"] = self.config.get_openai_api_key() elif "anthropic" in model_id: - os.environ["ANTHROPIC_API_KEY"] = self.config.get_anthropic_api_key() # not tested + os.environ["ANTHROPIC_API_KEY"] = self.config.get_anthropic_api_key() elif "mistral" in model_id: - os.environ["MISTRAL_API_KEY"] = self.config.get_mistral_api_key() # not tested + os.environ["MISTRAL_API_KEY"] = self.config.get_mistral_api_key() elif "cohere" in model_id: - os.environ["COHERE_API_KEY"] = self.config.get_cohere_api_key() # not tested - # Add other providers as needed + os.environ["COHERE_API_KEY"] = self.config.get_cohere_api_key() + def inference(self, model_id: str, prompt: str) -> str: self.set_api_key(model_id) From a52fc547b0261cab7a5af949866155d947a2e9c7 Mon Sep 17 00:00:00 2001 From: OSH212 Date: Wed, 15 May 2024 17:50:10 -0400 Subject: [PATCH 6/6] OpenRouter + DeepInfra --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 63c8961d..5977777a 100644 --- a/README.md +++ b/README.md @@ -141,6 +141,8 @@ when you first time run Devika, it will create a `config.toml` file for you in t - `CLAUDE`: Your Anthropic API key for accessing Claude models. - `MISTRAL`: Your Mistral API key for accessing Mistral models. - `GROQ`: Your Groq API key for accessing Groq models. + - `OPENROUTER`: Your OpenRouter API key for accessing OpenRouter models. + - `DEEPINFRA`: Your DeepInfra API key for accessing DeepInfra models. - `NETLIFY`: Your Netlify API key for deploying and managing web projects. - API_ENDPOINTS