Skip to content

Commit

Permalink
Switch to better working llamacpp provider (#289)
Browse files Browse the repository at this point in the history
  • Loading branch information
Josh-XT authored May 10, 2023
1 parent f925bf9 commit 87f5e47
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 9 deletions.
16 changes: 9 additions & 7 deletions provider/llamacpp.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
try:
from pyllamacpp.model import Model
from llama_cpp import Llama
except:
print("Failed to import pyllamacpp.")
print("Failed to import llama-cpp-python.")


class LlamacppProvider:
Expand All @@ -13,18 +13,20 @@ def __init__(
AI_MODEL: str = "default",
**kwargs
):
self.requirements = ["pyllamacpp"]
self.requirements = ["llama-cpp-python"]
self.AI_TEMPERATURE = AI_TEMPERATURE
self.MAX_TOKENS = MAX_TOKENS
self.AI_MODEL = AI_MODEL

if MODEL_PATH:
try:
self.MAX_TOKENS = int(self.MAX_TOKENS)
except:
self.MAX_TOKENS = 2000
self.model = Model(ggml_model=MODEL_PATH, n_ctx=self.MAX_TOKENS)

self.model = Llama(model_path=MODEL_PATH)

def instruct(self, prompt, tokens: int = 0):
return self.model.generate(
prompt, n_predict=55, n_threads=8, temp=float(self.AI_TEMPERATURE)
)
return self.model(
prompt, max_tokens=55, stop=["\n"], temperature=float(self.AI_TEMPERATURE)
)["choices"][0]["text"]
3 changes: 2 additions & 1 deletion requirements-mac.txt
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,5 @@ webdriver_manager
spacy
discord
EdgeGPT
nomic
nomic
llama-cpp-python==0.1.48
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ sentence-transformers
tiktoken
tweepy
webdriver_manager
pyllamacpp==1.0.7
llama-cpp-python==0.1.48
spacy
discord
gpt4free
Expand Down

0 comments on commit 87f5e47

Please sign in to comment.