From 631090b32dea9e487a946c424aa2e9bb50d03b7e Mon Sep 17 00:00:00 2001 From: uommou Date: Sat, 23 Mar 2024 20:42:53 +0900 Subject: [PATCH] =?UTF-8?q?[fix]=20await=20=EC=82=AD=EC=A0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/routers/chat.py | 10 ++++++---- app/routers/recommendation.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/app/routers/chat.py b/app/routers/chat.py index 0e9d66f..8e32c10 100644 --- a/app/routers/chat.py +++ b/app/routers/chat.py @@ -43,7 +43,7 @@ async def get_langchain_case(data: openai_dto.PromptRequest): my_template = openai_prompt.Template.case_classify_template prompt = PromptTemplate.from_template(my_template) - case = await chat_model.predict(prompt.format(question=question)) + case = chat_model.predict(prompt.format(question=question)) print(case) case = int(case) @@ -73,12 +73,14 @@ async def get_langchain_normal(data: openai_dto.PromptRequest): # case 1 : norma openai_api_key=OPENAI_API_KEY # API 키 ) question = data.prompt + # 기존 모델 문제 생김 + # chat_model = LangchainOpenAI(openai_api_key=OPENAI_API_KEY) # description: give NESS's ideal instruction as template my_template = openai_prompt.Template.case1_template prompt = PromptTemplate.from_template(my_template) - response = await chat_model.predict(prompt.format(output_language="Korean", question=question)) + response = chat_model.predict(prompt.format(output_language="Korean", question=question)) print(response) return response @@ -96,7 +98,7 @@ async def get_langchain_schedule(data: openai_dto.PromptRequest): case2_template = openai_prompt.Template.case2_template prompt = PromptTemplate.from_template(case2_template) - response = await chat_model.predict(prompt.format(output_language="Korean", question=question)) + response = chat_model.predict(prompt.format(output_language="Korean", question=question)) print(response) return response @@ -122,6 +124,6 @@ async def get_langchain_rag(data: openai_dto.PromptRequest): # 여기서는 chat_model.predict가 비동기 함수인지, 동기 함수인지에 따라 처리가 달라질 수 있습니다. # 만약 비동기 함수라면 await를 사용해야 합니다. 아래 코드는 동기 함수를 가정하고 작성되었습니다. # 비동기 함수라면, 예: response = await chat_model.predict(...) 형태로 수정해야 합니다. - response = await chat_model.predict(prompt.format(output_language="Korean", question=question, schedule=schedule)) + response = chat_model.predict(prompt.format(output_language="Korean", question=question, schedule=schedule)) print(response) return response diff --git a/app/routers/recommendation.py b/app/routers/recommendation.py index 3d133d3..d3116a6 100644 --- a/app/routers/recommendation.py +++ b/app/routers/recommendation.py @@ -37,4 +37,4 @@ async def get_recommendation(): recommendation_template = openai_prompt.Template.recommendation_template prompt = PromptTemplate.from_template(recommendation_template) - return await chat_model.predict(prompt.format()) \ No newline at end of file + return chat_model.predict(prompt.format()) \ No newline at end of file