Skip to content

Commit

Permalink
Merge pull request #33 from studio-recoding/feat/cicd
Browse files Browse the repository at this point in the history
[fix] await 삭제
  • Loading branch information
uommou authored Mar 23, 2024
2 parents b918493 + 631090b commit b80a56f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 5 deletions.
10 changes: 6 additions & 4 deletions app/routers/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ async def get_langchain_case(data: openai_dto.PromptRequest):
my_template = openai_prompt.Template.case_classify_template

prompt = PromptTemplate.from_template(my_template)
case = await chat_model.predict(prompt.format(question=question))
case = chat_model.predict(prompt.format(question=question))

print(case)
case = int(case)
Expand Down Expand Up @@ -73,12 +73,14 @@ async def get_langchain_normal(data: openai_dto.PromptRequest): # case 1 : norma
openai_api_key=OPENAI_API_KEY # API 키
)
question = data.prompt
# 기존 모델 문제 생김
# chat_model = LangchainOpenAI(openai_api_key=OPENAI_API_KEY)

# description: give NESS's ideal instruction as template
my_template = openai_prompt.Template.case1_template

prompt = PromptTemplate.from_template(my_template)
response = await chat_model.predict(prompt.format(output_language="Korean", question=question))
response = chat_model.predict(prompt.format(output_language="Korean", question=question))
print(response)
return response

Expand All @@ -96,7 +98,7 @@ async def get_langchain_schedule(data: openai_dto.PromptRequest):
case2_template = openai_prompt.Template.case2_template

prompt = PromptTemplate.from_template(case2_template)
response = await chat_model.predict(prompt.format(output_language="Korean", question=question))
response = chat_model.predict(prompt.format(output_language="Korean", question=question))
print(response)
return response

Expand All @@ -122,6 +124,6 @@ async def get_langchain_rag(data: openai_dto.PromptRequest):
# 여기서는 chat_model.predict가 비동기 함수인지, 동기 함수인지에 따라 처리가 달라질 수 있습니다.
# 만약 비동기 함수라면 await를 사용해야 합니다. 아래 코드는 동기 함수를 가정하고 작성되었습니다.
# 비동기 함수라면, 예: response = await chat_model.predict(...) 형태로 수정해야 합니다.
response = await chat_model.predict(prompt.format(output_language="Korean", question=question, schedule=schedule))
response = chat_model.predict(prompt.format(output_language="Korean", question=question, schedule=schedule))
print(response)
return response
2 changes: 1 addition & 1 deletion app/routers/recommendation.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ async def get_recommendation():
recommendation_template = openai_prompt.Template.recommendation_template

prompt = PromptTemplate.from_template(recommendation_template)
return await chat_model.predict(prompt.format())
return chat_model.predict(prompt.format())

0 comments on commit b80a56f

Please sign in to comment.