Skip to content

Commit

Permalink
Merge pull request #37 from studio-recoding/dev
Browse files Browse the repository at this point in the history
[feat] CI/CD 코드 완성
  • Loading branch information
uommou authored Mar 24, 2024
2 parents 44d9672 + 69396f9 commit 0117e37
Show file tree
Hide file tree
Showing 9 changed files with 59 additions and 77 deletions.
54 changes: 24 additions & 30 deletions .github/workflows/cicd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,33 @@ name: CI/CD

on:
push:
branches: [ dev ]
pull_request:
branches: [ main, dev ]
# pull_request:
# branches: [ main, dev ]

env:
DOCKER_IMAGE: ghcr.io/${{ github.actor }}/ness-fastapi
VERSION: ${{ github.sha }}
NAME: ness-fastapi
# Docker image 를 ghcr.io 에 올릴 때 우리의github이름/이미지이름 으로 저장한다. 이미지이름을 정해주면 된다.
# Docker image 의 이름을 newproject 이라고 해놓은 것. 이름 뭐할지 정하면 된다.

jobs:
build:
name: Build
runs-on: ubuntu-latest
env:
OPENAI_API_KEY: ${{secrets.OPENAI_API_KEY}}
CHROMA_DB_IP_ADDRESS: ${{secrets.CHROMA_DB_IP_ADDRESS}}

steps:
- name: Checkout
uses: actions/checkout@v2

- name: Create .env file
run: |
echo "CHROMA_DB_IP_ADDRESS=${{ secrets.CHROMA_DB_IP_ADDRESS }}" >> .env
echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> .env
shell: bash

- name: Setup docker buildx
id: buildx
uses: docker/setup-buildx-action@v1
Expand All @@ -47,28 +55,6 @@ jobs:
push: true
tags: ${{ env.DOCKER_IMAGE }}:latest

# deploy:
# needs: build
# name: Deploy
# runs-on: [ self-hosted, label-NESS ]
# # label-newproject 라는 이름으로 AWS EC2 가 Runner 를 작동시킬 때 사용했던 그 label
# steps:
# - name: Login to ghcr
# uses: docker/login-action@v1
# with:
# registry: ghcr.io
# username: ${{ github.actor }}
# password: ${{ secrets.GHCR_TOKEN }}
#
# - name: Docker run
# run: |
# docker stop ${{ env.NAME }} && docker rm ${{ env.NAME }} && docker rmi ${{ env.DOCKER_IMAGE }}:latest
# docker run -d -p 3000:3000 --name ness-fastapi --restart always ${{ env.DOCKER_IMAGE }}:latest
## docker pull ${{ env.DOCKER_IMAGE }}:latest
## docker stop ${{ env.NAME }} || true
## docker rm ${{ env.NAME }} || true
## docker rmi ${{ env.DOCKER_IMAGE }}:latest || true
## docker run -d -p 3000:3000 --name ${{ env.NAME }} --restart always ${{ env.DOCKER_IMAGE }}:latest
# EC2 볼륨 16기가로 늘려서 테스트 -> 파티션 용량 조절 필요
deploy:
needs: build
Expand All @@ -82,12 +68,20 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GHCR_TOKEN }}

- name: Remove old Docker image
run: docker rmi -f $(docker images -aq) || true #docker rmi -f ${{ env.DOCKER_IMAGE }}:latest || true
- name: Create .env file
run: |
touch .env
echo "$DEV_ENV" > .env
env:
DEV_ENV: ${{ secrets.DEV_ENV }}

- name: Docker run
run: |
docker pull ${{ env.DOCKER_IMAGE }}:latest
docker stop ${{ env.NAME }} || true
docker rm ${{ env.NAME }} || true
docker run -d -p 3000:3000 --name ${{ env.NAME }} --restart always ${{ env.DOCKER_IMAGE }}:latest
docker rmi -f $(docker images -aq) || true
docker pull ${{ env.DOCKER_IMAGE }}:latest
docker run -d -p 80:80 --name ${{ env.NAME }} --restart always \
-e CHROMA_DB_IP_ADDRESS=${{ secrets.CHROMA_DB_IP_ADDRESS }} \
-e OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }} \
${{ env.DOCKER_IMAGE }}:latest
16 changes: 8 additions & 8 deletions app/database/chroma_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from chromadb.utils import embedding_functions
from chromadb.config import Settings

# FAST API
from fastapi import Depends

# ETC
import os
import datetime
Expand All @@ -11,12 +14,6 @@

load_dotenv()
CHROMA_DB_IP_ADDRESS = os.getenv("CHROMA_DB_IP_ADDRESS")
# # 로컬에 ChromaDB가 저장된 경우
# DIR = os.path.dirname(os.path.abspath(__file__))
# DB_PATH = os.path.join(DIR, 'data')
# # 로컬 디스크에서 ChromaDB 실행: allow_reset으로 데이터베이스 초기화, anonymized_telemetry를 False로 설정하여 텔레메트리 수집 비활성화
# chroma_client = chromadb.PersistentClient(path=DB_PATH,
# settings=Settings(allow_reset=True, anonymized_telemetry=False))

# description: 원격 EC2 인스턴스에서 ChromaDB에 연결
chroma_client = chromadb.HttpClient(host=CHROMA_DB_IP_ADDRESS, port=8000)
Expand Down Expand Up @@ -53,6 +50,9 @@ async def add_db_data(schedule_data: AddScheduleDTO):
schedules.add(
documents=[schedule_data.data],
ids=[str(schedule_data.schedule_id)],
metadatas=[{"datetime": schedule_data.schedule_datetime, "member": schedule_data.member_id, "category": schedule_data.category}]
metadatas=[{"datetime_start": schedule_data.schedule_datetime_start, "datetime_end": schedule_data.schedule_datetime_end, "member": schedule_data.member_id, "category": schedule_data.category, "location": schedule_data.location, "person": schedule_data.person}]
)
return True
return True

def get_chroma_client():
return chroma_client
5 changes: 4 additions & 1 deletion app/dto/db_dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@

class AddScheduleDTO(BaseModel):
data: str
schedule_datetime: str # datetime.datetime 타입을 명시적으로 사용합니다.
schedule_datetime_start: str # datetime.datetime 타입을 명시적으로 사용합니다.
schedule_datetime_end: str
schedule_id: int
member_id: int
category: str
location: str
person: str

3 changes: 3 additions & 0 deletions app/dto/openai_dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,6 @@

class PromptRequest(BaseModel):
prompt: str

class ChatResponse(BaseModel):
ness: str
16 changes: 1 addition & 15 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,7 @@
from dotenv import load_dotenv

# BACKEND
from fastapi import FastAPI
from fastapi import FastAPI, HTTPException
import asyncio

# VECTOR DB Module
import app.database.chroma_db as vectordb

# AI
from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate

# DTO
from app.dto import openai_dto
from app.prompt import openai_prompt
from app.dto.db_dto import AddScheduleDTO
from fastapi import FastAPI, Depends

# ETC
import os
Expand Down
23 changes: 10 additions & 13 deletions app/routers/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import os

from dotenv import load_dotenv
from fastapi import APIRouter, HTTPException
from fastapi import APIRouter, HTTPException, status
from langchain_community.chat_models import ChatOpenAI
from langchain_core.prompts import PromptTemplate

from app.dto import openai_dto
from app.dto.openai_dto import PromptRequest, ChatResponse
from app.prompt import openai_prompt

import app.database.chroma_db as vectordb
Expand All @@ -26,8 +26,8 @@
config = configparser.ConfigParser()
config.read(CONFIG_FILE_PATH)

@router.post("/case")
async def get_langchain_case(data: openai_dto.PromptRequest):
@router.post("/case", status_code=status.HTTP_200_OK, response_model=ChatResponse)
async def get_langchain_case(data: PromptRequest) -> ChatResponse:
# description: use langchain

config_normal = config['NESS_NORMAL']
Expand Down Expand Up @@ -64,7 +64,7 @@ async def get_langchain_case(data: openai_dto.PromptRequest):

# case 1 : normal
#@router.post("/case/normal") # 테스트용 엔드포인트
async def get_langchain_normal(data: openai_dto.PromptRequest): # case 1 : normal
async def get_langchain_normal(data: PromptRequest) -> ChatResponse: # case 1 : normal
print("running case 1")
# description: use langchain
chat_model = ChatOpenAI(temperature=0, # 창의성 (0.0 ~ 2.0)
Expand All @@ -82,11 +82,11 @@ async def get_langchain_normal(data: openai_dto.PromptRequest): # case 1 : norma
prompt = PromptTemplate.from_template(my_template)
response = chat_model.predict(prompt.format(output_language="Korean", question=question))
print(response)
return response
return ChatResponse(ness=response)

# case 2 : 일정 생성
#@router.post("/case/make_schedule") # 테스트용 엔드포인트
async def get_langchain_schedule(data: openai_dto.PromptRequest):
async def get_langchain_schedule(data: PromptRequest) -> ChatResponse:
print("running case 2")
# description: use langchain
chat_model = ChatOpenAI(temperature=0, # 창의성 (0.0 ~ 2.0)
Expand All @@ -100,11 +100,11 @@ async def get_langchain_schedule(data: openai_dto.PromptRequest):
prompt = PromptTemplate.from_template(case2_template)
response = chat_model.predict(prompt.format(output_language="Korean", question=question))
print(response)
return response
return ChatResponse(ness=response)

# case 3 : rag
#@router.post("/case/rag") # 테스트용 엔드포인트
async def get_langchain_rag(data: openai_dto.PromptRequest):
async def get_langchain_rag(data: PromptRequest) -> ChatResponse:
print("running case 3")
# description: use langchain
chat_model = ChatOpenAI(temperature=0, # 창의성 (0.0 ~ 2.0)
Expand All @@ -121,9 +121,6 @@ async def get_langchain_rag(data: openai_dto.PromptRequest):
case3_template = openai_prompt.Template.case3_template

prompt = PromptTemplate.from_template(case3_template)
# 여기서는 chat_model.predict가 비동기 함수인지, 동기 함수인지에 따라 처리가 달라질 수 있습니다.
# 만약 비동기 함수라면 await를 사용해야 합니다. 아래 코드는 동기 함수를 가정하고 작성되었습니다.
# 비동기 함수라면, 예: response = await chat_model.predict(...) 형태로 수정해야 합니다.
response = chat_model.predict(prompt.format(output_language="Korean", question=question, schedule=schedule))
print(response)
return response
return ChatResponse(ness=response)
14 changes: 7 additions & 7 deletions app/routers/chromadb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
import os

from dotenv import load_dotenv
from fastapi import APIRouter, HTTPException
from fastapi import APIRouter, HTTPException, Depends, status

from app.dto.db_dto import AddScheduleDTO
from app.database.chroma_db import add_db_data, get_chroma_client

router = APIRouter(
prefix="/chromadb",
Expand All @@ -21,12 +22,11 @@
config.read(CONFIG_FILE_PATH)


@router.post("/add_schedule")
async def add_schedule_endpoint(schedule_data: AddScheduleDTO, vectordb=None):
@router.post("/add_schedule", status_code=status.HTTP_201_CREATED)
async def add_schedule_endpoint(schedule_data: AddScheduleDTO, chroma_client=Depends(get_chroma_client)):
try:
# vectordb.add_db_data 함수를 비동기적으로 호출합니다.
await vectordb.add_db_data(schedule_data)
# 직접 `add_db_data` 함수를 비동기적으로 호출합니다.
await add_db_data(schedule_data)
return {"message": "Schedule added successfully"}
except Exception as e:
# 에러 처리: 에러가 발생하면 HTTP 500 응답을 반환합니다.
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e))
2 changes: 1 addition & 1 deletion app/routers/recommendation.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
config.read(CONFIG_FILE_PATH)

@router.get("/main")
def get_recommendation():
async def get_recommendation():

# 모델
chat_model = ChatOpenAI(temperature=0, # 창의성 (0.0 ~ 2.0)
Expand Down
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
chromadb==0.4.22
fastapi==0.109.0
uvicorn==0.26.0
langchain==0.1.1
langchain-community==0.0.13
openai==1.8.0
python-dotenv==1.0.0
starlette==0.35.1
pydantic==2.5.3
sentence-transformers==2.5.1
sentence-transformers==2.5.1

0 comments on commit 0117e37

Please sign in to comment.