Skip to content

Commit

Permalink
Clean up logging
Browse files Browse the repository at this point in the history
  • Loading branch information
sanders41 committed Dec 24, 2024
1 parent 817bd44 commit 03d8290
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 6 deletions.
2 changes: 0 additions & 2 deletions backend/app/agents/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,6 @@ def _create_prompt(self) -> ChatPromptTemplate:
return ChatPromptTemplate.from_template(template)

async def process(self, state: AgentState) -> JsonDict:
logger.debug(f"DLPFC Agent processing state: {state}")

response = await self.llm.ainvoke(
self.prompt.format_messages(
task=state.task,
Expand Down
4 changes: 0 additions & 4 deletions backend/app/agents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ def _create_prompt(self) -> ChatPromptTemplate:
async def process(self, state: AgentState) -> JsonDict:
"""Process the current state and return updated state."""
try:
logger.debug(f"Processing state with prompt: {self.prompt}")
result = await self._process_with_timeout(state)
logger.debug(f"Received response: {result}")
return result
except TimeoutError:
error_msg = "Request timed out. Please try again."
Expand All @@ -53,7 +51,6 @@ async def process(self, state: AgentState) -> JsonDict:
async def _process_with_timeout(self, state: AgentState) -> JsonDict:
"""Process with timeout handling."""
try:
logger.debug("Sending request to OpenAI API...")
response = await self.llm.ainvoke(
self.prompt.format_messages(
task=state.task,
Expand All @@ -63,7 +60,6 @@ async def _process_with_timeout(self, state: AgentState) -> JsonDict:
feedback_history=state.feedback_history,
),
)
logger.debug(f"Received API response: {response}")
return self._format_response(response.content)
except TimeoutError:
logger.debug("API request timed out")
Expand Down

0 comments on commit 03d8290

Please sign in to comment.