Skip to content

Commit

Permalink
fixing type issues
Browse files Browse the repository at this point in the history
  • Loading branch information
Alex-Karmazin committed Jul 3, 2024
1 parent e742d08 commit 6cc62c4
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
10 changes: 5 additions & 5 deletions just_agents/llm_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@

@dataclass(kw_only=True)
class LLMSession:
llm_options: Dict[str, Any] = field(default_factory=lambda: LLAMA3)
tools: List[Callable] = field(default_factory=list)
available_tools: Dict[str, Callable] = field(default_factory=lambda: {})
llm_options: dict[str, Any] = field(default_factory=lambda: LLAMA3)
tools: list[Callable] = field(default_factory=list)
available_tools: dict[str, Callable] = field(default_factory=lambda: {})

on_response: list[OnCompletion] = field(default_factory=list)
memory: Memory = field(default_factory=lambda: Memory())
Expand Down Expand Up @@ -87,7 +87,7 @@ def stream_all(self, messages: list, run_callbacks: bool = True): # -> ContentSt
self.memory.add_messages(messages, run_callbacks)
return self._stream()

async def stream_async(self, prompt: str, run_callbacks: bool = True, output: Optional[Path] = None) -> List[Any]:
async def stream_async(self, prompt: str, run_callbacks: bool = True, output: Optional[Path] = None) -> list[Any]:
"""temporary function that allows testing the stream function which Alex wrote but I do not fully understand"""
collected_data = []
async for item in self.stream(prompt, run_callbacks, output):
Expand Down Expand Up @@ -176,7 +176,7 @@ def _process_function_calls(self, response: ModelResponse) -> Optional[ModelResp
return rotate_completion(messages=self.memory.messages, stream=False, options=self.llm_options)
return None

def _prepare_tools(self, functions: List[Any]):
def _prepare_tools(self, functions: list[Any]):
"""
Prepares functions as tools that LLM can call.
Note, the functions should have comments explaining LLM how to use them
Expand Down
4 changes: 2 additions & 2 deletions just_agents/streaming/openai_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
class AsyncSession(AbstractStreaming):

async def resp_async_generator(self, memory: Memory,
options: Dict,
available_tools: Dict[str, Callable]
options: dict,
available_tools: dict[str, Callable]
) -> AsyncGenerator[str, None]:

response: ModelResponse = rotate_completion(messages=memory.messages, stream=True, options=options)
Expand Down

0 comments on commit 6cc62c4

Please sign in to comment.