From 57be5f94f5d906493608049f4081812d9421079d Mon Sep 17 00:00:00 2001 From: vsakkas Date: Fri, 31 Mar 2023 10:01:50 +0300 Subject: [PATCH] Add stream support (#9) - Add `ask_stream` method for streaming the answer tokens as they come. - Rename `close` method to `close_conversation`. --- README.md | 4 +- pyproject.toml | 2 +- sydney/sydney.py | 181 +++++++++++++++++++++++++++++++---------------- 3 files changed, 123 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index 000bcff..d70ab39 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Sydney.py -[![Latest Release](https://img.shields.io/github/v/release/vsakkas/sydney.py.svg)](https://github.com/vsakkas/sydney.py/releases/tag/v0.5.1) +[![Latest Release](https://img.shields.io/github/v/release/vsakkas/sydney.py.svg)](https://github.com/vsakkas/sydney.py/releases/tag/v0.6.0) [![Python](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/) [![MIT License](https://img.shields.io/badge/license-MIT-blue)](https://github.com/vsakkas/sydney.py/blob/master/LICENSE) @@ -54,7 +54,7 @@ async def main() -> None: response = await sydney.ask("Hello, how are you?") print(response) - await sydney.close() + await sydney.close_conversation() if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index da3cb3b..c10393a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "sydney-py" -version = "0.5.1" +version = "0.6.0" description = "Python Client for Bing Chat, also known as Sydney." authors = ["vsakkas "] license = "MIT" diff --git a/sydney/sydney.py b/sydney/sydney.py index 43c268b..9bc8b0d 100644 --- a/sydney/sydney.py +++ b/sydney/sydney.py @@ -18,6 +18,90 @@ def __init__(self) -> None: self.conversation_style: ConversationStyle = None self.wss_client = None + def _build_request_arguments(self, prompt: str) -> dict: + return { + "arguments": [ + { + "source": "cib", + "optionsSets": [ + "nlu_direct_response_filter", + "deepleo", + "enable_debug_commands", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + self.conversation_style.value, + ], + "isStartOfSession": self.invocation_id == 0, + "message": { + "author": "user", + "inputMethod": "Keyboard", + "text": prompt, + "messageType": "Chat", + }, + "conversationSignature": self.conversation_signature, + "participant": { + "id": self.client_id, + }, + "conversationId": self.conversation_id, + } + ], + "invocationId": str(self.invocation_id), + "target": "chat", + "type": 4, + } + + async def _ask( + self, + prompt: str, + citations: bool = False, + raw: bool = False, + stream: bool = False, + ) -> str | dict: + if self.wss_client: + if not self.wss_client.closed: + await self.wss_client.close() + + # Create a connection Bing Chat. + self.wss_client = await websockets.connect( + BING_CHATHUB_URL, extra_headers=HEADERS, max_size=None + ) + await self.wss_client.send(as_json({"protocol": "json", "version": 1})) + await self.wss_client.recv() + + request = self._build_request_arguments(prompt) + self.invocation_id += 1 + + await self.wss_client.send(as_json(request)) + + while True: + objects = str(await self.wss_client.recv()).split(DELIMETER) + for obj in objects: + if not obj: + continue + response = json.loads(obj) + # Handle type 1 messages when streaming is enabled. + if ( + stream + and response.get("type") == 1 + and response["arguments"][0].get("messages") + ): + if raw: + yield response + elif citations: + yield response["arguments"][0]["messages"][0]["adaptiveCards"][0]["body"][0]["text"] + else: + yield response["arguments"][0]["messages"][0]["text"] + # Handle type 2 messages. + elif response.get("type") == 2: + if raw: + yield response + if citations: + yield response["item"]["messages"][1]["adaptiveCards"][0]["body"][0]["text"] + yield response["item"]["messages"][1]["text"] + # Exit, type 2 is the last message. + return + async def start_conversation(self, style: str = "balanced") -> None: """ Connect to Bing Chat and create a new conversation. @@ -56,9 +140,14 @@ async def start_conversation(self, style: str = "balanced") -> None: await session.close() - async def ask(self, prompt: str, citations: bool = False, raw: bool = False) -> str: + async def ask( + self, + prompt: str, + citations: bool = False, + raw: bool = False, + ) -> str | dict: """ - Send a prompt to Bing Chat using the current conversation. + Send a prompt to Bing Chat using the current conversation and return the answer. Parameters ---------- @@ -75,65 +164,35 @@ async def ask(self, prompt: str, citations: bool = False, raw: bool = False) -> The text response from Bing Chat. If citations is True, the function returns the cited text. If raw is True, the function returns the entire response object in raw JSON format. """ - if self.wss_client: - if not self.wss_client.closed: - await self.wss_client.close() - - # Create a connection Bing Chat. - self.wss_client = await websockets.connect( - BING_CHATHUB_URL, extra_headers=HEADERS, max_size=None - ) - await self.wss_client.send(as_json({"protocol": "json", "version": 1})) - await self.wss_client.recv() + async for response in self._ask(prompt, citations, raw, stream=False): + return response + + async def ask_stream( + self, + prompt: str, + citations: bool = False, + raw: bool = False, + ) -> str | dict: + """ + Send a prompt to Bing Chat using the current conversation and stream the answer. - request = { - "arguments": [ - { - "source": "cib", - "optionsSets": [ - "nlu_direct_response_filter", - "deepleo", - "enable_debug_commands", - "disable_emoji_spoken_text", - "responsible_ai_policy_235", - "enablemm", - self.conversation_style.value, - ], - "isStartOfSession": self.invocation_id == 0, - "message": { - "author": "user", - "inputMethod": "Keyboard", - "text": prompt, - "messageType": "Chat", - }, - "conversationSignature": self.conversation_signature, - "participant": { - "id": self.client_id, - }, - "conversationId": self.conversation_id, - } - ], - "invocationId": str(self.invocation_id), - "target": "chat", - "type": 4, - } - self.invocation_id += 1 + Parameters + ---------- + prompt : str + The prompt that needs to be sent to Bing Chat. + citations : bool, optional + Whether to return any cited text. Default is False. + raw : bool, optional + Whether to return the entire response object in raw JSON format. Default is False. - await self.wss_client.send(as_json(request)) - while True: - objects = str(await self.wss_client.recv()).split(DELIMETER) - for obj in objects: - if not obj: - continue - response = json.loads(obj) - if response.get("type") == 2: - if raw: - return response - if citations: - return response["item"]["messages"][1]["adaptiveCards"][0][ - "body" - ][0]["text"] - return response["item"]["messages"][1]["text"] + Returns + ------- + str + The text response from Bing Chat. If citations is True, the function returns the cited text. + If raw is True, the function returns the entire response object in raw JSON format. + """ + async for response in self._ask(prompt, citations, raw, stream=True): + yield response async def reset_conversation(self, style: str = None) -> None: """ @@ -153,10 +212,10 @@ async def reset_conversation(self, style: str = None) -> None: """ new_style = style if style else self.conversation_style - await self.close() + await self.close_conversation() await self.start_conversation(style=new_style) - async def close(self) -> None: + async def close_conversation(self) -> None: """ Close all connections to Bing Chat. Clear conversation information. """