Skip to content

Commit

Permalink
Merge pull request #56 from royerlab/bug_fixes_aug24
Browse files Browse the repository at this point in the history
Bug fixes Aug 24
  • Loading branch information
royerloic authored Aug 31, 2024
2 parents 2efbb5d + 3603cc3 commit ef9df75
Show file tree
Hide file tree
Showing 42 changed files with 804 additions and 291 deletions.
48 changes: 0 additions & 48 deletions .github/workflows/just_deploy.yml

This file was deleted.

17 changes: 9 additions & 8 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[metadata]
name = napari-chatgpt
version = v2024.5.15
version = v2024.8.30
description = A napari plugin to process and analyse images with chatGPT.
long_description = file: README.md
long_description_content_type = text/markdown
Expand Down Expand Up @@ -36,12 +36,13 @@ install_requires =
scikit-image
qtpy
QtAwesome
langchain==0.2.0rc2
langchain-community==0.2.0rc1
langchain-openai==0.1.6
langchain-anthropic==0.1.11
openai==1.29.0
anthropic
langchain==0.2.15
langchain-community==0.2.14
langchain-openai==0.1.23
langchain-anthropic==0.1.23
# langchain-google-genai==1.0.10
openai==1.42.0
anthropic==0.34.1
fastapi
uvicorn
websockets
Expand All @@ -54,7 +55,7 @@ install_requires =
xarray
arbol
playwright
duckduckgo_search==5.3.0b4
duckduckgo-search==6.2.11
ome-zarr
transformers
cryptography
Expand Down
5 changes: 3 additions & 2 deletions src/microplugin/formating/black_formating.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from pathlib import Path
from typing import Union

from black import FileMode, format_file_in_place, WriteBack


def format_code(code: str) -> str:
"""Format the code using black."""
Expand All @@ -29,6 +27,9 @@ def format_file(file_path: Union[str, Path]) -> None:
if isinstance(file_path, str):
file_path = Path(file_path)

# Local import to avoid polution of the global namespace:
from black import FileMode, format_file_in_place, WriteBack

# Format the file using Black
format_file_in_place(file_path,
fast=False,
Expand Down
39 changes: 6 additions & 33 deletions src/napari_chatgpt/_widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@
from qtpy.QtWidgets import QVBoxLayout, QComboBox

from microplugin.microplugin_window import MicroPluginMainWindow
from napari_chatgpt.utils.anthropic.model_list import get_anthropic_model_list
from napari_chatgpt.utils.configuration.app_configuration import \
AppConfiguration
from napari_chatgpt.utils.ollama.ollama_server import is_ollama_running, \
get_ollama_models
from napari_chatgpt.utils.openai.model_list import get_openai_model_list
from napari_chatgpt.utils.openai.model_list import get_openai_model_list, \
postprocess_openai_model_list
from napari_chatgpt.utils.python.installed_packages import \
is_package_installed
from napari_chatgpt.utils.qt.one_time_disclaimer_dialog import \
Expand Down Expand Up @@ -123,44 +125,15 @@ def _model_selection(self):

if is_package_installed('anthropic'):
# Add Anthropic models to the combo box:
model_list.append('claude-2.1')
model_list.append('claude-2.0')
model_list.append('claude-instant-1.2')
model_list.append('claude-3-sonnet-20240229')
model_list.append('claude-3-opus-20240229')

model_list.extend(get_anthropic_model_list())

if is_ollama_running():
ollama_models = get_ollama_models()
for ollama_model in ollama_models:
model_list.append('ollama_'+ollama_model)

# Postprocess model list:

# Special cases (common prefix):
if 'gpt-3.5-turbo' in model_list:
model_list.remove('gpt-3.5-turbo')

# get list of bad models for main LLM:
bad_models_filters = ['0613', 'vision', 'turbo-instruct', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-16k']

# get list of best models for main LLM:
best_models_filters = ['0314', '0301', '1106', 'gpt-4']

# Ensure that some 'bad' or unsupported models are excluded:
bad_models = [m for m in model_list if any(bm in m for bm in bad_models_filters)]
for bad_model in bad_models:
if bad_model in model_list:
model_list.remove(bad_model)
# model_list.append(bad_model)

# Ensure that the best models are at the top of the list:
best_models = [m for m in model_list if any(bm in m for bm in best_models_filters)]
model_list = best_models + [m for m in model_list if m not in best_models]

# Ensure that the very best models are at the top of the list:
very_best_models = [m for m in model_list if ('gpt-4-turbo-2024-04-09' in m) ]
model_list = very_best_models + [m for m in model_list if m not in very_best_models]
# Postprocess OpenAI model list:
model_list = postprocess_openai_model_list(model_list)

# normalise list:
model_list = list(model_list)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,19 @@ async def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
if self.verbose:
aprint(f"CHAT on_agent_action: {action}")
tool = camel_case_to_lower_case(action.tool)
message = f"I am using the {tool} to tackle your request: '{action.tool_input}'"

# extract value for args key after checking if action.tool_input is a dict:
if isinstance(action.tool_input, dict):
argument = action.tool_input.get('args', '')

# if argument is a singleton list, unpop that single element:
if isinstance(argument, list):
argument = argument[0]

else:
argument = action.tool_input

message = f"I am using the {tool} to tackle your request: '{argument}'"

self.last_tool_used = tool
self.last_tool_input = action.tool_input
Expand Down
16 changes: 14 additions & 2 deletions src/napari_chatgpt/chat_server/chat_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,18 @@
from napari_chatgpt.utils.api_keys.api_key import set_api_key
from napari_chatgpt.utils.configuration.app_configuration import \
AppConfiguration
from napari_chatgpt.utils.network.port_available import \
find_first_port_available
from napari_chatgpt.utils.notebook.jupyter_notebook import JupyterNotebookFile
from napari_chatgpt.utils.openai.default_model import \
get_default_openai_model_name
from napari_chatgpt.utils.python.installed_packages import is_package_installed






class NapariChatServer:
def __init__(self,
notebook: JupyterNotebookFile,
Expand Down Expand Up @@ -71,8 +77,12 @@ def __init__(self,
# Get configuration
config = AppConfiguration('omega')

# port:
self.port = config.get('port', 9000)
# check if default port is available, if not increment by one until available:
default_port = config.get('port', 9000)

# find first available port:
self.port = find_first_port_available(default_port, default_port+1000)
aprint(f"Using port: {self.port}")

# Mount static files:
static_files_path = os.path.join(
Expand All @@ -81,9 +91,11 @@ def __init__(self,
self.app.mount("/static", StaticFiles(directory=static_files_path),
name="static")

# Load templates:
templates_files_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates')

# Load Jinja2 templates:
templates = Jinja2Templates(directory=templates_files_path)

Expand Down
132 changes: 68 additions & 64 deletions src/napari_chatgpt/chat_server/static/marked-highlight.js
Original file line number Diff line number Diff line change
@@ -1,62 +1,69 @@
function markedHighlight(options) {
if (typeof options === 'function') {
options = {
highlight: options,
};
}

function markedHighlight(options)
{
if (typeof options === 'function') {
options = {
highlight: options
};
}
if (!options || typeof options.highlight !== 'function') {
throw new Error('Must provide highlight function');
}

if (!options || typeof options.highlight !== 'function') {
throw new Error('Must provide highlight function');
}
if (typeof options.langPrefix !== 'string') {
options.langPrefix = 'language-';
}

if (typeof options.langPrefix !== 'string') {
options.langPrefix = 'language-';
}
return {
async: !!options.async,
walkTokens(token) {
if (token.type !== 'code') {
return;
}

return {
async: !!options.async,
walkTokens(token) {
if (token.type !== 'code') {
return;
}
const lang = getLang(token.lang);

const lang = getLang(token);
if (options.async) {
return Promise.resolve(options.highlight(token.text, lang, token.lang || '')).then(updateToken(token));
}

if (options.async) {
return Promise.resolve(options.highlight(token.text, lang)).then(updateToken(token));
const code = options.highlight(token.text, lang, token.lang || '');
if (code instanceof Promise) {
throw new Error('markedHighlight is not set to async but the highlight function is async. Set the async option to true on markedHighlight to await the async highlight function.');
}
updateToken(token)(code);
},
useNewRenderer: true,
renderer: {
code(code, infoString, escaped) {
// istanbul ignore next
if (typeof code === 'object') {
escaped = code.escaped;
infoString = code.lang;
code = code.text;
}

const code = options.highlight(token.text, lang);
updateToken(token)(code);
const lang = getLang(infoString);
const classAttr = lang
? ` class="${options.langPrefix}${escape(lang)}"`
: '';
code = code.replace(/\n$/, '');
return `<pre><code${classAttr}>${escaped ? code : escape(code, true)}\n</code></pre>`;
},
renderer: {
code(code, infoString, escaped) {
const lang = (infoString || '').match(/\S*/)[0];
const classAttr = lang
? ` class="${options.langPrefix}${escape(lang)}"`
: '';
code = code.replace(/\n$/, '');
return `<pre><code${classAttr}>${escaped ? code : escape(code, true)}\n</code></pre>`;
}
}
};
},
};
}

function getLang(token)
{
return (token.lang || '').match(/\S*/)[0];
function getLang(lang) {
return (lang || '').match(/\S*/)[0];
}

function updateToken(token)
{
return (code) => {
if (typeof code === 'string' && code !== token.text) {
token.escaped = true;
token.text = code;
}
};
function updateToken(token) {
return (code) => {
if (typeof code === 'string' && code !== token.text) {
token.escaped = true;
token.text = code;
}
};
}

// copied from marked helpers
Expand All @@ -65,26 +72,23 @@ const escapeReplace = new RegExp(escapeTest.source, 'g');
const escapeTestNoEncode = /[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/;
const escapeReplaceNoEncode = new RegExp(escapeTestNoEncode.source, 'g');
const escapeReplacements = {
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
'"': '&quot;',
"'": '&#39;'
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
'"': '&quot;',
"'": '&#39;',
};
const getEscapeReplacement = (ch) => escapeReplacements[ch];
function escape(html, encode) {
if (encode) {
if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement);
}
} else {
if (escapeTestNoEncode.test(html)) {
return html.replace(escapeReplaceNoEncode, getEscapeReplacement);
}
if (encode) {
if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement);
}
} else {
if (escapeTestNoEncode.test(html)) {
return html.replace(escapeReplaceNoEncode, getEscapeReplacement);
}
}

return html;
}

//exports.markedHighlight = markedHighlight;

return html;
}
Loading

0 comments on commit ef9df75

Please sign in to comment.