diff --git a/CMakeLists.txt b/CMakeLists.txt index 444e671..afb8538 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -48,6 +48,7 @@ add_qtc_plugin(QodeAssist templates/CodeLlamaChat.hpp templates/Qwen.hpp templates/StarCoderChat.hpp + templates/Ollama.hpp providers/OllamaProvider.hpp providers/OllamaProvider.cpp providers/LMStudioProvider.hpp providers/LMStudioProvider.cpp providers/OpenAICompatProvider.hpp providers/OpenAICompatProvider.cpp @@ -62,5 +63,3 @@ add_qtc_plugin(QodeAssist chat/NavigationPanel.hpp chat/NavigationPanel.cpp ConfigurationManager.hpp ConfigurationManager.cpp ) - -target_link_libraries(QodeAssist PRIVATE ) diff --git a/ChatView/ClientInterface.cpp b/ChatView/ClientInterface.cpp index 6235b3b..63f9051 100644 --- a/ChatView/ClientInterface.cpp +++ b/ChatView/ClientInterface.cpp @@ -73,10 +73,20 @@ void ClientInterface::sendMessage(const QString &message, bool includeCurrentFil auto providerName = Settings::generalSettings().caProvider(); auto provider = LLMCore::ProvidersManager::instance().getProviderByName(providerName); + if (!provider) { + LOG_MESSAGE(QString("No provider found with name: %1").arg(providerName)); + return; + } + auto templateName = Settings::generalSettings().caTemplate(); auto promptTemplate = LLMCore::PromptTemplateManager::instance().getChatTemplateByName( templateName); + if (!promptTemplate) { + LOG_MESSAGE(QString("No template found with name: %1").arg(templateName)); + return; + } + LLMCore::ContextData context; context.prefix = message; context.suffix = ""; diff --git a/LLMClientInterface.cpp b/LLMClientInterface.cpp index 335d1ea..dd15605 100644 --- a/LLMClientInterface.cpp +++ b/LLMClientInterface.cpp @@ -152,10 +152,20 @@ void LLMClientInterface::handleCompletion(const QJsonObject &request) auto providerName = Settings::generalSettings().ccProvider(); auto provider = LLMCore::ProvidersManager::instance().getProviderByName(providerName); + if (!provider) { + LOG_MESSAGE(QString("No provider found with name: %1").arg(providerName)); + return; + } + auto templateName = Settings::generalSettings().ccTemplate(); auto promptTemplate = LLMCore::PromptTemplateManager::instance().getFimTemplateByName( templateName); + if (!promptTemplate) { + LOG_MESSAGE(QString("No template found with name: %1").arg(templateName)); + return; + } + LLMCore::LLMConfig config; config.requestType = LLMCore::RequestType::Fim; config.provider = provider; @@ -163,10 +173,8 @@ void LLMClientInterface::handleCompletion(const QJsonObject &request) config.url = QUrl( QString("%1%2").arg(Settings::generalSettings().ccUrl(), provider->completionEndpoint())); - config.providerRequest = {{"model", Settings::generalSettings().ccModel()}, - {"stream", true}, - {"stop", - QJsonArray::fromStringList(config.promptTemplate->stopWords())}}; + config.providerRequest = {{"model", Settings::generalSettings().ccModel()}, {"stream", true}}; + config.multiLineCompletion = completeSettings.multiLineCompletion(); QString systemPrompt; @@ -174,8 +182,12 @@ void LLMClientInterface::handleCompletion(const QJsonObject &request) systemPrompt.append(completeSettings.systemPrompt()); if (!updatedContext.fileContext.isEmpty()) systemPrompt.append(updatedContext.fileContext); + if (!systemPrompt.isEmpty()) + config.providerRequest["system"] = systemPrompt; - config.providerRequest["system"] = systemPrompt; + const auto stopWords = QJsonArray::fromStringList(config.promptTemplate->stopWords()); + if (!stopWords.isEmpty()) + config.providerRequest["stop"] = stopWords; config.promptTemplate->prepareRequest(config.providerRequest, updatedContext); config.provider->prepareRequest(config.providerRequest, LLMCore::RequestType::Fim); diff --git a/qodeassist.cpp b/qodeassist.cpp index a8b2b2a..6941e5a 100644 --- a/qodeassist.cpp +++ b/qodeassist.cpp @@ -54,6 +54,7 @@ #include "templates/CustomFimTemplate.hpp" #include "templates/DeepSeekCoderChat.hpp" #include "templates/DeepSeekCoderFim.hpp" +#include "templates/Ollama.hpp" #include "templates/Qwen.hpp" #include "templates/StarCoder2Fim.hpp" #include "templates/StarCoderChat.hpp" @@ -99,6 +100,7 @@ class QodeAssistPlugin final : public ExtensionSystem::IPlugin templateManager.registerTemplate(); templateManager.registerTemplate(); templateManager.registerTemplate(); + templateManager.registerTemplate(); Utils::Icon QCODEASSIST_ICON( {{":/resources/images/qoderassist-icon.png", Utils::Theme::IconsBaseColor}}); diff --git a/settings/GeneralSettings.cpp b/settings/GeneralSettings.cpp index f638597..422a61b 100644 --- a/settings/GeneralSettings.cpp +++ b/settings/GeneralSettings.cpp @@ -70,7 +70,7 @@ GeneralSettings::GeneralSettings() ccModel.setHistoryCompleter(Constants::CC_MODEL_HISTORY); ccSelectModel.m_buttonText = TrConstants::SELECT; - initStringAspect(ccTemplate, Constants::CC_TEMPLATE, TrConstants::TEMPLATE, "CodeLlama FIM"); + initStringAspect(ccTemplate, Constants::CC_TEMPLATE, TrConstants::TEMPLATE, "Ollama Auto FIM"); ccTemplate.setReadOnly(true); ccSelectTemplate.m_buttonText = TrConstants::SELECT; diff --git a/templates/Ollama.hpp b/templates/Ollama.hpp new file mode 100644 index 0000000..6824564 --- /dev/null +++ b/templates/Ollama.hpp @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2024 Petr Mironychev + * + * This file is part of QodeAssist. + * + * QodeAssist is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * QodeAssist is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with QodeAssist. If not, see . + */ + +#pragma once + +#include "llmcore/PromptTemplate.hpp" + +namespace QodeAssist::Templates { + +class OllamaAutoFim : public LLMCore::PromptTemplate +{ +public: + LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Fim; } + QString name() const override { return "Ollama Auto FIM"; } + QString promptTemplate() const override { return {}; } + QStringList stopWords() const override { return QStringList(); } + + void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override + { + request["prompt"] = context.prefix; + request["suffix"] = context.suffix; + } +}; + +} // namespace QodeAssist::Templates