Skip to content

Commit

Permalink
Merge pull request #1 from luveqz/2024-03-05-add-anthropic-provider
Browse files Browse the repository at this point in the history
[feature] add anthropic provider
  • Loading branch information
larxn authored Mar 10, 2024
2 parents f4c6f8c + da82610 commit a81f337
Show file tree
Hide file tree
Showing 22 changed files with 3,624 additions and 1,191 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,5 @@ results
yarn.lock

# Secrets
browserstack.json
browserstack.json
.vercel
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ npm install
Start the development server on [`localhost:3000`](http://localhost:3000):

```bash
npm run dev
npm run vercel:dev
```

## Build
Expand Down
48 changes: 48 additions & 0 deletions api/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import Anthropic from '@anthropic-ai/sdk'
import express from 'express'
import cors from 'cors'
const app = express()
const port = 3001

app.use(cors())
app.use(express.json())

app.post('/api/generate', async (req, res) => {
const apiKey = req.body.apiKey
const prompt = req.body.prompt
const model = req.body.model

const anthropic = new Anthropic({
apiKey,
})

const stream = await anthropic.messages.create({
max_tokens: 1024,
messages: [
{
role: 'user',
content: prompt,
},
],
model,
stream: true,
})

for await (const messageStreamEvent of stream) {
if (messageStreamEvent?.delta?.text) {
res.write(messageStreamEvent?.delta?.text)
}
}

res.end()
})

app.listen(port, () => {
console.log('[*] Anthropic endpoint running')
})

export default app

export const config = {
supportsResponseStreaming: true,
}
3 changes: 2 additions & 1 deletion assets/css/main.css
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/*-------------------------------------
Fonts
-------------------------------------*/
@import url('https://fonts.googleapis.com/css2?family=Nunito:wght@500;600;700&display=swap');
@import url('https://fonts.googleapis.com/css2?family=Nunito:wght@400;500;600;700&display=swap');

/*-------------------------------------
Tailwind
Expand Down Expand Up @@ -29,6 +29,7 @@ body {

::-webkit-scrollbar-track {
border-radius: 2px;
margin: 8px 0px;
}

body::-webkit-scrollbar-track {
Expand Down
9 changes: 8 additions & 1 deletion components/TheNavigation.vue
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import startCase from 'lodash/startCase'
const { toggle: toggleFullScreen, isFullscreen } = useFullscreen()
const { isSupported: supportsFileSystemAccess } = useFileSystemAccess()
const { $editor } = useNuxtApp()
const { $editor, $modal } = useNuxtApp()
type MenuList = {
label: string
Expand Down Expand Up @@ -47,6 +47,13 @@ const menuList: MenuList = [
$editor.save({ toFileSystem: true })
},
},
{
label: 'Config',
startsSection: true,
action() {
$modal.open('config')
},
},
],
},
{
Expand Down
7 changes: 6 additions & 1 deletion components/common/BaseInput.vue
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ defineProps({
type: String,
required: true,
},
variant: {
type: String as PropType<'small' | 'medium'>,
default: 'small',
},
})
defineEmits(['update:model-value'])
Expand All @@ -12,7 +16,8 @@ defineEmits(['update:model-value'])
<template>
<input
type="text"
class="border-blue-gray-200 h-10 rounded border bg-white p-2.5 px-2 text-sm font-medium placeholder:opacity-40"
class="h-10 rounded border border-blue-gray-200 bg-white p-2.5 font-medium placeholder:opacity-80"
:class="variant === 'small' ? 'px-2 text-sm' : 'px-4 text-base'"
:value="modelValue"
@input="
$emit('update:model-value', ($event.target as HTMLInputElement).value)
Expand Down
27 changes: 1 addition & 26 deletions components/common/PromptConfigPopover.vue
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<script setup lang="ts">
import { type Prompt, ResponseModeLabels } from '@/lib/types/library'
import { type Prompt } from '@/lib/types/library'
import { PROMPT_ICON_CATALOG } from '@/lib/utils/library'
import { PROVIDERS } from '@/stores/llm'
Expand All @@ -19,16 +19,6 @@ const _groupName = ref(props.prompt.groupName)
const showAdvanced = ref(false)
const showIcons = ref(false)
const selectedResponseMode = ref({
id: props.prompt.responseMode,
label: ResponseModeLabels[props.prompt.responseMode],
})
const responseModes = Object.entries(ResponseModeLabels).map(([id, label]) => ({
id: Number(id),
label,
}))
const selectedProvider = ref({
id: props.prompt.providerId,
label: PROVIDERS[props.prompt.providerId].label,
Expand Down Expand Up @@ -199,21 +189,6 @@ const paginatedIcons = computed(() => {
</div>
</div>
</div>

<!-- Fourth Line -->
<div>
<div class="text-xs font-bold">
<label class="mb-1 block">Response mode</label>
<BaseSelect
v-model="selectedResponseMode"
:options="responseModes"
@update:model-value="
(option) => (prompt.responseMode = option.id)
"
class="w-full"
/>
</div>
</div>
</section>

<!-- Icons -->
Expand Down
101 changes: 101 additions & 0 deletions components/config/ModelConfigPanel.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
<template>
<section class="p-10 px-14">
<h2 class="text-2xl font-bold">Models</h2>

<p class="mt-4 leading-[120%]">
You can connect <strong>Dear Ghost</strong> to different large language
model providers. These are all the supported ones:
</p>
</section>

<hr class="border-0 border-b-2 border-[#DEE4EB]" />

<section class="p-10 px-14">
<h3 class="text-xl font-bold"> Anthropic </h3>

<p class="mt-4 leading-[120%]">
The company behind <strong>Claude</strong>, one of the most popular family
of models within the creative writing space.
</p>
<p class="mt-4 leading-[120%]">
To use Claude through the Dear Ghost interface, you will have to create an
API key on the
<a
class="font-semibold text-blue-600"
href="https://console.anthropic.com/dashboard"
target="_blank"
rel="noreferrer"
>Anthropic Console</a
>.
</p>

<label class="mb-1 mt-5 block font-bold">API key:</label>
<BaseInput
variant="medium"
v-model="$editor.providerConfig.anthropic.apiKey"
class="w-full max-w-72"
placeholder="sk-ant-api03-rV97zpCN..."
/>
</section>

<hr class="mx-auto max-w-20 border-0 border-b-2 border-[#DEE4EB]" />

<section class="p-10 px-14">
<h3 class="text-xl font-bold"> LM Studio </h3>

<p class="mt-4 leading-[120%]">
This is an app you can use to download and run models on your laptop.
<strong>Mistral 7B</strong>
is one of the many models in their catalog.
</p>
<p class="mt-4 leading-[120%]">
Once you have installed
<a
class="font-semibold text-blue-600"
href="https://lmstudio.ai/"
target="_blank"
rel="noreferrer"
>LM Studio</a
>. and downloaded your favorite model, run its local server.
</p>

<label class="mb-1 mt-5 block font-bold">Server port:</label>
<BaseInput
variant="medium"
v-model="$editor.providerConfig.lmStudio.port"
class="w-full max-w-72"
placeholder="4321"
/>
</section>

<hr class="mx-auto max-w-20 border-0 border-b-2 border-[#DEE4EB]" />

<section class="p-10 px-14 pb-20">
<h3 class="text-xl font-bold"> Ollama </h3>

<p class="mt-4 leading-[120%]">
A command-line tool you can use to download and run models on your laptop.
<strong>Mistral 7B</strong>
is one of the many models in their catalog.
</p>
<p class="mt-4 leading-[120%]">
Once you have installed
<a
class="font-semibold text-blue-600"
href="https://ollama.com/"
target="_blank"
rel="noreferrer"
>Ollama</a
>
and downloaded your favorite model, run its local server.
</p>

<label class="mb-1 mt-5 block font-bold">Server host:</label>
<BaseInput
variant="medium"
v-model="$editor.providerConfig.ollama.host"
class="w-full max-w-72"
placeholder="http://localhost:11434"
/>
</section>
</template>
17 changes: 17 additions & 0 deletions components/config/PatreonConfigPanel.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<template>
<div class="mt-52 flex flex-col items-center">
<HeartIcon class="h-6 w-6 text-orange-500" />

<p class="mx-6 mt-3 max-w-60 text-center leading-tight">
Become a
<a
class="font-bold text-orange-600"
href="https://www.patreon.com/DearGhost"
target="_blank"
rel="noreferrer"
>Patreon</a
>
and help make this possible.
</p>
</div>
</template>
37 changes: 37 additions & 0 deletions components/dialogs/ConfigDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
<script setup>
import { TabGroup, TabList, Tab, TabPanels, TabPanel } from '@headlessui/vue'
const tabs = ['Models', 'Prompts', 'Appearance']
</script>

<template>
<div
class="flex h-[calc(100vh_-_8rem)] w-full max-w-[48rem] overflow-hidden rounded bg-white font-normal"
>
<TabGroup>
<TabList
class="flex w-40 shrink-0 flex-col items-start gap-y-5 bg-[#DEE4EB] px-9 py-12"
>
<Tab v-for="tab in tabs" :key="tab" v-slot="{ selected }">
<span :class="{ 'font-bold': selected }">
{{ tab }}
</span>
</Tab>
</TabList>

<TabPanels class="mr-2 grow overflow-y-auto">
<TabPanel>
<ModelConfigPanel />
</TabPanel>

<TabPanel>
<PatreonConfigPanel />
</TabPanel>

<TabPanel>
<PatreonConfigPanel />
</TabPanel>
</TabPanels>
</TabGroup>
</div>
</template>
7 changes: 1 addition & 6 deletions lib/data/default-prompts.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { type Prompt, ResponseMode } from '@/lib/types/library'
import { type Prompt } from '@/lib/types/library'
import { LLMProvider } from '@/stores/llm'

export const DEFAULT_PROMPTS: Prompt[] = [
Expand All @@ -12,7 +12,6 @@ export const DEFAULT_PROMPTS: Prompt[] = [
{{ selected_text }}
`.trim(),
responseMode: ResponseMode.InsertBelow,
groupName: 'Discovery',
providerId: LLMProvider.LMStudio,
modelId: 'mistral:instruct',
Expand All @@ -27,7 +26,6 @@ export const DEFAULT_PROMPTS: Prompt[] = [
{{ selected_text }}
`.trim(),
responseMode: ResponseMode.InsertBelow,
groupName: 'Discovery',
providerId: LLMProvider.LMStudio,
modelId: 'mistral:instruct',
Expand All @@ -42,7 +40,6 @@ export const DEFAULT_PROMPTS: Prompt[] = [
{{ selected_text }}
`.trim(),
responseMode: ResponseMode.InsertBelow,
groupName: 'Style Lab',
providerId: LLMProvider.LMStudio,
modelId: 'mistral:instruct',
Expand All @@ -57,7 +54,6 @@ export const DEFAULT_PROMPTS: Prompt[] = [
{{ selected_text }}
`.trim(),
responseMode: ResponseMode.InsertBelow,
groupName: 'Style Lab',
providerId: LLMProvider.LMStudio,
modelId: 'mistral:instruct',
Expand All @@ -68,7 +64,6 @@ export const DEFAULT_PROMPTS: Prompt[] = [
secondaryLabel: 'Ctrl + Enter',
iconName: 'BoltIcon',
template: '{{ selected_text }}',
responseMode: ResponseMode.InsertBelow,
groupName: 'Other',
providerId: LLMProvider.LMStudio,
modelId: 'mistral:instruct',
Expand Down
Loading

0 comments on commit a81f337

Please sign in to comment.