Skip to content

Commit

Permalink
Add apis.do
Browse files Browse the repository at this point in the history
  • Loading branch information
sam-lippert committed Dec 13, 2023
1 parent 37675c2 commit 7048a44
Show file tree
Hide file tree
Showing 5 changed files with 579 additions and 625 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
node_modules
node_modules
*.vars
.wrangler
8 changes: 5 additions & 3 deletions github-webhooks.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
export default (req, env) => {

import { json } from 'apis.do'

}
// TODO: Complete stub for GitHub webhook
export default (req, env) => {
return json({ success: true, user })
}
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
"deploy": "wrangler publish worker.js"
},
"devDependencies": {
"wrangler": "^3.2.0"
"wrangler": "^3.20.0"
},
"dependencies": {
"apis.do": "^2.3.3"
}
}
283 changes: 146 additions & 137 deletions worker.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
export const api = {
import { API, json } from 'apis.do'
import webhooks from './github-webhooks.js'

const api = new API({
icon: '🤖',
name: 'gpt.do',
description: 'GPT-3 Templates and Completions',
Expand All @@ -13,168 +16,168 @@ export const api = {
signup: 'https://gpt.do/signup',
subscribe: 'https://gpt.do/subscribe',
repo: 'https://github.com/drivly/gpt.do',
}
})

export default {
fetch: async (req, env) => {
async function getCompletion(options) {
return await fetch('https://api.openai.com/v1/chat/completions', {
method: 'post',
body: JSON.stringify(options),
headers: {
'content-type': 'application/json',
'authorization': 'Bearer ' + env.OPENAI_API_KEY
}
}).then(res => res.json()).catch(() => ({}))
}
const { user, json: data, hostname, pathname, pathSegments, query, } = await env.CTX.fetch(req).then(res => res.json())
if (pathname == '/favicon.ico') return new Response(null, { status: 404 })
// TODO: Complete stub for GitHub webhook
if (pathname == '/webhooks/github') return json({ success: true, user })
if (!user.authenticated) return Response.redirect('/login')
let { messages, functions, } = data || {}
let { n, max_tokens, model, store, } = query || {}
if (!n) n = data?.n
if (!max_tokens) max_tokens = data?.max_tokens
if (!model) model = data?.model
if (!store) store = data?.store
if (!messages) messages = []
let steps = [], input = { ...query }
if (['prompts', 'formats'].includes(pathSegments[0])) {
const [{ value: templates }, { value: file }] = await Promise.allSettled([
fetch('https://gpt.do/templates.json').then(res => res.json()),
query.fileUrl ? fetch(decodeURIComponent(query.fileUrl)).then(res => res.text()) : '',
])
const template = templates[pathSegments[0]][pathSegments[1]]
if (!template) return json({ api, error: 'Template not found.', user, }, 404)
if (!n) n = template.n
if (!max_tokens) max_tokens = template.max_tokens
if (!model) model = template.model
if (!messages?.length) messages = template.messages || formatMessages(template.list) || []
input = { ...template.input, ...query, file, }
if (template.forEach?.length)
steps = Array.isArray(template.forEach[0])
? template.forEach.map(formatMessages)
: [formatMessages(template.forEach)]
}
if (model && functions?.length && /03\d\d$/.test(model)) {
return json({ api, error: model + ' does not support functions.', user, }, 400)
}
if (n) n = parseInt(n)
if (max_tokens) {
max_tokens = parseInt(max_tokens)
if (user.role !== 'admin' && max_tokens > 1000) max_tokens = 1000
}
const id = env.CONVO.idFromName(hostname + user.id.toString())
const stub = env.CONVO.get(id)
if (store && !messages?.length) messages = await stub.fetch(new Request('/'))
if (pathSegments.length === 1 && pathSegments[0] || pathSegments.length === 3 && pathSegments[2]) {
input.item = decodeURIComponent(pathSegments[pathSegments.length - 1])
if (!messages.find(m => m.role === 'user'))
messages.push({ role: 'user', content: input.item })
}
if (input.file && !messages.find(m.content.match(/\{\{file\}\}/))) {
messages.push({ role: 'user', content: input.file })
}
if (!messages.find(m => m.role === 'system')) {
messages.unshift({
role: 'system',
content: query.system || data?.system
|| 'You are a helpful assistant who responds in Markdown. All lists should be Markdown checklists with `- [ ]` items.',
})
}
if (Object.keys(input).length) messages = fillMessageTemplate(messages, input)
const options = {
model: model && (!model.startsWith('gpt-4') || user.role === 'admin') ? model : 'gpt-3.5-turbo',
messages,
n,
max_tokens,
functions: functions?.length ? functions : undefined,
user: data?.user || undefined,
api.get('/favicon.ico', () => {
return new Response(null, { status: 404 })
})
api.get('/webhooks/github', webhooks)
api.get('/:message?', handler)
api.get('/:template/:templateId/:message?', handler)
api.createRoute('POST', '/api/:message?', handler)
api.createRoute('POST', '/:message?', handler)
api.createRoute('POST', '/:template/:templateId/:message?', handler)

async function handler(req, env) {
async function getCompletion(options) {
return await fetch('https://api.openai.com/v1/chat/completions', {
method: 'post',
body: JSON.stringify(options),
headers: {
'content-type': 'application/json',
authorization: 'Bearer ' + env.OPENAI_API_KEY,
},
})
.then((res) => res.json())
.catch(console.error)
}
const { user, json: data, hostname, pathSegments, query } = await env.CTX.fetch(req).then((res) => res.json())
if (!user.authenticated) return Response.redirect('/login')
let { messages, functions } = data || {}
let { n, max_tokens, model, store } = query || {}
if (!n) n = data?.n
if (!max_tokens) max_tokens = data?.max_tokens
if (!model) model = data?.model
if (!store) store = data?.store
if (!messages) messages = []
let steps = [],
input = { ...query }
if (['prompts', 'formats'].includes(pathSegments[0])) {
const [{ value: templates }, { value: file }] = await Promise.allSettled([
fetch('https://gpt.do/templates.json').then((res) => res.json()),
query.fileUrl ? fetch(decodeURIComponent(query.fileUrl)).then((res) => res.text()) : '',
])
const template = templates[pathSegments[0]][pathSegments[1]]
if (!template) return json({ api, error: 'Template not found.', user }, 404)
if (!n) n = template.n
if (!max_tokens) max_tokens = template.max_tokens
if (!model) model = template.model
if (!messages?.length) messages = template.messages || formatMessages(template.list) || []
input = { ...template.input, ...query, file }
if (template.forEach?.length) steps = Array.isArray(template.forEach[0]) ? template.forEach.map(formatMessages) : [formatMessages(template.forEach)]
}
if (model && functions?.length && /03\d\d$/.test(model)) {
return json({ api, error: model + ' does not support functions.', user }, 400)
}
if (n) n = parseInt(n)
if (max_tokens) {
max_tokens = parseInt(max_tokens)
if (user.role !== 'admin' && max_tokens > 1000) max_tokens = 1000
}
const id = user.id && env.CONVO.idFromName(hostname + user.id.toString())
const stub = user.id && env.CONVO.get(id)
if (stub && store && !messages?.length) messages = await stub.fetch(new Request('/'))
if ((pathSegments.length === 1 && pathSegments[0]) || (pathSegments.length === 3 && pathSegments[2])) {
input.item = decodeURIComponent(pathSegments[pathSegments.length - 1])
if (!messages.find((m) => m.role === 'user')) {
messages.push({ role: 'user', content: input.item })
}
const completion = await getCompletion(options)
if (completion.error) {
console.error(completion.error)
return json({
}
if (input.file && !messages.find(m.content.match(/\{\{file\}\}/))) {
messages.push({ role: 'user', content: input.file })
}
if (!messages.find((m) => m.role === 'system')) {
messages.unshift({
role: 'system',
content: query.system || data?.system || 'You are a helpful assistant who responds in Markdown. All lists should be Markdown checklists with `- [ ]` items.',
})
}
if (Object.keys(input).length) messages = fillMessageTemplate(messages, input)
const options = {
model: model && (!model.startsWith('gpt-4') || user.role === 'admin') ? model : 'gpt-3.5-turbo',
messages,
n,
max_tokens,
functions: functions?.length ? functions : undefined,
user: data?.user || undefined,
}
const completion = await getCompletion(options)
if (completion.error) {
console.error(completion.error)
return json(
{
api,
error: "An error occurred while processing your request.",
error: 'An error occurred while processing your request.',
completion: query.debug ? completion : undefined,
functions: query.debug ? functions : undefined,
inputMessages: query.debug ? [[messages]] : undefined,
user,
}, 500)
}
let message = completion.choices?.[0]?.message
let response = message?.content?.split('\n') || []
let responses = []
for (let stepIX = 0; stepIX < steps.length; stepIX++) {
const step = steps[stepIX]
const inputForks = (stepIX === 0 ? response : responses[stepIX - 1].flatMap(r => r.response))
.map(r => r.replace(/^- "?(.+?)"?$/, '$1').trim()) // remove markdown list formatting
.filter(r => r)
const promises = []
responses[stepIX] = []
for (let forkIX = 0; forkIX < inputForks.length; forkIX++) {
responses[stepIX][forkIX] = {
inputMessages: fillMessageTemplate(step, {
...input,
item: inputForks[forkIX],
})
}
promises.push(getCompletion({
},
500
)
}
let message = completion.choices?.[0]?.message
let response = message?.content?.split('\n') || []
let responses = []
for (let stepIX = 0; stepIX < steps.length; stepIX++) {
const step = steps[stepIX]
const inputForks = (stepIX === 0 ? response : responses[stepIX - 1].flatMap((r) => r.response))
.map((r) => r?.replace(/^- "?(.+?)"?$/, '$1')?.trim()) // remove markdown list formatting
.filter((r) => r)
const promises = []
responses[stepIX] = []
for (let forkIX = 0; forkIX < inputForks.length; forkIX++) {
responses[stepIX][forkIX] = {
inputMessages: fillMessageTemplate(step, {
...input,
item: inputForks[forkIX],
}),
}
promises.push(
getCompletion({
...options,
messages: responses[stepIX][forkIX].inputMessages,
}).then(c => {
}).then((c) => {
responses[stepIX][forkIX].completion = c
responses[stepIX][forkIX].response = c.choices?.[0]?.message?.content?.split('\n') || []
}))
}
await Promise.allSettled(promises)
}
if (store) {
await env.CONVO.fetch(id, new Request('/', {
body: messages.concat(message)
}))
})
)
}
return json({
response: responses.length
? responses[responses.length - 1].flatMap(r => r.response)
: response,
...(responses.length === 0 ? completion : {}),
completions: responses.length
? [[completion]].concat(responses.map(r => r.map(r => r.completion)))
: undefined,
functions: query.debug ? functions : undefined,
inputMessages: query.debug
? [[messages]].concat(responses.map(r => r.map(r => r.inputMessages)))
: undefined,
user,
})
},
await Promise.allSettled(promises)
}
if (store) {
await env.CONVO.fetch(
id,
new Request('/', {
body: messages.concat(message),
})
)
}
return json({
response: responses.length ? responses[responses.length - 1].flatMap((r) => r.response) : response,
...(responses.length === 0 ? completion : {}),
completions: responses.length ? [[completion]].concat(responses.map((r) => r.map((r) => r.completion))) : undefined,
functions: query.debug ? functions : undefined,
inputMessages: query.debug ? [[messages]].concat(responses.map((r) => r.map((r) => r.inputMessages))) : undefined,
user,
})
}

function fillMessageTemplate(messages, input) {
return messages.map(message => ({
return messages.map((message) => ({
role: message.role,
name: message.name,
content: message.content.replace(/\{\{([^}]+)\}\}/g, (_, key) => input[key]),
}))
}

function formatMessages(messageSet) {
return messageSet.map(Object.entries).map(i => ({
return messageSet.map(Object.entries).map((i) => ({
role: i[0][0],
content: i[0][1],
}))
}

const json = (obj, status) =>
new Response(JSON.stringify(obj, null, 2), {
headers: { 'content-type': 'application/json; charset=utf-8' },
status,
})


export class Conversation {
constructor(state) {
this.state = state
Expand All @@ -185,7 +188,13 @@ export class Conversation {

async fetch(request) {
let { messages } = await request.json()
if (messages?.length) await this.state.storage.set('messages', this.messages = messages)
if (messages?.length) await this.state.storage.set('messages', (this.messages = messages))
return new Response(this.messages)
}
}

export default {
fetch: async (req, env, ctx) => {
return await api.fetch(req, env, ctx)
},
}
Loading

0 comments on commit 7048a44

Please sign in to comment.