-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathllama_chat.go
41 lines (34 loc) · 892 Bytes
/
llama_chat.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
package go_ernie
import (
"context"
"fmt"
"net/http"
)
const llamaChatURL = "/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/"
type LlamaChatRequest struct {
Messages []ChatCompletionMessage `json:"messages"`
Stream bool `json:"stream"`
UserId string `json:"user_id"`
Model string `json:"-"`
}
type LlamaChatResponse struct {
ErnieBotResponse
}
func (c *Client) CreateLlamaChatCompletion(
ctx context.Context,
request LlamaChatRequest,
) (response LlamaChatResponse, err error) {
if request.Stream {
err = ErrChatCompletionStreamNotSupported
return
}
req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(fmt.Sprintf("%s%s", llamaChatURL, request.Model)), withBody(request))
if err != nil {
return
}
err = c.sendRequest(req, &response)
if response.ErrorCode != 0 {
err = &response.APIError
}
return
}