D2A / main.go
Niansuh's picture
Update main.go
24092aa verified
raw
history blame
7 kB
package main
import (
"bufio"
"bytes"
"encoding/json"
"io"
"log"
"net/http"
"time"
"github.com/gin-gonic/gin"
)
const (
statusURL = "https://duckduckgo.com/duckchat/v1/status"
chatURL = "https://duckduckgo.com/duckchat/v1/chat"
userAgent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0"
)
type OpenAIRequest struct {
Model string `json:"model"`
Messages []struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"messages"`
Stream bool `json:"stream"`
}
type OpenAIResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []OpenAIChoice `json:"choices"`
}
type OpenAIChoice struct {
Index int `json:"index"`
Delta OpenAIDelta `json:"delta"`
Logprobs interface{} `json:"logprobs"`
FinishReason *string `json:"finish_reason"`
}
type OpenAIDelta struct {
Role string `json:"role,omitempty"`
Content string `json:"content,omitempty"`
}
type OpenAINonStreamResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []OpenAINonStreamChoice `json:"choices"`
}
type OpenAINonStreamChoice struct {
Index int `json:"index"`
Message OpenAIDelta `json:"message"`
FinishReason *string `json:"finish_reason"`
}
type DuckDuckGoResponse struct {
Role string `json:"role"`
Message string `json:"message"`
Created int64 `json:"created"`
ID string `json:"id"`
Action string `json:"action"`
Model string `json:"model"`
}
func chatWithDuckDuckGo(c *gin.Context, messages []struct {
Role string `json:"role"`
Content string `json:"content"`
}, stream bool) {
client := &http.Client{Timeout: 10 * time.Second}
// Get vqd_4
req, err := http.NewRequest("GET", statusURL, nil)
if err != nil {
log.Println(err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"})
return
}
req.Header.Set("x-vqd-accept", "1")
setHeaders(req, userAgent)
resp, err := client.Do(req)
if err != nil {
log.Println(err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get vqd_4"})
return
}
defer resp.Body.Close()
vqd4 := resp.Header.Get("x-vqd-4")
// Send chat request
payload := map[string]interface{}{
"model": "gpt-3.5-turbo-0125",
"messages": messages,
}
payloadBytes, err := json.Marshal(payload)
if err != nil {
log.Println(err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to marshal payload"})
return
}
req, err = http.NewRequest("POST", chatURL, bytes.NewBuffer(payloadBytes))
if err != nil {
log.Println(err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"})
return
}
req.Header.Set("x-vqd-4", vqd4)
setHeaders(req, userAgent)
resp, err = client.Do(req)
if err != nil {
log.Println(err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to send chat request"})
return
}
defer resp.Body.Close()
// Process response
reader := bufio.NewReader(resp.Body)
c.Header("Content-Type", "text/event-stream")
c.Header("Cache-Control", "no-cache")
c.Header("Connection", "keep-alive")
c.Header("Transfer-Encoding", "chunked")
flusher, _ := c.Writer.(http.Flusher)
var response OpenAIResponse
response.Choices = make([]OpenAIChoice, 1)
var responseContent string
for {
line, err := reader.ReadBytes('\n')
if err != nil {
if err == io.EOF {
break
}
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if bytes.HasPrefix(line, []byte("data: ")) {
chunk := line[6:]
if bytes.HasPrefix(chunk, []byte("[DONE]")) {
response.Choices[0].Delta.Content = responseContent
response.Choices[0].Delta.Role = "assistant"
response.Choices[0].FinishReason = new(string)
*response.Choices[0].FinishReason = "stop"
c.JSON(http.StatusOK, response)
return
}
var data DuckDuckGoResponse
decoder := json.NewDecoder(bytes.NewReader(chunk))
decoder.UseNumber()
err = decoder.Decode(&data)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
response.ID = data.ID
response.Object = "chat.completion"
response.Created = data.Created
response.Model = data.Model
responseContent += data.Message
if stream {
response.Choices[0].Delta.Content = data.Message
responseBytes, err := json.Marshal(response)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.Data(http.StatusOK, "application/json", append(append([]byte("data: "), responseBytes...), []byte("\n\n")...))
flusher.Flush()
response.Choices[0].Delta.Content = ""
}
}
}
}
func setHeaders(req *http.Request, userAgent string) {
req.Header.Set("User-Agent", userAgent)
req.Header.Set("Accept", "text/event-stream")
req.Header.Set("Accept-Language", "de,en-US;q=0.7,en;q=0.3")
req.Header.Set("Accept-Encoding", "gzip, deflate, br")
req.Header.Set("Referer", "https://duckduckgo.com/")
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Origin", "https://duckduckgo.com")
req.Header.Set("Connection", "keep-alive")
req.Header.Set("Cookie", "dcm=1")
req.Header.Set("Sec-Fetch-Dest", "empty")
req.Header.Set("Sec-Fetch-Mode", "cors")
req.Header.Set("Sec-Fetch-Site", "same-origin")
req.Header.Set("Pragma", "no-cache")
req.Header.Set("TE", "trailers")
}
func main() {
gin.SetMode(gin.ReleaseMode)
r := gin.Default()
r.GET("/", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"message": "Hello! Thank you for using FreeDuckDuckGo. Made by Vincent Yang. Repo: https://github.com/missuo/FreeDuckDuckGo",
})
})
r.OPTIONS("/v1/chat/completions", func(c *gin.Context) {
c.Header("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Methods", "POST, OPTIONS")
c.Header("Access-Control-Allow-Headers", "Content-Type, Accept, Origin, X-Requested-With")
c.Status(http.StatusOK)
})
r.POST("/v1/chat/completions", func(c *gin.Context) {
var req OpenAIRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// only support user role
for i := range req.Messages {
if req.Messages[i].Role == "system" {
req.Messages[i].Role = "user"
}
}
// set model to gpt-3.5-turbo-0125
req.Model = "gpt-3.5-turbo-0125"
chatWithDuckDuckGo(c, req.Messages, req.Stream)
})
r.GET("/v1/models", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"object": "list",
"data": []gin.H{
{
"id": "gpt-3.5-turbo-0125",
"object": "model",
"created": 1692901427,
"owned_by": "system",
},
},
})
})
r.Run(":3456")
}