File size: 3,097 Bytes
931bd01
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
package bogdanfinn

import (
	"aurora/httpclient"
	"fmt"
	"io"
	"net/http"
	"os"
	"strings"
	"testing"

	"github.com/joho/godotenv"
)

var BaseURL string

func init() {
	_ = godotenv.Load(".env")
	BaseURL = os.Getenv("BASE_URL")
	if BaseURL == "" {
		BaseURL = "https://chat.openai.com/backend-anon"
	}
}
func TestTlsClient_Request(t *testing.T) {
	client := NewStdClient()
	userAgent := "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
	proxy := "http://127.0.0.1:7990"
	client.SetProxy(proxy)

	apiUrl := BaseURL + "/sentinel/chat-requirements"
	payload := strings.NewReader(`{"conversation_mode_kind":"primary_assistant"}`)
	header := make(httpclient.AuroraHeaders)
	header.Set("Content-Type", "application/json")
	header.Set("User-Agent", userAgent)
	header.Set("Accept", "*/*")
	header.Set("oai-language", "en-US")
	header.Set("origin", "https://chat.openai.com")
	header.Set("referer", "https://chat.openai.com/")
	header.Set("oai-device-id", "c83b24f0-5a9e-4c43-8915-3f67d4332609")
	response, err := client.Request(http.MethodPost, apiUrl, header, nil, payload)
	if err != nil {
		return
	}
	defer response.Body.Close()
	fmt.Println(response.StatusCode)
	if response.StatusCode != 200 {
		fmt.Println("Error: ", response.StatusCode)
	}
}

func TestChatGPTModel(t *testing.T) {
	client := NewStdClient()
	userAgent := "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
	proxy := "http://127.0.0.1:7990"
	client.SetProxy(proxy)
	apiUrl := "https://chat.openai.com/backend-anon/models"

	header := make(httpclient.AuroraHeaders)
	header.Set("Content-Type", "application/json")
	header.Set("User-Agent", userAgent)
	header.Set("Accept", "*/*")
	header.Set("oai-language", "en-US")
	header.Set("origin", "https://chat.openai.com")
	header.Set("referer", "https://chat.openai.com/")
	header.Set("oai-device-id", "c83b24f0-5a9e-4c43-8915-3f67d4332609")
	response, err := client.Request(http.MethodGet, apiUrl, header, nil, nil)
	if err != nil {
		return
	}
	defer response.Body.Close()
	fmt.Println(response.StatusCode)
	if response.StatusCode != 200 {
		fmt.Println("Error: ", response.StatusCode)
		body, _ := io.ReadAll(response.Body)
		fmt.Println(string(body))
		return
	}

	type EnginesData struct {
		Models []struct {
			Slug         string   `json:"slug"`
			MaxTokens    int      `json:"max_tokens"`
			Title        string   `json:"title"`
			Description  string   `json:"description"`
			Tags         []string `json:"tags"`
			Capabilities struct {
			} `json:"capabilities,omitempty"`
			ProductFeatures struct {
			} `json:"product_features,omitempty"`
		} `json:"models"`
		Categories []struct {
			Category             string `json:"category"`
			HumanCategoryName    string `json:"human_category_name"`
			SubscriptionLevel    string `json:"subscription_level"`
			DefaultModel         string `json:"default_model"`
			CodeInterpreterModel string `json:"code_interpreter_model,omitempty"`
			PluginsModel         string `json:"plugins_model"`
		} `json:"categories"`
	}

}