Spaces:
Runtime error
Runtime error
Upload useapi.py
Browse files
useapi.py
CHANGED
@@ -13,6 +13,7 @@ character_url = "https://" + os.getenv("character_url")
|
|
13 |
avatar_url = "https://" + os.getenv("avatar_url")
|
14 |
image_url = "https://" + os.getenv("image_url")
|
15 |
auth = os.getenv("auth")
|
|
|
16 |
#headers
|
17 |
def create_headers(language):
|
18 |
# 映射
|
@@ -60,10 +61,10 @@ def get_models():
|
|
60 |
for model_info in models:
|
61 |
# 过滤掉 'gpt-4o' 和 'gpt-4o-mini'
|
62 |
if model_info['model'] not in ['mythomax-13b']:
|
63 |
-
if model_info['model'] in ['
|
64 |
-
weight =
|
65 |
else:
|
66 |
-
weight = int(math.ceil(
|
67 |
self.models.extend([model_info['model']] * weight)
|
68 |
|
69 |
model_storage = ModelStorage()
|
@@ -78,26 +79,37 @@ def get_models():
|
|
78 |
return model_storage.models
|
79 |
|
80 |
#解析推荐json
|
81 |
-
def extract_recommand(data):
|
82 |
-
|
83 |
-
|
|
|
|
|
84 |
"character_id": item["character_id"],
|
85 |
-
"avatar_url"
|
86 |
"_id": item["_id"],
|
87 |
-
"image_url"
|
88 |
"description": item["description"],
|
89 |
"name": item["title"],
|
90 |
-
"opening":
|
91 |
-
}
|
92 |
-
|
93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
#请求推荐API
|
96 |
async def recommand(char_id, language):
|
97 |
recommand_url = str(recommand_base_url + char_id)
|
98 |
-
|
99 |
-
|
100 |
-
|
|
|
101 |
|
102 |
async def fetch_stream(query, model, moment_id, session_id, bio, request_name, queue, language):
|
103 |
payload = {"query": query, "model": model, "bio": bio, "moment_id": moment_id}
|
@@ -170,4 +182,4 @@ async def combine_streams(
|
|
170 |
active_streams -= 1
|
171 |
|
172 |
session_id_a = await task_a
|
173 |
-
session_id_b = await task_b
|
|
|
13 |
avatar_url = "https://" + os.getenv("avatar_url")
|
14 |
image_url = "https://" + os.getenv("image_url")
|
15 |
auth = os.getenv("auth")
|
16 |
+
moment_url = os.getenv("moment_url")
|
17 |
#headers
|
18 |
def create_headers(language):
|
19 |
# 映射
|
|
|
61 |
for model_info in models:
|
62 |
# 过滤掉 'gpt-4o' 和 'gpt-4o-mini'
|
63 |
if model_info['model'] not in ['mythomax-13b']:
|
64 |
+
if model_info['model'] in ['gemma-2-9b', 'llama-3.1-8b']:
|
65 |
+
weight = 12 # Assign a low weight to reduce their frequency
|
66 |
else:
|
67 |
+
weight = int(math.ceil(25 / model_info['price'] + 0.5))
|
68 |
self.models.extend([model_info['model']] * weight)
|
69 |
|
70 |
model_storage = ModelStorage()
|
|
|
79 |
return model_storage.models
|
80 |
|
81 |
#解析推荐json
|
82 |
+
async def extract_recommand(data, language):
|
83 |
+
result = []
|
84 |
+
for item in data["data"]["moments"]:
|
85 |
+
opening = await get_moment_opening(item["_id"], language)
|
86 |
+
result.append({
|
87 |
"character_id": item["character_id"],
|
88 |
+
"avatar_url": str(avatar_url + item["character_id"] + "_avatar.webp"),
|
89 |
"_id": item["_id"],
|
90 |
+
"image_url": str(image_url + item["_id"] + "_large.webp"),
|
91 |
"description": item["description"],
|
92 |
"name": item["title"],
|
93 |
+
"opening": opening
|
94 |
+
})
|
95 |
+
return result
|
96 |
+
|
97 |
+
async def get_moment_opening(moment_id, language):
|
98 |
+
url = f"{moment_url}{moment_id}"
|
99 |
+
async with httpx.AsyncClient() as client:
|
100 |
+
response = await client.get(url, headers=create_headers(language))
|
101 |
+
if response.status_code == 200:
|
102 |
+
data = response.json()
|
103 |
+
return data['data']['opening']
|
104 |
+
return None
|
105 |
|
106 |
#请求推荐API
|
107 |
async def recommand(char_id, language):
|
108 |
recommand_url = str(recommand_base_url + char_id)
|
109 |
+
async with httpx.AsyncClient() as client:
|
110 |
+
response = await client.get(recommand_url, headers=create_headers(language))
|
111 |
+
json_data = response.json()
|
112 |
+
return await extract_recommand(json_data, language)
|
113 |
|
114 |
async def fetch_stream(query, model, moment_id, session_id, bio, request_name, queue, language):
|
115 |
payload = {"query": query, "model": model, "bio": bio, "moment_id": moment_id}
|
|
|
182 |
active_streams -= 1
|
183 |
|
184 |
session_id_a = await task_a
|
185 |
+
session_id_b = await task_b
|