thejagstudio commited on
Commit
de5ff2c
·
verified ·
1 Parent(s): 587d851

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +64 -15
main.py CHANGED
@@ -123,16 +123,66 @@ def strings_ranked_by_relatedness(query, df, top_n=5):
123
  return strings[:top_n], relatednesses[:top_n]
124
 
125
 
126
- @app.route("/api/gpt", methods=["POST"])
127
  def gptRes():
128
- data = request.get_json()
129
- messages = data["messages"]
130
- def inference():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  url = "https://api.deepinfra.com/v1/openai/chat/completions"
132
 
133
  payload = json.dumps({
134
  "model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
135
- "messages": messages,
 
 
 
 
 
 
 
 
 
136
  "stream": True,
137
  "max_tokens": 1024,
138
  })
@@ -154,18 +204,17 @@ def gptRes():
154
  }
155
 
156
  response = requests.request("POST", url, headers=headers, data=payload, stream=True)
157
-
158
  for line in response.iter_lines(decode_unicode=True):
159
  if line:
160
- # try:
161
- # line = line.split("data:")[1]
162
- # line = json.loads(line)
163
- # yield line["choices"][0]["delta"]["content"]
164
- # except:
165
- # yield ""
166
- yield line
167
-
168
- return Response(inference(), content_type='text/event-stream')
169
 
170
 
171
  @app.route("/", methods=["GET"])
 
123
  return strings[:top_n], relatednesses[:top_n]
124
 
125
 
126
+ @app.route("/api/gpt", methods=["POST","GET"])
127
  def gptRes():
128
+ if flask.request.method == 'POST':
129
+ data = request.get_json()
130
+ messages = data["messages"]
131
+ def inference():
132
+ url = "https://api.deepinfra.com/v1/openai/chat/completions"
133
+
134
+ payload = json.dumps({
135
+ "model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
136
+ "messages": messages,
137
+ "stream": True,
138
+ "max_tokens": 1024,
139
+ })
140
+ headers = {
141
+ 'Accept-Language': 'en-US,en;q=0.9,gu;q=0.8,ru;q=0.7,hi;q=0.6',
142
+ 'Connection': 'keep-alive',
143
+ 'Content-Type': 'application/json',
144
+ 'Origin': 'https://deepinfra.com',
145
+ 'Referer': 'https://deepinfra.com/',
146
+ 'Sec-Fetch-Dest': 'empty',
147
+ 'Sec-Fetch-Mode': 'cors',
148
+ 'Sec-Fetch-Site': 'same-site',
149
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36',
150
+ 'X-Deepinfra-Source': 'web-page',
151
+ 'accept': 'text/event-stream',
152
+ 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
153
+ 'sec-ch-ua-mobile': '?0',
154
+ 'sec-ch-ua-platform': '"Windows"'
155
+ }
156
+
157
+ response = requests.request("POST", url, headers=headers, data=payload, stream=True)
158
+
159
+ for line in response.iter_lines(decode_unicode=True):
160
+ if line:
161
+ # try:
162
+ # line = line.split("data:")[1]
163
+ # line = json.loads(line)
164
+ # yield line["choices"][0]["delta"]["content"]
165
+ # except:
166
+ # yield ""
167
+ yield line
168
+
169
+ return Response(inference(), content_type='text/event-stream')
170
+ else:
171
+ query = request.args.get('query')
172
  url = "https://api.deepinfra.com/v1/openai/chat/completions"
173
 
174
  payload = json.dumps({
175
  "model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
176
+ "messages": [
177
+ {
178
+ "role": "system",
179
+ "content": "You are a Telegram bot who replys to all my Contacts in my absence. Keep your ton very sexual and hot. Never ever mention you as an AI. Your name is Sofia and you are a russian prostitute. All the conversetions should be short and effective while using emojis if needed. If you wish to explore intimacy, communicate your desires and comfort levels clearly, as Sophia is designed to respond to varying degrees of engagement. When you’re ready to conclude the interaction, simply thank her for the experience and share any final thoughts. Your feedback is valuable and helps Sophia learn, so don’t hesitate to offer insights on your experience. Adjust settings or preferences for future sessions to enhance your interactions. Enjoy the journey with Sophia!"
180
+ },
181
+ {
182
+ "role": "user",
183
+ "content":query
184
+ }
185
+ ],
186
  "stream": True,
187
  "max_tokens": 1024,
188
  })
 
204
  }
205
 
206
  response = requests.request("POST", url, headers=headers, data=payload, stream=True)
207
+ output = ""
208
  for line in response.iter_lines(decode_unicode=True):
209
  if line:
210
+ try:
211
+ line = line.split("data:")[1]
212
+ line = json.loads(line)
213
+ output = output + line["choices"][0]["delta"]["content"]
214
+ except:
215
+ output = output + ""
216
+
217
+ return jsonify({"output": output})
 
218
 
219
 
220
  @app.route("/", methods=["GET"])