File size: 2,895 Bytes
9942045
 
ffa1ff4
 
6f8b8b6
ffa1ff4
 
bebb80b
6f8b8b6
bebb80b
 
9942045
 
6ec79ba
2160552
ffa1ff4
12623c6
6ec79ba
 
2160552
 
6ec79ba
 
c5fa067
 
 
 
6ec79ba
 
ffa1ff4
 
9942045
12623c6
9942045
cfcc3f6
12623c6
9942045
cfcc3f6
9942045
436dfc5
9942045
ffa1ff4
 
 
6ec79ba
6f8b8b6
 
 
 
 
9942045
6f8b8b6
9942045
6f8b8b6
 
 
 
 
6ec79ba
ffa1ff4
9942045
 
 
69ddcdf
3fecea6
ffa1ff4
3fecea6
ffa1ff4
 
 
 
 
6ec79ba
ffa1ff4
 
 
69ddcdf
3fecea6
 
6f8b8b6
9942045
6f8b8b6
9942045
ffa1ff4
3fecea6
 
ffa1ff4
 
 
 
 
 
 
 
 
 
 
 
3fecea6
 
 
 
 
 
 
 
 
ffa1ff4
 
6ec79ba
4787e74
cfcc3f6
4787e74
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
r"""
Refer to https://linux.do/t/topic/120588/5.

# flask 糊接口
#
# curl -sS --location http://127.0.0.1:5000/hf/v1/chat/completions -H "Content-Type: application/json" --data "{\"model\": \"gpt-.5-turbo\", \"messages\": [{\"role\": \"user\", \"content\": \"Say this is a test!\"}], \"stream\": false}"

# --data '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Say this is a test!"}], "stream": false}'

curl https://mikeee-duck2api.hf.space/hf/v1/chat/completions -H "Content-Type: application/json"  --data "{\"model\": \"gpt-3.5-turbo\", \"messages\": [{\"role\": \"user\", \"content\": \"Say this is a test!\"}], \"stream\": false}"
"""

import os
from threading import Thread

import requests
from dotenv import load_dotenv, dotenv_values
from flask import Flask, jsonify, request
from python_run_cmd import run_cmd
from ycecream import y


def run_duck2api():
    try:
        run_cmd("./duck2api")
    except Exception as exc:
        y(exc)

Thread(target=run_duck2api).start()

y.configure(sln=1)
load_dotenv()
y(dotenv_values())

# ./duck2api starts at 8088 specified in .env (default 8080)
PORT = os.getenv("SERVER_PORT")
if not PORT:
    PORT = 8080


y(PORT)

app = Flask(__name__)


@app.route("/", methods=["GET", "POST"])
def landing():
    y(request.url)
    y(request.path)
    if request.path.startswith("/hf"):
        url = f"http://127.0.0.1:{PORT}{request.path[3:]}"
    else:
        url = f"http://127.0.0.1:{PORT}{request.path}"

    response = requests.get(url)
    return response.text


@app.route("/hf/v1/chat/completions", methods=["POST"])
def my_endpoint():
    # url = "https://api.anakin.ai/v1/apps/1344/chatbot/chat"
    # url = "http://acone:{PORT}/v1/chat/completions"
    # url = f"http://127.0.0.1:{PORT}/v1/chat/completions"

    y(request)
    # y(request.host)
    y(request.host_url)
    # y(request.path)
    # y(request.full_path)
    # y(request.json)
    # y(dir(request))
    headers = {
        "Content-Type": "application/json",
        # 'Authorization': 'Bearer ANAKINAI_API_ACCESS_TOKEN'
    }

    # remove prefix /hf
    y(request.url)
    y(request.path)
    if request.path.startswith("/hf"):
        url = f"http://127.0.0.1:{PORT}{request.path[3:]}"
    else:
        url = f"http://127.0.0.1:{PORT}{request.path}"

    y(url)

    try:
        response = requests.post(url, headers=headers, json=request.json)
    except Exception as exc:
        y(exc)
        err_msg = str(exc)
        # raise

    y(response.json())

    if response.status_code == 200:
        return jsonify(response.json())

    return jsonify(
        {
            "error": {
                "message": f"An error occurred: {err_msg}",
                "type": "server_error",
                "code": 500,
            }
        }
    )


if __name__ == "__main__":
    y(" -- ")
    app.run(port=5000, debug=True)
    y(" -end- ")