File size: 3,509 Bytes
e28221f
3a09006
e28221f
1375c75
e28221f
1e78474
3a09006
0aebbbc
 
3a09006
 
 
 
 
1e78474
3a09006
 
 
 
 
 
 
 
 
 
 
 
 
9131fdd
0aeab64
3a09006
 
 
0aebbbc
 
 
3a09006
0aebbbc
 
 
3a09006
0aebbbc
 
 
3a09006
 
 
1e78474
 
 
 
 
 
 
 
 
0aebbbc
 
 
 
1e78474
0aebbbc
 
 
3a09006
 
 
 
 
0aebbbc
3a09006
 
 
0aebbbc
 
3a09006
 
 
e28221f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3a09006
 
 
e28221f
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import argparse
import uvicorn
import sys
import json


from fastapi import FastAPI
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
from sse_starlette.sse import EventSourceResponse
from utils.logger import logger
from networks.message_streamer import MessageStreamer
from messagers.message_composer import MessageComposer
from googletrans import Translator


class ChatAPIApp:
    def __init__(self):
        self.app = FastAPI(
            docs_url="/",
            title="HuggingFace LLM API",
            swagger_ui_parameters={"defaultModelsExpandDepth": -1},
            version="1.0",
        )
        self.setup_routes()

    def get_available_models(self):
        f = open('apis/lang_name.json', "r")
        self.available_models = json.loads(f.read())
        return self.available_models

    class ChatCompletionsPostItem(BaseModel):
        from_language: str = Field(
            default="auto",
            description="(str) `Detect`",
        )
        to_language: str = Field(
            default="en",
            description="(str) `en`",
        )
        text: str = Field(
            default="Hello",
            description="(str) `Text for translate`",
        )

    def chat_completions(self, item: ChatCompletionsPostItem):
        translator = Translator()
        f = open('apis/lang_name.json', "r")
        available_langs = json.loads(f.read())
        from_lang = "en"
        for lang_item in available_langs:
          if item.to_language == lang_item['code']:
              from_lang = item.to_language
              break
        
        item_response = {
            "from_language": item.from_language,
            "to_language": item.to_language,
            "text": item.text,
            "translate": translator.translate(item.text, dest=from_lang)
        }
        json_compatible_item_data = jsonable_encoder(item_response)
        return JSONResponse(content=json_compatible_item_data)

    def setup_routes(self):
        for prefix in ["", "/v1"]:
            self.app.get(
                prefix + "/models",
                summary="Get available languages",
            )(self.get_available_models)

            self.app.post(
                prefix + "/translate",
                summary="translate text",
            )(self.chat_completions)


class ArgParser(argparse.ArgumentParser):
    def __init__(self, *args, **kwargs):
        super(ArgParser, self).__init__(*args, **kwargs)

        self.add_argument(
            "-s",
            "--server",
            type=str,
            default="0.0.0.0",
            help="Server IP for HF LLM Chat API",
        )
        self.add_argument(
            "-p",
            "--port",
            type=int,
            default=23333,
            help="Server Port for HF LLM Chat API",
        )

        self.add_argument(
            "-d",
            "--dev",
            default=False,
            action="store_true",
            help="Run in dev mode",
        )

        self.args = self.parse_args(sys.argv[1:])


app = ChatAPIApp().app

if __name__ == "__main__":
    args = ArgParser().args
    if args.dev:
        uvicorn.run("__main__:app", host=args.server, port=args.port, reload=True)
    else:
        uvicorn.run("__main__:app", host=args.server, port=args.port, reload=False)

    # python -m apis.chat_api      # [Docker] on product mode
    # python -m apis.chat_api -d   # [Dev]    on develop mode