b_demo_hf / app.py
Beracles
unify interface
408959d
raw
history blame
3.89 kB
import json
import os
from datetime import datetime
from zoneinfo import ZoneInfo
import gradio as gr
from extract import extract
import app_util
from pgsoft.pgconst.const import service_list, functionality_list, game_list
from pgsoft.pgdate.date_utils import beijing
import call_ai
from str_util import normalize_text
#######################
# proxy version
#######################
proxy_version = "1.0.0-2023-12-13-a" # reconstruct ai calling
t = datetime.now()
t = t.astimezone(ZoneInfo("Asia/Shanghai"))
print(f"[Beijing]: {t.replace(microsecond=0)}")
t = t.astimezone(ZoneInfo("America/Los_Angeles"))
print(f"[Seattle]: {t.replace(microsecond=0)}")
spaces = {
"b_demo_hf": "stevez-ai",
"pgdemo2": "stevez-ai2",
"pgdemo3": "stevez-ai3",
}
identity = os.environ.get("identity")
if not identity:
identity = "b_demo_hf"
space = "stevez-ai"
if identity in spaces:
space = spaces[identity]
def run(hf_token, service, game, functionality, nlp_command):
"""
event handler
"""
# reuse hf_token field as json string
token, user_name, redirect, source, _ = extract(hf_token)
if user_name is None:
user_name = "__fake__"
# redirect all traffic to the proxy sever
global space
if redirect is not None:
space = redirect
url = f"https://{space}.hf.space"
if token is None or token == "":
return "please specify hf token"
if service not in service_list[1:]:
if game is None:
return "please specify which game"
if functionality is None:
return "please choose the AI functionality"
if functionality == "AI":
if nlp_command in ["", None]:
return "please make sure the command is not empty"
service_start = beijing()
print(f"<<<<<<<<<<<<<< service starts at {service_start} <<<<<<<<<<<<<<")
if service in ["download game", "upload game", "list games"]:
res = app_util.call_clouddisk(service, nlp_command, token)
res = json.loads(res)
if res["result"] is None:
outp = {"status": "Failure"}
else:
outp = {"status": "OK", "result": res["result"]}
else:
assert "games" in service_list
if service == "games":
print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
nlp_command = normalize_text(nlp_command)
call_ai.from_cache = True
outp = call_ai.call_ai(
service,
game,
functionality,
nlp_command,
url,
token,
)
if isinstance(outp, str):
return outp
# add proxy version info to the output
outp["timestamp"] = beijing().__str__()
outp["proxy-version"] = proxy_version
outp["user"] = user_name
outp["game"] = game
if source:
outp["source"] = source
outp["cache"] = call_ai.from_cache
app_util.call_logger(outp, identity, token)
service_end = beijing()
timecost = service_end.timestamp() - service_start.timestamp()
print(
f">>>>>>>>>>>>>>> service ends at {service_end}, "
+ f"costs {timecost:.2f}s >>>>>>>>>>>>>>>\n"
)
return json.dumps(outp, indent=4)
demo = gr.Interface(
fn=run,
inputs=[
"text",
gr.Radio(
service_list,
value=service_list[0],
info="Shared services",
),
gr.Radio(
game_list,
value=game_list[1],
info="Which game you want the AI to support?",
),
gr.Radio(
functionality_list,
value=functionality_list[0],
# label = "What do you want to do?",
info="What functionality?",
),
"text",
],
outputs="text",
title="Demo",
allow_flagging="never",
)
demo.launch()