b_demo_hf / app.py
Jiang
fix url of ai (#21)
7a375af unverified
import json
import os
from datetime import datetime
from zoneinfo import ZoneInfo
import gradio as gr
from extract import extract
import app_util
from pgsoft.pgconst.const import service_list, functionality_list, game_list
from pgsoft.pghost import ais
from pgsoft.pgdate.date_utils import beijing
import call_ai
from str_util import normalize_text
#######################
# proxy version
#######################
proxy_version = "1.0.0-2024-01-02-a" # reconstruct ai calling
t = datetime.now()
t = t.astimezone(ZoneInfo("Asia/Shanghai"))
print(f"[Beijing]: {t.replace(microsecond=0)}")
t = t.astimezone(ZoneInfo("America/Los_Angeles"))
print(f"[Seattle]: {t.replace(microsecond=0)}")
identity = os.environ.get("identity")
print(f"identity: {identity}")
if not identity:
identity = "pgdemo_dev"
ai = "stevez-ai-dev"
if identity in ais:
ai = ais[identity]
db_token = os.environ.get("db_token")
if db_token:
print(db_token[:5])
def run(hf_token, service, game, functionality, nlp_command):
"""
event handler
"""
# reuse hf_token field as json string
token, user_name, redirect, source, _ = extract(hf_token)
if user_name is None:
user_name = "__fake__"
# redirect all traffic to the proxy sever
global ai
if redirect is not None:
ai = redirect
ai_url = f"https://{ai}.hf.space"
if token is None or token == "":
return "please specify hf token"
if service not in service_list[1:]:
if game is None:
return "please specify which game"
if functionality is None:
return "please choose the AI functionality"
if functionality == "AI":
if nlp_command in ["", None]:
return "please make sure the command is not empty"
service_start = beijing()
print(f"<<<<<<<<<<<<<< service starts at {service_start} <<<<<<<<<<<<<<")
if service in ["download game", "upload game", "list games"]:
res = app_util.file_service(service, nlp_command, db_token)
if not res:
outp = {"status": "Failure"}
else:
outp = {"status": "OK", "result": res}
else:
assert "games" in service_list
if service == "games":
print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
nlp_command = normalize_text(nlp_command)
call_ai.from_cache = True
outp = call_ai.call_ai(
service,
game,
functionality,
nlp_command,
ai_url,
token,
)
if isinstance(outp, str):
return outp
# add proxy version info to the output
outp["timestamp"] = beijing().__str__()
outp["proxy-version"] = proxy_version
outp["user"] = user_name
outp["game"] = game
if source:
outp["source"] = source
outp["cache"] = call_ai.from_cache
app_util.call_logger(outp, identity, token)
service_end = beijing()
timecost = service_end.timestamp() - service_start.timestamp()
print(
f">>>>>>>>>>>>>>> service ends at {service_end}, "
+ f"costs {timecost:.2f}s >>>>>>>>>>>>>>>\n"
)
return json.dumps(outp, indent=4)
demo = gr.Interface(
fn=run,
inputs=[
"text",
gr.Radio(
service_list,
value=service_list[0],
info="Shared services",
),
gr.Radio(
game_list,
value=game_list[1],
info="Which game you want the AI to support?",
),
gr.Radio(
functionality_list,
value=functionality_list[0],
# label = "What do you want to do?",
info="What functionality?",
),
"text",
],
outputs="text",
title="Demo",
allow_flagging="never",
)
demo.launch()