steveagi commited on
Commit
80ac3aa
·
unverified ·
2 Parent(s): 90abc51 3818a89

Merge pull request #12 from east-and-west-magic/feature-call-ai

Browse files
Files changed (5) hide show
  1. app.py +27 -53
  2. app_util.py +8 -1
  3. cache.py +0 -40
  4. call_ai.py +37 -0
  5. str_util.py +5 -0
app.py CHANGED
@@ -1,21 +1,20 @@
1
- import requests
2
  import json
3
  import os
4
  from datetime import datetime
5
  from zoneinfo import ZoneInfo
6
 
7
  import gradio as gr
8
- from gradio_client import Client
9
  from extract import extract
10
  import app_util
11
- import cache
12
  from pgsoft.pgconst.const import service_list, functionality_list, game_list
13
  from pgsoft.pgdate.date_utils import beijing
 
 
14
 
15
  #######################
16
  # proxy version
17
  #######################
18
- proxy_version = "1.0.0-2023-12-12-a" # use cache
19
 
20
  t = datetime.now()
21
  t = t.astimezone(ZoneInfo("Asia/Shanghai"))
@@ -23,8 +22,6 @@ print(f"[Beijing]: {t.replace(microsecond=0)}")
23
  t = t.astimezone(ZoneInfo("America/Los_Angeles"))
24
  print(f"[Seattle]: {t.replace(microsecond=0)}")
25
 
26
- # assert False
27
-
28
 
29
  spaces = {
30
  "b_demo_hf": "stevez-ai",
@@ -38,8 +35,6 @@ if not identity:
38
  space = "stevez-ai"
39
  if identity in spaces:
40
  space = spaces[identity]
41
- filepath = os.sep.join(["cache", "cached_ai.json"])
42
- cache.load_cache(filepath)
43
 
44
 
45
  def run(hf_token, service, game, functionality, nlp_command):
@@ -58,6 +53,7 @@ def run(hf_token, service, game, functionality, nlp_command):
58
  space = redirect
59
  url = f"https://{space}.hf.space"
60
 
 
61
  if token is None or token == "":
62
  return "please specify hf token"
63
 
@@ -91,51 +87,29 @@ def run(hf_token, service, game, functionality, nlp_command):
91
  else:
92
  outp = {"status": "OK", "result": json.loads(res)["result"]}
93
  else:
94
- try:
95
- assert "games" in service_list
96
- if service == "games":
97
- print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
98
- outp = cache.get_cache(nlp_command)
99
- if outp is None:
100
- client = Client(
101
- url,
102
- hf_token=token,
103
- verbose=False,
104
- )
105
- calling_start = beijing()
106
- print(f"calling ai starts at {calling_start}")
107
- res = client.predict(
108
- service,
109
- game,
110
- functionality,
111
- nlp_command, # hidden,
112
- api_name="/predict",
113
- )
114
- calling_end = beijing()
115
- timecost = calling_end.timestamp() - calling_start.timestamp()
116
- print(f"calling ai ends at {calling_end}, costs {timecost:.2f}s")
117
- outp = json.loads(res)
118
- outp["cache"] = False
119
- cache.add_cache(nlp_command, outp)
120
- else:
121
- print(f"[cache] return from cache")
122
- # add proxy version info to the output
123
- outp["proxy-version"] = proxy_version
124
- outp["user"] = user_name
125
- outp["game"] = game
126
- if source:
127
- outp["source"] = source
128
- calling_start = beijing()
129
- print(f"calling logger starts at {beijing()}")
130
- app_util.call_logger(outp, identity, token)
131
- calling_end = beijing()
132
- timecost = calling_end.timestamp() - calling_start.timestamp()
133
- print(f"calling logger ends at {calling_end}, costs {timecost:.2f}s")
134
- except Exception as e:
135
- return (
136
- f"{type(e)}, {str(e)}. \nyou may want to make "
137
- + "sure your hf_token is correct"
138
- )
139
  service_end = beijing()
140
  timecost = service_end.timestamp() - service_start.timestamp()
141
  print(
 
 
1
  import json
2
  import os
3
  from datetime import datetime
4
  from zoneinfo import ZoneInfo
5
 
6
  import gradio as gr
 
7
  from extract import extract
8
  import app_util
 
9
  from pgsoft.pgconst.const import service_list, functionality_list, game_list
10
  from pgsoft.pgdate.date_utils import beijing
11
+ import call_ai
12
+ from str_util import normalize_text
13
 
14
  #######################
15
  # proxy version
16
  #######################
17
+ proxy_version = "1.0.0-2023-12-13-a" # reconstruct ai calling
18
 
19
  t = datetime.now()
20
  t = t.astimezone(ZoneInfo("Asia/Shanghai"))
 
22
  t = t.astimezone(ZoneInfo("America/Los_Angeles"))
23
  print(f"[Seattle]: {t.replace(microsecond=0)}")
24
 
 
 
25
 
26
  spaces = {
27
  "b_demo_hf": "stevez-ai",
 
35
  space = "stevez-ai"
36
  if identity in spaces:
37
  space = spaces[identity]
 
 
38
 
39
 
40
  def run(hf_token, service, game, functionality, nlp_command):
 
53
  space = redirect
54
  url = f"https://{space}.hf.space"
55
 
56
+ nlp_command = normalize_text(nlp_command)
57
  if token is None or token == "":
58
  return "please specify hf token"
59
 
 
87
  else:
88
  outp = {"status": "OK", "result": json.loads(res)["result"]}
89
  else:
90
+ assert "games" in service_list
91
+ if service == "games":
92
+ print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
93
+ call_ai.from_cache = True
94
+ outp = call_ai.call_ai(
95
+ service,
96
+ game,
97
+ functionality,
98
+ nlp_command,
99
+ url,
100
+ token,
101
+ )
102
+ if isinstance(outp, str):
103
+ return outp
104
+ # add proxy version info to the output
105
+ outp["timestamp"] = beijing().__str__()
106
+ outp["proxy-version"] = proxy_version
107
+ outp["user"] = user_name
108
+ outp["game"] = game
109
+ if source:
110
+ outp["source"] = source
111
+ outp["cache"] = call_ai.from_cache
112
+ app_util.call_logger(outp, identity, token)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
  service_end = beijing()
114
  timecost = service_end.timestamp() - service_start.timestamp()
115
  print(
app_util.py CHANGED
@@ -1,17 +1,20 @@
1
  import json
2
 
3
  from gradio_client import Client
 
4
 
5
 
6
  def call_logger(log_info, caller, hf_token) -> None:
7
  #######################
8
  # logging
9
  #######################
 
 
 
10
  urls = [
11
  "https://hubei-hunan-logger.hf.space",
12
  "https://hubei-hunan-logger2.hf.space",
13
  ]
14
-
15
  for url in urls:
16
  try:
17
  client = Client(
@@ -23,6 +26,10 @@ def call_logger(log_info, caller, hf_token) -> None:
23
  print(f"[logging to {url}] OK")
24
  except Exception as e:
25
  print(f"[logging to {url}] error: {e}")
 
 
 
 
26
 
27
 
28
  def call_clouddisk(service, arg, hf_token) -> str | None:
 
1
  import json
2
 
3
  from gradio_client import Client
4
+ from pgsoft.pgdate.date_utils import beijing
5
 
6
 
7
  def call_logger(log_info, caller, hf_token) -> None:
8
  #######################
9
  # logging
10
  #######################
11
+ calling_start = beijing()
12
+ print(f"calling logger starts at {beijing()}")
13
+ #################################################
14
  urls = [
15
  "https://hubei-hunan-logger.hf.space",
16
  "https://hubei-hunan-logger2.hf.space",
17
  ]
 
18
  for url in urls:
19
  try:
20
  client = Client(
 
26
  print(f"[logging to {url}] OK")
27
  except Exception as e:
28
  print(f"[logging to {url}] error: {e}")
29
+ #################################################
30
+ calling_end = beijing()
31
+ timecost = calling_end.timestamp() - calling_start.timestamp()
32
+ print(f"calling logger ends at {calling_end}, costs {timecost:.2f}s")
33
 
34
 
35
  def call_clouddisk(service, arg, hf_token) -> str | None:
cache.py DELETED
@@ -1,40 +0,0 @@
1
- import os
2
- import json
3
- from pgsoft.pgdate.date_utils import beijing
4
-
5
- cache_ai = {}
6
-
7
-
8
- def normalize_text(text: str) -> str:
9
- text = text.lower()
10
- tmp = text.split(" ")
11
- tmp = [word for word in tmp if word != ""]
12
- return " ".join(tmp)
13
-
14
-
15
- def load_cache(filepath: str):
16
- """load cached ai calling from a json file"""
17
- global cache_ai
18
- if os.path.exists(filepath):
19
- with open(filepath, "r+") as f:
20
- cache_ai = json.load(f)
21
-
22
-
23
- def add_cache(command: str, result: dict):
24
- """add a cache of ai calling"""
25
- command = normalize_text(command)
26
- result["command"] = command
27
-
28
- global cache_ai
29
- cache_ai[command] = result
30
- print(f'[cache] added "{command}"')
31
-
32
-
33
- def get_cache(command: str) -> dict | None:
34
- """return a cached ai calling with new "timestamp" """
35
- command = normalize_text(command)
36
- outp = cache_ai.get(command)
37
- if outp:
38
- outp["timestamp"] = beijing().__str__()
39
- outp["cache"] = True
40
- return outp
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
call_ai.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import cache
2
+ from gradio_client import Client
3
+ from pgsoft.pgdate.date_utils import beijing
4
+ import json
5
+
6
+ from_cache = True
7
+
8
+
9
+ @cache
10
+ def call_ai(service, game, functionality, nlp_command, url, hf_token):
11
+ calling_start = beijing()
12
+ print(f"calling ai starts at {calling_start}")
13
+ try:
14
+ client = Client(
15
+ url,
16
+ hf_token=hf_token,
17
+ verbose=False,
18
+ )
19
+ res = client.predict(
20
+ service,
21
+ game,
22
+ functionality,
23
+ nlp_command, # hidden,
24
+ api_name="/predict",
25
+ )
26
+ except Exception as e:
27
+ return (
28
+ f"{type(e)}, {str(e)}. \nyou may want to make "
29
+ + "sure your hf_token is correct"
30
+ )
31
+ calling_end = beijing()
32
+ timecost = calling_end.timestamp() - calling_start.timestamp()
33
+ print(f"calling ai ends at {calling_end}, costs {timecost:.2f}s")
34
+ outp = json.loads(res)
35
+ global from_cache
36
+ from_cache = False
37
+ return outp
str_util.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ def normalize_text(text: str) -> str:
2
+ text = text.lower()
3
+ tmp = text.split(" ")
4
+ tmp = [word for word in tmp if word != ""]
5
+ return " ".join(tmp)