Spaces:
Running
Running
broadfield
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from huggingface_hub import InferenceClient, HfApi
|
2 |
#from html2image import Html2Image
|
3 |
import gradio as gr
|
4 |
#import markdown
|
@@ -15,7 +15,8 @@ import re
|
|
15 |
import os
|
16 |
loc_folder="chat_history"
|
17 |
loc_file="chat_json"
|
18 |
-
|
|
|
19 |
clients = [
|
20 |
{'type':'image','name':'black-forest-labs/FLUX.1-dev','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
21 |
{'type':'text','name':'deepseek-ai/DeepSeek-V2.5-1210','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
@@ -47,6 +48,8 @@ def format_prompt(message, mod, system):
|
|
47 |
return prompt
|
48 |
def generate(prompt,history,mod=2,tok=4000,seed=1,role="ASSISTANT",data=None):
|
49 |
#print("#####",history,"######")
|
|
|
|
|
50 |
gen_images=False
|
51 |
client=InferenceClient(clients[int(mod)]['name'])
|
52 |
client_tok=clients[int(mod)]['max_tokens']
|
@@ -64,10 +67,10 @@ def generate(prompt,history,mod=2,tok=4000,seed=1,role="ASSISTANT",data=None):
|
|
64 |
system_prompt = prompts.MANAGER.replace("**TIMELINE**",data[4]).replace("**HISTORY**",str(history))
|
65 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
66 |
elif role == "PATHMAKER":
|
67 |
-
system_prompt = prompts.PATH_MAKER.replace("**CURRENT_OR_NONE**",str(data[4])).replace("**PROMPT**",json.dumps(data[0],indent=4)).replace("**HISTORY**",str(history))
|
68 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
69 |
elif role == "CREATE_FILE":
|
70 |
-
system_prompt = prompts.CREATE_FILE.replace("**TIMELINE**",data[4]).replace("**FILENAME**",str(data[1])).replace("**TEMPLATE_OR_NONE**",str(data[2]))
|
71 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
72 |
elif role == "SEARCH":
|
73 |
system_prompt = prompts.SEARCH.replace("**DATA**",data)
|
@@ -174,7 +177,7 @@ def parse_json(inp):
|
|
174 |
return out1,out2
|
175 |
def build_space(repo_name,file_name,file_content,access_token=""):
|
176 |
try:
|
177 |
-
repo_path=
|
178 |
if not access_token:access_token=os.environ['HF_TOKEN']
|
179 |
|
180 |
api=HfApi(endpoint="https://huggingface.co", token=access_token)
|
@@ -223,7 +226,6 @@ def agent(prompt_in,history,mod=2):
|
|
223 |
MAX_DATA=int(clients[int(mod)]['max_tokens'])*2
|
224 |
if not history:history=[{'role':'user','content':prompt_in['text']}]
|
225 |
while go == True:
|
226 |
-
|
227 |
seed = random.randint(1,9999999999999)
|
228 |
c=0
|
229 |
#history = [history[-4:]]
|
@@ -263,7 +265,7 @@ def agent(prompt_in,history,mod=2):
|
|
263 |
ret1,ret2 = parse_json(out_w[2].split('<|im_end|>')[0])
|
264 |
print('ret1',ret1)
|
265 |
print('ret2',ret2)
|
266 |
-
build_out = build_space(
|
267 |
history+=[{'role':'system','content':f'observation:{build_out}'}]
|
268 |
yield history
|
269 |
elif 'IMAGE' in fn:
|
|
|
1 |
+
from huggingface_hub import InferenceClient, HfApi, list_files_info
|
2 |
#from html2image import Html2Image
|
3 |
import gradio as gr
|
4 |
#import markdown
|
|
|
15 |
import os
|
16 |
loc_folder="chat_history"
|
17 |
loc_file="chat_json"
|
18 |
+
user_="community-pool/"
|
19 |
+
repo_="test2"
|
20 |
clients = [
|
21 |
{'type':'image','name':'black-forest-labs/FLUX.1-dev','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
22 |
{'type':'text','name':'deepseek-ai/DeepSeek-V2.5-1210','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
|
|
48 |
return prompt
|
49 |
def generate(prompt,history,mod=2,tok=4000,seed=1,role="ASSISTANT",data=None):
|
50 |
#print("#####",history,"######")
|
51 |
+
file_list = list_files_info(f'{user_}{repo_}')
|
52 |
+
print('file list\n',file_list)
|
53 |
gen_images=False
|
54 |
client=InferenceClient(clients[int(mod)]['name'])
|
55 |
client_tok=clients[int(mod)]['max_tokens']
|
|
|
67 |
system_prompt = prompts.MANAGER.replace("**TIMELINE**",data[4]).replace("**HISTORY**",str(history))
|
68 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
69 |
elif role == "PATHMAKER":
|
70 |
+
system_prompt = prompts.PATH_MAKER.replace("**FILE_LIST**",file_list).replace("**CURRENT_OR_NONE**",str(data[4])).replace("**PROMPT**",json.dumps(data[0],indent=4)).replace("**HISTORY**",str(history))
|
71 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
72 |
elif role == "CREATE_FILE":
|
73 |
+
system_prompt = prompts.CREATE_FILE.replace("**FILE_LIST**",file_list).replace("**TIMELINE**",data[4]).replace("**FILENAME**",str(data[1])).replace("**TEMPLATE_OR_NONE**",str(data[2]))
|
74 |
formatted_prompt = format_prompt(prompt, mod, system_prompt)
|
75 |
elif role == "SEARCH":
|
76 |
system_prompt = prompts.SEARCH.replace("**DATA**",data)
|
|
|
177 |
return out1,out2
|
178 |
def build_space(repo_name,file_name,file_content,access_token=""):
|
179 |
try:
|
180 |
+
repo_path=user_+str(repo_name)
|
181 |
if not access_token:access_token=os.environ['HF_TOKEN']
|
182 |
|
183 |
api=HfApi(endpoint="https://huggingface.co", token=access_token)
|
|
|
226 |
MAX_DATA=int(clients[int(mod)]['max_tokens'])*2
|
227 |
if not history:history=[{'role':'user','content':prompt_in['text']}]
|
228 |
while go == True:
|
|
|
229 |
seed = random.randint(1,9999999999999)
|
230 |
c=0
|
231 |
#history = [history[-4:]]
|
|
|
265 |
ret1,ret2 = parse_json(out_w[2].split('<|im_end|>')[0])
|
266 |
print('ret1',ret1)
|
267 |
print('ret2',ret2)
|
268 |
+
build_out = build_space(repo_,ret1,ret2)
|
269 |
history+=[{'role':'system','content':f'observation:{build_out}'}]
|
270 |
yield history
|
271 |
elif 'IMAGE' in fn:
|