johann22 commited on
Commit
1d3019d
·
1 Parent(s): de4b17f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +145 -0
app.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import random
3
+ from huggingface_hub import InferenceClient
4
+ import gradio as gr
5
+ #from utils import parse_action, parse_file_content, read_python_module_structure
6
+ from datetime import datetime
7
+ from PIL import Image
8
+ import agent
9
+ from models import models
10
+ import urllib.request
11
+ import uuid
12
+ import requests
13
+ import io
14
+ loaded_model=[]
15
+ for i,model in enumerate(models):
16
+ loaded_model.append(gr.load(f'models/{model}'))
17
+ print (loaded_model)
18
+
19
+ now = datetime.now()
20
+ date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
21
+
22
+ client = InferenceClient(
23
+ "mistralai/Mixtral-8x7B-Instruct-v0.1"
24
+ )
25
+
26
+ ############################################
27
+ model = gr.load("models/stabilityai/sdxl-turbo")
28
+
29
+ VERBOSE = True
30
+ MAX_HISTORY = 10000
31
+ #MODEL = "gpt-3.5-turbo" # "gpt-4"
32
+ history = []
33
+
34
+ def infer(txt):
35
+ return (model(txt))
36
+
37
+ def format_prompt(message, history):
38
+ prompt = "<s>"
39
+ for user_prompt, bot_response in history:
40
+ prompt += f"[INST] {user_prompt} [/INST]"
41
+ prompt += f" {bot_response}</s> "
42
+ prompt += f"[INST] {message} [/INST]"
43
+ return prompt
44
+
45
+
46
+
47
+ def run_gpt(
48
+ in_prompt,
49
+ history,
50
+ ):
51
+ print(f'history :: {history}')
52
+ prompt=format_prompt(in_prompt,history)
53
+ seed = random.randint(1,1111111111111111)
54
+ print (seed)
55
+ generate_kwargs = dict(
56
+ temperature=1.0,
57
+ max_new_tokens=1048,
58
+ top_p=0.99,
59
+ repetition_penalty=1.0,
60
+ do_sample=True,
61
+ seed=seed,
62
+ )
63
+
64
+
65
+ content = agent.GENERATE_PROMPT + prompt
66
+
67
+ print(content)
68
+
69
+ #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
70
+ #formatted_prompt = format_prompt(f'{content}', history)
71
+
72
+ stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
73
+ resp = ""
74
+ for response in stream:
75
+ resp += response.token.text
76
+ return resp
77
+
78
+
79
+ def run(purpose,history,model_drop):
80
+ print (history)
81
+ #print(purpose)
82
+ #print(hist)
83
+ task=None
84
+ directory="./"
85
+ #if history:
86
+ # history=str(history).strip("[]")
87
+ #if not history:
88
+ # history = ""
89
+
90
+ #action_name, action_input = parse_action(line)
91
+ out_prompt = run_gpt(
92
+ purpose,
93
+ history,
94
+
95
+ )
96
+
97
+ yield ("",[(purpose,out_prompt)],None)
98
+ #out_img = infer(out_prompt)
99
+ model=loaded_model[int(model_drop)]
100
+ out_img=model(out_prompt)
101
+ print(out_img)
102
+ url=f'https://johann22-mixtral-diffusion.hf.space/file={out_img}'
103
+ print(url)
104
+ uid = uuid.uuid4()
105
+ #urllib.request.urlretrieve(image, 'tmp.png')
106
+ #out=Image.open('tmp.png')
107
+ r = requests.get(url, stream=True)
108
+ if r.status_code == 200:
109
+ out = Image.open(io.BytesIO(r.content))
110
+ yield ("",[(purpose,out_prompt)],out)
111
+ #return ("", [(purpose,history)])
112
+
113
+
114
+
115
+ ################################################
116
+
117
+ with gr.Blocks() as iface:
118
+ gr.HTML("""<center><h1>Chat Diffusion</h1><br><h3>This chatbot will generate images</h3></center>""")
119
+ #chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
120
+ with gr.Row():
121
+ with gr.Column():
122
+ chatbot=gr.Chatbot()
123
+ msg = gr.Textbox()
124
+ model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0])
125
+ with gr.Row():
126
+ submit_b = gr.Button()
127
+ stop_b = gr.Button("Stop")
128
+ clear = gr.ClearButton([msg, chatbot])
129
+
130
+ sumbox=gr.Image(label="Image")
131
+
132
+
133
+ sub_b = submit_b.click(run, [msg,chatbot,model_drop],[msg,chatbot,sumbox])
134
+ sub_e = msg.submit(run, [msg, chatbot,model_drop], [msg, chatbot,sumbox])
135
+ stop_b.click(None,None,None, cancels=[sub_b,sub_e])
136
+ iface.launch()
137
+ '''
138
+ gr.ChatInterface(
139
+ fn=run,
140
+ chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
141
+ title="Mixtral 46.7B\nMicro-Agent\nInternet Search <br> development test",
142
+ examples=examples,
143
+ concurrency_limit=20,
144
+ ).launch(show_api=False)
145
+ '''