Spaces:
Runtime error
Runtime error
Upload 2 files
Browse files- gradio_streamingllm.py +6 -1
- llama_cpp_python_streamingllm.py +22 -6
gradio_streamingllm.py
CHANGED
@@ -10,12 +10,15 @@ from mods.btn_rag import init as btn_rag_init
|
|
10 |
# ========== 按钮中用到的共同的函数 ==========
|
11 |
from mods.btn_com import init as btn_com_init
|
12 |
|
13 |
-
# ==========
|
14 |
from mods.btn_submit import init as btn_submit_init
|
15 |
|
16 |
# ========== 输出一段旁白 ==========
|
17 |
from mods.btn_vo import init as btn_vo_init
|
18 |
|
|
|
|
|
|
|
19 |
# ========== 给用户提供默认回复的建议 ==========
|
20 |
from mods.btn_suggest import init as btn_suggest_init
|
21 |
|
@@ -117,6 +120,8 @@ with gr.Blocks() as chatting:
|
|
117 |
|
118 |
btn_suggest_init(cfg)
|
119 |
|
|
|
|
|
120 |
# ========== 用于调试 ==========
|
121 |
btn_reset_init(cfg)
|
122 |
|
|
|
10 |
# ========== 按钮中用到的共同的函数 ==========
|
11 |
from mods.btn_com import init as btn_com_init
|
12 |
|
13 |
+
# ========== 输出一段回答 ==========
|
14 |
from mods.btn_submit import init as btn_submit_init
|
15 |
|
16 |
# ========== 输出一段旁白 ==========
|
17 |
from mods.btn_vo import init as btn_vo_init
|
18 |
|
19 |
+
# ========== 重新输出一段回答 ==========
|
20 |
+
from mods.btn_retry import init as btn_retry_init
|
21 |
+
|
22 |
# ========== 给用户提供默认回复的建议 ==========
|
23 |
from mods.btn_suggest import init as btn_suggest_init
|
24 |
|
|
|
120 |
|
121 |
btn_suggest_init(cfg)
|
122 |
|
123 |
+
btn_retry_init(cfg)
|
124 |
+
|
125 |
# ========== 用于调试 ==========
|
126 |
btn_reset_init(cfg)
|
127 |
|
llama_cpp_python_streamingllm.py
CHANGED
@@ -50,10 +50,10 @@ class StreamingLLM(Llama):
|
|
50 |
|
51 |
def venv_disband(self, name_set):
|
52 |
if len(self.venv) <= 1:
|
53 |
-
return
|
54 |
name_set = {x for x in name_set if x in self.venv_idx_map}
|
55 |
if not name_set:
|
56 |
-
return
|
57 |
while self.venv_idx_map:
|
58 |
if self.venv_idx_map[0] in name_set:
|
59 |
self.venv_idx_map.pop(0) # 删除
|
@@ -61,13 +61,29 @@ class StreamingLLM(Llama):
|
|
61 |
self.venv[0] += tmp
|
62 |
else:
|
63 |
break
|
64 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
def venv_remove(self, name: str):
|
67 |
if len(self.venv) <= 1:
|
68 |
-
return
|
69 |
if name not in self.venv_idx_map:
|
70 |
-
return
|
71 |
venv_idx = self.venv_idx_map.index(name) + 1
|
72 |
while self.venv_idx_map:
|
73 |
self.venv_idx_map.pop(venv_idx - 1) # 删除
|
@@ -85,7 +101,7 @@ class StreamingLLM(Llama):
|
|
85 |
venv_idx = self.venv_idx_map.index(name, venv_idx - 1) + 1
|
86 |
except ValueError: # 没有了
|
87 |
break
|
88 |
-
return
|
89 |
|
90 |
def venv_pop_token(self):
|
91 |
self.n_tokens -= 1
|
|
|
50 |
|
51 |
def venv_disband(self, name_set):
|
52 |
if len(self.venv) <= 1:
|
53 |
+
return False
|
54 |
name_set = {x for x in name_set if x in self.venv_idx_map}
|
55 |
if not name_set:
|
56 |
+
return False
|
57 |
while self.venv_idx_map:
|
58 |
if self.venv_idx_map[0] in name_set:
|
59 |
self.venv_idx_map.pop(0) # 删除
|
|
|
61 |
self.venv[0] += tmp
|
62 |
else:
|
63 |
break
|
64 |
+
return True
|
65 |
+
|
66 |
+
def venv_revision(self, name: str):
|
67 |
+
if len(self.venv) <= 1:
|
68 |
+
return False
|
69 |
+
if name not in self.venv_idx_map:
|
70 |
+
return False
|
71 |
+
_s = 0
|
72 |
+
while self.venv_idx_map:
|
73 |
+
if self.venv_idx_map[-1] == name:
|
74 |
+
break
|
75 |
+
self.venv_idx_map.pop() # 删除
|
76 |
+
_s += self.venv.pop()
|
77 |
+
if _s:
|
78 |
+
self.n_tokens -= min(_s, self.n_tokens)
|
79 |
+
self.kv_cache_seq_trim()
|
80 |
+
return True
|
81 |
|
82 |
def venv_remove(self, name: str):
|
83 |
if len(self.venv) <= 1:
|
84 |
+
return False
|
85 |
if name not in self.venv_idx_map:
|
86 |
+
return False
|
87 |
venv_idx = self.venv_idx_map.index(name) + 1
|
88 |
while self.venv_idx_map:
|
89 |
self.venv_idx_map.pop(venv_idx - 1) # 删除
|
|
|
101 |
venv_idx = self.venv_idx_map.index(name, venv_idx - 1) + 1
|
102 |
except ValueError: # 没有了
|
103 |
break
|
104 |
+
return True
|
105 |
|
106 |
def venv_pop_token(self):
|
107 |
self.n_tokens -= 1
|