Download model from hub
Browse files- app.py +3 -2
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from ctransformers import AutoModelForCausalLM
|
|
|
2 |
|
3 |
import gradio as gr
|
4 |
|
@@ -19,8 +20,8 @@ def generate(history):
|
|
19 |
return streamer
|
20 |
|
21 |
|
22 |
-
|
23 |
-
llm = AutoModelForCausalLM.from_pretrained(
|
24 |
end_token = "</s>"
|
25 |
|
26 |
|
|
|
1 |
from ctransformers import AutoModelForCausalLM
|
2 |
+
from huggingface_hub import hf_hub_download
|
3 |
|
4 |
import gradio as gr
|
5 |
|
|
|
20 |
return streamer
|
21 |
|
22 |
|
23 |
+
model_path = hf_hub_download(repo_id="theodotus/llama-uk", subfolder="model", filename="model_q4_1.bin")
|
24 |
+
llm = AutoModelForCausalLM.from_pretrained(model_path, model_type='llama')
|
25 |
end_token = "</s>"
|
26 |
|
27 |
|
requirements.txt
CHANGED
@@ -1 +1,2 @@
|
|
1 |
-
ctransformers
|
|
|
|
1 |
+
ctransformers
|
2 |
+
huggingface_hub
|