Spaces:
Sleeping
Sleeping
update
Browse files- Dockerfile +1 -1
- main.py +3 -17
- toolbox/k2_sherpa/nn_models.py +6 -2
Dockerfile
CHANGED
@@ -18,7 +18,7 @@ USER user
|
|
18 |
ENV HOME=/home/user \
|
19 |
PATH=/home/user/.local/bin:$PATH
|
20 |
|
21 |
-
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib/python3.8/site-packages/k2/lib/
|
22 |
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/user/.local/lib/python3.8/site-packages/k2/lib/
|
23 |
|
24 |
|
|
|
18 |
ENV HOME=/home/user \
|
19 |
PATH=/home/user/.local/bin:$PATH
|
20 |
|
21 |
+
#ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib/python3.8/site-packages/k2/lib/
|
22 |
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/user/.local/lib/python3.8/site-packages/k2/lib/
|
23 |
|
24 |
|
main.py
CHANGED
@@ -11,18 +11,6 @@ import platform
|
|
11 |
import time
|
12 |
import tempfile
|
13 |
|
14 |
-
# os.system(
|
15 |
-
# "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib/python3.8/site-packages/k2/lib/"
|
16 |
-
# )
|
17 |
-
#
|
18 |
-
# os.system(
|
19 |
-
# "cp -v /usr/local/lib/python3.8/site-packages/k2/lib/*.so /usr/local/lib/python3.8/site-packages/sherpa/lib/"
|
20 |
-
# )
|
21 |
-
#
|
22 |
-
# os.system(
|
23 |
-
# "cp -v /home/user/.local/lib/python3.8/site-packages/k2/lib/*.so /home/user/.local/lib/python3.8/site-packages/sherpa/lib/"
|
24 |
-
# )
|
25 |
-
|
26 |
from project_settings import project_path, log_directory
|
27 |
import log
|
28 |
|
@@ -113,15 +101,13 @@ def process(
|
|
113 |
|
114 |
# load recognizer
|
115 |
local_model_dir = pretrained_model_dir / "huggingface" / repo_id
|
116 |
-
nn_model_file = local_model_dir / m_dict["nn_model_file"]
|
117 |
-
tokens_file = local_model_dir / m_dict["tokens_file"]
|
118 |
|
119 |
recognizer = nn_models.load_recognizer(
|
120 |
repo_id=m_dict["repo_id"],
|
121 |
-
nn_model_file=nn_model_file
|
122 |
-
tokens_file=tokens_file
|
123 |
sub_folder=m_dict["sub_folder"],
|
124 |
-
local_model_dir=local_model_dir
|
125 |
loader=m_dict["loader"],
|
126 |
decoding_method=decoding_method,
|
127 |
num_active_paths=num_active_paths,
|
|
|
11 |
import time
|
12 |
import tempfile
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
from project_settings import project_path, log_directory
|
15 |
import log
|
16 |
|
|
|
101 |
|
102 |
# load recognizer
|
103 |
local_model_dir = pretrained_model_dir / "huggingface" / repo_id
|
|
|
|
|
104 |
|
105 |
recognizer = nn_models.load_recognizer(
|
106 |
repo_id=m_dict["repo_id"],
|
107 |
+
nn_model_file=m_dict["nn_model_file"],
|
108 |
+
tokens_file=m_dict["tokens_file"],
|
109 |
sub_folder=m_dict["sub_folder"],
|
110 |
+
local_model_dir=local_model_dir,
|
111 |
loader=m_dict["loader"],
|
112 |
decoding_method=decoding_method,
|
113 |
num_active_paths=num_active_paths,
|
toolbox/k2_sherpa/nn_models.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
from enum import Enum
|
4 |
from functools import lru_cache
|
5 |
import os
|
|
|
6 |
|
7 |
import huggingface_hub
|
8 |
import sherpa
|
@@ -107,7 +108,7 @@ def load_recognizer(repo_id: str,
|
|
107 |
nn_model_file: str,
|
108 |
tokens_file: str,
|
109 |
sub_folder: str,
|
110 |
-
local_model_dir:
|
111 |
loader: str,
|
112 |
decoding_method: str = "greedy_search",
|
113 |
num_active_paths: int = 4,
|
@@ -118,9 +119,12 @@ def load_recognizer(repo_id: str,
|
|
118 |
nn_model_file=nn_model_file,
|
119 |
tokens_file=tokens_file,
|
120 |
sub_folder=sub_folder,
|
121 |
-
local_model_dir=local_model_dir,
|
122 |
)
|
123 |
|
|
|
|
|
|
|
124 |
if loader == "load_sherpa_offline_recognizer":
|
125 |
recognizer = load_sherpa_offline_recognizer(
|
126 |
nn_model_file=nn_model_file,
|
|
|
3 |
from enum import Enum
|
4 |
from functools import lru_cache
|
5 |
import os
|
6 |
+
from pathlib import Path
|
7 |
|
8 |
import huggingface_hub
|
9 |
import sherpa
|
|
|
108 |
nn_model_file: str,
|
109 |
tokens_file: str,
|
110 |
sub_folder: str,
|
111 |
+
local_model_dir: Path,
|
112 |
loader: str,
|
113 |
decoding_method: str = "greedy_search",
|
114 |
num_active_paths: int = 4,
|
|
|
119 |
nn_model_file=nn_model_file,
|
120 |
tokens_file=tokens_file,
|
121 |
sub_folder=sub_folder,
|
122 |
+
local_model_dir=local_model_dir.as_posix(),
|
123 |
)
|
124 |
|
125 |
+
nn_model_file = (local_model_dir / nn_model_file).as_posix()
|
126 |
+
tokens_file = (local_model_dir / tokens_file).as_posix()
|
127 |
+
|
128 |
if loader == "load_sherpa_offline_recognizer":
|
129 |
recognizer = load_sherpa_offline_recognizer(
|
130 |
nn_model_file=nn_model_file,
|