Spaces:
Running
Running
inoki-giskard
commited on
Commit
•
4456f8e
1
Parent(s):
d2ff920
Separate different scans using uid
Browse files- app_text_classification.py +1 -2
- io_utils.py +19 -21
app_text_classification.py
CHANGED
@@ -102,8 +102,7 @@ def get_demo(demo):
|
|
102 |
)
|
103 |
|
104 |
with gr.Row():
|
105 |
-
logs = gr.Textbox(label="Giskard Bot Evaluation Log:", visible=False)
|
106 |
-
demo.load(get_logs_file, None, logs, every=0.5)
|
107 |
|
108 |
dataset_id_input.change(
|
109 |
check_dataset_and_get_config,
|
|
|
102 |
)
|
103 |
|
104 |
with gr.Row():
|
105 |
+
logs = gr.Textbox(value=get_logs_file, label="Giskard Bot Evaluation Log:", visible=False, every=0.5)
|
|
|
106 |
|
107 |
dataset_id_input.change(
|
108 |
check_dataset_and_get_config,
|
io_utils.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import os
|
|
|
2 |
import subprocess
|
3 |
|
4 |
import yaml
|
@@ -6,6 +7,7 @@ import yaml
|
|
6 |
import pipe
|
7 |
|
8 |
YAML_PATH = "./cicd/configs"
|
|
|
9 |
|
10 |
|
11 |
class Dumper(yaml.Dumper):
|
@@ -28,7 +30,6 @@ def read_scanners(uid):
|
|
28 |
with open(get_yaml_path(uid), "r") as f:
|
29 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
30 |
scanners = config.get("detectors", [])
|
31 |
-
f.close()
|
32 |
return scanners
|
33 |
|
34 |
|
@@ -38,11 +39,9 @@ def write_scanners(scanners, uid):
|
|
38 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
39 |
if config:
|
40 |
config["detectors"] = scanners
|
41 |
-
f.close()
|
42 |
# save scanners to detectors in yaml
|
43 |
with open(get_yaml_path(uid), "w") as f:
|
44 |
yaml.dump(config, f, Dumper=Dumper)
|
45 |
-
f.close()
|
46 |
|
47 |
|
48 |
# read model_type from yaml file
|
@@ -51,7 +50,6 @@ def read_inference_type(uid):
|
|
51 |
with open(get_yaml_path(uid), "r") as f:
|
52 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
53 |
inference_type = config.get("inference_type", "")
|
54 |
-
f.close()
|
55 |
return inference_type
|
56 |
|
57 |
|
@@ -66,11 +64,9 @@ def write_inference_type(use_inference, inference_token, uid):
|
|
66 |
config["inference_type"] = "hf_pipeline"
|
67 |
# FIXME: A quick and temp fix for missing token
|
68 |
config["inference_token"] = ""
|
69 |
-
f.close()
|
70 |
# save inference_type to inference_type in yaml
|
71 |
with open(get_yaml_path(uid), "w") as f:
|
72 |
yaml.dump(config, f, Dumper=Dumper)
|
73 |
-
f.close()
|
74 |
|
75 |
|
76 |
# read column mapping from yaml file
|
@@ -113,21 +109,20 @@ def convert_column_mapping_to_json(df, label=""):
|
|
113 |
|
114 |
def get_logs_file():
|
115 |
try:
|
116 |
-
|
117 |
-
|
118 |
except Exception:
|
119 |
return "Log file does not exist"
|
120 |
|
121 |
|
122 |
-
def write_log_to_user_file(
|
123 |
-
with open(f"./tmp/
|
124 |
f.write(log)
|
125 |
-
f.close()
|
126 |
|
127 |
|
128 |
-
def save_job_to_pipe(
|
129 |
with lock:
|
130 |
-
pipe.jobs.append((
|
131 |
|
132 |
|
133 |
def pop_job_from_pipe():
|
@@ -135,14 +130,17 @@ def pop_job_from_pipe():
|
|
135 |
return
|
136 |
job_info = pipe.jobs.pop()
|
137 |
pipe.current = job_info[2]
|
138 |
-
|
|
|
139 |
command = job_info[1]
|
140 |
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
|
|
|
|
148 |
pipe.current = None
|
|
|
1 |
import os
|
2 |
+
from pathlib import Path
|
3 |
import subprocess
|
4 |
|
5 |
import yaml
|
|
|
7 |
import pipe
|
8 |
|
9 |
YAML_PATH = "./cicd/configs"
|
10 |
+
LOG_FILE = "temp_log"
|
11 |
|
12 |
|
13 |
class Dumper(yaml.Dumper):
|
|
|
30 |
with open(get_yaml_path(uid), "r") as f:
|
31 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
32 |
scanners = config.get("detectors", [])
|
|
|
33 |
return scanners
|
34 |
|
35 |
|
|
|
39 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
40 |
if config:
|
41 |
config["detectors"] = scanners
|
|
|
42 |
# save scanners to detectors in yaml
|
43 |
with open(get_yaml_path(uid), "w") as f:
|
44 |
yaml.dump(config, f, Dumper=Dumper)
|
|
|
45 |
|
46 |
|
47 |
# read model_type from yaml file
|
|
|
50 |
with open(get_yaml_path(uid), "r") as f:
|
51 |
config = yaml.load(f, Loader=yaml.FullLoader)
|
52 |
inference_type = config.get("inference_type", "")
|
|
|
53 |
return inference_type
|
54 |
|
55 |
|
|
|
64 |
config["inference_type"] = "hf_pipeline"
|
65 |
# FIXME: A quick and temp fix for missing token
|
66 |
config["inference_token"] = ""
|
|
|
67 |
# save inference_type to inference_type in yaml
|
68 |
with open(get_yaml_path(uid), "w") as f:
|
69 |
yaml.dump(config, f, Dumper=Dumper)
|
|
|
70 |
|
71 |
|
72 |
# read column mapping from yaml file
|
|
|
109 |
|
110 |
def get_logs_file():
|
111 |
try:
|
112 |
+
with open(LOG_FILE, "r") as file:
|
113 |
+
return file.read()
|
114 |
except Exception:
|
115 |
return "Log file does not exist"
|
116 |
|
117 |
|
118 |
+
def write_log_to_user_file(task_id, log):
|
119 |
+
with open(f"./tmp/{task_id}.log", "a") as f:
|
120 |
f.write(log)
|
|
|
121 |
|
122 |
|
123 |
+
def save_job_to_pipe(task_id, job, description, lock):
|
124 |
with lock:
|
125 |
+
pipe.jobs.append((task_id, job, description))
|
126 |
|
127 |
|
128 |
def pop_job_from_pipe():
|
|
|
130 |
return
|
131 |
job_info = pipe.jobs.pop()
|
132 |
pipe.current = job_info[2]
|
133 |
+
task_id = job_info[0]
|
134 |
+
write_log_to_user_file(task_id, f"Running job id {task_id}\n")
|
135 |
command = job_info[1]
|
136 |
|
137 |
+
# Link to LOG_FILE
|
138 |
+
log_file_path = Path(LOG_FILE)
|
139 |
+
if log_file_path.exists():
|
140 |
+
log_file_path.unlink()
|
141 |
+
os.symlink(f"./tmp/{task_id}.log", LOG_FILE)
|
142 |
+
|
143 |
+
with open(f"./tmp/{task_id}.log", "a") as log_file:
|
144 |
+
p = subprocess.Popen(command, stdout=log_file, stderr=subprocess.STDOUT)
|
145 |
+
p.wait()
|
146 |
pipe.current = None
|