ZeroCommand commited on
Commit
a2810d6
1 Parent(s): 97c6f8e

add closing files

Browse files
Files changed (1) hide show
  1. io_utils.py +12 -0
io_utils.py CHANGED
@@ -28,6 +28,7 @@ def read_scanners(uid):
28
  with open(get_yaml_path(uid), "r") as f:
29
  config = yaml.load(f, Loader=yaml.FullLoader)
30
  scanners = config.get("detectors", [])
 
31
  return scanners
32
 
33
 
@@ -37,9 +38,12 @@ def write_scanners(scanners, uid):
37
  config = yaml.load(f, Loader=yaml.FullLoader)
38
  if config:
39
  config["detectors"] = scanners
 
40
  # save scanners to detectors in yaml
41
  with open(get_yaml_path(uid), "w") as f:
42
  yaml.dump(config, f, Dumper=Dumper)
 
 
43
 
44
 
45
  # read model_type from yaml file
@@ -48,6 +52,7 @@ def read_inference_type(uid):
48
  with open(get_yaml_path(uid), "r") as f:
49
  config = yaml.load(f, Loader=yaml.FullLoader)
50
  inference_type = config.get("inference_type", "")
 
51
  return inference_type
52
 
53
 
@@ -62,9 +67,11 @@ def write_inference_type(use_inference, inference_token, uid):
62
  config["inference_type"] = "hf_pipeline"
63
  # FIXME: A quick and temp fix for missing token
64
  config["inference_token"] = ""
 
65
  # save inference_type to inference_type in yaml
66
  with open(get_yaml_path(uid), "w") as f:
67
  yaml.dump(config, f, Dumper=Dumper)
 
68
 
69
 
70
 
@@ -75,6 +82,7 @@ def read_column_mapping(uid):
75
  config = yaml.load(f, Loader=yaml.FullLoader)
76
  if config:
77
  column_mapping = config.get("column_mapping", dict())
 
78
  return column_mapping
79
 
80
 
@@ -82,6 +90,7 @@ def read_column_mapping(uid):
82
  def write_column_mapping(mapping, uid):
83
  with open(get_yaml_path(uid), "r") as f:
84
  config = yaml.load(f, Loader=yaml.FullLoader)
 
85
 
86
  if config is None:
87
  return
@@ -92,6 +101,8 @@ def write_column_mapping(mapping, uid):
92
 
93
  with open(get_yaml_path(uid), "w") as f:
94
  yaml.dump(config, f, Dumper=Dumper)
 
 
95
 
96
 
97
  # convert column mapping dataframe to json
@@ -114,6 +125,7 @@ def get_logs_file(uid):
114
  def write_log_to_user_file(id, log):
115
  with open(f"./tmp/{id}_log", "a") as f:
116
  f.write(log)
 
117
 
118
 
119
  def save_job_to_pipe(id, job, lock):
 
28
  with open(get_yaml_path(uid), "r") as f:
29
  config = yaml.load(f, Loader=yaml.FullLoader)
30
  scanners = config.get("detectors", [])
31
+ f.close()
32
  return scanners
33
 
34
 
 
38
  config = yaml.load(f, Loader=yaml.FullLoader)
39
  if config:
40
  config["detectors"] = scanners
41
+ f.close()
42
  # save scanners to detectors in yaml
43
  with open(get_yaml_path(uid), "w") as f:
44
  yaml.dump(config, f, Dumper=Dumper)
45
+ f.close()
46
+
47
 
48
 
49
  # read model_type from yaml file
 
52
  with open(get_yaml_path(uid), "r") as f:
53
  config = yaml.load(f, Loader=yaml.FullLoader)
54
  inference_type = config.get("inference_type", "")
55
+ f.close()
56
  return inference_type
57
 
58
 
 
67
  config["inference_type"] = "hf_pipeline"
68
  # FIXME: A quick and temp fix for missing token
69
  config["inference_token"] = ""
70
+ f.close()
71
  # save inference_type to inference_type in yaml
72
  with open(get_yaml_path(uid), "w") as f:
73
  yaml.dump(config, f, Dumper=Dumper)
74
+ f.close()
75
 
76
 
77
 
 
82
  config = yaml.load(f, Loader=yaml.FullLoader)
83
  if config:
84
  column_mapping = config.get("column_mapping", dict())
85
+ f.close()
86
  return column_mapping
87
 
88
 
 
90
  def write_column_mapping(mapping, uid):
91
  with open(get_yaml_path(uid), "r") as f:
92
  config = yaml.load(f, Loader=yaml.FullLoader)
93
+ f.close()
94
 
95
  if config is None:
96
  return
 
101
 
102
  with open(get_yaml_path(uid), "w") as f:
103
  yaml.dump(config, f, Dumper=Dumper)
104
+ f.close()
105
+
106
 
107
 
108
  # convert column mapping dataframe to json
 
125
  def write_log_to_user_file(id, log):
126
  with open(f"./tmp/{id}_log", "a") as f:
127
  f.write(log)
128
+ f.close()
129
 
130
 
131
  def save_job_to_pipe(id, job, lock):