ZeroCommand commited on
Commit
8a5336f
1 Parent(s): ff77572

fix config file io bug

Browse files
app_text_classification.py CHANGED
@@ -5,6 +5,7 @@ from io_utils import (
5
  write_scanners,
6
  read_inference_type,
7
  get_logs_file,
 
8
  )
9
  from wordings import INTRODUCTION_MD, CONFIRM_MAPPING_DETAILS_MD
10
  from text_classification_ui_helpers import (
@@ -96,7 +97,8 @@ def get_demo(demo):
96
  demo.load(get_logs_file, uid_label, logs, every=0.5)
97
 
98
  dataset_id_input.change(
99
- check_dataset_and_get_config, inputs=[dataset_id_input, uid_label], outputs=[dataset_config_input]
 
100
  )
101
 
102
  dataset_config_input.change(
@@ -107,9 +109,20 @@ def get_demo(demo):
107
 
108
  scanners.change(write_scanners, inputs=[scanners, uid_label])
109
 
110
- run_inference.change(select_run_mode, inputs=[run_inference, uid_label], outputs=[inference_token, run_local])
111
-
112
- run_local.change(deselect_run_inference, inputs=[run_local], outputs=[inference_token, run_inference])
 
 
 
 
 
 
 
 
 
 
 
113
  gr.on(
114
  triggers=[label.change for label in column_mappings],
115
  fn=write_column_mapping_to_config,
 
5
  write_scanners,
6
  read_inference_type,
7
  get_logs_file,
8
+ write_inference_type,
9
  )
10
  from wordings import INTRODUCTION_MD, CONFIRM_MAPPING_DETAILS_MD
11
  from text_classification_ui_helpers import (
 
97
  demo.load(get_logs_file, uid_label, logs, every=0.5)
98
 
99
  dataset_id_input.change(
100
+ check_dataset_and_get_config,
101
+ inputs=[dataset_id_input, uid_label], outputs=[dataset_config_input]
102
  )
103
 
104
  dataset_config_input.change(
 
109
 
110
  scanners.change(write_scanners, inputs=[scanners, uid_label])
111
 
112
+ run_inference.change(
113
+ select_run_mode,
114
+ inputs=[run_inference, inference_token, uid_label],
115
+ outputs=[inference_token, run_local])
116
+
117
+ run_local.change(
118
+ deselect_run_inference,
119
+ inputs=[run_local],
120
+ outputs=[inference_token, run_inference])
121
+
122
+ inference_token.change(
123
+ write_inference_type,
124
+ inputs=[run_inference, inference_token, uid_label])
125
+
126
  gr.on(
127
  triggers=[label.change for label in column_mappings],
128
  fn=write_column_mapping_to_config,
io_utils.py CHANGED
@@ -34,12 +34,14 @@ def read_scanners(uid):
34
 
35
  # convert a list of scanners to yaml file
36
  def write_scanners(scanners, uid):
37
- with open(get_yaml_path(uid), "r+") as f:
38
  config = yaml.load(f, Loader=yaml.FullLoader)
39
  if config:
40
  config["detectors"] = scanners
41
- # save scanners to detectors in yaml
42
- yaml.dump(config, f, Dumper=Dumper)
 
 
43
  f.close()
44
 
45
 
@@ -54,18 +56,22 @@ def read_inference_type(uid):
54
 
55
 
56
  # write model_type to yaml file
57
- def write_inference_type(use_inference, uid):
58
- with open(get_yaml_path(uid), "r+") as f:
59
  config = yaml.load(f, Loader=yaml.FullLoader)
60
  if use_inference:
61
  config["inference_type"] = "hf_inference_api"
 
62
  else:
63
  config["inference_type"] = "hf_pipeline"
64
- # save inference_type to inference_type in yaml
 
 
65
  yaml.dump(config, f, Dumper=Dumper)
66
  f.close()
67
 
68
 
 
69
  # read column mapping from yaml file
70
  def read_column_mapping(uid):
71
  column_mapping = {}
@@ -81,14 +87,16 @@ def read_column_mapping(uid):
81
  def write_column_mapping(mapping, uid):
82
  with open(get_yaml_path(uid), "r") as f:
83
  config = yaml.load(f, Loader=yaml.FullLoader)
 
 
84
  if config is None:
85
  return
86
  if mapping is None and "column_mapping" in config.keys():
87
  del config["column_mapping"]
88
  else:
89
  config["column_mapping"] = mapping
 
90
  with open(get_yaml_path(uid), "w") as f:
91
- # save column_mapping to column_mapping in yaml
92
  yaml.dump(config, f, Dumper=Dumper)
93
  f.close()
94
 
 
34
 
35
  # convert a list of scanners to yaml file
36
  def write_scanners(scanners, uid):
37
+ with open(get_yaml_path(uid), "r") as f:
38
  config = yaml.load(f, Loader=yaml.FullLoader)
39
  if config:
40
  config["detectors"] = scanners
41
+ f.close()
42
+ # save scanners to detectors in yaml
43
+ with open(get_yaml_path(uid), "w") as f:
44
+ yaml.dump(config, f, Dumper=Dumper)
45
  f.close()
46
 
47
 
 
56
 
57
 
58
  # write model_type to yaml file
59
+ def write_inference_type(use_inference, inference_token, uid):
60
+ with open(get_yaml_path(uid), "r") as f:
61
  config = yaml.load(f, Loader=yaml.FullLoader)
62
  if use_inference:
63
  config["inference_type"] = "hf_inference_api"
64
+ config["inference_token"] = inference_token
65
  else:
66
  config["inference_type"] = "hf_pipeline"
67
+ f.close()
68
+ # save inference_type to inference_type in yaml
69
+ with open(get_yaml_path(uid), "w") as f:
70
  yaml.dump(config, f, Dumper=Dumper)
71
  f.close()
72
 
73
 
74
+
75
  # read column mapping from yaml file
76
  def read_column_mapping(uid):
77
  column_mapping = {}
 
87
  def write_column_mapping(mapping, uid):
88
  with open(get_yaml_path(uid), "r") as f:
89
  config = yaml.load(f, Loader=yaml.FullLoader)
90
+ f.close()
91
+
92
  if config is None:
93
  return
94
  if mapping is None and "column_mapping" in config.keys():
95
  del config["column_mapping"]
96
  else:
97
  config["column_mapping"] = mapping
98
+
99
  with open(get_yaml_path(uid), "w") as f:
 
100
  yaml.dump(config, f, Dumper=Dumper)
101
  f.close()
102
 
text_classification_ui_helpers.py CHANGED
@@ -42,9 +42,10 @@ def check_dataset_and_get_split(dataset_id, dataset_config):
42
  # gr.Warning(f"Failed to load dataset {dataset_id} with config {dataset_config}: {e}")
43
  pass
44
 
45
- def select_run_mode(run_inf, uid):
46
- write_inference_type(run_inf, uid)
47
  if run_inf:
 
 
48
  return (
49
  gr.update(visible=True),
50
  gr.update(value=False))
 
42
  # gr.Warning(f"Failed to load dataset {dataset_id} with config {dataset_config}: {e}")
43
  pass
44
 
45
+ def select_run_mode(run_inf, inf_token, uid):
 
46
  if run_inf:
47
+ if len(inf_token) > 0:
48
+ write_inference_type(run_inf, inf_token, uid)
49
  return (
50
  gr.update(visible=True),
51
  gr.update(value=False))