ZeroCommand commited on
Commit
ff77572
1 Parent(s): 5927800

fix inf token ui

Browse files
app_text_classification.py CHANGED
@@ -4,7 +4,6 @@ from io_utils import (
4
  read_scanners,
5
  write_scanners,
6
  read_inference_type,
7
- write_inference_type,
8
  get_logs_file,
9
  )
10
  from wordings import INTRODUCTION_MD, CONFIRM_MAPPING_DETAILS_MD
@@ -14,6 +13,8 @@ from text_classification_ui_helpers import (
14
  check_dataset_and_get_split,
15
  check_model_and_show_prediction,
16
  write_column_mapping_to_config,
 
 
17
  )
18
 
19
  MAX_LABELS = 20
@@ -70,7 +71,7 @@ def get_demo(demo):
70
  run_local = gr.Checkbox(value=True, label="Run in this Space")
71
  use_inference = read_inference_type(uid) == "hf_inference_api"
72
  run_inference = gr.Checkbox(value=use_inference, label="Run with Inference API")
73
- inference_token = gr.Textbox(value="", label="HF Token for Inference API", visible=False)
74
 
75
  with gr.Accordion(label="Scanner Advance Config (optional)", open=False):
76
  selected = read_scanners(uid)
@@ -106,8 +107,9 @@ def get_demo(demo):
106
 
107
  scanners.change(write_scanners, inputs=[scanners, uid_label])
108
 
109
- run_inference.change(write_inference_type, inputs=[run_inference, uid_label], outputs=[inference_token])
110
 
 
111
  gr.on(
112
  triggers=[label.change for label in column_mappings],
113
  fn=write_column_mapping_to_config,
 
4
  read_scanners,
5
  write_scanners,
6
  read_inference_type,
 
7
  get_logs_file,
8
  )
9
  from wordings import INTRODUCTION_MD, CONFIRM_MAPPING_DETAILS_MD
 
13
  check_dataset_and_get_split,
14
  check_model_and_show_prediction,
15
  write_column_mapping_to_config,
16
+ select_run_mode,
17
+ deselect_run_inference,
18
  )
19
 
20
  MAX_LABELS = 20
 
71
  run_local = gr.Checkbox(value=True, label="Run in this Space")
72
  use_inference = read_inference_type(uid) == "hf_inference_api"
73
  run_inference = gr.Checkbox(value=use_inference, label="Run with Inference API")
74
+ inference_token = gr.Textbox(value="", label="HF Token for Inference API", visible=False, interactive=True)
75
 
76
  with gr.Accordion(label="Scanner Advance Config (optional)", open=False):
77
  selected = read_scanners(uid)
 
107
 
108
  scanners.change(write_scanners, inputs=[scanners, uid_label])
109
 
110
+ run_inference.change(select_run_mode, inputs=[run_inference, uid_label], outputs=[inference_token, run_local])
111
 
112
+ run_local.change(deselect_run_inference, inputs=[run_local], outputs=[inference_token, run_inference])
113
  gr.on(
114
  triggers=[label.change for label in column_mappings],
115
  fn=write_column_mapping_to_config,
io_utils.py CHANGED
@@ -64,8 +64,6 @@ def write_inference_type(use_inference, uid):
64
  # save inference_type to inference_type in yaml
65
  yaml.dump(config, f, Dumper=Dumper)
66
  f.close()
67
- return (gr.update(visible=(use_inference == "hf_inference_api")))
68
-
69
 
70
 
71
  # read column mapping from yaml file
 
64
  # save inference_type to inference_type in yaml
65
  yaml.dump(config, f, Dumper=Dumper)
66
  f.close()
 
 
67
 
68
 
69
  # read column mapping from yaml file
text_classification_ui_helpers.py CHANGED
@@ -9,7 +9,8 @@ import gradio as gr
9
  from transformers.pipelines import TextClassificationPipeline
10
 
11
  from io_utils import (get_yaml_path, read_column_mapping, save_job_to_pipe,
12
- write_column_mapping, write_log_to_user_file)
 
13
  from text_classification import (check_model, get_example_prediction,
14
  get_labels_and_features_from_dataset)
15
  from wordings import CONFIRM_MAPPING_DETAILS_FAIL_RAW, MAPPING_STYLED_ERROR_WARNING, CHECK_CONFIG_OR_SPLIT_RAW
@@ -41,7 +42,29 @@ def check_dataset_and_get_split(dataset_id, dataset_config):
41
  # gr.Warning(f"Failed to load dataset {dataset_id} with config {dataset_config}: {e}")
42
  pass
43
 
 
 
 
 
 
 
 
 
 
 
44
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  def write_column_mapping_to_config(
46
  dataset_id, dataset_config, dataset_split, uid, *labels
47
  ):
 
9
  from transformers.pipelines import TextClassificationPipeline
10
 
11
  from io_utils import (get_yaml_path, read_column_mapping, save_job_to_pipe,
12
+ write_column_mapping, write_log_to_user_file,
13
+ write_inference_type)
14
  from text_classification import (check_model, get_example_prediction,
15
  get_labels_and_features_from_dataset)
16
  from wordings import CONFIRM_MAPPING_DETAILS_FAIL_RAW, MAPPING_STYLED_ERROR_WARNING, CHECK_CONFIG_OR_SPLIT_RAW
 
42
  # gr.Warning(f"Failed to load dataset {dataset_id} with config {dataset_config}: {e}")
43
  pass
44
 
45
+ def select_run_mode(run_inf, uid):
46
+ write_inference_type(run_inf, uid)
47
+ if run_inf:
48
+ return (
49
+ gr.update(visible=True),
50
+ gr.update(value=False))
51
+ else:
52
+ return (
53
+ gr.update(visible=False),
54
+ gr.update(value=True))
55
 
56
+ def deselect_run_inference(run_local):
57
+ if run_local:
58
+ return (
59
+ gr.update(visible=False),
60
+ gr.update(value=False)
61
+ )
62
+ else:
63
+ return (
64
+ gr.update(visible=True),
65
+ gr.update(value=True)
66
+ )
67
+
68
  def write_column_mapping_to_config(
69
  dataset_id, dataset_config, dataset_split, uid, *labels
70
  ):