ksaramout commited on
Commit
404f2ee
·
1 Parent(s): b401ae1

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -171,6 +171,8 @@ else:
171
  st.success('Running in Production mode!', icon="✅")
172
 
173
  st.subheader("Tell us about your Databricks and Labelbox environments", divider='grey')
 
 
174
  title = st.text_input('Enter Databricks Domain (e.g., <instance>.<cloud>.databricks.com)', '')
175
  databricks_api_key = st.text_input('Databricks API Key', type='password')
176
  labelbox_api_key = st.text_input('Labelbox API Key', type='password')
@@ -477,15 +479,21 @@ if new_dataset_name or selected_dataset_name:
477
  # Schema Map
478
  row_data_input = selected_row_data
479
  global_key_input = selected_global_key
 
 
480
  schema_map_dict = {'row_data': row_data_input}
481
  if global_key_input:
482
  schema_map_dict['global_key'] = global_key_input
483
 
484
- # Convert the dict to a stringified JSON
485
- schema_map_str = json.dumps(schema_map_dict)
486
-
 
 
 
487
 
488
  data = {
 
489
  "mode": mode,
490
  "databricks_instance": databricks_instance,
491
  "databricks_api_key": databricks_api_key,
@@ -496,7 +504,7 @@ if new_dataset_name or selected_dataset_name:
496
  "frequency": frequency,
497
  "new_cluster": 0,
498
  "cluster_id": cluster_id,
499
- "schema_map": schema_map_str
500
  }
501
 
502
 
@@ -504,7 +512,7 @@ if new_dataset_name or selected_dataset_name:
504
  # Ensure all fields are filled out
505
  required_fields = [
506
  mode, databricks_instance, databricks_api_key, new_dataset, dataset_id,
507
- table_path, labelbox_api_key, frequency, cluster_id, schema_map_str
508
  ]
509
 
510
 
 
171
  st.success('Running in Production mode!', icon="✅")
172
 
173
  st.subheader("Tell us about your Databricks and Labelbox environments", divider='grey')
174
+ cloud = "GCP"
175
+ #cloud = st.selectbox('Which cloud environment does your Databricks Workspace run in?', ['AWS', 'Azure', 'GCP'], index=None)
176
  title = st.text_input('Enter Databricks Domain (e.g., <instance>.<cloud>.databricks.com)', '')
177
  databricks_api_key = st.text_input('Databricks API Key', type='password')
178
  labelbox_api_key = st.text_input('Labelbox API Key', type='password')
 
479
  # Schema Map
480
  row_data_input = selected_row_data
481
  global_key_input = selected_global_key
482
+
483
+ # Create the initial dictionary
484
  schema_map_dict = {'row_data': row_data_input}
485
  if global_key_input:
486
  schema_map_dict['global_key'] = global_key_input
487
 
488
+ # Swap keys and values
489
+ reversed_schema_map_dict = {v: k for k, v in schema_map_dict.items()}
490
+
491
+ # Convert the reversed dictionary to a stringified JSON
492
+ reversed_schema_map_str = json.dumps(reversed_schema_map_dict)
493
+
494
 
495
  data = {
496
+ "cloud": cloud,
497
  "mode": mode,
498
  "databricks_instance": databricks_instance,
499
  "databricks_api_key": databricks_api_key,
 
504
  "frequency": frequency,
505
  "new_cluster": 0,
506
  "cluster_id": cluster_id,
507
+ "schema_map": reversed_schema_map_str
508
  }
509
 
510
 
 
512
  # Ensure all fields are filled out
513
  required_fields = [
514
  mode, databricks_instance, databricks_api_key, new_dataset, dataset_id,
515
+ table_path, labelbox_api_key, frequency, cluster_id, reversed_schema_map_str
516
  ]
517
 
518