Spaces:
Sleeping
Sleeping
Andrea Maldonado
commited on
Commit
·
57a7d1c
1
Parent(s):
2d34e0f
Removes dry unused variables
Browse files- .github/workflows/test_gedi.yml +2 -2
- gedi/generator.py +4 -4
- gedi/utils/io_helpers.py +4 -8
.github/workflows/test_gedi.yml
CHANGED
@@ -71,11 +71,11 @@ jobs:
|
|
71 |
|
72 |
- name: Compare output 3
|
73 |
run:
|
74 |
-
diff data/validation/genELexperiment3_04.json output/features/grid_feat/2_enself_rt20v/
|
75 |
|
76 |
- name: Compare output 4
|
77 |
run:
|
78 |
-
diff data/validation/genELexperiment4_02.json output/features/grid_feat/2_enself_rt20v/
|
79 |
|
80 |
test_benchmark:
|
81 |
runs-on: ubuntu-latest
|
|
|
71 |
|
72 |
- name: Compare output 3
|
73 |
run:
|
74 |
+
diff data/validation/genELexperiment3_04.json output/features/grid_feat/2_enself_rt20v/genELexperiment3_04_nan.json
|
75 |
|
76 |
- name: Compare output 4
|
77 |
run:
|
78 |
+
diff data/validation/genELexperiment4_02.json output/features/grid_feat/2_enself_rt20v/genELexperiment4_nan_02.json
|
79 |
|
80 |
test_benchmark:
|
81 |
runs-on: ubuntu-latest
|
gedi/generator.py
CHANGED
@@ -165,7 +165,7 @@ class GenerateEventLogs():
|
|
165 |
with multiprocessing.Pool(num_cores) as p:
|
166 |
print(f"INFO: Generator starting at {start.strftime('%H:%M:%S')} using {num_cores} cores for {len(tasks)} tasks...")
|
167 |
random.seed(RANDOM_SEED)
|
168 |
-
log_config = p.map(self.generator_wrapper, [(index, row
|
169 |
self.log_config = log_config
|
170 |
|
171 |
else:
|
@@ -194,7 +194,7 @@ class GenerateEventLogs():
|
|
194 |
except IndexError:
|
195 |
identifier = task[0]+1
|
196 |
task = task[1].loc[lambda x, identifier=identifier: x!=identifier]
|
197 |
-
self.objectives = task.to_dict()
|
198 |
random.seed(RANDOM_SEED)
|
199 |
self.configs = self.optimize()
|
200 |
|
@@ -209,7 +209,7 @@ class GenerateEventLogs():
|
|
209 |
if self.objectives.get('ratio_unique_traces_per_trace'):#HOTFIX
|
210 |
self.objectives['ratio_variants_per_number_of_traces']=self.objectives.pop('ratio_unique_traces_per_trace')
|
211 |
|
212 |
-
save_path = get_output_key_value_location(
|
213 |
self.output_path, identifier, self.feature_keys)+".xes"
|
214 |
|
215 |
write_xes(log_config['log'], save_path)
|
@@ -221,7 +221,7 @@ class GenerateEventLogs():
|
|
221 |
if features_to_dump.get('ratio_unique_traces_per_trace'):#HOTFIX
|
222 |
features_to_dump['ratio_variants_per_number_of_traces']=features_to_dump.pop('ratio_unique_traces_per_trace')
|
223 |
features_to_dump['log'] = identifier.replace('genEL', '')
|
224 |
-
dump_features_json(features_to_dump,
|
225 |
return log_config
|
226 |
|
227 |
def generate_optimized_log(self, config):
|
|
|
165 |
with multiprocessing.Pool(num_cores) as p:
|
166 |
print(f"INFO: Generator starting at {start.strftime('%H:%M:%S')} using {num_cores} cores for {len(tasks)} tasks...")
|
167 |
random.seed(RANDOM_SEED)
|
168 |
+
log_config = p.map(self.generator_wrapper, [(index, row) for index, row in tasks.iterrows()])
|
169 |
self.log_config = log_config
|
170 |
|
171 |
else:
|
|
|
194 |
except IndexError:
|
195 |
identifier = task[0]+1
|
196 |
task = task[1].loc[lambda x, identifier=identifier: x!=identifier]
|
197 |
+
self.objectives = task.dropna().to_dict()
|
198 |
random.seed(RANDOM_SEED)
|
199 |
self.configs = self.optimize()
|
200 |
|
|
|
209 |
if self.objectives.get('ratio_unique_traces_per_trace'):#HOTFIX
|
210 |
self.objectives['ratio_variants_per_number_of_traces']=self.objectives.pop('ratio_unique_traces_per_trace')
|
211 |
|
212 |
+
save_path = get_output_key_value_location(task.to_dict(),
|
213 |
self.output_path, identifier, self.feature_keys)+".xes"
|
214 |
|
215 |
write_xes(log_config['log'], save_path)
|
|
|
221 |
if features_to_dump.get('ratio_unique_traces_per_trace'):#HOTFIX
|
222 |
features_to_dump['ratio_variants_per_number_of_traces']=features_to_dump.pop('ratio_unique_traces_per_trace')
|
223 |
features_to_dump['log'] = identifier.replace('genEL', '')
|
224 |
+
dump_features_json(features_to_dump, save_path)
|
225 |
return log_config
|
226 |
|
227 |
def generate_optimized_log(self, config):
|
gedi/utils/io_helpers.py
CHANGED
@@ -72,15 +72,11 @@ def get_output_key_value_location(obj, output_path, identifier, obj_keys=None):
|
|
72 |
save_path = os.path.join(folder_path, generated_file_name)
|
73 |
return save_path
|
74 |
|
75 |
-
def dump_features_json(features: dict, output_path,
|
76 |
-
output_parts = PurePath(output_path).parts
|
77 |
-
|
78 |
*output_parts[1:])
|
79 |
-
|
80 |
-
json_path = get_output_key_value_location(objectives,
|
81 |
-
feature_dir, identifier, obj_keys)+".json"
|
82 |
-
else:
|
83 |
-
json_path = os.path.join(feature_dir, identifier)+".json"
|
84 |
|
85 |
os.makedirs(os.path.split(json_path)[0], exist_ok=True)
|
86 |
with open(json_path, 'w') as fp:
|
|
|
72 |
save_path = os.path.join(folder_path, generated_file_name)
|
73 |
return save_path
|
74 |
|
75 |
+
def dump_features_json(features: dict, output_path, content_type="features"):
|
76 |
+
output_parts = PurePath(output_path.split(".xes")[0]).parts
|
77 |
+
features_path = os.path.join(output_parts[0], content_type,
|
78 |
*output_parts[1:])
|
79 |
+
json_path = features_path+'.json'
|
|
|
|
|
|
|
|
|
80 |
|
81 |
os.makedirs(os.path.split(json_path)[0], exist_ok=True)
|
82 |
with open(json_path, 'w') as fp:
|