mriusero commited on
Commit
c587d34
·
1 Parent(s): 05a4c82

core: datashape

Browse files
app.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  import gradio as gr
2
 
3
  from src.chat import respond
@@ -33,20 +35,24 @@ with gr.Blocks(theme=custom_theme) as demo:
33
  pause = gr.Button("⏸️ Pause")
34
  reset = gr.Button("🔄 Reset")
35
 
36
- with gr.Column(scale=2):
37
- display_df = gr.DataFrame(
38
- label="Production Data",
39
- headers=[
40
- "Part ID", "Timestamp", "Position", "Orientation", "Tool ID",
41
- "Compliance", "Event", "Error Code", "Error Description",
42
- "Downtime Start", "Downtime End"
43
- ]
44
- )
 
 
 
 
45
  play.click(
46
  fn=play_fn,
47
  inputs=None,
48
- outputs=display_df,
49
- )
50
  pause.click(
51
  fn=pause_fn,
52
  inputs=None,
@@ -58,6 +64,7 @@ with gr.Blocks(theme=custom_theme) as demo:
58
  outputs=None
59
  )
60
 
 
61
  with gr.Tab("Description"):
62
  gr.Markdown(
63
  """
 
1
+ import json
2
+ import pandas as pd
3
  import gradio as gr
4
 
5
  from src.chat import respond
 
35
  pause = gr.Button("⏸️ Pause")
36
  reset = gr.Button("🔄 Reset")
37
 
38
+ with gr.Column(scale=3):
39
+ df_outputs = {
40
+ "DataFrame 1": pd.DataFrame(),
41
+ "DataFrame 2": pd.DataFrame(),
42
+ "DataFrame 3": pd.DataFrame(),
43
+ "DataFrame 4": pd.DataFrame(),
44
+ "DataFrame 5": pd.DataFrame(),
45
+ }
46
+ json_output = {}
47
+
48
+ df_components = [gr.DataFrame(label=df_name, visible=False) for df_name in df_outputs.keys()]
49
+ json_component = gr.JSON(label="Machine JSON", value=json_output, visible=False)
50
+
51
  play.click(
52
  fn=play_fn,
53
  inputs=None,
54
+ outputs=df_components + [json_component]
55
+ )
56
  pause.click(
57
  fn=pause_fn,
58
  inputs=None,
 
64
  outputs=None
65
  )
66
 
67
+
68
  with gr.Tab("Description"):
69
  gr.Markdown(
70
  """
src/production/flow.py CHANGED
@@ -1,4 +1,6 @@
 
1
  import time
 
2
  import random
3
  import numpy as np
4
  import pandas as pd
@@ -112,8 +114,8 @@ def play_fn():
112
  while PRODUCTION:
113
  data = synthetic_data()
114
  raw_data = compile(data)
115
- yield raw_data
116
- process(raw_data)
117
 
118
 
119
  def pause_fn():
@@ -129,6 +131,7 @@ def reset_fn():
129
  """
130
  Reset the production state and clear the data.
131
  """
 
132
  print("=== RESET DONE ===")
133
  global PRODUCTION, PROD_STATE
134
  PRODUCTION = False
 
1
+ import os
2
  import time
3
+ import json
4
  import random
5
  import numpy as np
6
  import pandas as pd
 
114
  while PRODUCTION:
115
  data = synthetic_data()
116
  raw_data = compile(data)
117
+ tools_dfs, machine_json = process(raw_data)
118
+ yield [tools_dfs[key] for key in tools_dfs.keys()] + [machine_json]
119
 
120
 
121
  def pause_fn():
 
131
  """
132
  Reset the production state and clear the data.
133
  """
134
+ os.system('clear')
135
  print("=== RESET DONE ===")
136
  global PRODUCTION, PROD_STATE
137
  PRODUCTION = False
src/production/metrics/machine.py CHANGED
@@ -2,7 +2,7 @@ import pandas as pd
2
  import json
3
  import os
4
 
5
- def machine_metrics(raw_data):
6
  """
7
  Calculate machine efficiency metrics from raw production data.
8
  :param raw_data: collection of raw production data containing timestamps, downtime, and compliance information.
@@ -49,7 +49,7 @@ def machine_metrics(raw_data):
49
  else:
50
  mttr = pd.Timedelta(0)
51
 
52
- results = {
53
  "opening_time": str(opening_time),
54
  "required_time": str(required_time),
55
  "unplanned_stop_time": str(unplanned_stop_time),
@@ -62,11 +62,4 @@ def machine_metrics(raw_data):
62
  "TRS": TRS,
63
  "MTBF": str(mtbf),
64
  "MTTR": str(mttr)
65
- }
66
-
67
- os.makedirs('data', exist_ok=True)
68
-
69
- with open('data/efficiency.json', 'w') as json_file:
70
- json.dump(results, json_file, indent=4)
71
-
72
- return results
 
2
  import json
3
  import os
4
 
5
+ def get_machine_metrics(raw_data):
6
  """
7
  Calculate machine efficiency metrics from raw production data.
8
  :param raw_data: collection of raw production data containing timestamps, downtime, and compliance information.
 
49
  else:
50
  mttr = pd.Timedelta(0)
51
 
52
+ return {
53
  "opening_time": str(opening_time),
54
  "required_time": str(required_time),
55
  "unplanned_stop_time": str(unplanned_stop_time),
 
62
  "TRS": TRS,
63
  "MTBF": str(mtbf),
64
  "MTTR": str(mttr)
65
+ }
 
 
 
 
 
 
 
src/production/metrics/tools.py CHANGED
@@ -21,7 +21,7 @@ def stats_metrics(data, column, usl, lsl):
21
  return rolling_mean, rolling_std, cp, cpk
22
 
23
 
24
- def process_unique_tool(tool, raw_data, file_id=None):
25
  """
26
  Process data for a single tool and save the results to a CSV file.
27
  Args:
@@ -29,24 +29,29 @@ def process_unique_tool(tool, raw_data, file_id=None):
29
  raw_data (pd.DataFrame): DataFrame containing the raw production data.
30
  """
31
  tool_data = raw_data[raw_data['Tool ID'] == tool].copy()
 
32
  tool_data['pos_rolling_mean'], tool_data['pos_rolling_std'], tool_data['pos_rolling_cp'], tool_data['pos_rolling_cpk'] = stats_metrics(tool_data, 'Position', 0.5, 0.3)
33
  tool_data['ori_rolling_mean'], tool_data['ori_rolling_std'], tool_data['ori_rolling_cp'], tool_data['ori_rolling_cpk'] = stats_metrics(tool_data, 'Orientation', 0.6, 0.2)
34
- tool_data.to_csv(f'./data/tool_{file_id}.csv', index=False)
35
 
36
 
37
- def tools_metrics(raw_data):
38
  """
39
  Process the raw production data to extract tool metrics in parallel.
40
  """
 
41
  tools = raw_data['Tool ID'].unique()
42
 
43
  with ThreadPoolExecutor() as executor:
44
- executor.map(lambda tool: process_unique_tool(tool, raw_data, file_id=tool), tools)
 
 
45
 
46
  # Calculate metrics for all tools together
47
  all_tools_data = raw_data.copy()
48
  all_tools_data = all_tools_data[all_tools_data['Tool ID'] != 'N/A']
49
-
50
  all_tools_data['pos_rolling_mean'], all_tools_data['pos_rolling_std'], all_tools_data['pos_rolling_cp'], all_tools_data['pos_rolling_cpk'] = stats_metrics(all_tools_data, 'Position', 0.5, 0.3)
51
  all_tools_data['ori_rolling_mean'], all_tools_data['ori_rolling_std'], all_tools_data['ori_rolling_cp'], all_tools_data['ori_rolling_cpk'] = stats_metrics(all_tools_data, 'Orientation', 0.6, 0.2)
52
- all_tools_data.to_csv('./data/tool_all.csv', index=False)
 
 
 
21
  return rolling_mean, rolling_std, cp, cpk
22
 
23
 
24
+ def process_unique_tool(tool, raw_data):
25
  """
26
  Process data for a single tool and save the results to a CSV file.
27
  Args:
 
29
  raw_data (pd.DataFrame): DataFrame containing the raw production data.
30
  """
31
  tool_data = raw_data[raw_data['Tool ID'] == tool].copy()
32
+ tool_data = tool_data[tool_data['Tool ID'] != 'N/A']
33
  tool_data['pos_rolling_mean'], tool_data['pos_rolling_std'], tool_data['pos_rolling_cp'], tool_data['pos_rolling_cpk'] = stats_metrics(tool_data, 'Position', 0.5, 0.3)
34
  tool_data['ori_rolling_mean'], tool_data['ori_rolling_std'], tool_data['ori_rolling_cp'], tool_data['ori_rolling_cpk'] = stats_metrics(tool_data, 'Orientation', 0.6, 0.2)
35
+ return tool, tool_data
36
 
37
 
38
+ def get_tools_metrics(raw_data):
39
  """
40
  Process the raw production data to extract tool metrics in parallel.
41
  """
42
+ metrics = {}
43
  tools = raw_data['Tool ID'].unique()
44
 
45
  with ThreadPoolExecutor() as executor:
46
+ results = list(executor.map(lambda tool: process_unique_tool(tool, raw_data), tools))
47
+ for tool, tool_data in results:
48
+ metrics[f"tool_{tool}"] = tool_data
49
 
50
  # Calculate metrics for all tools together
51
  all_tools_data = raw_data.copy()
52
  all_tools_data = all_tools_data[all_tools_data['Tool ID'] != 'N/A']
 
53
  all_tools_data['pos_rolling_mean'], all_tools_data['pos_rolling_std'], all_tools_data['pos_rolling_cp'], all_tools_data['pos_rolling_cpk'] = stats_metrics(all_tools_data, 'Position', 0.5, 0.3)
54
  all_tools_data['ori_rolling_mean'], all_tools_data['ori_rolling_std'], all_tools_data['ori_rolling_cp'], all_tools_data['ori_rolling_cpk'] = stats_metrics(all_tools_data, 'Orientation', 0.6, 0.2)
55
+ metrics['all'] = all_tools_data
56
+
57
+ return metrics
src/production/processing.py CHANGED
@@ -1,9 +1,25 @@
1
- from .metrics.tools import tools_metrics
2
- from .metrics.machine import machine_metrics
 
 
 
3
 
4
  def process(raw_data):
5
  """
6
  Process the raw production data to extract metrics.
7
  """
8
- tools_metrics(raw_data)
9
- machine_metrics(raw_data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import numpy as np
3
+
4
+ from .metrics.tools import get_tools_metrics
5
+ from .metrics.machine import get_machine_metrics
6
 
7
  def process(raw_data):
8
  """
9
  Process the raw production data to extract metrics.
10
  """
11
+ print("=== TOOLS METRICS ===\n")
12
+ tools_dfs = get_tools_metrics(raw_data)
13
+ tools_dfs = {tool: df for tool, df in tools_dfs.items() if not df.empty}
14
+
15
+ for tool, df in tools_dfs.items():
16
+ print(tool)
17
+ print(df.head())
18
+ print("\n")
19
+
20
+ print("=== MACHINE METRICS ===")
21
+ machine_results = get_machine_metrics(raw_data)
22
+ machine_json = json.dumps(machine_results, indent=4)
23
+ print(machine_json)
24
+
25
+ return tools_dfs, machine_json