YchKhan commited on
Commit
0cc706f
·
verified ·
1 Parent(s): aa0448d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -0
app.py CHANGED
@@ -23,6 +23,83 @@ class ReqGroupingCategory(BaseModel):
23
  class ReqGroupingResponse(BaseModel):
24
  categories: List[ReqGroupingCategory]
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  # ---- Solution Models ----
28
 
 
23
  class ReqGroupingResponse(BaseModel):
24
  categories: List[ReqGroupingCategory]
25
 
26
+ model_config = {
27
+ "categories": [
28
+ {
29
+ "id": 1,
30
+ "title": "Robustness & Resilience",
31
+ "requirements": [
32
+ {
33
+ "context": "Subject to the operator’s policy and regulatory requirements, an AI service provided by the 6G network or UE shall be able to provide information regarding robustness scores.",
34
+ "requirement": "Expose an overall robustness score to service consumers."
35
+ },
36
+ {
37
+ "context": "The network can respond with a missings-resilience score for the used AI application.",
38
+ "requirement": "Report a missings-resilience score that quantifies tolerance to missing or corrupted input data."
39
+ }
40
+ ]
41
+ },
42
+ {
43
+ "id": 2,
44
+ "title": "Environmental Sustainability",
45
+ "requirements": [
46
+ {
47
+ "context": "What is the level of energy consumption per information request (per inference run of the AI).",
48
+ "requirement": "Report energy consumption per 1 000 inference requests."
49
+ },
50
+ {
51
+ "context": "What is the portion of renewable energy of the energy consumed by the AI service.",
52
+ "requirement": "Report the share of renewable energy in the AI service’s power mix."
53
+ },
54
+ {
55
+ "context": "The application sets a requirement for the energy consumption needed for inference.",
56
+ "requirement": "Allow the consumer to specify a maximum energy-per-inference threshold that must be met."
57
+ }
58
+ ]
59
+ },
60
+ {
61
+ "id": 3,
62
+ "title": "Explainability & Transparency",
63
+ "requirements": [
64
+ {
65
+ "context": "Local explanation: The aim is to explain individual outputs provided by an ML model.",
66
+ "requirement": "Support local explanations for single predictions."
67
+ },
68
+ {
69
+ "context": "Global explanation: The aim is to explain the whole ML model behaviour.",
70
+ "requirement": "Support global explanations that describe overall model logic."
71
+ },
72
+ {
73
+ "context": "Third-party applications have explanations of AI agent reasoning.",
74
+ "requirement": "Provide on-demand reasoning for predictions to authorised consumers."
75
+ }
76
+ ]
77
+ },
78
+ {
79
+ "id": 4,
80
+ "title": "Service Discovery & Criteria Negotiation",
81
+ "requirements": [
82
+ {
83
+ "context": "A subscriber density prediction service is offered via an exposure interface.",
84
+ "requirement": "Ensure AI services are discoverable through the exposure interface."
85
+ },
86
+ {
87
+ "context": "The application requests further profile information regarding robustness, sustainability and explainability aspects.",
88
+ "requirement": "Expose a profile that includes robustness, sustainability and explainability metrics."
89
+ },
90
+ {
91
+ "context": "A service consumer shall be able to provide service criteria regarding robustness, environmental sustainability, and explainability when requesting an AI service to the 6G system.",
92
+ "requirement": "Accept consumer-supplied criteria for robustness, sustainability and explainability."
93
+ },
94
+ {
95
+ "context": "In some cases the AI service could not be fulfilled, or could fall back to a non-AI mechanism if the criteria cannot be met.",
96
+ "requirement": "Support rejection or graceful fallback when agreed criteria are not satisfied."
97
+ }
98
+ ]
99
+ }
100
+ ]
101
+ }
102
+
103
 
104
  # ---- Solution Models ----
105