Dev Scripts
Browse files- scripts/1_test_node_creation.py +51 -0
- scripts/2.1_test_graph_operation.py +52 -0
- scripts/2.2_test_graph_operation.py +60 -0
- scripts/2.3_test_graph_operation.py +67 -0
- scripts/2_test_graph_operation.py +74 -0
- scripts/3.0_sec_340b_test.py +72 -0
- scripts/3.10_test_graph.py +46 -0
- scripts/3.11_connect_the_dots.py +33 -0
- scripts/3.12_get-perfect_num.py +63 -0
- scripts/3.13_get_perfect.py +65 -0
- scripts/3.14.ipynb +0 -0
- scripts/3.15.ipynb +774 -0
- scripts/3.1_test_340b_traversal.py +56 -0
- scripts/3.2_visualize_340b.py +83 -0
- scripts/3.3_expanded_relationships.py +63 -0
- scripts/3.4_nodes.py +104 -0
- scripts/3.5_enhanced_nodes.py +77 -0
- scripts/3.6_test_visualisations.py +20 -0
- scripts/3.7_test_visualisations.py +36 -0
- scripts/3.8_expand_graph.py +62 -0
- scripts/3.9_pass_graph_file.py +26 -0
- scripts/4.0_sacred_geomitry.py +72 -0
scripts/1_test_node_creation.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import json
|
3 |
+
|
4 |
+
BASE_URL = "http://localhost:5000"
|
5 |
+
|
6 |
+
def test_load_graph():
|
7 |
+
response = requests.post(f"{BASE_URL}/load_graph")
|
8 |
+
print("Load Graph Response:", response.json())
|
9 |
+
|
10 |
+
def test_create_node():
|
11 |
+
payload = {
|
12 |
+
"node_id": "patient_123",
|
13 |
+
"data": {
|
14 |
+
"name": "John Doe",
|
15 |
+
"age": 45,
|
16 |
+
"medical_conditions": ["hypertension", "diabetes"]
|
17 |
+
},
|
18 |
+
"domain": "Healthcare",
|
19 |
+
"type": "Patient"
|
20 |
+
}
|
21 |
+
headers = {"Content-Type": "application/json"}
|
22 |
+
response = requests.post(f"{BASE_URL}/create_node", headers=headers, data=json.dumps(payload))
|
23 |
+
print("Create Node Response:", response.json())
|
24 |
+
|
25 |
+
def test_query_node(node_id):
|
26 |
+
response = requests.get(f"{BASE_URL}/query_node", params={"node_id": node_id})
|
27 |
+
print(f"Query Node {node_id} Response:", response.json())
|
28 |
+
|
29 |
+
def test_list_nodes():
|
30 |
+
response = requests.get(f"{BASE_URL}/list_nodes")
|
31 |
+
print("List Nodes Response:", response.json())
|
32 |
+
|
33 |
+
def test_list_relationships():
|
34 |
+
response = requests.get(f"{BASE_URL}/list_relationships")
|
35 |
+
print("List Relationships Response:", response.json())
|
36 |
+
|
37 |
+
if __name__ == "__main__":
|
38 |
+
print("\n--- Testing Graph Loading ---")
|
39 |
+
test_load_graph()
|
40 |
+
|
41 |
+
print("\n--- Testing Node Creation ---")
|
42 |
+
test_create_node()
|
43 |
+
|
44 |
+
print("\n--- Testing Node Query ---")
|
45 |
+
test_query_node("patient_123")
|
46 |
+
|
47 |
+
print("\n--- Testing List All Nodes ---")
|
48 |
+
test_list_nodes()
|
49 |
+
|
50 |
+
print("\n--- Testing List All Relationships ---")
|
51 |
+
test_list_relationships()
|
scripts/2.1_test_graph_operation.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def print_tree(node, prefix="", visited=None):
|
4 |
+
"""Recursive function to print a refined tree structure without duplicates."""
|
5 |
+
if visited is None:
|
6 |
+
visited = set()
|
7 |
+
|
8 |
+
node_id = node["node_id"]
|
9 |
+
# Avoid printing duplicate nodes by checking if node has been visited
|
10 |
+
if node_id in visited:
|
11 |
+
print(f"{prefix}(already listed) {node_id}")
|
12 |
+
return
|
13 |
+
visited.add(node_id)
|
14 |
+
|
15 |
+
print(f"{prefix}{node_id}")
|
16 |
+
children = node.get("descendants", [])
|
17 |
+
for i, child in enumerate(children):
|
18 |
+
# Choose branch formatting based on child position
|
19 |
+
new_prefix = f"{prefix}├── " if i < len(children) - 1 else f"{prefix}└── "
|
20 |
+
print_tree(child, new_prefix, visited)
|
21 |
+
|
22 |
+
# Test API Endpoints
|
23 |
+
base_url = "http://localhost:5000"
|
24 |
+
|
25 |
+
# Step 1: Load Graph
|
26 |
+
print("\n--- Testing Graph Loading ---")
|
27 |
+
response = requests.post(f"{base_url}/load_graph")
|
28 |
+
print("Load Graph Response:", response.json())
|
29 |
+
|
30 |
+
# Step 2: Create Node
|
31 |
+
print("\n--- Testing Node Creation ---")
|
32 |
+
create_data = {
|
33 |
+
"node_id": "patient_123",
|
34 |
+
"data": {"name": "John Doe", "age": 45, "medical_conditions": ["hypertension", "diabetes"]},
|
35 |
+
"domain": "Healthcare",
|
36 |
+
"type": "Patient"
|
37 |
+
}
|
38 |
+
response = requests.post(f"{base_url}/create_node", json=create_data)
|
39 |
+
print("Create Node Response:", response.json())
|
40 |
+
|
41 |
+
# Step 3: Inspect Relationships for Node (Healthcare)
|
42 |
+
print("\n--- Testing Inspect Relationships for Node (Healthcare) ---")
|
43 |
+
response = requests.get(f"{base_url}/inspect_relationships?node_id=Healthcare")
|
44 |
+
relationships = response.json()
|
45 |
+
print("Inspect Relationships for Healthcare:")
|
46 |
+
|
47 |
+
# Convert to tree-like structure
|
48 |
+
root_node = {
|
49 |
+
"node_id": relationships["node_id"],
|
50 |
+
"descendants": relationships["relationships"]["child_relations"]
|
51 |
+
}
|
52 |
+
print_tree(root_node)
|
scripts/2.2_test_graph_operation.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def print_tree(node, prefix="", visited=None, is_inheritance=False):
|
4 |
+
"""Recursive function to print a tree structure with differentiation for inheritance vs. other relationships."""
|
5 |
+
if visited is None:
|
6 |
+
visited = set()
|
7 |
+
|
8 |
+
node_id = node["node_id"]
|
9 |
+
# Avoid printing duplicate nodes by checking if node has been visited
|
10 |
+
if node_id in visited:
|
11 |
+
print(f"{prefix}(already listed) {node_id}")
|
12 |
+
return
|
13 |
+
visited.add(node_id)
|
14 |
+
|
15 |
+
relationship_type = "inherited" if is_inheritance else "related"
|
16 |
+
print(f"{prefix}{node_id} ({relationship_type})")
|
17 |
+
|
18 |
+
# Traverse descendants (inheritance)
|
19 |
+
descendants = [desc for desc in node.get("descendants", []) if desc["relationship"] == "inherits_from"]
|
20 |
+
for i, child in enumerate(descendants):
|
21 |
+
new_prefix = f"{prefix}├── " if i < len(descendants) - 1 else f"{prefix}└── "
|
22 |
+
print_tree(child, new_prefix, visited, is_inheritance=True)
|
23 |
+
|
24 |
+
# Display other relationships
|
25 |
+
other_relations = [rel for rel in node.get("descendants", []) if rel["relationship"] != "inherits_from"]
|
26 |
+
for j, rel in enumerate(other_relations):
|
27 |
+
new_prefix = f"{prefix}├── " if j < len(other_relations) - 1 else f"{prefix}└── "
|
28 |
+
print_tree(rel, new_prefix, visited, is_inheritance=False)
|
29 |
+
|
30 |
+
# Test API Endpoints
|
31 |
+
base_url = "http://localhost:5000"
|
32 |
+
|
33 |
+
# Step 1: Load Graph
|
34 |
+
print("\n--- Testing Graph Loading ---")
|
35 |
+
response = requests.post(f"{base_url}/load_graph")
|
36 |
+
print("Load Graph Response:", response.json())
|
37 |
+
|
38 |
+
# Step 2: Create Node
|
39 |
+
print("\n--- Testing Node Creation ---")
|
40 |
+
create_data = {
|
41 |
+
"node_id": "patient_123",
|
42 |
+
"data": {"name": "John Doe", "age": 45, "medical_conditions": ["hypertension", "diabetes"]},
|
43 |
+
"domain": "Healthcare",
|
44 |
+
"type": "Patient"
|
45 |
+
}
|
46 |
+
response = requests.post(f"{base_url}/create_node", json=create_data)
|
47 |
+
print("Create Node Response:", response.json())
|
48 |
+
|
49 |
+
# Step 3: Inspect Relationships for Node (Healthcare)
|
50 |
+
print("\n--- Testing Inspect Relationships for Node (Healthcare) ---")
|
51 |
+
response = requests.get(f"{base_url}/inspect_relationships?node_id=Healthcare")
|
52 |
+
relationships = response.json()
|
53 |
+
print("Inspect Relationships for Healthcare:")
|
54 |
+
|
55 |
+
# Convert to tree-like structure
|
56 |
+
root_node = {
|
57 |
+
"node_id": relationships["node_id"],
|
58 |
+
"descendants": relationships["relationships"]["child_relations"]
|
59 |
+
}
|
60 |
+
print_tree(root_node)
|
scripts/2.3_test_graph_operation.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def print_tree(node, prefix="", visited=None, root_id="Healthcare"):
|
4 |
+
"""Recursive function to print all relationships in a hierarchical tree format."""
|
5 |
+
if visited is None:
|
6 |
+
visited = set()
|
7 |
+
|
8 |
+
node_id = node.get("node_id")
|
9 |
+
if not node_id:
|
10 |
+
print(f"{prefix}(unknown node)")
|
11 |
+
return
|
12 |
+
|
13 |
+
if node_id in visited or (node_id == root_id and prefix != ""):
|
14 |
+
print(f"{prefix}(already listed) {node_id}")
|
15 |
+
return
|
16 |
+
visited.add(node_id)
|
17 |
+
|
18 |
+
relationship_label = f"({node.get('relationship', '')})" if node.get("relationship") else ""
|
19 |
+
print(f"{prefix}{node_id} {relationship_label}")
|
20 |
+
|
21 |
+
# Check if there are descendants to traverse
|
22 |
+
descendants = node.get("descendants", [])
|
23 |
+
if not descendants:
|
24 |
+
print(f"{prefix}(no further descendants)")
|
25 |
+
else:
|
26 |
+
for i, child in enumerate(descendants):
|
27 |
+
new_prefix = f"{prefix}├── " if i < len(descendants) - 1 else f"{prefix}└── "
|
28 |
+
print_tree(child, new_prefix, visited, root_id)
|
29 |
+
|
30 |
+
def inspect_and_print_relationships(node_id):
|
31 |
+
"""Inspect and display relationships for a specified node in a tree format."""
|
32 |
+
response = requests.get(f"{base_url}/traverse_node?node_id={node_id}&direction=down")
|
33 |
+
traversal_hierarchy = response.json()
|
34 |
+
|
35 |
+
print(f"\nTraversal Response for {node_id}:", traversal_hierarchy) # Debugging line
|
36 |
+
# Ensure the root node structure matches expectations
|
37 |
+
if "node_id" not in traversal_hierarchy:
|
38 |
+
traversal_hierarchy = {
|
39 |
+
"node_id": node_id,
|
40 |
+
"descendants": traversal_hierarchy.get("descendants", [])
|
41 |
+
}
|
42 |
+
|
43 |
+
print(f"\nInspect Relationships for {node_id} (Full Hierarchy):")
|
44 |
+
print_tree(traversal_hierarchy)
|
45 |
+
|
46 |
+
# Test API Endpoints
|
47 |
+
base_url = "http://localhost:5000"
|
48 |
+
|
49 |
+
# Step 1: Load Graph
|
50 |
+
print("\n--- Testing Graph Loading ---")
|
51 |
+
response = requests.post(f"{base_url}/load_graph")
|
52 |
+
print("Load Graph Response:", response.json())
|
53 |
+
|
54 |
+
# Step 2: Create Node
|
55 |
+
print("\n--- Testing Node Creation ---")
|
56 |
+
create_data = {
|
57 |
+
"node_id": "patient_123",
|
58 |
+
"data": {"name": "John Doe", "age": 45, "medical_conditions": ["hypertension", "diabetes"]},
|
59 |
+
"domain": "Healthcare",
|
60 |
+
"type": "Patient"
|
61 |
+
}
|
62 |
+
response = requests.post(f"{base_url}/create_node", json=create_data)
|
63 |
+
print("Create Node Response:", response.json())
|
64 |
+
|
65 |
+
# Step 3: Inspect Relationships for Node (Healthcare) - Full hierarchical tree
|
66 |
+
print("\n--- Testing Inspect Relationships for Node (Healthcare) ---")
|
67 |
+
inspect_and_print_relationships("Healthcare")
|
scripts/2_test_graph_operation.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# scripts/test_graph_operations.py
|
2 |
+
import requests
|
3 |
+
|
4 |
+
BASE_URL = "http://localhost:5000"
|
5 |
+
|
6 |
+
def test_load_graph():
|
7 |
+
print("\n--- Testing Graph Loading ---")
|
8 |
+
response = requests.post(f"{BASE_URL}/load_graph")
|
9 |
+
print("Load Graph Response:", response.json())
|
10 |
+
|
11 |
+
def test_create_node():
|
12 |
+
print("\n--- Testing Node Creation ---")
|
13 |
+
data = {
|
14 |
+
"node_id": "patient_123",
|
15 |
+
"data": {
|
16 |
+
"name": "John Doe",
|
17 |
+
"age": 45,
|
18 |
+
"medical_conditions": ["hypertension", "diabetes"]
|
19 |
+
},
|
20 |
+
"domain": "Healthcare",
|
21 |
+
"type": "Patient"
|
22 |
+
}
|
23 |
+
response = requests.post(f"{BASE_URL}/create_node", json=data)
|
24 |
+
print("Create Node Response:", response.json())
|
25 |
+
|
26 |
+
def test_query_node(node_id):
|
27 |
+
print(f"\n--- Testing Node Query ({node_id}) ---")
|
28 |
+
response = requests.get(f"{BASE_URL}/query_node", params={"node_id": node_id})
|
29 |
+
print(f"Query Node {node_id} Response:", response.json())
|
30 |
+
|
31 |
+
def test_list_nodes():
|
32 |
+
print("\n--- Testing List All Nodes ---")
|
33 |
+
response = requests.get(f"{BASE_URL}/list_nodes")
|
34 |
+
print("List Nodes Response:", response.json())
|
35 |
+
|
36 |
+
def test_list_relationships():
|
37 |
+
print("\n--- Testing List All Relationships ---")
|
38 |
+
response = requests.get(f"{BASE_URL}/list_relationships")
|
39 |
+
print("List Relationships Response:", response.json())
|
40 |
+
|
41 |
+
def test_traverse_node(node_id, direction):
|
42 |
+
print(f"\n--- Testing Node Traversal ({node_id}, {direction}) ---")
|
43 |
+
response = requests.get(f"{BASE_URL}/traverse_node", params={"node_id": node_id, "direction": direction})
|
44 |
+
print(f"Traversal Path ({node_id} - {direction}):", response.json())
|
45 |
+
|
46 |
+
def test_inspect_relationships(node_id):
|
47 |
+
print(f"\n--- Testing Inspect Relationships for Node ({node_id}) ---")
|
48 |
+
response = requests.get(f"{BASE_URL}/inspect_relationships", params={"node_id": node_id})
|
49 |
+
print(f"Inspect Relationships for {node_id}:", response.json())
|
50 |
+
|
51 |
+
if __name__ == "__main__":
|
52 |
+
# Step 1: Load the Graph
|
53 |
+
test_load_graph()
|
54 |
+
|
55 |
+
# Step 2: Create a sample node
|
56 |
+
test_create_node()
|
57 |
+
|
58 |
+
# Step 3: Query specific nodes
|
59 |
+
test_query_node("patient_123")
|
60 |
+
test_query_node("Healthcare")
|
61 |
+
|
62 |
+
# Step 4: List all nodes
|
63 |
+
test_list_nodes()
|
64 |
+
|
65 |
+
# Step 5: List all relationships
|
66 |
+
test_list_relationships()
|
67 |
+
|
68 |
+
# Step 6: Traverse nodes (both up and down)
|
69 |
+
test_traverse_node("patient_123", "up")
|
70 |
+
test_traverse_node("Healthcare", "down")
|
71 |
+
|
72 |
+
# Step 7: Inspect relationships, including hierarchy traversal
|
73 |
+
test_inspect_relationships("Healthcare")
|
74 |
+
test_inspect_relationships("patient_123")
|
scripts/3.0_sec_340b_test.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def print_tree(node, prefix="", visited=None, root_id="Healthcare"):
|
4 |
+
"""Recursive function to print all relationships in a hierarchical tree format."""
|
5 |
+
if visited is None:
|
6 |
+
visited = set()
|
7 |
+
|
8 |
+
node_id = node.get("node_id")
|
9 |
+
if not node_id:
|
10 |
+
print(f"{prefix}(unknown node)")
|
11 |
+
return
|
12 |
+
|
13 |
+
if node_id in visited or (node_id == root_id and prefix != ""):
|
14 |
+
print(f"{prefix}(already listed) {node_id}")
|
15 |
+
return
|
16 |
+
visited.add(node_id)
|
17 |
+
|
18 |
+
relationship_label = f"({node.get('relationship', '')})" if node.get("relationship") else ""
|
19 |
+
print(f"{prefix}{node_id} {relationship_label}")
|
20 |
+
|
21 |
+
# Check if there are descendants to traverse
|
22 |
+
descendants = node.get("descendants", [])
|
23 |
+
if not descendants:
|
24 |
+
print(f"{prefix}(no further descendants)")
|
25 |
+
else:
|
26 |
+
for i, child in enumerate(descendants):
|
27 |
+
new_prefix = f"{prefix}├── " if i < len(descendants) - 1 else f"{prefix}└── "
|
28 |
+
print_tree(child, new_prefix, visited, root_id)
|
29 |
+
|
30 |
+
def inspect_and_print_relationships(node_id):
|
31 |
+
"""Inspect and display relationships for a specified node in a tree format."""
|
32 |
+
response = requests.get(f"{base_url}/inspect_relationships?node_id={node_id}")
|
33 |
+
traversal_hierarchy = response.json()
|
34 |
+
|
35 |
+
print(f"\nTraversal Response for {node_id}:", traversal_hierarchy) # Debugging line
|
36 |
+
# Ensure the root node structure matches expectations
|
37 |
+
if "node_id" not in traversal_hierarchy:
|
38 |
+
traversal_hierarchy = {
|
39 |
+
"node_id": node_id,
|
40 |
+
"descendants": traversal_hierarchy.get("child_relations", [])
|
41 |
+
}
|
42 |
+
|
43 |
+
print(f"\nInspect Relationships for {node_id} (Full Hierarchy):")
|
44 |
+
print_tree(traversal_hierarchy)
|
45 |
+
|
46 |
+
# Base URL for API
|
47 |
+
base_url = "http://localhost:5000"
|
48 |
+
|
49 |
+
# Step 1: Load Graph (Specify the graph to load, e.g., PHSA/340B section)
|
50 |
+
print("\n--- Testing Graph Loading ---")
|
51 |
+
graph_data = {"graph_file": "graphs/PHSA/phsa_sec_340b.json"}
|
52 |
+
response = requests.post(f"{base_url}/load_graph", json=graph_data)
|
53 |
+
print("Load Graph Response:", response.json())
|
54 |
+
|
55 |
+
# Step 2: Create a Test Node
|
56 |
+
print("\n--- Testing Node Creation ---")
|
57 |
+
create_data = {
|
58 |
+
"node_id": "patient_123",
|
59 |
+
"data": {"name": "John Doe", "age": 45, "medical_conditions": ["hypertension", "diabetes"]},
|
60 |
+
"domain": "Healthcare",
|
61 |
+
"type": "Patient"
|
62 |
+
}
|
63 |
+
response = requests.post(f"{base_url}/create_node", json=create_data)
|
64 |
+
print("Create Node Response:", response.json())
|
65 |
+
|
66 |
+
# Step 3: Inspect Relationships for Node (Healthcare) - Full hierarchical tree
|
67 |
+
print("\n--- Testing Inspect Relationships for Node (Healthcare) ---")
|
68 |
+
inspect_and_print_relationships("Healthcare")
|
69 |
+
|
70 |
+
# Step 4: Inspect Relationships for Specific Node (Section 340B)
|
71 |
+
print("\n--- Testing Inspect Relationships for Node (Section 340B) ---")
|
72 |
+
inspect_and_print_relationships("Section 340B")
|
scripts/3.10_test_graph.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import networkx as nx
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
6 |
+
from app.services.agn_service.build_graph import build_graph
|
7 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
8 |
+
|
9 |
+
# Define the path to your index.json file
|
10 |
+
index_file_path = "graphs/index.json" # Ensure this path is correct
|
11 |
+
|
12 |
+
# Function to load JSON data from file
|
13 |
+
def load_json_file(file_path):
|
14 |
+
try:
|
15 |
+
with open(file_path, "r") as file:
|
16 |
+
return json.load(file) # Parse JSON into a dictionary
|
17 |
+
except json.JSONDecodeError as e:
|
18 |
+
print(f"Error parsing JSON file: {e}")
|
19 |
+
return None
|
20 |
+
except FileNotFoundError:
|
21 |
+
print(f"File not found: {file_path}")
|
22 |
+
return None
|
23 |
+
|
24 |
+
# Load the JSON data
|
25 |
+
data = load_json_file(index_file_path)
|
26 |
+
|
27 |
+
# Check if data was loaded successfully
|
28 |
+
if data is None:
|
29 |
+
print("Failed to load JSON data.")
|
30 |
+
else:
|
31 |
+
# Build the graph using the loaded data
|
32 |
+
try:
|
33 |
+
G = build_graph(data) # Pass the parsed JSON data (dictionary) to build_graph
|
34 |
+
print("Graph built successfully.")
|
35 |
+
except Exception as e:
|
36 |
+
print(f"Error building graph: {e}")
|
37 |
+
|
38 |
+
|
39 |
+
output_image = "test_graph_visualization.png"
|
40 |
+
|
41 |
+
# Step 2: Generate and save the graph visualization
|
42 |
+
if G:
|
43 |
+
visualize_graph(G, output_file=output_image)
|
44 |
+
print(f"Graph visualization generated and saved as {output_image}")
|
45 |
+
else:
|
46 |
+
print("Failed to build graph.")
|
scripts/3.11_connect_the_dots.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import json
|
4 |
+
import networkx as nx
|
5 |
+
import matplotlib.pyplot as plt
|
6 |
+
|
7 |
+
# Path setup
|
8 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
9 |
+
from app.services.agn_service.build_expanded_graph import build_expanded_graph
|
10 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
11 |
+
|
12 |
+
# Paths to files
|
13 |
+
index_file_path = "graphs/config.json" # Use the updated config file
|
14 |
+
output_image = "expanded_graph_visualization.png"
|
15 |
+
|
16 |
+
# Load index data
|
17 |
+
def load_index_data(file_path):
|
18 |
+
with open(file_path, "r") as file:
|
19 |
+
return json.load(file)
|
20 |
+
|
21 |
+
# Main execution to build and visualize the expanded graph
|
22 |
+
if __name__ == "__main__":
|
23 |
+
# Load index data from the config
|
24 |
+
data = load_index_data(index_file_path)
|
25 |
+
|
26 |
+
# Build the graph using the updated build_graph function
|
27 |
+
G = build_expanded_graph(data)
|
28 |
+
|
29 |
+
if G:
|
30 |
+
visualize_graph(G, output_file=output_image)
|
31 |
+
print(f"Expanded graph visualization saved as {output_image}")
|
32 |
+
else:
|
33 |
+
print("Failed to build expanded graph.")
|
scripts/3.12_get-perfect_num.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
# Load original data with corrected file path
|
4 |
+
with open("graphs/prime_perfect_data.json", "r") as f:
|
5 |
+
data = json.load(f)
|
6 |
+
|
7 |
+
# New list to store inferred relationships
|
8 |
+
inferred_relationships = []
|
9 |
+
|
10 |
+
# Analyze existing relationships
|
11 |
+
for link in data["relationships"]:
|
12 |
+
source = link["source"]
|
13 |
+
target = link["target"]
|
14 |
+
relationship = link["relationship"]
|
15 |
+
|
16 |
+
# Check for multiplicative relationships
|
17 |
+
if relationship == "multiplied_to_generate":
|
18 |
+
factor = target / source
|
19 |
+
if factor.is_integer():
|
20 |
+
inferred_relationships.append({
|
21 |
+
"source": source,
|
22 |
+
"target": target,
|
23 |
+
"relationship": f"multiplied_by_{int(factor)}"
|
24 |
+
})
|
25 |
+
|
26 |
+
# Check for next in sequence (simple addition pattern)
|
27 |
+
elif relationship == "next_in_sequence":
|
28 |
+
diff = target - source
|
29 |
+
inferred_relationships.append({
|
30 |
+
"source": source,
|
31 |
+
"target": target,
|
32 |
+
"relationship": f"add_{int(diff)}"
|
33 |
+
})
|
34 |
+
|
35 |
+
# Check for generating Mersenne primes
|
36 |
+
elif relationship == "generates_Mersenne_prime":
|
37 |
+
if (source > 1) and (target == 2**source - 1):
|
38 |
+
inferred_relationships.append({
|
39 |
+
"source": source,
|
40 |
+
"target": target,
|
41 |
+
"relationship": "mersenne_equation"
|
42 |
+
})
|
43 |
+
|
44 |
+
# Check for generating perfect numbers
|
45 |
+
elif relationship == "generates_perfect_number":
|
46 |
+
if (source > 1) and (target == 2**(source - 1) * (2**source - 1)):
|
47 |
+
inferred_relationships.append({
|
48 |
+
"source": source,
|
49 |
+
"target": target,
|
50 |
+
"relationship": "perfect_equation"
|
51 |
+
})
|
52 |
+
|
53 |
+
# Combine original relationships with inferred relationships
|
54 |
+
combined_data = {
|
55 |
+
"nodes": data["nodes"],
|
56 |
+
"relationships": data["relationships"] + inferred_relationships
|
57 |
+
}
|
58 |
+
|
59 |
+
# Save to new JSON file in the graphs directory
|
60 |
+
with open("graphs/inferred_prime_perfect_data.json", "w") as f:
|
61 |
+
json.dump(combined_data, f, indent=4)
|
62 |
+
|
63 |
+
print("Inferred relationships added to graphs/inferred_prime_perfect_data.json")
|
scripts/3.13_get_perfect.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
# Sample data structure based on your JSON
|
4 |
+
data = {
|
5 |
+
"primes": [
|
6 |
+
{"value": 2, "category": "prime"},
|
7 |
+
{"value": 3, "category": "prime"},
|
8 |
+
{"value": 5, "category": "prime"},
|
9 |
+
# add other nodes as needed
|
10 |
+
],
|
11 |
+
"perfect_numbers": [
|
12 |
+
{"value": 6, "category": "perfect_number"},
|
13 |
+
{"value": 28, "category": "perfect_number"},
|
14 |
+
# add other nodes as needed
|
15 |
+
],
|
16 |
+
"relationships": [
|
17 |
+
{"source": 2, "target": 3, "relationship": "generates_Mersenne_prime"},
|
18 |
+
# add other predefined relationships
|
19 |
+
]
|
20 |
+
}
|
21 |
+
|
22 |
+
# Inferred relationship function
|
23 |
+
def infer_relationships(nodes):
|
24 |
+
inferred_relationships = []
|
25 |
+
node_values = {node['value']: node for node in nodes}
|
26 |
+
|
27 |
+
# Iterate over node pairs to find possible relationships
|
28 |
+
for source in nodes:
|
29 |
+
for target in nodes:
|
30 |
+
if source == target:
|
31 |
+
continue
|
32 |
+
|
33 |
+
source_value = source['value']
|
34 |
+
target_value = target['value']
|
35 |
+
|
36 |
+
# Check for multiplication relationship
|
37 |
+
if target_value % source_value == 0:
|
38 |
+
factor = target_value // source_value
|
39 |
+
inferred_relationships.append({
|
40 |
+
"source": source_value,
|
41 |
+
"target": target_value,
|
42 |
+
"relationship": f"multiplied_by_{factor}"
|
43 |
+
})
|
44 |
+
|
45 |
+
# Check for addition relationship
|
46 |
+
if target_value - source_value > 0:
|
47 |
+
addend = target_value - source_value
|
48 |
+
inferred_relationships.append({
|
49 |
+
"source": source_value,
|
50 |
+
"target": target_value,
|
51 |
+
"relationship": f"add_{addend}"
|
52 |
+
})
|
53 |
+
|
54 |
+
return inferred_relationships
|
55 |
+
|
56 |
+
# Infer relationships
|
57 |
+
nodes = data["primes"] + data["perfect_numbers"]
|
58 |
+
new_relationships = infer_relationships(nodes)
|
59 |
+
|
60 |
+
# Append new relationships to the original data
|
61 |
+
data["relationships"].extend(new_relationships)
|
62 |
+
|
63 |
+
# Output the updated JSON
|
64 |
+
with open("updated_graph_data.json", "w") as file:
|
65 |
+
json.dump(data, file, indent=4)
|
scripts/3.14.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
scripts/3.15.ipynb
ADDED
@@ -0,0 +1,774 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# Quantum Circuit with Qiskit Runtime\n",
|
8 |
+
"\n",
|
9 |
+
"This notebook demonstrates how to set up a simple quantum circuit using Qiskit Runtime and measure performance using an Active Graph Network (AGN). The notebook is based on our discussion and includes dependency installation, circuit definition, benchmarking, and troubleshooting steps."
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "markdown",
|
14 |
+
"metadata": {},
|
15 |
+
"source": [
|
16 |
+
"## 1. Install Qiskit and Qiskit IBM Runtime\n",
|
17 |
+
"\n",
|
18 |
+
"First, install the necessary dependencies for Qiskit and Qiskit IBM Runtime. Make sure you restart the kernel after installing these packages to avoid compatibility issues."
|
19 |
+
]
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"cell_type": "code",
|
23 |
+
"execution_count": 1,
|
24 |
+
"metadata": {},
|
25 |
+
"outputs": [
|
26 |
+
{
|
27 |
+
"name": "stdout",
|
28 |
+
"output_type": "stream",
|
29 |
+
"text": [
|
30 |
+
"Requirement already satisfied: qiskit in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (1.2.4)\n",
|
31 |
+
"Requirement already satisfied: qiskit-ibm-runtime in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (0.32.0)\n",
|
32 |
+
"Requirement already satisfied: rustworkx>=0.15.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.15.1)\n",
|
33 |
+
"Requirement already satisfied: numpy<3,>=1.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.23.5)\n",
|
34 |
+
"Requirement already satisfied: scipy>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
35 |
+
"Requirement already satisfied: sympy>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
36 |
+
"Requirement already satisfied: dill>=0.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.3.9)\n",
|
37 |
+
"Requirement already satisfied: python-dateutil>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (2.9.0)\n",
|
38 |
+
"Requirement already satisfied: stevedore>=3.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (5.3.0)\n",
|
39 |
+
"Requirement already satisfied: typing-extensions in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (4.12.2)\n",
|
40 |
+
"Requirement already satisfied: symengine<0.14,>=0.11 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.13.0)\n",
|
41 |
+
"Requirement already satisfied: requests>=2.19 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.32.3)\n",
|
42 |
+
"Requirement already satisfied: requests-ntlm>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.1.0)\n",
|
43 |
+
"Requirement already satisfied: urllib3>=1.21.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.2.3)\n",
|
44 |
+
"Requirement already satisfied: websocket-client>=1.5.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.8.0)\n",
|
45 |
+
"Requirement already satisfied: ibm-platform-services>=0.22.6 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (0.59.0)\n",
|
46 |
+
"Requirement already satisfied: pydantic>=2.5.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.8.2)\n",
|
47 |
+
"Requirement already satisfied: ibm-cloud-sdk-core<4.0.0,>=3.22.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (3.22.0)\n",
|
48 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (0.6.0)\n",
|
49 |
+
"Requirement already satisfied: pydantic-core==2.20.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (2.20.1)\n",
|
50 |
+
"Requirement already satisfied: six>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from python-dateutil>=2.8.0->qiskit) (1.16.0)\n",
|
51 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.3.2)\n",
|
52 |
+
"Requirement already satisfied: idna<4,>=2.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.7)\n",
|
53 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (2024.8.30)\n",
|
54 |
+
"Requirement already satisfied: ntlm-auth>=1.0.2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.5.0)\n",
|
55 |
+
"Requirement already satisfied: cryptography>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (43.0.3)\n",
|
56 |
+
"Requirement already satisfied: pbr>=2.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from stevedore>=3.0.0->qiskit) (6.1.0)\n",
|
57 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from sympy>=1.3->qiskit) (1.3.0)\n",
|
58 |
+
"Requirement already satisfied: cffi>=1.12 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.17.1)\n",
|
59 |
+
"Requirement already satisfied: PyJWT<3.0.0,>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-cloud-sdk-core<4.0.0,>=3.22.0->ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (2.9.0)\n",
|
60 |
+
"Requirement already satisfied: pycparser in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (2.22)\n"
|
61 |
+
]
|
62 |
+
}
|
63 |
+
],
|
64 |
+
"source": [
|
65 |
+
"!pip install -U qiskit qiskit-ibm-runtime"
|
66 |
+
]
|
67 |
+
},
|
68 |
+
{
|
69 |
+
"cell_type": "markdown",
|
70 |
+
"metadata": {},
|
71 |
+
"source": [
|
72 |
+
"## 2. Initialize Qiskit Runtime Service\n",
|
73 |
+
"\n",
|
74 |
+
"After installing the dependencies, import Qiskit Runtime Service and initialize it. This service allows you to access IBM Quantum backends and execute circuits with improved efficiency."
|
75 |
+
]
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"cell_type": "code",
|
79 |
+
"execution_count": 2,
|
80 |
+
"metadata": {},
|
81 |
+
"outputs": [
|
82 |
+
{
|
83 |
+
"ename": "ImportError",
|
84 |
+
"evalue": "Qiskit is installed in an invalid environment that has both Qiskit >=1.0 and an earlier version. You should create a new virtual environment, and ensure that you do not mix dependencies between Qiskit <1.0 and >=1.0. Any packages that depend on 'qiskit-terra' are not compatible with Qiskit 1.0 and will need to be updated. Qiskit unfortunately cannot enforce this requirement during environment resolution. See https://qisk.it/packaging-1-0 for more detail.",
|
85 |
+
"output_type": "error",
|
86 |
+
"traceback": [
|
87 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
88 |
+
"\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)",
|
89 |
+
"Cell \u001b[0;32mIn[2], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit_ibm_runtime\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m QiskitRuntimeService\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mos\u001b[39;00m\n\u001b[1;32m 4\u001b[0m os\u001b[38;5;241m.\u001b[39menviron[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mIBMQ_API_KEY\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m86a3bf838145fe6863cd15adf39b4678715f07c28e66cf2e1e3c7a9f9020a4f4215b71b9c664db90690e99ed2760c6e973c993257ace55c7ae6d5dfacd2d13fc\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
90 |
+
"File \u001b[0;32m~/Git/AIMO/.conda/lib/python3.10/site-packages/qiskit_ibm_runtime/__init__.py:204\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mlogging\u001b[39;00m\n\u001b[1;32m 202\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mwarnings\u001b[39;00m\n\u001b[0;32m--> 204\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mqiskit_runtime_service\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m QiskitRuntimeService\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mibm_backend\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m IBMBackend\n\u001b[1;32m 206\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mruntime_job\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m RuntimeJob\n",
|
91 |
+
"File \u001b[0;32m~/Git/AIMO/.conda/lib/python3.10/site-packages/qiskit_ibm_runtime/qiskit_runtime_service.py:23\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mcollections\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m OrderedDict\n\u001b[1;32m 21\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtyping\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Dict, Callable, Optional, Union, List, Any, Type, Sequence\n\u001b[0;32m---> 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mproviders\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mbackend\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m BackendV2 \u001b[38;5;28;01mas\u001b[39;00m Backend\n\u001b[1;32m 24\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mproviders\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mexceptions\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m QiskitBackendNotFoundError\n\u001b[1;32m 25\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mproviders\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mproviderutils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m filter_backends\n",
|
92 |
+
"File \u001b[0;32m~/Git/AIMO/.conda/lib/python3.10/site-packages/qiskit/__init__.py:38\u001b[0m\n\u001b[1;32m 36\u001b[0m _suppress_error \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQISKIT_SUPPRESS_1_0_IMPORT_ERROR\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mFalse\u001b[39;00m) \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m1\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mint\u001b[39m(_major) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _suppress_error:\n\u001b[0;32m---> 38\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[1;32m 39\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQiskit is installed in an invalid environment that has both Qiskit >=1.0\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 40\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m and an earlier version.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 41\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m You should create a new virtual environment, and ensure that you do not mix\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 42\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m dependencies between Qiskit <1.0 and >=1.0.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 43\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Any packages that depend on \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mqiskit-terra\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m are not compatible with Qiskit 1.0 and\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 44\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m will need to be updated.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 45\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Qiskit unfortunately cannot enforce this requirement during environment resolution.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 46\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m See https://qisk.it/packaging-1-0 for more detail.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 47\u001b[0m )\n\u001b[1;32m 49\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_accelerate\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_numpy_compat\u001b[39;00m\n",
|
93 |
+
"\u001b[0;31mImportError\u001b[0m: Qiskit is installed in an invalid environment that has both Qiskit >=1.0 and an earlier version. You should create a new virtual environment, and ensure that you do not mix dependencies between Qiskit <1.0 and >=1.0. Any packages that depend on 'qiskit-terra' are not compatible with Qiskit 1.0 and will need to be updated. Qiskit unfortunately cannot enforce this requirement during environment resolution. See https://qisk.it/packaging-1-0 for more detail."
|
94 |
+
]
|
95 |
+
}
|
96 |
+
],
|
97 |
+
"source": [
|
98 |
+
"from qiskit_ibm_runtime import QiskitRuntimeService\n",
|
99 |
+
"\n",
|
100 |
+
"import os\n",
|
101 |
+
"os.environ['IBMQ_API_KEY'] = '86a3bf838145fe6863cd15adf39b4678715f07c28e66cf2e1e3c7a9f9020a4f4215b71b9c664db90690e99ed2760c6e973c993257ace55c7ae6d5dfacd2d13fc'"
|
102 |
+
]
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"cell_type": "code",
|
106 |
+
"execution_count": 4,
|
107 |
+
"id": "10d152b8",
|
108 |
+
"metadata": {},
|
109 |
+
"outputs": [],
|
110 |
+
"source": [
|
111 |
+
"import os\n",
|
112 |
+
"from qiskit_ibm_runtime import QiskitRuntimeService\n",
|
113 |
+
" \n",
|
114 |
+
"# Retrieve API key from environment variable\n",
|
115 |
+
"api_key = os.getenv('IBMQ_API_KEY')\n",
|
116 |
+
"\n",
|
117 |
+
"if not api_key:\n",
|
118 |
+
" raise EnvironmentError(\"IBMQ_API_KEY environment variable not set.\")\n",
|
119 |
+
" \n",
|
120 |
+
"# Initialize the Qiskit Runtime service using the API key\n",
|
121 |
+
"service = QiskitRuntimeService(channel=\"ibm_quantum\", token=api_key)"
|
122 |
+
]
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"cell_type": "markdown",
|
126 |
+
"metadata": {},
|
127 |
+
"source": [
|
128 |
+
"## 3. Define the Quantum Circuit\n",
|
129 |
+
"\n",
|
130 |
+
"This section defines a quantum circuit based on our previous discussions. This circuit uses a few gates on four qubits. Adjust the `theta` values as needed to test different configurations."
|
131 |
+
]
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"cell_type": "code",
|
135 |
+
"execution_count": 6,
|
136 |
+
"metadata": {},
|
137 |
+
"outputs": [],
|
138 |
+
"source": [
|
139 |
+
"from qiskit import QuantumCircuit\n",
|
140 |
+
"\n",
|
141 |
+
"# Define a simple quantum circuit\n",
|
142 |
+
"def qiskit_circuit(theta):\n",
|
143 |
+
" qc = QuantumCircuit(4)\n",
|
144 |
+
" qc.rx(theta[0], 0)\n",
|
145 |
+
" qc.ry(theta[1], 1)\n",
|
146 |
+
" qc.cx(0, 1)\n",
|
147 |
+
" qc.ry(theta[2], 2)\n",
|
148 |
+
" qc.cx(1, 2)\n",
|
149 |
+
" qc.rx(theta[3], 3)\n",
|
150 |
+
" qc.measure_all()\n",
|
151 |
+
" return qc\n",
|
152 |
+
"\n",
|
153 |
+
"# Define theta values and create the circuit\n",
|
154 |
+
"theta_values = [0.3, 0.7, 1.2, 0.5]\n",
|
155 |
+
"qc = qiskit_circuit(theta_values)"
|
156 |
+
]
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"cell_type": "markdown",
|
160 |
+
"metadata": {},
|
161 |
+
"source": [
|
162 |
+
"## 4. Run the Circuit on IBM Quantum Backend with Qiskit Runtime\n",
|
163 |
+
"\n",
|
164 |
+
"Use the `Sampler` primitive to run the circuit on a suitable IBM Quantum backend. The `Sampler` manages the execution efficiently, and results are returned directly."
|
165 |
+
]
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"cell_type": "code",
|
169 |
+
"execution_count": 12,
|
170 |
+
"metadata": {},
|
171 |
+
"outputs": [
|
172 |
+
{
|
173 |
+
"name": "stdout",
|
174 |
+
"output_type": "stream",
|
175 |
+
"text": [
|
176 |
+
"Available backends: [<IBMBackend('ibm_brisbane')>, <IBMBackend('ibm_kyiv')>, <IBMBackend('ibm_sherbrooke')>]\n"
|
177 |
+
]
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"ename": "AttributeError",
|
181 |
+
"evalue": "__enter__",
|
182 |
+
"output_type": "error",
|
183 |
+
"traceback": [
|
184 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
185 |
+
"\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
186 |
+
"Cell \u001b[0;32mIn[12], line 34\u001b[0m\n\u001b[1;32m 31\u001b[0m qc \u001b[38;5;241m=\u001b[39m qiskit_circuit(theta_values)\n\u001b[1;32m 33\u001b[0m \u001b[38;5;66;03m# Run the sampler using Qiskit Runtime\u001b[39;00m\n\u001b[0;32m---> 34\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m service \u001b[38;5;28;01mas\u001b[39;00m runtime_service:\n\u001b[1;32m 35\u001b[0m \u001b[38;5;66;03m# Initialize the sampler\u001b[39;00m\n\u001b[1;32m 36\u001b[0m sampler \u001b[38;5;241m=\u001b[39m Sampler()\n\u001b[1;32m 38\u001b[0m \u001b[38;5;66;03m# Execute the circuit using the sampler\u001b[39;00m\n",
|
187 |
+
"\u001b[0;31mAttributeError\u001b[0m: __enter__"
|
188 |
+
]
|
189 |
+
}
|
190 |
+
],
|
191 |
+
"source": [
|
192 |
+
"from qiskit import QuantumCircuit\n",
|
193 |
+
"from qiskit_ibm_runtime import QiskitRuntimeService, Sampler\n",
|
194 |
+
"import os\n",
|
195 |
+
"\n",
|
196 |
+
"# Retrieve API key from environment variable\n",
|
197 |
+
"api_key = os.getenv('IBMQ_API_KEY')\n",
|
198 |
+
"if not api_key:\n",
|
199 |
+
" raise EnvironmentError(\"IBMQ_API_KEY environment variable not set.\")\n",
|
200 |
+
"\n",
|
201 |
+
"# Initialize the Qiskit Runtime service using the API key\n",
|
202 |
+
"service = QiskitRuntimeService(channel=\"ibm_quantum\", token=api_key)\n",
|
203 |
+
"\n",
|
204 |
+
"# List available backends to find a suitable one\n",
|
205 |
+
"available_backends = service.backends()\n",
|
206 |
+
"print(\"Available backends:\", available_backends)\n",
|
207 |
+
"\n",
|
208 |
+
"# Define a simple quantum circuit\n",
|
209 |
+
"def qiskit_circuit(theta):\n",
|
210 |
+
" qc = QuantumCircuit(4)\n",
|
211 |
+
" qc.rx(theta[0], 0)\n",
|
212 |
+
" qc.ry(theta[1], 1)\n",
|
213 |
+
" qc.cx(0, 1)\n",
|
214 |
+
" qc.ry(theta[2], 2)\n",
|
215 |
+
" qc.cx(1, 2)\n",
|
216 |
+
" qc.rx(theta[3], 3)\n",
|
217 |
+
" qc.measure_all()\n",
|
218 |
+
" return qc\n",
|
219 |
+
"\n",
|
220 |
+
"# Define theta values and create the circuit\n",
|
221 |
+
"theta_values = [0.3, 0.7, 1.2, 0.5]\n",
|
222 |
+
"qc = qiskit_circuit(theta_values)\n",
|
223 |
+
"\n",
|
224 |
+
"# Run the sampler using Qiskit Runtime\n",
|
225 |
+
"with service as runtime_service:\n",
|
226 |
+
" # Initialize the sampler\n",
|
227 |
+
" sampler = Sampler()\n",
|
228 |
+
"\n",
|
229 |
+
" # Execute the circuit using the sampler\n",
|
230 |
+
" job = sampler.run(circuits=qc, shots=1024)\n",
|
231 |
+
" result = job.result()\n",
|
232 |
+
" counts = result.get_counts(qc)\n",
|
233 |
+
" print(\"Results using Qiskit Runtime Sampler:\", counts)"
|
234 |
+
]
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"cell_type": "code",
|
238 |
+
"execution_count": 18,
|
239 |
+
"id": "b2140276",
|
240 |
+
"metadata": {},
|
241 |
+
"outputs": [
|
242 |
+
{
|
243 |
+
"name": "stdout",
|
244 |
+
"output_type": "stream",
|
245 |
+
"text": [
|
246 |
+
"Requirement already satisfied: qiskit in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (1.2.4)\n",
|
247 |
+
"Requirement already satisfied: qiskit-ibm-runtime in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (0.32.0)\n",
|
248 |
+
"Requirement already satisfied: matplotlib in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (3.9.2)\n",
|
249 |
+
"Collecting python-dotenv\n",
|
250 |
+
" Downloading python_dotenv-1.0.1-py3-none-any.whl.metadata (23 kB)\n",
|
251 |
+
"Requirement already satisfied: rustworkx>=0.15.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.15.1)\n",
|
252 |
+
"Requirement already satisfied: numpy<3,>=1.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.23.5)\n",
|
253 |
+
"Requirement already satisfied: scipy>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
254 |
+
"Requirement already satisfied: sympy>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
255 |
+
"Requirement already satisfied: dill>=0.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.3.9)\n",
|
256 |
+
"Requirement already satisfied: python-dateutil>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (2.9.0)\n",
|
257 |
+
"Requirement already satisfied: stevedore>=3.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (5.3.0)\n",
|
258 |
+
"Requirement already satisfied: typing-extensions in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (4.12.2)\n",
|
259 |
+
"Requirement already satisfied: symengine<0.14,>=0.11 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.13.0)\n",
|
260 |
+
"Requirement already satisfied: requests>=2.19 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.32.3)\n",
|
261 |
+
"Requirement already satisfied: requests-ntlm>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.1.0)\n",
|
262 |
+
"Requirement already satisfied: urllib3>=1.21.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.2.3)\n",
|
263 |
+
"Requirement already satisfied: websocket-client>=1.5.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.8.0)\n",
|
264 |
+
"Requirement already satisfied: ibm-platform-services>=0.22.6 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (0.59.0)\n",
|
265 |
+
"Requirement already satisfied: pydantic>=2.5.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.8.2)\n",
|
266 |
+
"Requirement already satisfied: contourpy>=1.0.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (1.3.0)\n",
|
267 |
+
"Requirement already satisfied: cycler>=0.10 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (0.12.1)\n",
|
268 |
+
"Requirement already satisfied: fonttools>=4.22.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (4.54.1)\n",
|
269 |
+
"Requirement already satisfied: kiwisolver>=1.3.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (1.4.7)\n",
|
270 |
+
"Requirement already satisfied: packaging>=20.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (24.1)\n",
|
271 |
+
"Requirement already satisfied: pillow>=8 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (11.0.0)\n",
|
272 |
+
"Requirement already satisfied: pyparsing>=2.3.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from matplotlib) (3.2.0)\n",
|
273 |
+
"Requirement already satisfied: ibm-cloud-sdk-core<4.0.0,>=3.22.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (3.22.0)\n",
|
274 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (0.6.0)\n",
|
275 |
+
"Requirement already satisfied: pydantic-core==2.20.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (2.20.1)\n",
|
276 |
+
"Requirement already satisfied: six>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from python-dateutil>=2.8.0->qiskit) (1.16.0)\n",
|
277 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.3.2)\n",
|
278 |
+
"Requirement already satisfied: idna<4,>=2.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.7)\n",
|
279 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (2024.8.30)\n",
|
280 |
+
"Requirement already satisfied: ntlm-auth>=1.0.2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.5.0)\n",
|
281 |
+
"Requirement already satisfied: cryptography>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (43.0.3)\n",
|
282 |
+
"Requirement already satisfied: pbr>=2.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from stevedore>=3.0.0->qiskit) (6.1.0)\n",
|
283 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from sympy>=1.3->qiskit) (1.3.0)\n",
|
284 |
+
"Requirement already satisfied: cffi>=1.12 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.17.1)\n",
|
285 |
+
"Requirement already satisfied: PyJWT<3.0.0,>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-cloud-sdk-core<4.0.0,>=3.22.0->ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (2.9.0)\n",
|
286 |
+
"Requirement already satisfied: pycparser in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (2.22)\n",
|
287 |
+
"Downloading python_dotenv-1.0.1-py3-none-any.whl (19 kB)\n",
|
288 |
+
"Installing collected packages: python-dotenv\n",
|
289 |
+
"Successfully installed python-dotenv-1.0.1\n"
|
290 |
+
]
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"source": [
|
294 |
+
"!pip install qiskit qiskit-ibm-runtime matplotlib python-dotenv"
|
295 |
+
]
|
296 |
+
},
|
297 |
+
{
|
298 |
+
"cell_type": "code",
|
299 |
+
"execution_count": 4,
|
300 |
+
"id": "d5b83a7a",
|
301 |
+
"metadata": {},
|
302 |
+
"outputs": [
|
303 |
+
{
|
304 |
+
"name": "stdout",
|
305 |
+
"output_type": "stream",
|
306 |
+
"text": [
|
307 |
+
"Found existing installation: qiskit 1.2.4\n",
|
308 |
+
"Uninstalling qiskit-1.2.4:\n",
|
309 |
+
" Successfully uninstalled qiskit-1.2.4\n",
|
310 |
+
"Found existing installation: qiskit-ibm-runtime 0.32.0\n",
|
311 |
+
"Uninstalling qiskit-ibm-runtime-0.32.0:\n",
|
312 |
+
" Successfully uninstalled qiskit-ibm-runtime-0.32.0\n",
|
313 |
+
"Found existing installation: qiskit-terra 0.46.3\n",
|
314 |
+
"Uninstalling qiskit-terra-0.46.3:\n",
|
315 |
+
" Successfully uninstalled qiskit-terra-0.46.3\n",
|
316 |
+
"\u001b[33mWARNING: Skipping qiskit-aer as it is not installed.\u001b[0m\u001b[33m\n",
|
317 |
+
"\u001b[0m\u001b[33mWARNING: Skipping qiskit-ignis as it is not installed.\u001b[0m\u001b[33m\n",
|
318 |
+
"\u001b[0mFound existing installation: qiskit-ibmq-provider 0.20.2\n",
|
319 |
+
"Uninstalling qiskit-ibmq-provider-0.20.2:\n",
|
320 |
+
" Successfully uninstalled qiskit-ibmq-provider-0.20.2\n",
|
321 |
+
"Collecting qiskit\n",
|
322 |
+
" Using cached qiskit-1.2.4-cp38-abi3-macosx_11_0_arm64.whl.metadata (12 kB)\n",
|
323 |
+
"Requirement already satisfied: rustworkx>=0.15.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.15.1)\n",
|
324 |
+
"Requirement already satisfied: numpy<3,>=1.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.23.5)\n",
|
325 |
+
"Requirement already satisfied: scipy>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
326 |
+
"Requirement already satisfied: sympy>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
327 |
+
"Requirement already satisfied: dill>=0.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.3.9)\n",
|
328 |
+
"Requirement already satisfied: python-dateutil>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (2.9.0)\n",
|
329 |
+
"Requirement already satisfied: stevedore>=3.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (5.3.0)\n",
|
330 |
+
"Requirement already satisfied: typing-extensions in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (4.12.2)\n",
|
331 |
+
"Requirement already satisfied: symengine<0.14,>=0.11 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.13.0)\n",
|
332 |
+
"Requirement already satisfied: six>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from python-dateutil>=2.8.0->qiskit) (1.16.0)\n",
|
333 |
+
"Requirement already satisfied: pbr>=2.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from stevedore>=3.0.0->qiskit) (6.1.0)\n",
|
334 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from sympy>=1.3->qiskit) (1.3.0)\n",
|
335 |
+
"Using cached qiskit-1.2.4-cp38-abi3-macosx_11_0_arm64.whl (4.5 MB)\n",
|
336 |
+
"Installing collected packages: qiskit\n",
|
337 |
+
"Successfully installed qiskit-1.2.4\n"
|
338 |
+
]
|
339 |
+
}
|
340 |
+
],
|
341 |
+
"source": [
|
342 |
+
"!pip uninstall qiskit qiskit-ibm-runtime qiskit-terra qiskit-aer qiskit-ignis qiskit-ibmq-provider -y\n",
|
343 |
+
"!pip install qiskit"
|
344 |
+
]
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"cell_type": "code",
|
348 |
+
"execution_count": 3,
|
349 |
+
"id": "2dfabb36",
|
350 |
+
"metadata": {},
|
351 |
+
"outputs": [
|
352 |
+
{
|
353 |
+
"name": "stdout",
|
354 |
+
"output_type": "stream",
|
355 |
+
"text": [
|
356 |
+
"Requirement already satisfied: qiskit_ibm_runtime in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (0.32.0)\n",
|
357 |
+
"Requirement already satisfied: requests>=2.19 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (2.32.3)\n",
|
358 |
+
"Requirement already satisfied: requests-ntlm>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (1.1.0)\n",
|
359 |
+
"Requirement already satisfied: numpy>=1.13 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (1.23.5)\n",
|
360 |
+
"Requirement already satisfied: urllib3>=1.21.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (2.2.3)\n",
|
361 |
+
"Requirement already satisfied: python-dateutil>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (2.9.0)\n",
|
362 |
+
"Requirement already satisfied: websocket-client>=1.5.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (1.8.0)\n",
|
363 |
+
"Requirement already satisfied: ibm-platform-services>=0.22.6 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (0.59.0)\n",
|
364 |
+
"Requirement already satisfied: pydantic>=2.5.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (2.8.2)\n",
|
365 |
+
"Requirement already satisfied: qiskit>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit_ibm_runtime) (1.2.4)\n",
|
366 |
+
"Requirement already satisfied: ibm-cloud-sdk-core<4.0.0,>=3.22.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-platform-services>=0.22.6->qiskit_ibm_runtime) (3.22.0)\n",
|
367 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit_ibm_runtime) (0.6.0)\n",
|
368 |
+
"Requirement already satisfied: pydantic-core==2.20.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit_ibm_runtime) (2.20.1)\n",
|
369 |
+
"Requirement already satisfied: typing-extensions>=4.6.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit_ibm_runtime) (4.12.2)\n",
|
370 |
+
"Requirement already satisfied: six>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from python-dateutil>=2.8.0->qiskit_ibm_runtime) (1.16.0)\n",
|
371 |
+
"Requirement already satisfied: rustworkx>=0.15.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (0.15.1)\n",
|
372 |
+
"Requirement already satisfied: scipy>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (1.13.1)\n",
|
373 |
+
"Requirement already satisfied: sympy>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (1.13.1)\n",
|
374 |
+
"Requirement already satisfied: dill>=0.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (0.3.9)\n",
|
375 |
+
"Requirement already satisfied: stevedore>=3.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (5.3.0)\n",
|
376 |
+
"Requirement already satisfied: symengine<0.14,>=0.11 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit>=1.1.0->qiskit_ibm_runtime) (0.13.0)\n",
|
377 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit_ibm_runtime) (3.3.2)\n",
|
378 |
+
"Requirement already satisfied: idna<4,>=2.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit_ibm_runtime) (3.7)\n",
|
379 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit_ibm_runtime) (2024.8.30)\n",
|
380 |
+
"Requirement already satisfied: ntlm-auth>=1.0.2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit_ibm_runtime) (1.5.0)\n",
|
381 |
+
"Requirement already satisfied: cryptography>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit_ibm_runtime) (43.0.3)\n",
|
382 |
+
"Requirement already satisfied: cffi>=1.12 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cryptography>=1.3->requests-ntlm>=1.1.0->qiskit_ibm_runtime) (1.17.1)\n",
|
383 |
+
"Requirement already satisfied: PyJWT<3.0.0,>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-cloud-sdk-core<4.0.0,>=3.22.0->ibm-platform-services>=0.22.6->qiskit_ibm_runtime) (2.9.0)\n",
|
384 |
+
"Requirement already satisfied: pbr>=2.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from stevedore>=3.0.0->qiskit>=1.1.0->qiskit_ibm_runtime) (6.1.0)\n",
|
385 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from sympy>=1.3->qiskit>=1.1.0->qiskit_ibm_runtime) (1.3.0)\n",
|
386 |
+
"Requirement already satisfied: pycparser in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=1.3->requests-ntlm>=1.1.0->qiskit_ibm_runtime) (2.22)\n"
|
387 |
+
]
|
388 |
+
}
|
389 |
+
],
|
390 |
+
"source": [
|
391 |
+
"\n",
|
392 |
+
"!pip install qiskit_ibm_runtime"
|
393 |
+
]
|
394 |
+
},
|
395 |
+
{
|
396 |
+
"cell_type": "code",
|
397 |
+
"execution_count": 20,
|
398 |
+
"id": "e201d494",
|
399 |
+
"metadata": {},
|
400 |
+
"outputs": [
|
401 |
+
{
|
402 |
+
"name": "stdout",
|
403 |
+
"output_type": "stream",
|
404 |
+
"text": [
|
405 |
+
"1.2.4\n"
|
406 |
+
]
|
407 |
+
}
|
408 |
+
],
|
409 |
+
"source": [
|
410 |
+
"import qiskit\n",
|
411 |
+
"print(qiskit.__version__)"
|
412 |
+
]
|
413 |
+
},
|
414 |
+
{
|
415 |
+
"cell_type": "code",
|
416 |
+
"execution_count": 9,
|
417 |
+
"id": "a01acdd4",
|
418 |
+
"metadata": {},
|
419 |
+
"outputs": [
|
420 |
+
{
|
421 |
+
"name": "stdout",
|
422 |
+
"output_type": "stream",
|
423 |
+
"text": [
|
424 |
+
"Requirement already satisfied: qiskit in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (1.2.4)\n",
|
425 |
+
"Collecting qiskit-ibm-runtime\n",
|
426 |
+
" Using cached qiskit_ibm_runtime-0.32.0-py3-none-any.whl.metadata (19 kB)\n",
|
427 |
+
"Requirement already satisfied: rustworkx>=0.15.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.15.1)\n",
|
428 |
+
"Requirement already satisfied: numpy<3,>=1.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.23.5)\n",
|
429 |
+
"Requirement already satisfied: scipy>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
430 |
+
"Requirement already satisfied: sympy>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (1.13.1)\n",
|
431 |
+
"Requirement already satisfied: dill>=0.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.3.9)\n",
|
432 |
+
"Requirement already satisfied: python-dateutil>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (2.9.0)\n",
|
433 |
+
"Requirement already satisfied: stevedore>=3.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (5.3.0)\n",
|
434 |
+
"Requirement already satisfied: typing-extensions in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (4.12.2)\n",
|
435 |
+
"Requirement already satisfied: symengine<0.14,>=0.11 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit) (0.13.0)\n",
|
436 |
+
"Requirement already satisfied: requests>=2.19 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.32.3)\n",
|
437 |
+
"Requirement already satisfied: requests-ntlm>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.1.0)\n",
|
438 |
+
"Requirement already satisfied: urllib3>=1.21.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.2.3)\n",
|
439 |
+
"Requirement already satisfied: websocket-client>=1.5.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (1.8.0)\n",
|
440 |
+
"Requirement already satisfied: ibm-platform-services>=0.22.6 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (0.59.0)\n",
|
441 |
+
"Requirement already satisfied: pydantic>=2.5.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from qiskit-ibm-runtime) (2.8.2)\n",
|
442 |
+
"Requirement already satisfied: ibm-cloud-sdk-core<4.0.0,>=3.22.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (3.22.0)\n",
|
443 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (0.6.0)\n",
|
444 |
+
"Requirement already satisfied: pydantic-core==2.20.1 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from pydantic>=2.5.0->qiskit-ibm-runtime) (2.20.1)\n",
|
445 |
+
"Requirement already satisfied: six>=1.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from python-dateutil>=2.8.0->qiskit) (1.16.0)\n",
|
446 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.3.2)\n",
|
447 |
+
"Requirement already satisfied: idna<4,>=2.5 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (3.7)\n",
|
448 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests>=2.19->qiskit-ibm-runtime) (2024.8.30)\n",
|
449 |
+
"Requirement already satisfied: ntlm-auth>=1.0.2 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.5.0)\n",
|
450 |
+
"Requirement already satisfied: cryptography>=1.3 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from requests-ntlm>=1.1.0->qiskit-ibm-runtime) (43.0.3)\n",
|
451 |
+
"Requirement already satisfied: pbr>=2.0.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from stevedore>=3.0.0->qiskit) (6.1.0)\n",
|
452 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from sympy>=1.3->qiskit) (1.3.0)\n",
|
453 |
+
"Requirement already satisfied: cffi>=1.12 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (1.17.1)\n",
|
454 |
+
"Requirement already satisfied: PyJWT<3.0.0,>=2.8.0 in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from ibm-cloud-sdk-core<4.0.0,>=3.22.0->ibm-platform-services>=0.22.6->qiskit-ibm-runtime) (2.9.0)\n",
|
455 |
+
"Requirement already satisfied: pycparser in /Users/callum/Git/AIMO/.conda/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=1.3->requests-ntlm>=1.1.0->qiskit-ibm-runtime) (2.22)\n",
|
456 |
+
"Using cached qiskit_ibm_runtime-0.32.0-py3-none-any.whl (3.0 MB)\n",
|
457 |
+
"Installing collected packages: qiskit-ibm-runtime\n",
|
458 |
+
"Successfully installed qiskit-ibm-runtime-0.32.0\n",
|
459 |
+
"Note: you may need to restart the kernel to use updated packages.\n"
|
460 |
+
]
|
461 |
+
}
|
462 |
+
],
|
463 |
+
"source": [
|
464 |
+
"pip install -U qiskit qiskit-ibm-runtime"
|
465 |
+
]
|
466 |
+
},
|
467 |
+
{
|
468 |
+
"cell_type": "code",
|
469 |
+
"execution_count": 5,
|
470 |
+
"id": "588c9add",
|
471 |
+
"metadata": {},
|
472 |
+
"outputs": [
|
473 |
+
{
|
474 |
+
"ename": "ImportError",
|
475 |
+
"evalue": "Qiskit is installed in an invalid environment that has both Qiskit >=1.0 and an earlier version. You should create a new virtual environment, and ensure that you do not mix dependencies between Qiskit <1.0 and >=1.0. Any packages that depend on 'qiskit-terra' are not compatible with Qiskit 1.0 and will need to be updated. Qiskit unfortunately cannot enforce this requirement during environment resolution. See https://qisk.it/packaging-1-0 for more detail.",
|
476 |
+
"output_type": "error",
|
477 |
+
"traceback": [
|
478 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
479 |
+
"\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)",
|
480 |
+
"Cell \u001b[0;32mIn[5], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mlogging\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtyping\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m List\n\u001b[0;32m----> 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m QuantumCircuit\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit_ibm_runtime\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m QiskitRuntimeService, Sampler, Options\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mproviders\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mibmq\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m IBMQBackend\n",
|
481 |
+
"File \u001b[0;32m~/Git/AIMO/.conda/lib/python3.10/site-packages/qiskit/__init__.py:38\u001b[0m\n\u001b[1;32m 36\u001b[0m _suppress_error \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQISKIT_SUPPRESS_1_0_IMPORT_ERROR\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mFalse\u001b[39;00m) \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m1\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mint\u001b[39m(_major) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _suppress_error:\n\u001b[0;32m---> 38\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[1;32m 39\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQiskit is installed in an invalid environment that has both Qiskit >=1.0\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 40\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m and an earlier version.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 41\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m You should create a new virtual environment, and ensure that you do not mix\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 42\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m dependencies between Qiskit <1.0 and >=1.0.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 43\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Any packages that depend on \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mqiskit-terra\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m are not compatible with Qiskit 1.0 and\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 44\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m will need to be updated.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 45\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Qiskit unfortunately cannot enforce this requirement during environment resolution.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 46\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m See https://qisk.it/packaging-1-0 for more detail.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 47\u001b[0m )\n\u001b[1;32m 49\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_accelerate\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mqiskit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_numpy_compat\u001b[39;00m\n",
|
482 |
+
"\u001b[0;31mImportError\u001b[0m: Qiskit is installed in an invalid environment that has both Qiskit >=1.0 and an earlier version. You should create a new virtual environment, and ensure that you do not mix dependencies between Qiskit <1.0 and >=1.0. Any packages that depend on 'qiskit-terra' are not compatible with Qiskit 1.0 and will need to be updated. Qiskit unfortunately cannot enforce this requirement during environment resolution. See https://qisk.it/packaging-1-0 for more detail."
|
483 |
+
]
|
484 |
+
}
|
485 |
+
],
|
486 |
+
"source": [
|
487 |
+
"import os\n",
|
488 |
+
"import sys\n",
|
489 |
+
"import logging\n",
|
490 |
+
"from typing import List\n",
|
491 |
+
"\n",
|
492 |
+
"from qiskit import QuantumCircuit\n",
|
493 |
+
"from qiskit_ibm_runtime import QiskitRuntimeService, Sampler, Options\n",
|
494 |
+
"from qiskit.providers.ibmq import IBMQBackend\n",
|
495 |
+
"from qiskit.visualization import plot_histogram\n",
|
496 |
+
"import matplotlib.pyplot as plt\n",
|
497 |
+
"\n",
|
498 |
+
"# Configure logging\n",
|
499 |
+
"logging.basicConfig(\n",
|
500 |
+
" level=logging.INFO,\n",
|
501 |
+
" format='%(asctime)s [%(levelname)s] %(message)s',\n",
|
502 |
+
" handlers=[\n",
|
503 |
+
" logging.StreamHandler(sys.stdout)\n",
|
504 |
+
" ]\n",
|
505 |
+
")\n",
|
506 |
+
"logger = logging.getLogger(__name__)\n",
|
507 |
+
"\n",
|
508 |
+
"def get_api_key() -> str:\n",
|
509 |
+
" \"\"\"Retrieve the IBMQ API key from environment variables.\"\"\"\n",
|
510 |
+
" api_key = os.getenv('IBMQ_API_KEY')\n",
|
511 |
+
" if not api_key:\n",
|
512 |
+
" logger.error(\"IBMQ_API_KEY environment variable not set.\")\n",
|
513 |
+
" raise EnvironmentError(\"IBMQ_API_KEY environment variable not set.\")\n",
|
514 |
+
" logger.info(\"API key retrieved successfully.\")\n",
|
515 |
+
" return api_key\n",
|
516 |
+
"\n",
|
517 |
+
"def initialize_service(api_key: str) -> QiskitRuntimeService:\n",
|
518 |
+
" \"\"\"Initialize the Qiskit Runtime service with the provided API key.\"\"\"\n",
|
519 |
+
" try:\n",
|
520 |
+
" service = QiskitRuntimeService(channel=\"ibm_quantum\", token=api_key)\n",
|
521 |
+
" logger.info(\"Qiskit Runtime Service initialized successfully.\")\n",
|
522 |
+
" return service\n",
|
523 |
+
" except Exception as e:\n",
|
524 |
+
" logger.error(f\"Failed to initialize Qiskit Runtime Service: {e}\")\n",
|
525 |
+
" raise\n",
|
526 |
+
"\n",
|
527 |
+
"def list_available_backends(service: QiskitRuntimeService) -> List[IBMQBackend]:\n",
|
528 |
+
" \"\"\"List available IBM Quantum backends.\"\"\"\n",
|
529 |
+
" try:\n",
|
530 |
+
" backends = service.backends()\n",
|
531 |
+
" logger.info(f\"Available backends: {[backend.name() for backend in backends]}\")\n",
|
532 |
+
" return backends\n",
|
533 |
+
" except Exception as e:\n",
|
534 |
+
" logger.error(f\"Failed to retrieve backends: {e}\")\n",
|
535 |
+
" raise\n",
|
536 |
+
"\n",
|
537 |
+
"def select_best_backend(backends: List[IBMQBackend]) -> IBMQBackend:\n",
|
538 |
+
" \"\"\"\n",
|
539 |
+
" Select the best available backend based on certain criteria.\n",
|
540 |
+
" For demonstration, we'll select the least busy backend.\n",
|
541 |
+
" \"\"\"\n",
|
542 |
+
" try:\n",
|
543 |
+
" backend = service.backend.ibmq_least_busy(backends)\n",
|
544 |
+
" logger.info(f\"Selected backend: {backend.name()}\")\n",
|
545 |
+
" return backend\n",
|
546 |
+
" except Exception as e:\n",
|
547 |
+
" logger.error(f\"Failed to select the best backend: {e}\")\n",
|
548 |
+
" raise\n",
|
549 |
+
"\n",
|
550 |
+
"def create_quantum_circuit(theta: List[float]) -> QuantumCircuit:\n",
|
551 |
+
" \"\"\"Define and return a simple quantum circuit based on theta values.\"\"\"\n",
|
552 |
+
" qc = QuantumCircuit(4)\n",
|
553 |
+
" qc.rx(theta[0], 0)\n",
|
554 |
+
" qc.ry(theta[1], 1)\n",
|
555 |
+
" qc.cx(0, 1)\n",
|
556 |
+
" qc.ry(theta[2], 2)\n",
|
557 |
+
" qc.cx(1, 2)\n",
|
558 |
+
" qc.rx(theta[3], 3)\n",
|
559 |
+
" qc.measure_all()\n",
|
560 |
+
" logger.debug(f\"Quantum Circuit created with theta values: {theta}\")\n",
|
561 |
+
" return qc\n",
|
562 |
+
"\n",
|
563 |
+
"def run_sampler(service: QiskitRuntimeService, backend: IBMQBackend, circuit: QuantumCircuit, shots: int = 1024) -> dict:\n",
|
564 |
+
" \"\"\"Initialize the sampler, execute the circuit, and return the counts.\"\"\"\n",
|
565 |
+
" try:\n",
|
566 |
+
" # Configure sampler options if needed\n",
|
567 |
+
" sampler_options = Options(shots=shots)\n",
|
568 |
+
" sampler_options.backend = backend\n",
|
569 |
+
"\n",
|
570 |
+
" sampler = Sampler(options=sampler_options)\n",
|
571 |
+
" logger.info(\"Sampler initialized successfully.\")\n",
|
572 |
+
"\n",
|
573 |
+
" # Execute the circuit using the sampler\n",
|
574 |
+
" job = sampler.run(circuits=circuit)\n",
|
575 |
+
" logger.info(\"Sampler job submitted.\")\n",
|
576 |
+
"\n",
|
577 |
+
" # Retrieve the results\n",
|
578 |
+
" result = job.result()\n",
|
579 |
+
" counts = result.get_counts(circuit)\n",
|
580 |
+
" logger.info(\"Sampler job completed successfully.\")\n",
|
581 |
+
" return counts\n",
|
582 |
+
" except Exception as e:\n",
|
583 |
+
" logger.error(f\"Failed to run sampler: {e}\")\n",
|
584 |
+
" raise\n",
|
585 |
+
"\n",
|
586 |
+
"def visualize_results(counts: dict):\n",
|
587 |
+
" \"\"\"Visualize the measurement results as a histogram.\"\"\"\n",
|
588 |
+
" try:\n",
|
589 |
+
" plot_histogram(counts)\n",
|
590 |
+
" plt.title(\"Quantum Circuit Measurement Results\")\n",
|
591 |
+
" plt.show()\n",
|
592 |
+
" logger.info(\"Results visualized successfully.\")\n",
|
593 |
+
" except Exception as e:\n",
|
594 |
+
" logger.error(f\"Failed to visualize results: {e}\")\n",
|
595 |
+
" raise\n",
|
596 |
+
"\n",
|
597 |
+
"def main():\n",
|
598 |
+
" \"\"\"Main function to execute the quantum sampling workflow.\"\"\"\n",
|
599 |
+
" try:\n",
|
600 |
+
" api_key = get_api_key()\n",
|
601 |
+
" service = initialize_service(api_key)\n",
|
602 |
+
" available_backends = list_available_backends(service)\n",
|
603 |
+
"\n",
|
604 |
+
" if not available_backends:\n",
|
605 |
+
" logger.error(\"No available backends found.\")\n",
|
606 |
+
" sys.exit(1)\n",
|
607 |
+
"\n",
|
608 |
+
" backend = select_best_backend(available_backends)\n",
|
609 |
+
"\n",
|
610 |
+
" # Define theta values\n",
|
611 |
+
" theta_values = [0.3, 0.7, 1.2, 0.5]\n",
|
612 |
+
" logger.info(f\"Theta values: {theta_values}\")\n",
|
613 |
+
"\n",
|
614 |
+
" # Create quantum circuit\n",
|
615 |
+
" qc = create_quantum_circuit(theta_values)\n",
|
616 |
+
"\n",
|
617 |
+
" # Run sampler\n",
|
618 |
+
" counts = run_sampler(service, backend, qc, shots=1024)\n",
|
619 |
+
"\n",
|
620 |
+
" # Print and visualize results\n",
|
621 |
+
" logger.info(f\"Measurement Results: {counts}\")\n",
|
622 |
+
" visualize_results(counts)\n",
|
623 |
+
"\n",
|
624 |
+
" except Exception as e:\n",
|
625 |
+
" logger.critical(f\"An unexpected error occurred: {e}\")\n",
|
626 |
+
" sys.exit(1)\n",
|
627 |
+
"\n",
|
628 |
+
"if __name__ == \"__main__\":\n",
|
629 |
+
" main()"
|
630 |
+
]
|
631 |
+
},
|
632 |
+
{
|
633 |
+
"cell_type": "markdown",
|
634 |
+
"metadata": {},
|
635 |
+
"source": [
|
636 |
+
"## 5. Benchmarking Execution Time\n",
|
637 |
+
"\n",
|
638 |
+
"This section includes code to benchmark the execution time of running the quantum circuit multiple times to evaluate the AGN's performance. We will measure and plot the time taken for each execution."
|
639 |
+
]
|
640 |
+
},
|
641 |
+
{
|
642 |
+
"cell_type": "code",
|
643 |
+
"execution_count": null,
|
644 |
+
"metadata": {},
|
645 |
+
"outputs": [],
|
646 |
+
"source": [
|
647 |
+
"import time\n",
|
648 |
+
"import matplotlib.pyplot as plt\n",
|
649 |
+
"\n",
|
650 |
+
"def benchmark_circuit(sampler, qc, iterations=100):\n",
|
651 |
+
" exec_times = []\n",
|
652 |
+
" for _ in range(iterations):\n",
|
653 |
+
" start_time = time.time()\n",
|
654 |
+
" job = sampler.run(circuits=qc, shots=1024)\n",
|
655 |
+
" result = job.result()\n",
|
656 |
+
" counts = result.get_counts(qc)\n",
|
657 |
+
" end_time = time.time()\n",
|
658 |
+
" exec_times.append(end_time - start_time)\n",
|
659 |
+
" return exec_times\n",
|
660 |
+
"\n",
|
661 |
+
"# Run the benchmark\n",
|
662 |
+
"with Sampler(session=service) as sampler:\n",
|
663 |
+
" exec_times = benchmark_circuit(sampler, qc)\n",
|
664 |
+
"\n",
|
665 |
+
"# Plot the execution times\n",
|
666 |
+
"plt.plot(exec_times, label=\"Execution Time per Iteration\")\n",
|
667 |
+
"plt.xlabel(\"Iteration\")\n",
|
668 |
+
"plt.ylabel(\"Execution Time (s)\")\n",
|
669 |
+
"plt.title(\"AGN Execution Time Benchmark\")\n",
|
670 |
+
"plt.legend()\n",
|
671 |
+
"plt.show()"
|
672 |
+
]
|
673 |
+
},
|
674 |
+
{
|
675 |
+
"cell_type": "markdown",
|
676 |
+
"metadata": {},
|
677 |
+
"source": [
|
678 |
+
"## 6. Troubleshooting\n",
|
679 |
+
"\n",
|
680 |
+
"If you encounter any issues, consider the following steps:\n",
|
681 |
+
"\n",
|
682 |
+
"- **Update Qiskit**: Ensure you have the latest version of Qiskit and Qiskit IBM Runtime installed.\n",
|
683 |
+
"- **Restart Kernel**: After updating, restart the notebook kernel to load the latest packages.\n",
|
684 |
+
"- **Check Available Backends**: Verify available backends with `service.backends()` if needed."
|
685 |
+
]
|
686 |
+
},
|
687 |
+
{
|
688 |
+
"cell_type": "code",
|
689 |
+
"execution_count": null,
|
690 |
+
"metadata": {},
|
691 |
+
"outputs": [],
|
692 |
+
"source": [
|
693 |
+
"# List available backends to troubleshoot issues\n",
|
694 |
+
"print(service.backends())"
|
695 |
+
]
|
696 |
+
},
|
697 |
+
{
|
698 |
+
"cell_type": "code",
|
699 |
+
"execution_count": 7,
|
700 |
+
"id": "45f34dd4",
|
701 |
+
"metadata": {},
|
702 |
+
"outputs": [
|
703 |
+
{
|
704 |
+
"ename": "AttributeError",
|
705 |
+
"evalue": "module 'torch' has no attribute 'zeros'",
|
706 |
+
"output_type": "error",
|
707 |
+
"traceback": [
|
708 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
709 |
+
"\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
710 |
+
"Cell \u001b[0;32mIn[7], line 5\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;66;03m# Initialize the state tensor on the GPU\u001b[39;00m\n\u001b[1;32m 4\u001b[0m num_qubits \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m4\u001b[39m\n\u001b[0;32m----> 5\u001b[0m state \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mzeros\u001b[49m(\u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mnum_qubits, dtype\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mcomplex64)\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmps\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 6\u001b[0m state[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m \u001b[38;5;66;03m# Start in |0...0> state\u001b[39;00m\n\u001b[1;32m 8\u001b[0m \u001b[38;5;66;03m# Define rotation gates as tensor operations\u001b[39;00m\n",
|
711 |
+
"\u001b[0;31mAttributeError\u001b[0m: module 'torch' has no attribute 'zeros'"
|
712 |
+
]
|
713 |
+
}
|
714 |
+
],
|
715 |
+
"source": [
|
716 |
+
"import torch\n",
|
717 |
+
"\n",
|
718 |
+
"# Initialize the state tensor on the GPU\n",
|
719 |
+
"num_qubits = 4\n",
|
720 |
+
"state = torch.zeros(2**num_qubits, dtype=torch.complex64).to('mps')\n",
|
721 |
+
"state[0] = 1 # Start in |0...0> state\n",
|
722 |
+
"\n",
|
723 |
+
"# Define rotation gates as tensor operations\n",
|
724 |
+
"def rx(theta):\n",
|
725 |
+
" return torch.tensor([\n",
|
726 |
+
" [torch.cos(theta/2), -1j * torch.sin(theta/2)],\n",
|
727 |
+
" [-1j * torch.sin(theta/2), torch.cos(theta/2)]\n",
|
728 |
+
" ], dtype=torch.complex64).to('cuda')\n",
|
729 |
+
"\n",
|
730 |
+
"# Apply an RX rotation to the first qubit\n",
|
731 |
+
"def apply_single_qubit_gate(state, gate, target_qubit, num_qubits):\n",
|
732 |
+
" tensor_op = torch.eye(1 << num_qubits, dtype=torch.complex64).to('cuda')\n",
|
733 |
+
" indices = torch.arange(1 << num_qubits)\n",
|
734 |
+
" \n",
|
735 |
+
" # Apply gate only to the target qubit's indices\n",
|
736 |
+
" for i in range(2):\n",
|
737 |
+
" tensor_op[(indices >> target_qubit & 1) == i, (indices >> target_qubit & 1) == i] = gate[i, i]\n",
|
738 |
+
"\n",
|
739 |
+
" return tensor_op @ state\n",
|
740 |
+
"\n",
|
741 |
+
"# Define an AGN-inspired operation sequence\n",
|
742 |
+
"theta_values = [0.3, 0.7, 1.2, 0.5]\n",
|
743 |
+
"for i, theta in enumerate(theta_values):\n",
|
744 |
+
" gate = rx(theta)\n",
|
745 |
+
" state = apply_single_qubit_gate(state, gate, target_qubit=i, num_qubits=num_qubits)\n",
|
746 |
+
"\n",
|
747 |
+
"# Compute probabilities (similar to measurement probabilities in quantum computing)\n",
|
748 |
+
"probs = torch.abs(state)**2\n",
|
749 |
+
"print(probs)"
|
750 |
+
]
|
751 |
+
}
|
752 |
+
],
|
753 |
+
"metadata": {
|
754 |
+
"kernelspec": {
|
755 |
+
"display_name": "Python 3",
|
756 |
+
"language": "python",
|
757 |
+
"name": "python3"
|
758 |
+
},
|
759 |
+
"language_info": {
|
760 |
+
"codemirror_mode": {
|
761 |
+
"name": "ipython",
|
762 |
+
"version": 3
|
763 |
+
},
|
764 |
+
"file_extension": ".py",
|
765 |
+
"mimetype": "text/x-python",
|
766 |
+
"name": "python",
|
767 |
+
"nbconvert_exporter": "python",
|
768 |
+
"pygments_lexer": "ipython3",
|
769 |
+
"version": "3.10.13"
|
770 |
+
}
|
771 |
+
},
|
772 |
+
"nbformat": 4,
|
773 |
+
"nbformat_minor": 5
|
774 |
+
}
|
scripts/3.1_test_340b_traversal.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def print_tree(node, prefix="", visited=None):
|
4 |
+
"""Recursive function to print a refined tree structure without duplicates."""
|
5 |
+
if visited is None:
|
6 |
+
visited = set()
|
7 |
+
|
8 |
+
node_id = node.get("node_id")
|
9 |
+
if not node_id:
|
10 |
+
print(f"{prefix}(unknown node)")
|
11 |
+
return
|
12 |
+
|
13 |
+
# Avoid printing duplicate nodes by checking if node has been visited
|
14 |
+
if node_id in visited:
|
15 |
+
print(f"{prefix}(already listed) {node_id}")
|
16 |
+
return
|
17 |
+
visited.add(node_id)
|
18 |
+
|
19 |
+
relationship_label = f"({node.get('relationship', '')})" if node.get("relationship") else ""
|
20 |
+
print(f"{prefix}{node_id} {relationship_label}")
|
21 |
+
|
22 |
+
children = node.get("descendants", [])
|
23 |
+
for i, child in enumerate(children):
|
24 |
+
new_prefix = f"{prefix}├── " if i < len(children) - 1 else f"{prefix}└── "
|
25 |
+
print_tree(child, new_prefix, visited)
|
26 |
+
|
27 |
+
def inspect_and_print_relationships(node_id):
|
28 |
+
"""Inspect and display relationships for a specified node in both downward and upward directions."""
|
29 |
+
# Downward traversal for child relationships
|
30 |
+
response_down = requests.get(f"{base_url}/traverse_node?node_id={node_id}&direction=down")
|
31 |
+
traversal_hierarchy_down = response_down.json().get("traversal_path", {})
|
32 |
+
|
33 |
+
print(f"\nTraversal Response for {node_id} (Descendants):", traversal_hierarchy_down) # Debugging line
|
34 |
+
print(f"\nInspect Relationships for {node_id} (Descendants):")
|
35 |
+
print_tree(traversal_hierarchy_down)
|
36 |
+
|
37 |
+
# Upward traversal for parent relationships
|
38 |
+
response_up = requests.get(f"{base_url}/traverse_node?node_id={node_id}&direction=up")
|
39 |
+
traversal_hierarchy_up = response_up.json().get("traversal_path", {})
|
40 |
+
|
41 |
+
print(f"\nTraversal Response for {node_id} (Ancestors):", traversal_hierarchy_up) # Debugging line
|
42 |
+
print(f"\nInspect Relationships for {node_id} (Ancestors):")
|
43 |
+
print_tree(traversal_hierarchy_up)
|
44 |
+
|
45 |
+
# Base URL for API
|
46 |
+
base_url = "http://localhost:5000"
|
47 |
+
|
48 |
+
# Step 1: Load Graph (Specify the graph to load, e.g., PHSA/340B section)
|
49 |
+
print("\n--- Testing Graph Loading ---")
|
50 |
+
graph_data = {"graph_file": "graphs/PHSA/phsa_sec_340b.json"}
|
51 |
+
response = requests.post(f"{base_url}/load_graph", json=graph_data)
|
52 |
+
print("Load Graph Response:", response.json())
|
53 |
+
|
54 |
+
# Step 2: Test Upward and Downward Traversal for 340B Program
|
55 |
+
print("\n--- Testing Inspect Relationships for Node (340B Program) ---")
|
56 |
+
inspect_and_print_relationships("340B Program")
|
scripts/3.2_visualize_340b.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import networkx as nx
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
|
5 |
+
# API Base URL
|
6 |
+
base_url = "http://localhost:5000"
|
7 |
+
|
8 |
+
def fetch_relationships(node_id, direction="down"):
|
9 |
+
"""Fetch relationships for the specified node in the given direction (up or down)."""
|
10 |
+
response = requests.get(f"{base_url}/traverse_node?node_id={node_id}&direction={direction}")
|
11 |
+
return response.json().get("traversal_path", {})
|
12 |
+
|
13 |
+
def build_graph_from_relationships(node_id):
|
14 |
+
"""Builds a NetworkX graph based on recursive relationship traversal."""
|
15 |
+
# Initialize directed graph
|
16 |
+
G = nx.DiGraph()
|
17 |
+
|
18 |
+
# Collect descendants and ancestors to build the graph structure
|
19 |
+
descendants_data = fetch_relationships(node_id, direction="down")
|
20 |
+
ancestors_data = fetch_relationships(node_id, direction="up")
|
21 |
+
|
22 |
+
# Recursively add nodes and edges for both descendants and ancestors
|
23 |
+
add_nodes_and_edges(G, descendants_data)
|
24 |
+
add_nodes_and_edges(G, ancestors_data)
|
25 |
+
|
26 |
+
return G
|
27 |
+
|
28 |
+
def add_nodes_and_edges(G, node, visited=None):
|
29 |
+
"""Recursive function to add nodes and edges from a traversal hierarchy to a NetworkX graph."""
|
30 |
+
if visited is None:
|
31 |
+
visited = set()
|
32 |
+
|
33 |
+
node_id = node.get("node_id")
|
34 |
+
if not node_id or node_id in visited:
|
35 |
+
return
|
36 |
+
visited.add(node_id)
|
37 |
+
|
38 |
+
# Add node to graph
|
39 |
+
G.add_node(node_id, label=node_id)
|
40 |
+
|
41 |
+
# Process child (descendant) relationships
|
42 |
+
for child in node.get("descendants", []):
|
43 |
+
child_id = child.get("node_id")
|
44 |
+
relationship = child.get("relationship", "related_to")
|
45 |
+
G.add_edge(node_id, child_id, label=relationship)
|
46 |
+
add_nodes_and_edges(G, child, visited) # Recursive call for descendants
|
47 |
+
|
48 |
+
# Process parent (ancestor) relationships
|
49 |
+
for ancestor in node.get("ancestors", []):
|
50 |
+
ancestor_id = ancestor.get("node_id")
|
51 |
+
relationship = ancestor.get("relationship", "related_to")
|
52 |
+
G.add_edge(ancestor_id, node_id, label=relationship)
|
53 |
+
add_nodes_and_edges(G, ancestor, visited) # Recursive call for ancestors
|
54 |
+
|
55 |
+
def visualize_graph(G, title="Graph Structure and Relationships"):
|
56 |
+
"""Visualize the graph using matplotlib and networkx."""
|
57 |
+
plt.figure(figsize=(12, 8))
|
58 |
+
pos = nx.spring_layout(G)
|
59 |
+
|
60 |
+
# Draw nodes and labels
|
61 |
+
nx.draw_networkx_nodes(G, pos, node_size=3000, node_color="skyblue", alpha=0.8)
|
62 |
+
nx.draw_networkx_labels(G, pos, font_size=10, font_color="black")
|
63 |
+
|
64 |
+
# Draw edges with labels
|
65 |
+
nx.draw_networkx_edges(G, pos, edge_color="gray", arrows=True)
|
66 |
+
edge_labels = {(u, v): d["label"] for u, v, d in G.edges(data=True)}
|
67 |
+
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_color="red")
|
68 |
+
|
69 |
+
# Title and display options
|
70 |
+
plt.title(title)
|
71 |
+
plt.axis("off")
|
72 |
+
plt.show()
|
73 |
+
|
74 |
+
# Step 1: Load Graph (Specify the graph to load, e.g., PHSA/340B section)
|
75 |
+
print("\n--- Loading Graph ---")
|
76 |
+
graph_data = {"graph_file": "graphs/PHSA/phsa_sec_340b.json"}
|
77 |
+
response = requests.post(f"{base_url}/load_graph", json=graph_data)
|
78 |
+
print("Load Graph Response:", response.json())
|
79 |
+
|
80 |
+
# Step 2: Build and visualize the graph for 340B Program
|
81 |
+
print("\n--- Building Graph for Visualization ---")
|
82 |
+
G = build_graph_from_relationships("340B Program")
|
83 |
+
visualize_graph(G, title="340B Program - Inferred Contextual Relationships")
|
scripts/3.3_expanded_relationships.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import networkx as nx
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
|
5 |
+
# Define the path to your index.json file
|
6 |
+
index_file_path = "graphs/index.json"
|
7 |
+
|
8 |
+
# Load the data from index.json
|
9 |
+
def load_index_data(file_path):
|
10 |
+
"""Load the index.json file and parse its contents."""
|
11 |
+
with open(file_path, "r") as file:
|
12 |
+
data = json.load(file)
|
13 |
+
return data
|
14 |
+
|
15 |
+
def build_graph(data):
|
16 |
+
"""Builds a directed graph based on entities and relationships."""
|
17 |
+
G = nx.DiGraph()
|
18 |
+
|
19 |
+
# Add nodes for each entity
|
20 |
+
for entity_id, entity_info in data["entities"].items():
|
21 |
+
label = entity_info.get("label", entity_id)
|
22 |
+
# Use label separately to avoid duplicate keyword arguments
|
23 |
+
G.add_node(entity_id, label=label, **{k: v for k, v in entity_info.items() if k != "label"})
|
24 |
+
|
25 |
+
# Add edges for each relationship
|
26 |
+
for relationship in data["relationships"]:
|
27 |
+
source = relationship["source"]
|
28 |
+
target = relationship["target"]
|
29 |
+
relationship_label = relationship["attributes"].get("relationship", "related_to")
|
30 |
+
G.add_edge(source, target, label=relationship_label)
|
31 |
+
|
32 |
+
return G
|
33 |
+
|
34 |
+
# Visualize the graph using Matplotlib
|
35 |
+
def visualize_graph(G, title="340B Program - Inferred Contextual Relationships"):
|
36 |
+
"""Visualizes the graph with nodes and relationships."""
|
37 |
+
pos = nx.spring_layout(G) # Position nodes with a spring layout
|
38 |
+
|
39 |
+
# Draw nodes with labels
|
40 |
+
plt.figure(figsize=(15, 10))
|
41 |
+
nx.draw_networkx_nodes(G, pos, node_size=3000, node_color="lightblue", alpha=0.7)
|
42 |
+
nx.draw_networkx_labels(G, pos, font_size=10, font_color="black", font_weight="bold")
|
43 |
+
|
44 |
+
# Draw edges with labels
|
45 |
+
nx.draw_networkx_edges(G, pos, arrowstyle="->", arrowsize=20, edge_color="gray", connectionstyle="arc3,rad=0.1")
|
46 |
+
edge_labels = {(u, v): d["label"] for u, v, d in G.edges(data=True)}
|
47 |
+
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_color="red", font_size=9)
|
48 |
+
|
49 |
+
# Set plot title and display
|
50 |
+
plt.title(title)
|
51 |
+
plt.axis("off")
|
52 |
+
plt.show()
|
53 |
+
|
54 |
+
# Main execution
|
55 |
+
if __name__ == "__main__":
|
56 |
+
# Load data from the index.json
|
57 |
+
data = load_index_data(index_file_path)
|
58 |
+
|
59 |
+
# Build the graph with entities and relationships
|
60 |
+
G = build_graph(data)
|
61 |
+
|
62 |
+
# Visualize the graph
|
63 |
+
visualize_graph(G)
|
scripts/3.4_nodes.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import networkx as nx
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
import os
|
5 |
+
|
6 |
+
# Define the path to your index.json file
|
7 |
+
index_file_path = "graphs/index.json"
|
8 |
+
|
9 |
+
# Load the data from index.json
|
10 |
+
def load_index_data(file_path):
|
11 |
+
"""Load the index.json file and parse its contents."""
|
12 |
+
with open(file_path, "r") as file:
|
13 |
+
data = json.load(file)
|
14 |
+
return data
|
15 |
+
|
16 |
+
def load_entity_file(entity_info):
|
17 |
+
"""Load the entity-specific JSON file if file_path is provided."""
|
18 |
+
file_path = entity_info.get("file_path")
|
19 |
+
if file_path and os.path.exists(file_path):
|
20 |
+
try:
|
21 |
+
with open(file_path, "r") as file:
|
22 |
+
data = json.load(file)
|
23 |
+
return data
|
24 |
+
except json.JSONDecodeError as e:
|
25 |
+
print(f"Error loading JSON file at {file_path}: {e}")
|
26 |
+
return None
|
27 |
+
elif file_path:
|
28 |
+
print(f"File not found: {file_path}")
|
29 |
+
return None
|
30 |
+
|
31 |
+
def build_graph(data):
|
32 |
+
"""Builds a directed graph based on entities and relationships."""
|
33 |
+
G = nx.DiGraph()
|
34 |
+
|
35 |
+
# Add nodes for each entity, excluding file placeholders
|
36 |
+
excluded_nodes = {"patient_protection._tmp", "phsa_sec_340b", "medicade_tmp"}
|
37 |
+
for entity_id, entity_info in data["entities"].items():
|
38 |
+
if entity_id in excluded_nodes:
|
39 |
+
continue
|
40 |
+
label = entity_info.get("label", entity_id)
|
41 |
+
G.add_node(entity_id, label=label, domain=entity_info.get("inherits_from", "Default"))
|
42 |
+
|
43 |
+
# Load entity file if specified
|
44 |
+
entity_data = load_entity_file(entity_info)
|
45 |
+
if isinstance(entity_data, dict): # Check if the loaded data is a dictionary
|
46 |
+
for relationship in entity_data.get("relationships", []):
|
47 |
+
source = relationship["source"]
|
48 |
+
target = relationship["target"]
|
49 |
+
relationship_label = relationship["attributes"].get("relationship", "related_to")
|
50 |
+
G.add_edge(source, target, label=relationship_label)
|
51 |
+
else:
|
52 |
+
print(f"Skipping entity {entity_id} due to invalid data format.")
|
53 |
+
|
54 |
+
# Add edges for each relationship in index.json
|
55 |
+
for relationship in data["relationships"]:
|
56 |
+
source = relationship["source"]
|
57 |
+
target = relationship["target"]
|
58 |
+
relationship_label = relationship["attributes"].get("relationship", "related_to")
|
59 |
+
G.add_edge(source, target, label=relationship_label)
|
60 |
+
|
61 |
+
return G
|
62 |
+
|
63 |
+
# Visualize the graph using Matplotlib
|
64 |
+
def visualize_graph(G, title="Inferred Contextual Relationships"):
|
65 |
+
"""Visualizes the graph with nodes and relationships, using domain colors and improved layout."""
|
66 |
+
# Use different colors for each domain
|
67 |
+
color_map = {
|
68 |
+
"Legislation": "lightcoral",
|
69 |
+
"Healthcare Systems": "lightgreen",
|
70 |
+
"Healthcare Policies": "lightblue",
|
71 |
+
"Default": "lightgrey"
|
72 |
+
}
|
73 |
+
|
74 |
+
# Set node colors based on their domains
|
75 |
+
node_colors = [color_map.get(G.nodes[node].get("domain", "Default"), "lightgrey") for node in G.nodes]
|
76 |
+
|
77 |
+
# Use Kamada-Kawai layout for better spacing of nodes
|
78 |
+
pos = nx.kamada_kawai_layout(G)
|
79 |
+
|
80 |
+
# Draw nodes with domain-specific colors
|
81 |
+
plt.figure(figsize=(15, 10))
|
82 |
+
nx.draw_networkx_nodes(G, pos, node_size=3000, node_color=node_colors, alpha=0.8)
|
83 |
+
nx.draw_networkx_labels(G, pos, font_size=9, font_color="black", font_weight="bold")
|
84 |
+
|
85 |
+
# Draw edges with labels
|
86 |
+
nx.draw_networkx_edges(G, pos, arrowstyle="->", arrowsize=15, edge_color="gray", connectionstyle="arc3,rad=0.1")
|
87 |
+
edge_labels = {(u, v): d["label"] for u, v, d in G.edges(data=True)}
|
88 |
+
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_color="red", font_size=8)
|
89 |
+
|
90 |
+
# Set plot title and display
|
91 |
+
plt.title(title, fontsize=14)
|
92 |
+
plt.axis("off")
|
93 |
+
plt.show()
|
94 |
+
|
95 |
+
# Main execution
|
96 |
+
if __name__ == "__main__":
|
97 |
+
# Load data from the index.json
|
98 |
+
data = load_index_data(index_file_path)
|
99 |
+
|
100 |
+
# Build the graph with entities and relationships
|
101 |
+
G = build_graph(data)
|
102 |
+
|
103 |
+
# Visualize the graph
|
104 |
+
visualize_graph(G)
|
scripts/3.5_enhanced_nodes.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import networkx as nx
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
import os
|
5 |
+
|
6 |
+
# Define the path to your index.json file
|
7 |
+
index_file_path = "graphs/index.json"
|
8 |
+
|
9 |
+
# Define colors for domains
|
10 |
+
domain_colors = {
|
11 |
+
"Legislation": "red",
|
12 |
+
"Healthcare Systems": "blue",
|
13 |
+
"Healthcare Policies": "green",
|
14 |
+
"Default": "grey"
|
15 |
+
}
|
16 |
+
|
17 |
+
# Load index data
|
18 |
+
def load_index_data(file_path):
|
19 |
+
with open(file_path, "r") as file:
|
20 |
+
return json.load(file)
|
21 |
+
|
22 |
+
# Load and parse entities
|
23 |
+
def build_graph(data):
|
24 |
+
G = nx.DiGraph()
|
25 |
+
for entity_id, entity_info in data["entities"].items():
|
26 |
+
label = entity_info.get("label", entity_id)
|
27 |
+
domain = entity_info.get("inherits_from", "Default")
|
28 |
+
color = domain_colors.get(domain, "grey") # Set color, defaulting to "grey" if domain is missing
|
29 |
+
G.add_node(entity_id, label=label, color=color)
|
30 |
+
|
31 |
+
# Load additional relationships if specified in the entity data
|
32 |
+
file_path = entity_info.get("file_path")
|
33 |
+
if file_path and os.path.exists(file_path):
|
34 |
+
with open(file_path, "r") as f:
|
35 |
+
entity_data = json.load(f)
|
36 |
+
for rel in entity_data.get("relationships", []):
|
37 |
+
G.add_edge(rel["source"], rel["target"], label=rel["attributes"]["relationship"])
|
38 |
+
|
39 |
+
# Add relationships from index.json
|
40 |
+
for relationship in data["relationships"]:
|
41 |
+
G.add_edge(relationship["source"], relationship["target"], label=relationship["attributes"].get("relationship", "related_to"))
|
42 |
+
|
43 |
+
return G
|
44 |
+
|
45 |
+
# Enhanced visualization
|
46 |
+
def visualize_graph(G, title="Inferred Contextual Relationships"):
|
47 |
+
pos = nx.spring_layout(G)
|
48 |
+
plt.figure(figsize=(15, 10))
|
49 |
+
|
50 |
+
# Draw nodes with colors
|
51 |
+
node_colors = [G.nodes[node].get("color", "grey") for node in G.nodes] # Default to "grey" if color is missing
|
52 |
+
nx.draw_networkx_nodes(G, pos, node_size=3000, node_color=node_colors, alpha=0.8)
|
53 |
+
|
54 |
+
# Draw labels
|
55 |
+
nx.draw_networkx_labels(G, pos, font_size=10, font_weight="bold")
|
56 |
+
|
57 |
+
# Draw edges with labels
|
58 |
+
nx.draw_networkx_edges(G, pos, arrowstyle="->", arrowsize=20, edge_color="gray", connectionstyle="arc3,rad=0.1")
|
59 |
+
edge_labels = {(u, v): d["label"] for u, v, d in G.edges(data=True)}
|
60 |
+
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_color="red", font_size=8)
|
61 |
+
|
62 |
+
# Save as PDF and display
|
63 |
+
plt.title(title)
|
64 |
+
plt.axis("off")
|
65 |
+
plt.savefig("graph_visualization.pdf") # Export as PDF
|
66 |
+
plt.show()
|
67 |
+
|
68 |
+
# Main execution
|
69 |
+
if __name__ == "__main__":
|
70 |
+
# Load data from index.json
|
71 |
+
data = load_index_data(index_file_path)
|
72 |
+
|
73 |
+
# Build the graph with entities and relationships
|
74 |
+
G = build_graph(data)
|
75 |
+
|
76 |
+
# Visualize the graph
|
77 |
+
visualize_graph(G)
|
scripts/3.6_test_visualisations.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# test_script.py
|
2 |
+
import os
|
3 |
+
import sys
|
4 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
5 |
+
from app.services.agn_service.build_graph import build_graph
|
6 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
7 |
+
|
8 |
+
# Define paths
|
9 |
+
graph_file = "graphs/healthcare.json" # Update this to your actual graph file path
|
10 |
+
output_image = "test_graph_visualization.png"
|
11 |
+
|
12 |
+
# Step 1: Build the graph
|
13 |
+
G = build_graph(graph_file) # This function should return the graph object
|
14 |
+
|
15 |
+
# Step 2: Generate and save the graph visualization
|
16 |
+
if G:
|
17 |
+
visualize_graph(G, output_file=output_image)
|
18 |
+
print(f"Graph visualization generated and saved as {output_image}")
|
19 |
+
else:
|
20 |
+
print("Failed to build graph.")
|
scripts/3.7_test_visualisations.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# test_build_and_visualize_graph.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import json
|
6 |
+
import networkx as nx
|
7 |
+
|
8 |
+
# Add the path to access agn_service
|
9 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
10 |
+
from app.services.agn_service.load_graph import load_graph
|
11 |
+
from app.services.agn_service.build_graph_from_index import build_graph_from_index
|
12 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
13 |
+
|
14 |
+
# Define paths
|
15 |
+
index_file_path = "graphs/index.json" # Path to index.json file
|
16 |
+
output_image = "test_graph_visualization.png"
|
17 |
+
|
18 |
+
# Load index data
|
19 |
+
def load_index_data(file_path):
|
20 |
+
with open(file_path, "r") as file:
|
21 |
+
return json.load(file)
|
22 |
+
|
23 |
+
# Main execution
|
24 |
+
if __name__ == "__main__":
|
25 |
+
# Step 1: Load index data from index.json
|
26 |
+
data = load_index_data(index_file_path)
|
27 |
+
|
28 |
+
# Step 2: Build the graph using the data loaded from index.json
|
29 |
+
G = build_graph_from_index(data)
|
30 |
+
|
31 |
+
# Step 3: Generate and save the graph visualization
|
32 |
+
if G:
|
33 |
+
visualize_graph(G, output_file=output_image)
|
34 |
+
print(f"Graph visualization generated and saved as {output_image}")
|
35 |
+
else:
|
36 |
+
print("Failed to build graph.")
|
scripts/3.8_expand_graph.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import json
|
4 |
+
import networkx as nx
|
5 |
+
import matplotlib.pyplot as plt
|
6 |
+
|
7 |
+
|
8 |
+
domain_colors = {
|
9 |
+
"Legislation": "red",
|
10 |
+
"Healthcare Systems": "blue",
|
11 |
+
"Healthcare Policies": "green",
|
12 |
+
"Default": "grey"
|
13 |
+
}
|
14 |
+
|
15 |
+
|
16 |
+
# Path setup
|
17 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
18 |
+
from app.services.agn_service.build_graph import build_graph
|
19 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
20 |
+
|
21 |
+
# Paths to files
|
22 |
+
index_file_path = "graphs/index.json"
|
23 |
+
output_image = "expanded_graph_visualization.png"
|
24 |
+
|
25 |
+
# Load index data
|
26 |
+
def load_index_data(file_path):
|
27 |
+
with open(file_path, "r") as file:
|
28 |
+
return json.load(file)
|
29 |
+
|
30 |
+
# Build the graph with entities and relationships from the updated index
|
31 |
+
def build_expanded_graph(data):
|
32 |
+
G = nx.DiGraph()
|
33 |
+
for entity_id, entity_info in data["entities"].items():
|
34 |
+
label = entity_info.get("label", entity_id)
|
35 |
+
domain = entity_info.get("inherits_from", "Default")
|
36 |
+
color = domain_colors.get(domain, "grey")
|
37 |
+
G.add_node(entity_id, label=label, color=color)
|
38 |
+
|
39 |
+
# Load additional relationships if specified in the entity data
|
40 |
+
file_path = entity_info.get("file_path")
|
41 |
+
if file_path and os.path.exists(file_path):
|
42 |
+
with open(file_path, "r") as f:
|
43 |
+
entity_data = json.load(f)
|
44 |
+
for rel in entity_data.get("relationships", []):
|
45 |
+
G.add_edge(rel["source"], rel["target"], label=rel["attributes"]["relationship"])
|
46 |
+
|
47 |
+
# Add new relationships from index.json
|
48 |
+
for relationship in data["relationships"]:
|
49 |
+
G.add_edge(relationship["source"], relationship["target"], label=relationship["attributes"].get("relationship", "related_to"))
|
50 |
+
|
51 |
+
return G
|
52 |
+
|
53 |
+
# Main execution to build and visualize the expanded graph
|
54 |
+
if __name__ == "__main__":
|
55 |
+
data = load_index_data(index_file_path)
|
56 |
+
G = build_expanded_graph(data)
|
57 |
+
|
58 |
+
if G:
|
59 |
+
visualize_graph(G, output_file=output_image)
|
60 |
+
print(f"Expanded graph visualization saved as {output_image}")
|
61 |
+
else:
|
62 |
+
print("Failed to build expanded graph.")
|
scripts/3.9_pass_graph_file.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# test_script.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
6 |
+
from app.services.agn_service.build_graph import build_graph
|
7 |
+
from app.services.agn_service.visualize_graph import visualize_graph
|
8 |
+
|
9 |
+
# Define paths
|
10 |
+
index_file_path = "graphs/index.json" # Path to your index.json file
|
11 |
+
output_image = "test_graph_visualization.png"
|
12 |
+
|
13 |
+
# Step 1: Build the graph
|
14 |
+
G = build_graph(index_file_path) # Pass the index file path here
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
output_image = "test_graph_visualization.png"
|
19 |
+
|
20 |
+
# Step 2: Generate and save the graph visualization
|
21 |
+
if G:
|
22 |
+
visualize_graph(G, output_file=output_image)
|
23 |
+
print(f"Graph visualization generated and saved as {output_image}")
|
24 |
+
else:
|
25 |
+
print("Failed to build graph.")
|
26 |
+
|
scripts/4.0_sacred_geomitry.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
import openpyxl
|
4 |
+
from openpyxl.utils.dataframe import dataframe_to_rows
|
5 |
+
from openpyxl.styles import Font
|
6 |
+
|
7 |
+
# Define the sample data for the updated dynamic Excel structure with adjustable SMA and EMA periods.
|
8 |
+
sample_data = {
|
9 |
+
"Timestamp": pd.date_range(start="2024-01-01", periods=10, freq="T"),
|
10 |
+
"Open": np.random.uniform(30000, 40000, 10),
|
11 |
+
"High": np.random.uniform(30000, 40000, 10),
|
12 |
+
"Low": np.random.uniform(30000, 40000, 10),
|
13 |
+
"Close": np.random.uniform(30000, 40000, 10),
|
14 |
+
"Volume": np.random.uniform(100, 500, 10)
|
15 |
+
}
|
16 |
+
|
17 |
+
# Convert to DataFrame
|
18 |
+
df_raw_data = pd.DataFrame(sample_data)
|
19 |
+
|
20 |
+
# Calculate SMA and EMA with adjustable initial periods
|
21 |
+
sma_period = 5
|
22 |
+
ema_period = 5
|
23 |
+
df_raw_data['SMA'] = df_raw_data['Close'].rolling(window=sma_period).mean()
|
24 |
+
df_raw_data['EMA'] = df_raw_data['Close'].ewm(span=ema_period, adjust=False).mean()
|
25 |
+
|
26 |
+
# Initialize workbook
|
27 |
+
wb = openpyxl.Workbook()
|
28 |
+
wb.remove(wb.active)
|
29 |
+
|
30 |
+
# Add Raw Data sheet
|
31 |
+
ws_raw = wb.create_sheet("Raw Data")
|
32 |
+
for r in dataframe_to_rows(df_raw_data, index=False, header=True):
|
33 |
+
ws_raw.append(r)
|
34 |
+
|
35 |
+
# Create Feature Engineering sheet with formula placeholders
|
36 |
+
ws_feature = wb.create_sheet("Feature Engineering")
|
37 |
+
ws_feature["A1"].value = "Adjustable SMA and EMA"
|
38 |
+
ws_feature["A1"].font = Font(bold=True)
|
39 |
+
ws_feature["A2"].value = "SMA Period:"
|
40 |
+
ws_feature["B2"].value = sma_period
|
41 |
+
ws_feature["A3"].value = "EMA Period:"
|
42 |
+
ws_feature["B3"].value = ema_period
|
43 |
+
|
44 |
+
# Formula cells for recalculated SMA and EMA
|
45 |
+
ws_feature["D2"].value = "Close Price"
|
46 |
+
ws_feature["E2"].value = "SMA (Dynamic)"
|
47 |
+
ws_feature["F2"].value = "EMA (Dynamic)"
|
48 |
+
|
49 |
+
# Insert formulas in the feature sheet (assuming the raw data is in 'Raw Data' sheet starting from A2)
|
50 |
+
for i in range(3, len(df_raw_data) + 3):
|
51 |
+
ws_feature[f"D{i}"] = f"=Raw Data!E{i}" # Close price
|
52 |
+
ws_feature[f"E{i}"] = f"=AVERAGE(OFFSET(D{i}-$B$2+1,0,0,$B$2,1))" # SMA formula using offset
|
53 |
+
ws_feature[f"F{i}"] = f"=EXPONENTIALMOVINGAVERAGE(D{i}, $B$3)" # Placeholder for EMA dynamic
|
54 |
+
|
55 |
+
# Add Synthetic Relationships sheet with parameters section
|
56 |
+
ws_synthetic = wb.create_sheet("Synthetic Relationships")
|
57 |
+
ws_synthetic["A1"].value = "Synthetic Relationships - Parameter Adjustments"
|
58 |
+
ws_synthetic["A1"].font = Font(bold=True)
|
59 |
+
|
60 |
+
# Populate example query columns for visualizing adjusted parameters
|
61 |
+
ws_synthetic.append(["Query Date", "Found Close Price", "Adjusted SMA", "Adjusted EMA"])
|
62 |
+
for i in range(3, len(df_raw_data) + 3):
|
63 |
+
ws_synthetic[f"A{i}"] = ws_raw[f"A{i}"].value # Query Date
|
64 |
+
ws_synthetic[f"B{i}"] = f"=Raw Data!E{i}" # Found Close Price
|
65 |
+
ws_synthetic[f"C{i}"] = f"=Feature Engineering!E{i}" # Adjusted SMA
|
66 |
+
ws_synthetic[f"D{i}"] = f"=Feature Engineering!F{i}" # Adjusted EMA
|
67 |
+
|
68 |
+
# Save the file
|
69 |
+
file_path = "ActiveGraphTheory.xlsx"
|
70 |
+
wb.save(file_path)
|
71 |
+
|
72 |
+
file_path
|