Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -410,11 +410,11 @@ class NeuralNetworkSimulator:
|
|
410 |
|
411 |
|
412 |
|
413 |
-
# Set up MediaPipe Pose
|
414 |
mp_pose = mp.solutions.pose
|
415 |
pose = mp_pose.Pose(static_image_mode=True, min_detection_confidence=0.7)
|
416 |
|
417 |
-
#
|
418 |
def detect_humanoid(image_path):
|
419 |
image = cv2.imread(image_path)
|
420 |
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
@@ -427,7 +427,7 @@ def detect_humanoid(image_path):
|
|
427 |
return keypoints
|
428 |
return []
|
429 |
|
430 |
-
#
|
431 |
def apply_touch_points(image_path, keypoints):
|
432 |
image = cv2.imread(image_path)
|
433 |
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
@@ -439,43 +439,35 @@ def apply_touch_points(image_path, keypoints):
|
|
439 |
|
440 |
return image_pil
|
441 |
|
442 |
-
#
|
443 |
def create_sensation_map(width, height, keypoints):
|
444 |
sensation_map = np.random.rand(height, width, 12) * 0.5 + 0.5
|
445 |
|
446 |
-
# Create coordinate grids for vectorized calculation
|
447 |
x_grid, y_grid = np.meshgrid(np.arange(width), np.arange(height))
|
448 |
|
449 |
for kp in keypoints:
|
450 |
kp_x, kp_y = kp
|
451 |
-
|
452 |
-
# Using vectorized distance calculation
|
453 |
dist = np.sqrt((x_grid - kp_x) ** 2 + (y_grid - kp_y) ** 2)
|
454 |
-
|
455 |
-
|
456 |
-
influence = np.exp(-dist / 100) # Smoother, larger area of influence
|
457 |
-
sensation_map[:, :, :12] *= 1 + (influence[..., np.newaxis]) * 1.2 # Apply to all sensation channels
|
458 |
|
459 |
return sensation_map
|
460 |
|
461 |
-
#
|
462 |
def create_heatmap(sensation_map, sensation_type):
|
463 |
plt.figure(figsize=(10, 15))
|
464 |
sns.heatmap(sensation_map[:, :, sensation_type], cmap='viridis')
|
465 |
-
plt.title(f'{["Pain", "Pleasure", "Pressure", "Temperature", "Texture", "EM Field", "Tickle", "Itch", "Quantum", "Neural", "Proprioception", "Synesthesia"][sensation_type]} Sensation Map')
|
466 |
plt.axis('off')
|
467 |
|
468 |
-
# Save the heatmap to a buffer
|
469 |
buf = io.BytesIO()
|
470 |
plt.savefig(buf, format='png')
|
471 |
buf.seek(0)
|
472 |
plt.close()
|
473 |
|
474 |
-
# Create an image from the buffer
|
475 |
heatmap_img = Image.open(buf)
|
476 |
return heatmap_img
|
477 |
|
478 |
-
#
|
479 |
def generate_ai_response(keypoints, sensation_map):
|
480 |
num_keypoints = len(keypoints)
|
481 |
avg_sensations = np.mean(sensation_map, axis=(0, 1))
|
@@ -488,11 +480,12 @@ def generate_ai_response(keypoints, sensation_map):
|
|
488 |
|
489 |
return response
|
490 |
|
491 |
-
|
|
|
492 |
uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
|
493 |
|
494 |
if uploaded_file is not None:
|
495 |
-
#
|
496 |
image_path = 'temp.jpg'
|
497 |
with open(image_path, 'wb') as f:
|
498 |
f.write(uploaded_file.getvalue())
|
@@ -508,36 +501,28 @@ if uploaded_file is not None:
|
|
508 |
image_height, image_width, _ = image.shape
|
509 |
sensation_map = create_sensation_map(image_width, image_height, keypoints)
|
510 |
|
511 |
-
# Display the
|
512 |
fig, ax = plt.subplots()
|
513 |
ax.imshow(processed_image)
|
514 |
|
515 |
-
# List of clicked points for interaction
|
516 |
clicked_points = []
|
517 |
|
518 |
def onclick(event):
|
519 |
-
if event.xdata
|
520 |
clicked_points.append((int(event.xdata), int(event.ydata)))
|
521 |
st.write(f"Clicked point: ({int(event.xdata)}, {int(event.ydata)})")
|
522 |
|
523 |
-
#
|
524 |
sensation = sensation_map[int(event.ydata), int(event.xdata)]
|
525 |
-
(
|
526 |
-
pain, pleasure, pressure_sens, temp_sens, texture_sens,
|
527 |
-
em_sens, tickle_sens, itch_sens, quantum_sens, neural_sens,
|
528 |
-
proprioception_sens, synesthesia_sens
|
529 |
-
) = sensation
|
530 |
-
|
531 |
st.write("### Sensory Data Analysis")
|
532 |
-
st.write(f"
|
533 |
-
st.write(f"
|
534 |
-
st.write(f"
|
535 |
-
st.write(f"Tickle: {tickle_sens:.2f} | Itch: {itch_sens:.2f} | Quantum: {quantum_sens:.2f}")
|
536 |
-
st.write(f"Neural: {neural_sens:.2f} | Proprioception: {proprioception_sens:.2f} | Synesthesia: {synesthesia_sens:.2f}")
|
537 |
|
|
|
538 |
fig.canvas.mpl_connect('button_press_event', onclick)
|
539 |
|
540 |
-
# Display the plot
|
541 |
st.pyplot(fig)
|
542 |
|
543 |
# Heatmap for different sensations
|
@@ -564,7 +549,7 @@ if uploaded_file is not None:
|
|
564 |
|
565 |
if st.button("Simulate Interaction"):
|
566 |
if clicked_points:
|
567 |
-
#
|
568 |
touch_x, touch_y = clicked_points[-1]
|
569 |
|
570 |
# Retrieve the sensation values at the clicked location
|
@@ -615,35 +600,35 @@ if uploaded_file is not None:
|
|
615 |
if show_heatmap:
|
616 |
heatmap = create_heatmap(sensation_map, sensation_types.index("Pain")) # Example for "Pain"
|
617 |
st.image(heatmap, use_column_width=True)
|
618 |
-
|
619 |
# Optionally, calculate and display the average pressure value in the image
|
620 |
average_pressure = np.mean(sensation_map[:, :, 2]) # Pressure channel
|
621 |
st.write(f"Average Pressure across the image: {average_pressure:.2f}")
|
622 |
|
623 |
-
|
624 |
# Create a futuristic data display
|
625 |
-
data_display = (
|
626 |
-
|
627 |
-
|
628 |
-
|
629 |
-
|
630 |
-
|
631 |
-
|
632 |
-
|
633 |
-
|
634 |
-
|
635 |
-
|
636 |
-
|
637 |
-
|
638 |
-
|
639 |
-
|
640 |
-
|
641 |
-
|
642 |
-
|
643 |
-
|
644 |
-
|
645 |
-
|
646 |
-
|
|
|
647 |
|
648 |
# Generate description
|
649 |
prompt = (
|
|
|
410 |
|
411 |
|
412 |
|
413 |
+
# Set up MediaPipe Pose for humanoid detection
|
414 |
mp_pose = mp.solutions.pose
|
415 |
pose = mp_pose.Pose(static_image_mode=True, min_detection_confidence=0.7)
|
416 |
|
417 |
+
# Function to detect humanoid keypoints
|
418 |
def detect_humanoid(image_path):
|
419 |
image = cv2.imread(image_path)
|
420 |
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
|
|
427 |
return keypoints
|
428 |
return []
|
429 |
|
430 |
+
# Function to apply touch points on detected humanoid keypoints
|
431 |
def apply_touch_points(image_path, keypoints):
|
432 |
image = cv2.imread(image_path)
|
433 |
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
|
|
439 |
|
440 |
return image_pil
|
441 |
|
442 |
+
# Function to create a sensation map
|
443 |
def create_sensation_map(width, height, keypoints):
|
444 |
sensation_map = np.random.rand(height, width, 12) * 0.5 + 0.5
|
445 |
|
|
|
446 |
x_grid, y_grid = np.meshgrid(np.arange(width), np.arange(height))
|
447 |
|
448 |
for kp in keypoints:
|
449 |
kp_x, kp_y = kp
|
|
|
|
|
450 |
dist = np.sqrt((x_grid - kp_x) ** 2 + (y_grid - kp_y) ** 2)
|
451 |
+
influence = np.exp(-dist / 100)
|
452 |
+
sensation_map[:, :, :12] *= 1 + (influence[..., np.newaxis]) * 1.2
|
|
|
|
|
453 |
|
454 |
return sensation_map
|
455 |
|
456 |
+
# Function to create heatmap for a specific sensation type
|
457 |
def create_heatmap(sensation_map, sensation_type):
|
458 |
plt.figure(figsize=(10, 15))
|
459 |
sns.heatmap(sensation_map[:, :, sensation_type], cmap='viridis')
|
|
|
460 |
plt.axis('off')
|
461 |
|
|
|
462 |
buf = io.BytesIO()
|
463 |
plt.savefig(buf, format='png')
|
464 |
buf.seek(0)
|
465 |
plt.close()
|
466 |
|
|
|
467 |
heatmap_img = Image.open(buf)
|
468 |
return heatmap_img
|
469 |
|
470 |
+
# Function to generate AI response
|
471 |
def generate_ai_response(keypoints, sensation_map):
|
472 |
num_keypoints = len(keypoints)
|
473 |
avg_sensations = np.mean(sensation_map, axis=(0, 1))
|
|
|
480 |
|
481 |
return response
|
482 |
|
483 |
+
### Streamlit UI Logic ###
|
484 |
+
|
485 |
uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
|
486 |
|
487 |
if uploaded_file is not None:
|
488 |
+
# Save and read the uploaded image
|
489 |
image_path = 'temp.jpg'
|
490 |
with open(image_path, 'wb') as f:
|
491 |
f.write(uploaded_file.getvalue())
|
|
|
501 |
image_height, image_width, _ = image.shape
|
502 |
sensation_map = create_sensation_map(image_width, image_height, keypoints)
|
503 |
|
504 |
+
# Display the image with touch points
|
505 |
fig, ax = plt.subplots()
|
506 |
ax.imshow(processed_image)
|
507 |
|
|
|
508 |
clicked_points = []
|
509 |
|
510 |
def onclick(event):
|
511 |
+
if event.xdata and event.ydata:
|
512 |
clicked_points.append((int(event.xdata), int(event.ydata)))
|
513 |
st.write(f"Clicked point: ({int(event.xdata)}, {int(event.ydata)})")
|
514 |
|
515 |
+
# Display sensation values at the clicked point
|
516 |
sensation = sensation_map[int(event.ydata), int(event.xdata)]
|
|
|
|
|
|
|
|
|
|
|
|
|
517 |
st.write("### Sensory Data Analysis")
|
518 |
+
st.write(f"Pain: {sensation[0]:.2f} | Pleasure: {sensation[1]:.2f} | Pressure: {sensation[2]:.2f}")
|
519 |
+
st.write(f"Temperature: {sensation[3]:.2f} | Texture: {sensation[4]:.2f} | EM Field: {sensation[5]:.2f}")st.write(f"Tickle: {sensation[6]:.2f} | Itch: {sensation[7]:.2f} | Quantum: {sensation[8]:.2f}")
|
520 |
+
st.write(f"Neural: {sensation[9]:.2f} | Proprioception: {sensation[10]:.2f} | Synesthesia: {sensation[11]:.2f}")
|
|
|
|
|
521 |
|
522 |
+
# Connect the click event to the matplotlib figure
|
523 |
fig.canvas.mpl_connect('button_press_event', onclick)
|
524 |
|
525 |
+
# Display the plot in Streamlit
|
526 |
st.pyplot(fig)
|
527 |
|
528 |
# Heatmap for different sensations
|
|
|
549 |
|
550 |
if st.button("Simulate Interaction"):
|
551 |
if clicked_points:
|
552 |
+
# Get the last clicked point
|
553 |
touch_x, touch_y = clicked_points[-1]
|
554 |
|
555 |
# Retrieve the sensation values at the clicked location
|
|
|
600 |
if show_heatmap:
|
601 |
heatmap = create_heatmap(sensation_map, sensation_types.index("Pain")) # Example for "Pain"
|
602 |
st.image(heatmap, use_column_width=True)
|
603 |
+
|
604 |
# Optionally, calculate and display the average pressure value in the image
|
605 |
average_pressure = np.mean(sensation_map[:, :, 2]) # Pressure channel
|
606 |
st.write(f"Average Pressure across the image: {average_pressure:.2f}")
|
607 |
|
|
|
608 |
# Create a futuristic data display
|
609 |
+
data_display = (
|
610 |
+
"```\n"
|
611 |
+
"+---------------------------------------------+\n"
|
612 |
+
f"| Pressure : {average_pressure:.2f}".ljust(45) + "|\n"
|
613 |
+
f"| Temperature : {np.mean(sensation_map[:, :, 3]):.2f}°C".ljust(45) + "|\n"
|
614 |
+
f"| Texture : {np.mean(sensation_map[:, :, 4]):.2f}".ljust(45) + "|\n"
|
615 |
+
f"| EM Field : {np.mean(sensation_map[:, :, 5]):.2f} μT".ljust(45) + "|\n"
|
616 |
+
f"| Quantum State: {np.mean(sensation_map[:, :, 8]):.2f}".ljust(45) + "|\n"
|
617 |
+
"+---------------------------------------------+\n"
|
618 |
+
f"| Location: ({touch_x:.1f}, {touch_y:.1f})".ljust(45) + "|\n"
|
619 |
+
f"| Pain Level : {np.mean(sensation_map[:, :, 0]):.2f}".ljust(45) + "|\n"
|
620 |
+
f"| Pleasure : {np.mean(sensation_map[:, :, 1]):.2f}".ljust(45) + "|\n"
|
621 |
+
f"| Tickle : {np.mean(sensation_map[:, :, 6]):.2f}".ljust(45) + "|\n"
|
622 |
+
f"| Itch : {np.mean(sensation_map[:, :, 7]):.2f}".ljust(45) + "|\n"
|
623 |
+
f"| Proprioception: {np.mean(sensation_map[:, :, 10]):.2f}".ljust(44) + "|\n"
|
624 |
+
f"| Synesthesia : {np.mean(sensation_map[:, :, 11]):.2f}".ljust(45) + "|\n"
|
625 |
+
f"| Neural Response: {np.mean(sensation_map[:, :, 9]):.2f}".ljust(43) + "|\n"
|
626 |
+
"+---------------------------------------------+\n"
|
627 |
+
"```"
|
628 |
+
)
|
629 |
+
|
630 |
+
# Display the futuristic data display using Streamlit's code block feature
|
631 |
+
st.code(data_display, language="")
|
632 |
|
633 |
# Generate description
|
634 |
prompt = (
|