Sephfox commited on
Commit
06ea1ab
1 Parent(s): 1bbe094

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -66
app.py CHANGED
@@ -412,75 +412,70 @@ class NeuralNetworkSimulator:
412
 
413
  # Set up MediaPipe Pose
414
  mp_pose = mp.solutions.pose
415
- pose = mp_pose.Pose(static_image_mode=True, min_detection_confidence=0.5)
416
 
 
417
  def detect_humanoid(image_path):
418
  image = cv2.imread(image_path)
419
  image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
420
  results = pose.process(image_rgb)
 
421
  if results.pose_landmarks:
422
  landmarks = results.pose_landmarks.landmark
423
  image_height, image_width, _ = image.shape
424
- keypoints = []
425
- for landmark in landmarks:
426
- x = int(landmark.x * image_width)
427
- y = int(landmark.y * image_height)
428
- keypoints.append((x, y))
429
  return keypoints
430
  return []
431
 
 
432
  def apply_touch_points(image_path, keypoints):
433
  image = cv2.imread(image_path)
434
- image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
435
- image = Image.fromarray(image)
436
- draw = ImageDraw.Draw(image)
 
437
  for point in keypoints:
438
- draw.ellipse([point[0]-5, point[1]-5, point[0]+5, point[1]+5], fill='red')
439
- return image
 
440
 
 
441
  def create_sensation_map(width, height, keypoints):
442
- sensation_map = np.zeros((height, width, 12))
443
- for y in range(height):
444
- for x in range(width):
445
- base_sensitivities = np.random.rand(12) * 0.5 + 0.5
446
-
447
- # Enhance sensitivities near keypoints
448
- for kp in keypoints:
449
- distance = np.sqrt((x - kp[0])**2 + (y - kp[1])**2)
450
- if distance < 30: # Adjust this value to change the area of influence
451
- base_sensitivities *= 1.5
452
-
453
- sensation_map[y, x, 0] = base_sensitivities[0] * np.random.rand() # Pain
454
- sensation_map[y, x, 1] = base_sensitivities[1] * np.random.rand() # Pleasure
455
- sensation_map[y, x, 2] = base_sensitivities[2] * np.random.rand() # Pressure
456
- sensation_map[y, x, 3] = base_sensitivities[3] * (np.random.rand() * 10 + 30) # Temperature
457
- sensation_map[y, x, 4] = base_sensitivities[4] * np.random.rand() # Texture
458
- sensation_map[y, x, 5] = base_sensitivities[5] * np.random.rand() # EM field
459
- sensation_map[y, x, 6] = base_sensitivities[6] * np.random.rand() # Tickle
460
- sensation_map[y, x, 7] = base_sensitivities[7] * np.random.rand() # Itch
461
- sensation_map[y, x, 8] = base_sensitivities[8] * np.random.rand() # Quantum
462
- sensation_map[y, x, 9] = base_sensitivities[9] * np.random.rand() # Neural
463
- sensation_map[y, x, 10] = base_sensitivities[10] * np.random.rand() # Proprioception
464
- sensation_map[y, x, 11] = base_sensitivities[11] * np.random.rand() # Synesthesia
465
 
466
  return sensation_map
467
 
 
468
  def create_heatmap(sensation_map, sensation_type):
469
  plt.figure(figsize=(10, 15))
470
  sns.heatmap(sensation_map[:, :, sensation_type], cmap='viridis')
471
  plt.title(f'{["Pain", "Pleasure", "Pressure", "Temperature", "Texture", "EM Field", "Tickle", "Itch", "Quantum", "Neural", "Proprioception", "Synesthesia"][sensation_type]} Sensation Map')
472
  plt.axis('off')
473
 
474
- # Instead of displaying, save to a buffer
475
  buf = io.BytesIO()
476
  plt.savefig(buf, format='png')
477
  buf.seek(0)
478
- plt.close() # Close the figure to free up memory
479
 
480
  # Create an image from the buffer
481
  heatmap_img = Image.open(buf)
482
  return heatmap_img
483
 
 
484
  def generate_ai_response(keypoints, sensation_map):
485
  num_keypoints = len(keypoints)
486
  avg_sensations = np.mean(sensation_map, axis=(0, 1))
@@ -493,10 +488,11 @@ def generate_ai_response(keypoints, sensation_map):
493
 
494
  return response
495
 
 
496
  uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
497
 
498
  if uploaded_file is not None:
499
- # Read the image
500
  image_path = 'temp.jpg'
501
  with open(image_path, 'wb') as f:
502
  f.write(uploaded_file.getvalue())
@@ -512,13 +508,13 @@ if uploaded_file is not None:
512
  image_height, image_width, _ = image.shape
513
  sensation_map = create_sensation_map(image_width, image_height, keypoints)
514
 
515
- # Display the processed image
516
  fig, ax = plt.subplots()
517
  ax.imshow(processed_image)
518
 
519
- # Create a list to store the clicked points
520
  clicked_points = []
521
-
522
  def onclick(event):
523
  if event.xdata is not None and event.ydata is not None:
524
  clicked_points.append((int(event.xdata), int(event.ydata)))
@@ -544,7 +540,7 @@ if uploaded_file is not None:
544
  # Display the plot
545
  st.pyplot(fig)
546
 
547
- # Display heatmaps for different sensations
548
  sensation_types = ["Pain", "Pleasure", "Pressure", "Temperature", "Texture", "EM Field",
549
  "Tickle", "Itch", "Quantum", "Neural", "Proprioception", "Synesthesia"]
550
 
@@ -556,30 +552,21 @@ if uploaded_file is not None:
556
  if st.button("Generate AI Response"):
557
  response = generate_ai_response(keypoints, sensation_map)
558
  st.write("AI Response:", response)
559
-
560
- # Touch controls and output
561
  st.subheader("Neural Interface Controls")
562
 
563
- # Touch duration
564
  touch_duration = st.slider("Interaction Duration (s)", 0.1, 5.0, 1.0, 0.1)
565
-
566
- # Touch pressure
567
  touch_pressure = st.slider("Interaction Intensity", 0.1, 2.0, 1.0, 0.1)
568
-
569
- # Toggle quantum feature
570
  use_quantum = st.checkbox("Enable Quantum Sensing", value=True)
571
-
572
- # Toggle synesthesia
573
  use_synesthesia = st.checkbox("Enable Synesthesia", value=False)
574
-
575
- # Add this with your other UI elements
576
  show_heatmap = st.checkbox("Show Sensation Heatmap", value=True)
577
 
578
  if st.button("Simulate Interaction"):
579
- # Simulate interaction at the clicked point
580
- if 'clicked_points' in locals() and clicked_points:
581
- touch_x, touch_y = clicked_points[-1]
582
 
 
583
  sensation = sensation_map[touch_y, touch_x]
584
  (
585
  pain, pleasure, pressure_sens, temp_sens, texture_sens,
@@ -587,31 +574,34 @@ if uploaded_file is not None:
587
  proprioception_sens, synesthesia_sens
588
  ) = sensation
589
 
 
590
  measured_pressure = pressure_sens * touch_pressure
591
- measured_temp = temp_sens # Assuming temperature doesn't change
592
- measured_texture = texture_sens # Assuming texture doesn't change
593
- measured_em = em_sens # Assuming EM field doesn't change
594
 
 
595
  if use_quantum:
596
  quantum_state = quantum_sens
597
  else:
598
  quantum_state = "N/A"
599
 
600
- # Calculate overall sensations
601
  pain_level = pain * measured_pressure * touch_pressure
602
  pleasure_level = pleasure * (measured_temp - 37) / 10
603
  tickle_level = tickle_sens * (1 - np.exp(-touch_duration / 0.5))
604
  itch_level = itch_sens * (1 - np.exp(-touch_duration / 1.5))
605
 
606
  # Proprioception (sense of body position)
607
- proprioception = proprioception_sens * np.linalg.norm([touch_x - image_width/2, touch_y - image_height/2]) / (image_width/2)
608
 
609
- # Synesthesia (mixing of senses)
610
  if use_synesthesia:
611
  synesthesia = synesthesia_sens * (measured_pressure + measured_temp + measured_em) / 3
612
  else:
613
  synesthesia = "N/A"
614
 
 
615
  st.write("### Simulated Interaction Results")
616
  st.write(f"Interaction Point: ({touch_x:.1f}, {touch_y:.1f})")
617
  st.write(f"Duration: {touch_duration:.1f} s | Intensity: {touch_pressure:.2f}")
@@ -620,14 +610,16 @@ if uploaded_file is not None:
620
  st.write(f"Tickle: {tickle_level:.2f} | Itch: {itch_level:.2f} | Quantum: {quantum_state}")
621
  st.write(f"Neural: {neural_sens:.2f} | Proprioception: {proprioception:.2f} | Synesthesia: {synesthesia}")
622
 
623
- # Display a heatmap of the sensations
624
  if show_heatmap:
625
- heatmap = create_heatmap(sensation_map, sensation_types.index("Pain"))
626
  st.image(heatmap, use_column_width=True)
627
 
628
- # Calculate the average pressure value
629
- average_pressure = np.mean(sensation_map[:, :, 2])
630
-
 
 
631
  # Create a futuristic data display
632
  data_display = (
633
  "```\n"
 
412
 
413
  # Set up MediaPipe Pose
414
  mp_pose = mp.solutions.pose
415
+ pose = mp_pose.Pose(static_image_mode=True, min_detection_confidence=0.7)
416
 
417
+ # Humanoid Detection Function
418
  def detect_humanoid(image_path):
419
  image = cv2.imread(image_path)
420
  image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
421
  results = pose.process(image_rgb)
422
+
423
  if results.pose_landmarks:
424
  landmarks = results.pose_landmarks.landmark
425
  image_height, image_width, _ = image.shape
426
+ keypoints = [(int(landmark.x * image_width), int(landmark.y * image_height)) for landmark in landmarks]
 
 
 
 
427
  return keypoints
428
  return []
429
 
430
+ # Apply touch points on detected humanoid keypoints
431
  def apply_touch_points(image_path, keypoints):
432
  image = cv2.imread(image_path)
433
+ image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
434
+ image_pil = Image.fromarray(image_rgb)
435
+ draw = ImageDraw.Draw(image_pil)
436
+
437
  for point in keypoints:
438
+ draw.ellipse([point[0] - 5, point[1] - 5, point[0] + 5, point[1] + 5], fill='red')
439
+
440
+ return image_pil
441
 
442
+ # Create Sensation Map with Vectorized Computation for Speed
443
  def create_sensation_map(width, height, keypoints):
444
+ sensation_map = np.random.rand(height, width, 12) * 0.5 + 0.5
445
+
446
+ # Create coordinate grids for vectorized calculation
447
+ x_grid, y_grid = np.meshgrid(np.arange(width), np.arange(height))
448
+
449
+ for kp in keypoints:
450
+ kp_x, kp_y = kp
451
+
452
+ # Using vectorized distance calculation
453
+ dist = np.sqrt((x_grid - kp_x) ** 2 + (y_grid - kp_y) ** 2)
454
+
455
+ # Apply Gaussian influence on sensation based on distance
456
+ influence = np.exp(-dist / 100) # Smoother, larger area of influence
457
+ sensation_map[:, :, :12] *= 1 + (influence[..., np.newaxis]) * 1.2 # Apply to all sensation channels
 
 
 
 
 
 
 
 
 
458
 
459
  return sensation_map
460
 
461
+ # Create Heatmap for a Specific Sensation Type
462
  def create_heatmap(sensation_map, sensation_type):
463
  plt.figure(figsize=(10, 15))
464
  sns.heatmap(sensation_map[:, :, sensation_type], cmap='viridis')
465
  plt.title(f'{["Pain", "Pleasure", "Pressure", "Temperature", "Texture", "EM Field", "Tickle", "Itch", "Quantum", "Neural", "Proprioception", "Synesthesia"][sensation_type]} Sensation Map')
466
  plt.axis('off')
467
 
468
+ # Save the heatmap to a buffer
469
  buf = io.BytesIO()
470
  plt.savefig(buf, format='png')
471
  buf.seek(0)
472
+ plt.close()
473
 
474
  # Create an image from the buffer
475
  heatmap_img = Image.open(buf)
476
  return heatmap_img
477
 
478
+ # Generate AI response based on keypoints and sensation map
479
  def generate_ai_response(keypoints, sensation_map):
480
  num_keypoints = len(keypoints)
481
  avg_sensations = np.mean(sensation_map, axis=(0, 1))
 
488
 
489
  return response
490
 
491
+ # Streamlit UI for Interaction
492
  uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
493
 
494
  if uploaded_file is not None:
495
+ # Read and save uploaded image
496
  image_path = 'temp.jpg'
497
  with open(image_path, 'wb') as f:
498
  f.write(uploaded_file.getvalue())
 
508
  image_height, image_width, _ = image.shape
509
  sensation_map = create_sensation_map(image_width, image_height, keypoints)
510
 
511
+ # Display the processed image with touch points
512
  fig, ax = plt.subplots()
513
  ax.imshow(processed_image)
514
 
515
+ # List of clicked points for interaction
516
  clicked_points = []
517
+
518
  def onclick(event):
519
  if event.xdata is not None and event.ydata is not None:
520
  clicked_points.append((int(event.xdata), int(event.ydata)))
 
540
  # Display the plot
541
  st.pyplot(fig)
542
 
543
+ # Heatmap for different sensations
544
  sensation_types = ["Pain", "Pleasure", "Pressure", "Temperature", "Texture", "EM Field",
545
  "Tickle", "Itch", "Quantum", "Neural", "Proprioception", "Synesthesia"]
546
 
 
552
  if st.button("Generate AI Response"):
553
  response = generate_ai_response(keypoints, sensation_map)
554
  st.write("AI Response:", response)
555
+
556
+ # Additional Neural Interface Controls for Interaction
557
  st.subheader("Neural Interface Controls")
558
 
 
559
  touch_duration = st.slider("Interaction Duration (s)", 0.1, 5.0, 1.0, 0.1)
 
 
560
  touch_pressure = st.slider("Interaction Intensity", 0.1, 2.0, 1.0, 0.1)
 
 
561
  use_quantum = st.checkbox("Enable Quantum Sensing", value=True)
 
 
562
  use_synesthesia = st.checkbox("Enable Synesthesia", value=False)
 
 
563
  show_heatmap = st.checkbox("Show Sensation Heatmap", value=True)
564
 
565
  if st.button("Simulate Interaction"):
566
+ if clicked_points:
567
+ touch_x, touch_y = clicked_points[-1]
 
568
 
569
+ # Retrieve the sensation values at the clicked location
570
  sensation = sensation_map[touch_y, touch_x]
571
  (
572
  pain, pleasure, pressure_sens, temp_sens, texture_sens,
 
574
  proprioception_sens, synesthesia_sens
575
  ) = sensation
576
 
577
+ # Adjust the sensations based on user interaction settings
578
  measured_pressure = pressure_sens * touch_pressure
579
+ measured_temp = temp_sens # Assuming temperature doesn't change during touch
580
+ measured_texture = texture_sens # Assuming texture is constant
581
+ measured_em = em_sens # Assuming electromagnetic field remains constant
582
 
583
+ # Quantum sensation handling based on user selection
584
  if use_quantum:
585
  quantum_state = quantum_sens
586
  else:
587
  quantum_state = "N/A"
588
 
589
+ # Calculate overall sensations with interaction modifiers
590
  pain_level = pain * measured_pressure * touch_pressure
591
  pleasure_level = pleasure * (measured_temp - 37) / 10
592
  tickle_level = tickle_sens * (1 - np.exp(-touch_duration / 0.5))
593
  itch_level = itch_sens * (1 - np.exp(-touch_duration / 1.5))
594
 
595
  # Proprioception (sense of body position)
596
+ proprioception = proprioception_sens * np.linalg.norm([touch_x - image_width / 2, touch_y - image_height / 2]) / (image_width / 2)
597
 
598
+ # Synesthesia (mixing of senses) handling based on user selection
599
  if use_synesthesia:
600
  synesthesia = synesthesia_sens * (measured_pressure + measured_temp + measured_em) / 3
601
  else:
602
  synesthesia = "N/A"
603
 
604
+ # Display simulated interaction results
605
  st.write("### Simulated Interaction Results")
606
  st.write(f"Interaction Point: ({touch_x:.1f}, {touch_y:.1f})")
607
  st.write(f"Duration: {touch_duration:.1f} s | Intensity: {touch_pressure:.2f}")
 
610
  st.write(f"Tickle: {tickle_level:.2f} | Itch: {itch_level:.2f} | Quantum: {quantum_state}")
611
  st.write(f"Neural: {neural_sens:.2f} | Proprioception: {proprioception:.2f} | Synesthesia: {synesthesia}")
612
 
613
+ # Optionally display heatmap of the sensations
614
  if show_heatmap:
615
+ heatmap = create_heatmap(sensation_map, sensation_types.index("Pain")) # Example for "Pain"
616
  st.image(heatmap, use_column_width=True)
617
 
618
+ # Optionally, calculate and display the average pressure value in the image
619
+ average_pressure = np.mean(sensation_map[:, :, 2]) # Pressure channel
620
+ st.write(f"Average Pressure across the image: {average_pressure:.2f}")
621
+
622
+
623
  # Create a futuristic data display
624
  data_display = (
625
  "```\n"