Simba commited on
Commit
81f2388
1 Parent(s): 26fb51c

Challanger test

Browse files
Files changed (2) hide show
  1. app.py +44 -3
  2. neovision/core.py +1 -0
app.py CHANGED
@@ -7,6 +7,8 @@ import numpy as np
7
 
8
  import neovision
9
 
 
 
10
  MARKDOWN = """
11
  # Welcome to VisionB 🧠 + 📸
12
 
@@ -16,6 +18,27 @@ Meet VisionB, your Visual Agent that combines the power of advanced GPT models w
16
  connector = neovision.OpanAIConnector()
17
 
18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  def save_image_to_drive(image: np.ndarray) -> str:
20
  image_filename = f"{uuid.uuid4()}.jpeg"
21
  image_directory = "data"
@@ -24,9 +47,8 @@ def save_image_to_drive(image: np.ndarray) -> str:
24
  cv2.imwrite(image_path, image)
25
  return image_path
26
 
27
-
28
-
29
  def respond(image: np.ndarray, prompt: str, chat_history=None):
 
30
  # Initialize chat_history as an empty list if it's None
31
  if chat_history is None:
32
  chat_history = []
@@ -34,11 +56,30 @@ def respond(image: np.ndarray, prompt: str, chat_history=None):
34
  image = np.fliplr(image)
35
  image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
36
  image_path = save_image_to_drive(image)
37
- response = connector.simple_prompt(image=image, prompt=prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  chat_history.append(((image_path,), None))
39
  chat_history.append((prompt, response))
40
  return "", chat_history
41
 
 
 
 
42
  with gr.Blocks() as demo:
43
  gr.Markdown(MARKDOWN)
44
  with gr.Row():
 
7
 
8
  import neovision
9
 
10
+ import random
11
+
12
  MARKDOWN = """
13
  # Welcome to VisionB 🧠 + 📸
14
 
 
18
  connector = neovision.OpanAIConnector()
19
 
20
 
21
+ def generate_liveness_challenge(image_details):
22
+ # Based on the image details, generate a challenge
23
+ challenges = []
24
+
25
+ if 'glasses' in image_details:
26
+ challenges.append("Please take off your glasses and hold them in your hand.")
27
+ if 'smiling' in image_details:
28
+ challenges.append("Please take another picture with a neutral expression.")
29
+
30
+ # You can add more contextual clues and corresponding challenges
31
+
32
+ # Generic challenges if no specific detail is detected
33
+ if not challenges:
34
+ challenges = [
35
+ "Please hold up 5 fingers.",
36
+ "Use your hand to cover one of your eyes.",
37
+ "Make an OK sign with your hand and hold it up to your chin."
38
+ ]
39
+
40
+ return random.choice(challenges)
41
+
42
  def save_image_to_drive(image: np.ndarray) -> str:
43
  image_filename = f"{uuid.uuid4()}.jpeg"
44
  image_directory = "data"
 
47
  cv2.imwrite(image_path, image)
48
  return image_path
49
 
 
 
50
  def respond(image: np.ndarray, prompt: str, chat_history=None):
51
+
52
  # Initialize chat_history as an empty list if it's None
53
  if chat_history is None:
54
  chat_history = []
 
56
  image = np.fliplr(image)
57
  image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
58
  image_path = save_image_to_drive(image)
59
+
60
+
61
+ # response = connector.simple_prompt(image=image, prompt=prompt)
62
+
63
+ # If the user's prompt is 'verify', we start the liveness challenge
64
+ if 'verify' in prompt.lower():
65
+ # Use the image details to generate a challenge
66
+ # This is where you'd use the AI's analysis of the image to tailor the challenge
67
+ # For simplicity, the details are hard-coded here
68
+ image_details = "A Person Wearing glasses" # Placeholder for actual analysis
69
+ challenge = generate_liveness_challenge(image_details)
70
+ response = f"For liveness verification, {challenge}"
71
+ else:
72
+ # For any other prompt, just process normally
73
+ response = connector.simple_prompt(image=image, prompt=prompt)
74
+
75
+
76
  chat_history.append(((image_path,), None))
77
  chat_history.append((prompt, response))
78
  return "", chat_history
79
 
80
+
81
+
82
+
83
  with gr.Blocks() as demo:
84
  gr.Markdown(MARKDOWN)
85
  with gr.Row():
neovision/core.py CHANGED
@@ -15,6 +15,7 @@ class OpanAIConnector:
15
  if api_key is None:
16
  raise ValueError("API_KEY is not set")
17
  self.api_key = api_key
 
18
 
19
  def simple_prompt(self, image: np.ndarray, prompt: str) -> str:
20
  headers = {
 
15
  if api_key is None:
16
  raise ValueError("API_KEY is not set")
17
  self.api_key = api_key
18
+
19
 
20
  def simple_prompt(self, image: np.ndarray, prompt: str) -> str:
21
  headers = {