root-sajjan commited on
Commit
94c0e24
·
verified ·
1 Parent(s): 81a28c0

indentation fix

Browse files
Files changed (1) hide show
  1. llm/inference.py +27 -27
llm/inference.py CHANGED
@@ -67,47 +67,47 @@ def extract_info(text):
67
  print(output)
68
 
69
 
70
-
71
  def get_name(url, object):
72
-
73
- messages = [
74
- {
75
- "role": "user",
76
- "content": [
77
- {
78
- "type": "text",
79
- "text": f"Is this a {object}?. Can you guess what it is and give me the closest brand it resembles to? or a model number? And give me its average price in today's market in USD. In output, give me its normal name, model name, model number and price. separated by commas. No description is needed."
80
- },
81
- {
82
- "type": "image_url",
83
- "image_url": {
84
- "url": url
85
- }
86
- }
87
- ]
88
- }
89
- ]
90
-
91
- completion = client.chat.completions.create(
92
- model="meta-llama/Llama-3.2-11B-Vision-Instruct",
93
- messages=messages,
94
- max_tokens=500
95
- )
96
-
97
 
98
  print(f'\n\nNow output of LLM:\n')
99
  llm_result = completion.choices[0].message['content']
100
  print(llm_result)
 
101
  # print(f'\n\nThat is the output')
102
  print(f"Extracting from the output now, function calling")
103
  result = extract_product_info(llm_result)
104
  print(f'\n\nResult brand and price:{result}')
105
  print(f'\n\nThat is the output')
 
106
  # result2 = extract_info(llm_result)
107
  # print(f'\n\nFrom Google llm:{result2}')
108
-
109
  return result
110
 
 
111
  # url = "https://i.ibb.co/mNYvqDL/crop_39.jpg"
112
  # object="fridge"
113
 
 
67
  print(output)
68
 
69
 
 
70
  def get_name(url, object):
71
+ messages = [
72
+ {
73
+ "role": "user",
74
+ "content": [
75
+ {
76
+ "type": "text",
77
+ "text": f"Is this a {object}?. Can you guess what it is and give me the closest brand it resembles to? or a model number? And give me its average price in today's market in USD. In output, give me its normal name, model name, model number and price. separated by commas. No description is needed."
78
+ },
79
+ {
80
+ "type": "image_url",
81
+ "image_url": {
82
+ "url": url
83
+ }
84
+ }
85
+ ]
86
+ }
87
+ ]
88
+
89
+ completion = client.chat.completions.create(
90
+ model="meta-llama/Llama-3.2-11B-Vision-Instruct",
91
+ messages=messages,
92
+ max_tokens=500
93
+ )
 
 
94
 
95
  print(f'\n\nNow output of LLM:\n')
96
  llm_result = completion.choices[0].message['content']
97
  print(llm_result)
98
+
99
  # print(f'\n\nThat is the output')
100
  print(f"Extracting from the output now, function calling")
101
  result = extract_product_info(llm_result)
102
  print(f'\n\nResult brand and price:{result}')
103
  print(f'\n\nThat is the output')
104
+
105
  # result2 = extract_info(llm_result)
106
  # print(f'\n\nFrom Google llm:{result2}')
107
+
108
  return result
109
 
110
+
111
  # url = "https://i.ibb.co/mNYvqDL/crop_39.jpg"
112
  # object="fridge"
113