shivanikerai commited on
Commit
495346b
·
verified ·
1 Parent(s): 992ac6b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -1,9 +1,13 @@
1
  import gradio as gr
2
- from transformers import pipeline
3
- pipe = pipeline("text-generation", model="shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0")
 
 
 
 
 
4
 
5
-
6
- def my_function(keywords, product_info):
7
  B_SYS, E_SYS = "<<SYS>>", "<</SYS>>"
8
  B_INST, E_INST = "[INST]", "[/INST]"
9
  B_in, E_in = "[Product Details]", "[/Product Details]"
@@ -11,8 +15,11 @@ def my_function(keywords, product_info):
11
  prompt = f"""{B_INST} {B_SYS} You are a helpful, respectful and honest assistant for ecommerce product title creation. {E_SYS}
12
  Create a SEO optimized e-commerce product title for the keywords:{keywords.strip()}
13
  {B_in}{product_info}{E_in}\n{E_INST}\n\n{B_out}"""
14
- predictions = pipe(prompt)
15
- output=((predictions[0]['generated_text']).split(B_out)[-1]).strip()
 
 
 
16
  return (output)
17
 
18
  # Process the inputs (e.g., concatenate strings, perform calculations)
@@ -21,9 +28,9 @@ def my_function(keywords, product_info):
21
 
22
  # Create the Gradio interface
23
  interface = gr.Interface(fn=my_function,
24
- inputs=["text", "text"],
25
  outputs="text",
26
  title="SEO Optimised Title Suggestion",
27
  description="Enter Keywords and Product Info:")
28
 
29
- interface.launch()
 
1
  import gradio as gr
2
+ import requests
3
+ # from transformers import pipeline
4
+ # pipe = pipeline("text-generation", model="shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0")
5
+ API_URL = "https://api-inference.huggingface.co/models/shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0"
6
+ def query(payload, api_token):
7
+ response = requests.post(API_URL, headers={"Authorization": f"Bearer {api_token}"}, json=payload)
8
+ return response.json()
9
 
10
+ def my_function(api_token, keywords, product_info):
 
11
  B_SYS, E_SYS = "<<SYS>>", "<</SYS>>"
12
  B_INST, E_INST = "[INST]", "[/INST]"
13
  B_in, E_in = "[Product Details]", "[/Product Details]"
 
15
  prompt = f"""{B_INST} {B_SYS} You are a helpful, respectful and honest assistant for ecommerce product title creation. {E_SYS}
16
  Create a SEO optimized e-commerce product title for the keywords:{keywords.strip()}
17
  {B_in}{product_info}{E_in}\n{E_INST}\n\n{B_out}"""
18
+ # predictions = pipe(prompt)
19
+ # output=((predictions[0]['generated_text']).split(B_out)[-1]).strip()
20
+ output = query({
21
+ "inputs": prompt,
22
+ },api_token)
23
  return (output)
24
 
25
  # Process the inputs (e.g., concatenate strings, perform calculations)
 
28
 
29
  # Create the Gradio interface
30
  interface = gr.Interface(fn=my_function,
31
+ inputs=["text", "text", "text"],
32
  outputs="text",
33
  title="SEO Optimised Title Suggestion",
34
  description="Enter Keywords and Product Info:")
35
 
36
+ interface.launch()