lorenzoscottb commited on
Commit
1ba6f25
Β·
verified Β·
1 Parent(s): f02e17d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -1
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  import pandas as pd
3
  from graph_utils import *
4
  from transformers import pipeline
 
5
 
6
  languages = pd.read_csv("model_lang.csv", names=["Lang_acr"])
7
 
@@ -88,7 +89,7 @@ interface_anon = gr.Interface(
88
  description=description_main,
89
  examples=[
90
  "I was an exchange student in Slovakia, visiting someone who had been an exchange student here. The start of the dream was when I had just arrived at their house. Her family was very happy to see me, as well as excited to hear about my life in America. They asked me if I could make them a traditional American dessert to have with dinner, and I agreed, but I needed to go to the store to get some strawberries. The former exchange student, Zelka, said she would drive me to the store, so she took me out to her car which was a jeep like the ones in old WWII movies. The ride was very bumpy and when we got to town, I was very surprised to see how many American stores, etc., were there. It was terrible how much American culture had taken over their country. Then we were taken into this shop where a Chinese man was giving acupuncture lessons.",
91
- "I was with my friends Mike and Ray. Lego blocks broke apart. Were perhaps in a junkyard.",
92
  ],
93
  cache_examples=True,
94
  )
@@ -102,10 +103,12 @@ pipe_L = pipeline(
102
  return_all_scores=True,
103
  truncation="do_not_truncate",
104
  )
 
105
  def predictL(text):
106
  t = pipe_L(text)
107
  t = {list(dct.values())[0] : list(dct.values())[1] for dct in t[0]}
108
  return t
 
109
  interface_model_L = gr.Interface(
110
  fn=predictL,
111
  inputs='text',
@@ -125,10 +128,12 @@ pipe_S = pipeline(
125
  return_all_scores=True,
126
  truncation="do_not_truncate",
127
  )
 
128
  def predict(text):
129
  t = pipe_S(text)
130
  t = {list(dct.values())[0] : list(dct.values())[1] for dct in t[0]}
131
  return t
 
132
  interface_model_S = gr.Interface(
133
  fn=predict,
134
  inputs='text',
@@ -163,6 +168,7 @@ pipe_N = pipeline(
163
  max_length=300,
164
  truncation="do_not_truncate",
165
  )
 
166
  def predictN(text):
167
  t = pipe_N(text)
168
  t = t[0]["generated_text"]
 
2
  import pandas as pd
3
  from graph_utils import *
4
  from transformers import pipeline
5
+ import dreamy
6
 
7
  languages = pd.read_csv("model_lang.csv", names=["Lang_acr"])
8
 
 
89
  description=description_main,
90
  examples=[
91
  "I was an exchange student in Slovakia, visiting someone who had been an exchange student here. The start of the dream was when I had just arrived at their house. Her family was very happy to see me, as well as excited to hear about my life in America. They asked me if I could make them a traditional American dessert to have with dinner, and I agreed, but I needed to go to the store to get some strawberries. The former exchange student, Zelka, said she would drive me to the store, so she took me out to her car which was a jeep like the ones in old WWII movies. The ride was very bumpy and when we got to town, I was very surprised to see how many American stores, etc., were there. It was terrible how much American culture had taken over their country. Then we were taken into this shop where a Chinese man was giving acupuncture lessons.",
92
+ "I was with my friends Mike and Ray. Lego blocks broke apart. Were perhaps in a junkyard."
93
  ],
94
  cache_examples=True,
95
  )
 
103
  return_all_scores=True,
104
  truncation="do_not_truncate",
105
  )
106
+
107
  def predictL(text):
108
  t = pipe_L(text)
109
  t = {list(dct.values())[0] : list(dct.values())[1] for dct in t[0]}
110
  return t
111
+
112
  interface_model_L = gr.Interface(
113
  fn=predictL,
114
  inputs='text',
 
128
  return_all_scores=True,
129
  truncation="do_not_truncate",
130
  )
131
+
132
  def predict(text):
133
  t = pipe_S(text)
134
  t = {list(dct.values())[0] : list(dct.values())[1] for dct in t[0]}
135
  return t
136
+
137
  interface_model_S = gr.Interface(
138
  fn=predict,
139
  inputs='text',
 
168
  max_length=300,
169
  truncation="do_not_truncate",
170
  )
171
+
172
  def predictN(text):
173
  t = pipe_N(text)
174
  t = t[0]["generated_text"]