bambadij commited on
Commit
7cb9d32
·
verified ·
1 Parent(s): 9297d17
Files changed (1) hide show
  1. app.py +17 -13
app.py CHANGED
@@ -133,7 +133,6 @@ async def generate_text(request: RequestModel):
133
  generated_text += chunk.choices[0].delta.content
134
 
135
  return {"summary_text_2": generated_text}
136
-
137
  @app.post("/generate2/")
138
  async def generate_text(file: UploadFile = File(...)):
139
  # Read the uploaded CSV file
@@ -143,20 +142,25 @@ async def generate_text(file: UploadFile = File(...)):
143
  except Exception as e:
144
  return {"error": f"Error reading CSV file: {str(e)}"}
145
 
146
- # Assuming you want to generate text based on the CSV data
147
- # For demonstration, we'll use the first row of the CSV
148
- # Adjust as needed based on your actual requirements
149
- text_to_generate = df.iloc[0].to_string()
 
 
150
 
151
  # Create the request for the API
152
- completion = client.chat.completions.create(
153
- model="meta/llama-3.1-8b-instruct",
154
- messages=[{"role": "user", "content": prompt1 + text_to_generate}],
155
- temperature=0.2,
156
- top_p=0.9,
157
- # max_tokens=1024,
158
- stream=True
159
- )
 
 
 
160
 
161
  generated_text = ""
162
  for chunk in completion:
 
133
  generated_text += chunk.choices[0].delta.content
134
 
135
  return {"summary_text_2": generated_text}
 
136
  @app.post("/generate2/")
137
  async def generate_text(file: UploadFile = File(...)):
138
  # Read the uploaded CSV file
 
142
  except Exception as e:
143
  return {"error": f"Error reading CSV file: {str(e)}"}
144
 
145
+ # Concatenate all rows into a single string
146
+ try:
147
+ # Convert the entire DataFrame to a string
148
+ text_to_generate = df.to_string(index=False)
149
+ except Exception as e:
150
+ return {"error": f"Error converting DataFrame to string: {str(e)}"}
151
 
152
  # Create the request for the API
153
+ try:
154
+ completion = client.chat.completions.create(
155
+ model="meta/llama-3.1-8b-instruct",
156
+ messages=[{"role": "user", "content": prompt1 + text_to_generate}],
157
+ temperature=0.2,
158
+ top_p=0.9,
159
+ # max_tokens=1024,
160
+ stream=True
161
+ )
162
+ except Exception as e:
163
+ return {"error": f"Error generating text: {str(e)}"}
164
 
165
  generated_text = ""
166
  for chunk in completion: