Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -63,10 +63,10 @@ def text2story(text):
|
|
63 |
story = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
64 |
return story
|
65 |
except Exception as e:
|
66 |
-
# Fallback to
|
67 |
-
fallback_generator = AutoModelForCausalLM.from_pretrained("
|
68 |
fallback_prompt = f"Please generate the story for the following and target audience for kids : {text}"
|
69 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
70 |
inputs = tokenizer(fallback_prompt, return_tensors="pt")
|
71 |
fallback_story = fallback_generator.generate(
|
72 |
inputs.input_ids,
|
|
|
63 |
story = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
64 |
return story
|
65 |
except Exception as e:
|
66 |
+
# Fallback to openai-community/gpt2 if the advanced one fails
|
67 |
+
fallback_generator = AutoModelForCausalLM.from_pretrained("openai-community/gpt2")
|
68 |
fallback_prompt = f"Please generate the story for the following and target audience for kids : {text}"
|
69 |
+
tokenizer = AutoTokenizer.from_pretrained("openai-community/gpt2")
|
70 |
inputs = tokenizer(fallback_prompt, return_tensors="pt")
|
71 |
fallback_story = fallback_generator.generate(
|
72 |
inputs.input_ids,
|