gokaygokay
commited on
Commit
•
9405818
1
Parent(s):
23ee02b
trial
Browse files- app.py +8 -1
- llm_inference.py +6 -2
app.py
CHANGED
@@ -89,7 +89,14 @@ def create_interface():
|
|
89 |
try:
|
90 |
# Step 1: Generate Prompt
|
91 |
dynamic_seed = random.randint(0, 1000000)
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
print(f"Generated Prompt: {prompt}")
|
94 |
|
95 |
# Step 2: Generate Text with LLM
|
|
|
89 |
try:
|
90 |
# Step 1: Generate Prompt
|
91 |
dynamic_seed = random.randint(0, 1000000)
|
92 |
+
if custom_input and custom_input.strip():
|
93 |
+
prompt = llm_node.generate_prompt(dynamic_seed, prompt_type, custom_input)
|
94 |
+
print(f"Using Custom Input Prompt.")
|
95 |
+
else:
|
96 |
+
# Inform the system to create a random prompt based on the selected prompt_type
|
97 |
+
prompt = llm_node.generate_prompt(dynamic_seed, prompt_type, f"Create a random prompt based on the '{prompt_type}' type.")
|
98 |
+
print(f"No Custom Input Prompt provided. Generated prompt based on prompt_type: {prompt_type}")
|
99 |
+
|
100 |
print(f"Generated Prompt: {prompt}")
|
101 |
|
102 |
# Step 2: Generate Text with LLM
|
llm_inference.py
CHANGED
@@ -28,7 +28,7 @@ class LLMInferenceNode:
|
|
28 |
if custom_input and custom_input.strip():
|
29 |
prompt = custom_input
|
30 |
else:
|
31 |
-
prompt = f"
|
32 |
|
33 |
# Additional logic can be added here if needed
|
34 |
print(f"Generated prompt: {prompt}") # Debug statement
|
@@ -107,7 +107,11 @@ You are allowed to make up film and branding names, and do them like 80's, 90's
|
|
107 |
|
108 |
# Construct messages for the LLM
|
109 |
system_message = "You are a helpful assistant. Try your best to give the best response possible to the user."
|
110 |
-
|
|
|
|
|
|
|
|
|
111 |
|
112 |
# Select the appropriate provider
|
113 |
if provider == "Hugging Face":
|
|
|
28 |
if custom_input and custom_input.strip():
|
29 |
prompt = custom_input
|
30 |
else:
|
31 |
+
prompt = f"Create a random prompt based on the '{prompt_type}' type."
|
32 |
|
33 |
# Additional logic can be added here if needed
|
34 |
print(f"Generated prompt: {prompt}") # Debug statement
|
|
|
107 |
|
108 |
# Construct messages for the LLM
|
109 |
system_message = "You are a helpful assistant. Try your best to give the best response possible to the user."
|
110 |
+
|
111 |
+
if input_text.startswith("Create a random prompt based on"):
|
112 |
+
user_message = input_text
|
113 |
+
else:
|
114 |
+
user_message = f"{base_prompt}\nDescription: {input_text}"
|
115 |
|
116 |
# Select the appropriate provider
|
117 |
if provider == "Hugging Face":
|