Spaces:
Running
Running
shaocongma
commited on
Commit
•
510bfff
1
Parent(s):
1457d21
Bug fix. Complete Prompts mode.
Browse files- app.py +14 -14
- auto_backgrounds.py +16 -1
- latex_templates/Default/template.tex +5 -0
- utils/prompts.py +1 -1
- utils/references.py +3 -0
app.py
CHANGED
@@ -134,7 +134,7 @@ def wrapped_generator(
|
|
134 |
tldr=True, max_kw_refs=10, bib_refs=None, max_tokens_ref=2048, # references
|
135 |
knowledge_database=None, max_tokens_kd=2048, query_counts=10, # domain knowledge
|
136 |
paper_template="ICLR2022", selected_sections=None, model="gpt-4", prompts_mode=False, # outputs parameters
|
137 |
-
cache_mode=
|
138 |
):
|
139 |
# if `cache_mode` is True, then follow the following steps:
|
140 |
# check if "title"+"description" have been generated before
|
@@ -271,16 +271,16 @@ with gr.Blocks(theme=theme) as demo:
|
|
271 |
clear_button_pp = gr.Button("Clear")
|
272 |
submit_button_pp = gr.Button("Submit", variant="primary")
|
273 |
|
274 |
-
with gr.Tab("文献搜索"):
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
|
285 |
with gr.Tab("文献综述 (Coming soon!)"):
|
286 |
gr.Markdown('''
|
@@ -312,9 +312,9 @@ with gr.Blocks(theme=theme) as demo:
|
|
312 |
domain_knowledge, max_tokens_kd_slider, query_counts_slider,
|
313 |
template, sections, model_selection, prompts_mode], outputs=file_output)
|
314 |
|
315 |
-
clear_button_refs.click(fn=clear_inputs_refs, inputs=[title_refs, slider_refs], outputs=[title_refs, slider_refs])
|
316 |
-
submit_button_refs.click(fn=wrapped_references_generator,
|
317 |
-
|
318 |
|
319 |
demo.queue(concurrency_count=1, max_size=5, api_open=False)
|
320 |
demo.launch(show_error=True)
|
|
|
134 |
tldr=True, max_kw_refs=10, bib_refs=None, max_tokens_ref=2048, # references
|
135 |
knowledge_database=None, max_tokens_kd=2048, query_counts=10, # domain knowledge
|
136 |
paper_template="ICLR2022", selected_sections=None, model="gpt-4", prompts_mode=False, # outputs parameters
|
137 |
+
cache_mode=IS_CACHE_AVAILABLE # handle cache mode
|
138 |
):
|
139 |
# if `cache_mode` is True, then follow the following steps:
|
140 |
# check if "title"+"description" have been generated before
|
|
|
271 |
clear_button_pp = gr.Button("Clear")
|
272 |
submit_button_pp = gr.Button("Submit", variant="primary")
|
273 |
|
274 |
+
# with gr.Tab("文献搜索"):
|
275 |
+
# gr.Markdown(REFERENCES)
|
276 |
+
#
|
277 |
+
# title_refs = gr.Textbox(value="Playing Atari with Deep Reinforcement Learning", lines=1, max_lines=1,
|
278 |
+
# label="Title", info="论文标题")
|
279 |
+
# slider_refs = gr.Slider(minimum=1, maximum=100, value=5, step=1,
|
280 |
+
# interactive=True, label="最相关的参考文献数目")
|
281 |
+
# with gr.Row():
|
282 |
+
# clear_button_refs = gr.Button("Clear")
|
283 |
+
# submit_button_refs = gr.Button("Submit", variant="primary")
|
284 |
|
285 |
with gr.Tab("文献综述 (Coming soon!)"):
|
286 |
gr.Markdown('''
|
|
|
312 |
domain_knowledge, max_tokens_kd_slider, query_counts_slider,
|
313 |
template, sections, model_selection, prompts_mode], outputs=file_output)
|
314 |
|
315 |
+
# clear_button_refs.click(fn=clear_inputs_refs, inputs=[title_refs, slider_refs], outputs=[title_refs, slider_refs])
|
316 |
+
# submit_button_refs.click(fn=wrapped_references_generator,
|
317 |
+
# inputs=[title_refs, slider_refs, key], outputs=json_output)
|
318 |
|
319 |
demo.queue(concurrency_count=1, max_size=5, api_open=False)
|
320 |
demo.launch(show_error=True)
|
auto_backgrounds.py
CHANGED
@@ -5,6 +5,7 @@ from utils.knowledge import Knowledge
|
|
5 |
from utils.file_operations import hash_name, make_archive, copy_templates
|
6 |
from utils.tex_processing import create_copies
|
7 |
from section_generator import section_generation # figures_generation, section_generation_bg, keywords_generation,
|
|
|
8 |
import logging
|
9 |
import time
|
10 |
from langchain.vectorstores import FAISS
|
@@ -145,6 +146,7 @@ def _generation_setup(title, description="", template="ICLR2022",
|
|
145 |
print(f"Failed to query from FAISS. Error {e}. Use empty domain knowledge instead.")
|
146 |
domain_knowledge = ""
|
147 |
else:
|
|
|
148 |
domain_knowledge = ""
|
149 |
|
150 |
###################################################################################################################
|
@@ -249,8 +251,14 @@ def generate_draft(title, description="", # main input
|
|
249 |
knowledge_database=knowledge_database)
|
250 |
|
251 |
# main components
|
|
|
252 |
print(f"================PROCESSING================")
|
253 |
for section in sections:
|
|
|
|
|
|
|
|
|
|
|
254 |
print(f"Generate {section} part...")
|
255 |
max_attempts = 4
|
256 |
attempts_count = 0
|
@@ -273,7 +281,14 @@ def generate_draft(title, description="", # main input
|
|
273 |
input_dict = {"title": title, "description": description, "generator": "generate_draft"}
|
274 |
filename = hash_name(input_dict) + ".zip"
|
275 |
print("\nMission completed.\n")
|
276 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
277 |
|
278 |
|
279 |
if __name__ == "__main__":
|
|
|
5 |
from utils.file_operations import hash_name, make_archive, copy_templates
|
6 |
from utils.tex_processing import create_copies
|
7 |
from section_generator import section_generation # figures_generation, section_generation_bg, keywords_generation,
|
8 |
+
from utils.prompts import generate_paper_prompts
|
9 |
import logging
|
10 |
import time
|
11 |
from langchain.vectorstores import FAISS
|
|
|
146 |
print(f"Failed to query from FAISS. Error {e}. Use empty domain knowledge instead.")
|
147 |
domain_knowledge = ""
|
148 |
else:
|
149 |
+
print("Selected database doesn't exist or no database is selected.")
|
150 |
domain_knowledge = ""
|
151 |
|
152 |
###################################################################################################################
|
|
|
251 |
knowledge_database=knowledge_database)
|
252 |
|
253 |
# main components
|
254 |
+
prompts_dict = {}
|
255 |
print(f"================PROCESSING================")
|
256 |
for section in sections:
|
257 |
+
if prompts_mode:
|
258 |
+
prompts = generate_paper_prompts(paper, section)
|
259 |
+
prompts_dict[section] = prompts
|
260 |
+
continue
|
261 |
+
|
262 |
print(f"Generate {section} part...")
|
263 |
max_attempts = 4
|
264 |
attempts_count = 0
|
|
|
281 |
input_dict = {"title": title, "description": description, "generator": "generate_draft"}
|
282 |
filename = hash_name(input_dict) + ".zip"
|
283 |
print("\nMission completed.\n")
|
284 |
+
|
285 |
+
if prompts_mode:
|
286 |
+
filename = hash_name(input_dict) + ".json"
|
287 |
+
with open(filename, "w") as f:
|
288 |
+
json.dump(prompts_dict, f)
|
289 |
+
return filename
|
290 |
+
else:
|
291 |
+
return make_archive(destination_folder, filename)
|
292 |
|
293 |
|
294 |
if __name__ == "__main__":
|
latex_templates/Default/template.tex
CHANGED
@@ -14,6 +14,11 @@
|
|
14 |
\usepackage{algorithm}
|
15 |
\usepackage{algpseudocode}
|
16 |
|
|
|
|
|
|
|
|
|
|
|
17 |
\title{TITLE}
|
18 |
\author{GPT-4}
|
19 |
|
|
|
14 |
\usepackage{algorithm}
|
15 |
\usepackage{algpseudocode}
|
16 |
|
17 |
+
\newlength\tindent
|
18 |
+
\setlength{\tindent}{\parindent}
|
19 |
+
\setlength{\parindent}{0pt}
|
20 |
+
\renewcommand{\indent}{\hspace*{\tindent}}
|
21 |
+
|
22 |
\title{TITLE}
|
23 |
\author{GPT-4}
|
24 |
|
utils/prompts.py
CHANGED
@@ -170,7 +170,7 @@ def generate_paper_prompts(paper_info, section):
|
|
170 |
# ref_instruction_subprompt - give AI references
|
171 |
# self_subprompt - give AI existing written parts
|
172 |
# output_subprompt - tell AI how to output
|
173 |
-
fundamental_subprompt = "Your task is to write the {section} section of the paper with the title '{title}'. {description}\n"
|
174 |
instruction_subprompt = "\n" \
|
175 |
"Your response should follow the following instructions:\n" \
|
176 |
"{instruction}\n" \
|
|
|
170 |
# ref_instruction_subprompt - give AI references
|
171 |
# self_subprompt - give AI existing written parts
|
172 |
# output_subprompt - tell AI how to output
|
173 |
+
fundamental_subprompt = "Your task is to write the {section} section of the paper with the title '{title}'. This paper has the following contributions: {description}\n"
|
174 |
instruction_subprompt = "\n" \
|
175 |
"Your response should follow the following instructions:\n" \
|
176 |
"{instruction}\n" \
|
utils/references.py
CHANGED
@@ -333,6 +333,8 @@ class References:
|
|
333 |
# send (title, .bib file) to evaluate embeddings; recieve truncated papers
|
334 |
papers = self._get_papers(keyword="_all")
|
335 |
|
|
|
|
|
336 |
# clear the bibtex file
|
337 |
with open(path_to_bibtex, "w", encoding="utf-8") as file:
|
338 |
file.write("")
|
@@ -358,6 +360,7 @@ class References:
|
|
358 |
with open(path_to_bibtex, "a", encoding="utf-8") as file:
|
359 |
file.write(bibtex_entry)
|
360 |
file.write("\n\n")
|
|
|
361 |
return paper_ids
|
362 |
|
363 |
def _get_papers(self, keyword="_all"):
|
|
|
333 |
# send (title, .bib file) to evaluate embeddings; recieve truncated papers
|
334 |
papers = self._get_papers(keyword="_all")
|
335 |
|
336 |
+
l = len(papers)
|
337 |
+
print(f"{l} papers will be added to `ref.bib`.")
|
338 |
# clear the bibtex file
|
339 |
with open(path_to_bibtex, "w", encoding="utf-8") as file:
|
340 |
file.write("")
|
|
|
360 |
with open(path_to_bibtex, "a", encoding="utf-8") as file:
|
361 |
file.write(bibtex_entry)
|
362 |
file.write("\n\n")
|
363 |
+
# print(f'{paper["paper_id"]} has been added to `ref.bib`.')
|
364 |
return paper_ids
|
365 |
|
366 |
def _get_papers(self, keyword="_all"):
|