Spaces:
Sleeping
Sleeping
# # run_search.py | |
# import os | |
# import sys | |
# import openai | |
# # Add "/actions" to the sys.path | |
# actions_path = os.path.abspath("/actions") | |
# sys.path.insert(0, actions_path) | |
# # Import search_content.py from /actions folder | |
# from search_content import main_search | |
# # Import api key from secrets | |
# secret_value_0 = os.environ.get("openai") | |
# openai.api_key = secret_value_0 | |
# # Provide your OpenAI API key | |
# def generate_openai_response(query, model_engine="text-davinci-002", max_tokens=124, temperature=0.8): | |
# """Generate a response using the OpenAI API.""" | |
# # Run the main function from search_content.py and store the results in a variable | |
# results = main_search(query) | |
# # Create context from the results | |
# context = "".join([f"#{str(i)}" for i in results])[:2014] # Trim the context to 2014 characters - Modify as necessory | |
# prompt_template = f"Relevant context: {context}\n\n Answer the question in detail: {query}" | |
# # Generate a response using the OpenAI API | |
# response = openai.Completion.create( | |
# engine=model_engine, | |
# prompt=prompt_template, | |
# max_tokens=max_tokens, | |
# temperature=temperature, | |
# n=1, | |
# stop=None, | |
# ) | |
# return response.choices[0].text.strip() | |
# def main(): | |
# query = "What is omdena local chapters, how a developer can benifit from it" | |
# response = generate_openai_response(query) | |
# print(response) | |
# if __name__ == "__main__": | |
# main() |