import openai from IPython.display import display, HTML openai.api_key = OPENAI_API_KEY def get_embedding(text): """Generate an embedding for the given text using OpenAI's API.""" # Check for valid input if not text or not isinstance(text, str): return None try: # Call OpenAI API to get the embedding embedding = openai.embeddings.create( input=text, model="text-embedding-3-small", dimensions=1536).data[0].embedding return embedding except Exception as e: print(f"Error in get_embedding: {e}") return None def handle_user_query(query, db, collection): # Assuming vector_search returns a list of dictionaries with keys 'title' and 'plot' get_knowledge = vector_search(query, db, collection) # Check if there are any results if not get_knowledge: return "No results found.", "No source information available." # Convert search results into a list of SearchResultItem models search_results_models = [ SearchResultItem(**result) for result in get_knowledge ] # Convert search results into a DataFrame for better rendering in Jupyter search_results_df = pd.DataFrame([item.dict() for item in search_results_models]) # Generate system response using OpenAI's completion completion = openai.chat.completions.create( model="gpt-3.5-turbo", messages=[ { "role": "system", "content": "You are a airbnb listing recommendation system."}, { "role": "user", "content": f"Answer this user query: {query} with the following context:\n{search_results_df}" } ] ) system_response = completion.choices[0].message.content # Print User Question, System Response, and Source Information print(f"- User Question:\n{query}\n") print(f"- System Response:\n{system_response}\n") # Display the DataFrame as an HTML table display(HTML(search_results_df.to_html())) # Return structured response and source info as a string return system_response