from langchain_openai import OpenAI from transformers import AutoModelForCausalLM import os os.environ["TOKENIZERS_PARALLELISM"] = "true" if not "OPENAI_API_KEY" in os.environ: from config_key import OPENAI_API_KEY os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY openai_llm = OpenAI(temperature=0, model="gpt-3.5-turbo-instruct") # opensource_llm = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf") #LAMA MODEL