from flaml import autogen # Set up configurations config_list = autogen.config_list_from_json( "OAI_CONFIG_LIST", filter_dict={ "model": ["gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], }, ) llm_config = { "request_timeout": 600, "seed": 42, "config_list": config_list, "temperature": 0, } # Construct agents assistant = autogen.AssistantAgent( name="assistant", llm_config=llm_config ) #USer proxy agent user_proxy = autogen.UserProxyAgent( name="user_proxy", human_input_mode="NEVER", max_consecutive_auto_reply=10, is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"), code_execution_config={ "work_dir": "coding", "use_docker": False, # set to True or image name like "python:3" to use docker }, ) #Example Task: Plot Chart # Start a conversation user_proxy.initiate_chat( assistant, message="""Plot a chart of NVIDIA and TESLA stock price gain YTD""", )