| 
									
										
										
										
											2023-09-28 13:14:36 -07:00
										 |  |  | from autogen import AssistantAgent, UserProxyAgent, config_list_from_json | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # Load LLM inference endpoints from an env variable or a file | 
					
						
							|  |  |  | # See https://microsoft.github.io/autogen/docs/FAQ#set-your-api-endpoints | 
					
						
							| 
									
										
										
										
											2023-09-30 09:21:07 -07:00
										 |  |  | # and OAI_CONFIG_LIST_sample | 
					
						
							| 
									
										
										
										
											2023-09-28 13:14:36 -07:00
										 |  |  | config_list = config_list_from_json(env_or_file="OAI_CONFIG_LIST") | 
					
						
							|  |  |  | assistant = AssistantAgent("assistant", llm_config={"config_list": config_list}) | 
					
						
							| 
									
										
										
										
											2024-01-18 19:03:49 +02:00
										 |  |  | user_proxy = UserProxyAgent( | 
					
						
							|  |  |  |     "user_proxy", code_execution_config={"work_dir": "coding", "use_docker": False} | 
					
						
							|  |  |  | )  # IMPORTANT: set to True to run code in docker, recommended | 
					
						
							| 
									
										
										
										
											2023-09-28 13:14:36 -07:00
										 |  |  | user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.") |