2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								import  os  
						 
					
						
							
								
									
										
										
										
											2023-02-08 10:39:40 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								import  logging  
						 
					
						
							
								
									
										
										
										
											2023-04-03 11:49:49 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  typing  import  Optional ,  Union ,  List ,  Dict ,  Any ,  Tuple  
						 
					
						
							
								
									
										
										
										
											2023-04-13 12:15:00 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  unittest . mock  import  patch ,  Mock ,  MagicMock  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								import  pytest  
						 
					
						
							
								
									
										
										
										
											2023-05-30 16:55:48 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  prompthub  import  Prompt  
						 
					
						
							
								
									
										
										
										
											2023-05-02 12:50:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  transformers  import  GenerationConfig ,  TextStreamer  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  haystack  import  Document ,  Pipeline ,  BaseComponent ,  MultiLabel  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								from  haystack . nodes . prompt  import  PromptTemplate ,  PromptNode ,  PromptModel  
						 
					
						
							
								
									
										
										
										
											2023-05-27 18:05:05 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  haystack . nodes . prompt . prompt_template  import  LEGACY_DEFAULT_TEMPLATES  
						 
					
						
							
								
									
										
										
										
											2023-07-06 13:23:13 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								from  haystack . nodes . prompt . invocation_layer  import  (  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    HFLocalInvocationLayer , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    DefaultTokenStreamingHandler , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    AzureChatGPTInvocationLayer , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    AzureOpenAIInvocationLayer , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    OpenAIInvocationLayer , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ChatGPTInvocationLayer , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								)  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.fixture  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  mock_prompthub ( ) :  
						 
					
						
							
								
									
										
										
										
											2023-05-30 16:55:48 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    with  patch ( " haystack.nodes.prompt.prompt_template.fetch_from_prompthub " )  as  mock_prompthub : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        mock_prompthub . return_value  =  Prompt ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            name = " deepset/test " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            tags = [ " test " ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            meta = { " author " :  " test " } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version = " v0.0.0 " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            text = " This is a test prompt. Use your knowledge to answer this question:  {question} " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            description = " test prompt " , 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        yield  mock_prompthub 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  skip_test_for_invalid_key ( prompt_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    if  prompt_model . api_key  is  not  None  and  prompt_model . api_key  ==  " KEY_NOT_FOUND " : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pytest . skip ( " No API key found, skipping test " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.fixture  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  get_api_key ( request ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    if  request . param  ==  " openai " : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        return  os . environ . get ( " OPENAI_API_KEY " ,  None ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    elif  request . param  ==  " azure " : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        return  os . environ . get ( " AZURE_OPENAI_API_KEY " ,  None ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_passing_template ( mock_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # Make model always return something positive on invoke 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mock_model . return_value . invoke . return_value  =  [ " positive " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # Create a template 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  PromptTemplate ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        " Please give a sentiment for this context. Answer with positive,  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " negative or neutral. Context:  {documents} ; Answer: " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # Execute prompt 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  node . prompt ( template ,  documents = [ " Berlin is an amazing city. " ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result  ==  [ " positive " ] 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch.object ( PromptNode ,  " prompt " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_call_with_no_kwargs ( mock_model ,  mocked_prompt ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mocked_prompt . assert_called_once_with ( node . default_prompt_template ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch.object ( PromptNode ,  " prompt " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_call_with_custom_kwargs ( mock_model ,  mocked_prompt ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node ( some_kwarg = " some_value " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mocked_prompt . assert_called_once_with ( node . default_prompt_template ,  some_kwarg = " some_value " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch.object ( PromptNode ,  " prompt " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_call_with_custom_template ( mock_model ,  mocked_prompt ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mock_template  =  Mock ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node ( prompt_template = mock_template ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mocked_prompt . assert_called_once_with ( mock_template ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch.object ( PromptNode ,  " prompt " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_call_with_custom_kwargs_and_template ( mock_model ,  mocked_prompt ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mock_template  =  Mock ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node ( prompt_template = mock_template ,  some_kwarg = " some_value " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    mocked_prompt . assert_called_once_with ( mock_template ,  some_kwarg = " some_value " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_no_default_template ( mock_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  node . get_prompt_template ( )  is  None 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-24 16:50:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_from_legacy_default_template ( mock_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( " question-answering " ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-27 18:05:05 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . name  ==  " question-answering " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . prompt_text  ==  LEGACY_DEFAULT_TEMPLATES [ " question-answering " ] [ " prompt " ] 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-24 16:50:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_with_default_template ( mock_model ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node . default_prompt_template  =  " deepset/test-prompt " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . name  ==  " deepset/test-prompt " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_name_from_hub ( mock_model ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( " deepset/test-prompt " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . name  ==  " deepset/test-prompt " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_local_file ( mock_model ,  tmp_path ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " local_prompt_template.yml " ,  " w " )  as  ptf : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ptf . write ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								name :  my_prompts / question - answering  
						 
					
						
							
								
									
										
										
										
											2023-05-30 16:55:48 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								text :  |  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Given  the  context  please  answer  the  question .  Context :  { join ( documents ) } ; 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Question :  { query } ; 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Answer : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								description :  A  simple  prompt  to  answer  a  question  given  a  set  of  documents  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								tags :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								  -  question - answering 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								meta :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								  authors : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    -  vblagoje 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								version :  v0 .1 .1  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								""" 
  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( str ( tmp_path  /  " local_prompt_template.yml " ) ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . name  ==  " my_prompts/question-answering " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " Given the context "  in  template . prompt_text 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_object ( mock_model ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    original_template  =  PromptTemplate ( " fake-template " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( original_template ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template  ==  original_template 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_wrong_template_name ( mock_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  patch ( " haystack.nodes.prompt.prompt_template.prompthub " )  as  mock_prompthub : 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        def  not_found ( * a ,  * * k ) : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            raise  ValueError ( " ' some-unsupported-template '  not supported! " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        mock_prompthub . fetch . side_effect  =  not_found 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        node  =  PromptNode ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        with  pytest . raises ( ValueError ,  match = " not supported " )  as  e : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            node . get_prompt_template ( " some-unsupported-template " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_get_prompt_template_only_template_text ( mock_model ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    template  =  node . get_prompt_template ( " some prompt " ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-08 11:31:04 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  template . name  ==  " custom-at-query-time " 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-15 18:46:26 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_invalid_template_params ( mock_model ,  mock_prompthub ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    with  pytest . raises ( ValueError ,  match = " Expected prompt parameters " ) : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        node . prompt ( " question-answering-per-document " ,  some_crazy_key = " Berlin is the capital of Germany. " ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-08 10:39:40 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-07-06 13:23:13 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.invocation_layer.open_ai.load_openai_tokenizer " ,  lambda  tokenizer_name :  None )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_azure_vs_open_ai_invocation_layer_selection ( ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Tests  that  the  correct  invocation  layer  is  selected  based  on  the  model  name  and  additional  parameters . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    As  we  support  both  OpenAI  and  Azure  models ,  we  need  to  make  sure  that  the  correct  invocation  layer  is  selected 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    based  on  the  model  name  and  additional  parameters . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    azure_model_kwargs  =  { 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " azure_base_url " :  " https://some_unimportant_url " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " azure_deployment_name " :  " https://some_unimportant_url.azurewebsites.net/api/prompt " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( " gpt-4 " ,  api_key = " some_key " ,  model_kwargs = azure_model_kwargs ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  isinstance ( node . prompt_model . model_invocation_layer ,  AzureChatGPTInvocationLayer ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( " text-davinci-003 " ,  api_key = " some_key " ,  model_kwargs = azure_model_kwargs ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  isinstance ( node . prompt_model . model_invocation_layer ,  AzureOpenAIInvocationLayer ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( " gpt-4 " ,  api_key = " some_key " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  isinstance ( node . prompt_model . model_invocation_layer ,  ChatGPTInvocationLayer )  and  not  isinstance ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        node . prompt_model . model_invocation_layer ,  AzureChatGPTInvocationLayer 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( " text-davinci-003 " ,  api_key = " some_key " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  isinstance ( node . prompt_model . model_invocation_layer ,  OpenAIInvocationLayer )  and  not  isinstance ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        node . prompt_model . model_invocation_layer ,  AzureChatGPTInvocationLayer 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-01-25 19:02:11 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-08-08 16:40:23 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ] ,  indirect = True )  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_simple_pipeline ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-08-08 16:40:23 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Tests  that  a  pipeline  with  a  prompt  node  and  prompt  template  has  the  right  output  structure 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    output_variable_name  =  " out " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " sentiment-analysis " ,  output_variable = output_variable_name ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-08-08 16:40:23 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # validate output variable present 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  output_variable_name  in  result 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ output_variable_name ] )  ==  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # validate pipeline parameters are present 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " documents "  in  result 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # and that so-called invocation context contains the right keys 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " invocation_context "  in  result 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  all ( item  in  result [ " invocation_context " ]  for  item  in  [ " query " ,  " documents " ,  output_variable_name ,  " prompts " ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-01-25 19:02:11 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This is a unit test? 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-generation " ,  output_variable = " query " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node2  =  PromptNode ( prompt_model ,  default_prompt_template = " question-answering-per-document " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node2 ,  name = " prompt_node_2 " ,  inputs = [ " prompt_node " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " berlin "  in  result [ " answers " ] [ 0 ] . answer . casefold ( ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-20 14:51:45 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
									
										
										
										
											2023-02-20 14:51:45 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_simple_pipeline_with_topk ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This can be a unit test? 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 14:51:45 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-generation " ,  output_variable = " query " ,  top_k = 2 ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 14:51:45 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  ==  2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_pipeline_with_standard_qa ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Unit test? 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-answering " ,  top_k = 1 ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        query = " Who lives in Berlin? " ,   # this being a string instead of a list what is being tested 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Carla and I live in Berlin " ,  id = " 1 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Christelle and I live in Paris " ,  id = " 2 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " answers " ] )  ==  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " carla "  in  result [ " answers " ] [ 0 ] . answer . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " answers " ] [ 0 ] . document_ids  ==  [ " 1 " ,  " 2 " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " answers " ] [ 0 ] . meta [ " prompt " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Given the context please answer the question. Context: My name is Carla and I live in Berlin My name is Christelle and I live in Paris;  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Question: Who lives in Berlin?; Answer: " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_pipeline_with_qa_with_references ( prompt_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-answering-with-references " ,  top_k = 1 ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        query = " Who lives in Berlin? " ,   # this being a string instead of a list what is being tested 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Carla and I live in Berlin " ,  id = " 1 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Christelle and I live in Paris " ,  id = " 2 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " answers " ] )  ==  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " carla, as stated in document[1] "  in  result [ " answers " ] [ 0 ] . answer . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " answers " ] [ 0 ] . document_ids  ==  [ " 1 " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " answers " ] [ 0 ] . meta [ " prompt " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Create a concise and informative answer (no more than 50 words) for a given question based solely on the given documents.  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " You must only use information from the given documents. Use an unbiased and journalistic tone. Do not repeat text. Cite the documents using Document[number] notation.  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
										 
							
							
								        " If multiple documents contain the answer, cite those documents like ‘ ’  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
										 
							
							
								        " say that ‘ ’  \n \n Document[1]: My name is Carla and I live in Berlin \n \n Document[2]: My name is Christelle and I live in Paris  \n   " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Question: Who lives in Berlin?; Answer:  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 14:51:45 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_pipeline_with_prompt_text_at_query_time ( prompt_model ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " test prompt template text " ,  top_k = 1 ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        query = " Who lives in Berlin? " ,   # this being a string instead of a list what is being tested 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Carla and I live in Berlin " ,  id = " 1 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Christelle and I live in Paris " ,  id = " 2 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        params = { 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            " prompt_template " :  " Create a concise and informative answer (no more than 50 words) for a given question based solely on the given documents. Cite the documents using Document[number] notation. \n \n { join(documents, delimiter=new_line+new_line, pattern= ' Document[$idx]: $content ' )} \n \n Question:  {query} \n \n Answer:  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " answers " ] )  ==  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " carla "  in  result [ " answers " ] [ 0 ] . answer . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " answers " ] [ 0 ] . document_ids  ==  [ " 1 " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " answers " ] [ 0 ] . meta [ " prompt " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Create a concise and informative answer (no more than 50 words) for a given question based solely on the given documents. Cite the documents using Document[number] notation. \n \n " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Document[1]: My name is Carla and I live in Berlin \n \n Document[2]: My name is Christelle and I live in Paris \n \n " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Question: Who lives in Berlin? \n \n Answer:  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-03 09:49:30 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_pipeline_with_prompt_template_at_query_time ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This should be just an AnswerParser unit test and some PromptTemplate unit tests 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-answering-with-references " ,  top_k = 1 ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt_template_yaml  =  """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            name :  " question-answering-with-references-custom " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            prompt_text :  ' Create a concise and informative answer (no more than 50 words) for 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                a  given  question  based  solely  on  the  given  documents .  Cite  the  documents  using  Doc [ number ]  notation . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                { join ( documents ,  delimiter = new_line + new_line ,  pattern = ' ' Doc [ $ idx ] :  $ content ' ' ) } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                Question :  { query } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                Answer :  ' 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            output_parser : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                type :  AnswerParser 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                    reference_pattern :  Doc \\[ ( [ ^ \\] ] + ) \\] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        query = " Who lives in Berlin? " ,   # this being a string instead of a list what is being tested 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Carla and I live in Berlin " ,  id = " doc-1 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Christelle and I live in Paris " ,  id = " doc-2 " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        params = { " prompt_template " :  prompt_template_yaml } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " answers " ] )  ==  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " carla "  in  result [ " answers " ] [ 0 ] . answer . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " answers " ] [ 0 ] . document_ids  ==  [ " doc-1 " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " answers " ] [ 0 ] . meta [ " prompt " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Create a concise and informative answer (no more than 50 words) for a given question based solely on the given documents. Cite the documents using Doc[number] notation. \n \n " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Doc[1]: My name is Carla and I live in Berlin \n \n Doc[2]: My name is Christelle and I live in Paris \n \n " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Question: Who lives in Berlin? \n \n Answer:  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_pipeline_with_prompt_template_and_nested_shaper_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This can be a Shaper unit test? 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config_with_prompt_template.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  template_with_nested_shaper 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptTemplate 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                prompt :  " Given the context please answer the question. Context:  {{ documents}}; Question:  {{ query}}; Answer:  " 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 14:10:20 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_parser : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  type :  AnswerParser 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  google / flan - t5 - small 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  template_with_nested_shaper 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config_with_prompt_template.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " What is an amazing city? " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    answer  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  answer . casefold ( ) ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " answers " ] [ 0 ] . meta [ " prompt " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Given the context please answer the question. Context: Berlin is an amazing city.; Question: What is an amazing city?; Answer:  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-24 14:07:52 +00:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_node_no_debug ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This is another unit test 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-24 14:07:52 +00:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    """ Pipeline with PromptNode should not generate debug info if debug is false. """ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = " question-generation " ,  top_k = 2 ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # debug explicitely False 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ,  debug = False ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result . get ( " _debug " ,  " No debug info " )  ==  " No debug info " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # debug None 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ,  debug = None ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result . get ( " _debug " ,  " No debug info " )  ==  " No debug info " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # debug True 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ,  debug = True ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " _debug " ] [ " prompt_node " ] [ " runtime " ] [ " prompts_used " ] [ 0 ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Given the context please generate a question. Context: Berlin is the capital of Germany; Question: " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-01-25 19:02:11 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_qa ( prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Not a PromptNode test, this maybe can be a unit test 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    """ Test the PromptNode where the `query` is a string instead of a list what the PromptNode would expects, 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    because  in  a  question - answering  pipeline  the  retrievers  need  ` query `  as  a  string ,  so  the  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    need  to  be  able  to  handle  the  ` query `  being  a  string  instead  of  a  list . """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt_template  =  PromptTemplate ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        " Given the context please answer the question. Context:  {documents} ; Question:  {query} ; Answer: " 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( prompt_model ,  default_prompt_template = prompt_template ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        query = " Who lives in Berlin? " ,   # this being a string instead of a list what is being tested 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Carla and I live in Berlin " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            Document ( " My name is Christelle and I live in Paris " ) , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ] , 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-31 03:33:47 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        debug = True ,   # so we can verify that the constructed prompt is returned in debug 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " results " ] )  ==  2 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " carla "  in  result [ " results " ] [ 0 ] . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-01-31 03:33:47 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # also verify that the PromptNode has included its constructed prompt LLM model input in the returned debug 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result [ " _debug " ] [ " prompt_node " ] [ " runtime " ] [ " prompts_used " ] [ 0 ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ==  " Given the context please answer the question. Context: My name is Carla and I live in Berlin;  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " Question: Who lives in Berlin?; Answer: " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-01-10 02:08:17 -05:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_shared_model ( ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: What is this testing? Can this be a unit test? 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    model  =  PromptModel ( ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    node  =  PromptNode ( model_name_or_path = model ,  default_prompt_template = " question-generation " ,  output_variable = " query " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    node2  =  PromptNode ( model_name_or_path = model ,  default_prompt_template = " question-answering-per-document " ) 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipe . add_node ( component = node2 ,  name = " prompt_node_2 " ,  inputs = [ " prompt_node " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipe . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is the capital of Germany " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " answers " ] [ 0 ] . answer  ==  " Berlin " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_simple_pipeline_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This can be a unit test just to verify that loading 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # PromptNode from yaml creates a correctly runnable Pipeline. 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # Also it could probably be renamed to test_prompt_node_yaml_loading 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  sentiment - analysis 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " results " ] [ 0 ]  ==  " positive " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-02-15 18:46:26 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_simple_pipeline_yaml_with_default_params ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Is this testing yaml loading? 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-15 18:46:26 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-15 18:46:26 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  sentiment - analysis 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_kwargs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  torch_dtype :  torch . bfloat16 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  pipeline . graph . nodes [ " p1 " ] [ " component " ] . prompt_model . model_kwargs  ==  { " torch_dtype " :  " torch.bfloat16 " } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = None ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  result [ " results " ] [ 0 ]  ==  " positive " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Is this testing PromptNode or Pipeline? 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - generation 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_variable :  query 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    response  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-02 16:28:56 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  response . casefold ( ) ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " invocation_context " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  >  0 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result [ " invocation_context " ]  and  len ( result [ " invocation_context " ] [ " query " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_shared_prompt_model_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Is this similar to test_complex_pipeline_with_shared_model? 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # Why are we testing this two times? 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptModel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - generation 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_variable :  query 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    response  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-02 16:28:56 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  response . casefold ( ) ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " invocation_context " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  >  0 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result [ " invocation_context " ]  and  len ( result [ " invocation_context " ] [ " query " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_shared_prompt_model_and_prompt_template_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Is this testing PromptNode or Pipeline parsing? 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config_with_prompt_template.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptModel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  google / flan - t5 - small 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_kwargs : 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-27 09:59:27 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                  torch_dtype :  auto 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  question_generation_template 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptTemplate 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                prompt :  " Given the context please generate a question. Context:  {{ documents}}; Question: " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question_generation_template 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_variable :  query 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config_with_prompt_template.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    response  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-02 16:28:56 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  response . casefold ( ) ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " invocation_context " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  >  0 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result [ " invocation_context " ]  and  len ( result [ " invocation_context " ] [ " query " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_with_dummy_node_between_prompt_nodes_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This can be a unit test. Is it necessary though? Is it testing PromptNode? 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # test that we can stick some random node in between prompt nodes and that everything still works 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    # most specifically, we want to ensure that invocation_context is still populated correctly and propagated 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    class  InBetweenNode ( BaseComponent ) : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        outgoing_edges  =  1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        def  run ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            self , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            query :  Optional [ str ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            file_paths :  Optional [ List [ str ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            labels :  Optional [ MultiLabel ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            documents :  Optional [ List [ Document ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            meta :  Optional [ dict ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        )  - >  Tuple [ Dict ,  str ] : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            return  { } ,  " output_1 " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        def  run_batch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            self , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            queries :  Optional [ Union [ str ,  List [ str ] ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            file_paths :  Optional [ List [ str ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            labels :  Optional [ Union [ MultiLabel ,  List [ MultiLabel ] ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            documents :  Optional [ Union [ List [ Document ] ,  List [ List [ Document ] ] ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            meta :  Optional [ Union [ Dict [ str ,  Any ] ,  List [ Dict [ str ,  Any ] ] ] ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            params :  Optional [ dict ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            debug :  Optional [ bool ]  =  None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            return  { } ,  " output_1 " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config_with_prompt_template.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  in_between 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  InBetweenNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptModel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  google / flan - t5 - small 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_kwargs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  torch_dtype :  torch . bfloat16 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  question_generation_template 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptTemplate 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                prompt :  " Given the context please generate a question. Context:  {{ documents}}; Question: " 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question_generation_template 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_variable :  query 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  in_between 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  in_between 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config_with_prompt_template.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is an amazing city. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    response  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  response . casefold ( ) ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " invocation_context " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  >  0 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result [ " invocation_context " ]  and  len ( result [ " invocation_context " ] [ " query " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.parametrize ( " haystack_openai_config " ,  [ " openai " ,  " azure " ] ,  indirect = True )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_all_features ( tmp_path ,  haystack_openai_config ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Is this testing PromptNode or pipeline yaml parsing? 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    if  not  haystack_openai_config : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pytest . skip ( " No API key found, skipping test " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    if  " azure_base_url "  in  haystack_openai_config : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # don't change this indentation, it's important for the yaml to be valid 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        azure_conf_yaml_snippet  =  f """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  azure_base_url :  { haystack_openai_config [ ' azure_base_url ' ] } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  azure_deployment_name :  { haystack_openai_config [ ' azure_deployment_name ' ] } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    else : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        azure_conf_yaml_snippet  =  " " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config_with_prompt_template.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            f """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  pmodel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptModel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  google / flan - t5 - small 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_kwargs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  torch_dtype :  torch . bfloat16 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  pmodel_openai 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptModel 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  text - davinci - 003 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_kwargs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  temperature :  0.9 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                  max_tokens :  64 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                  { azure_conf_yaml_snippet } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                api_key :  { haystack_openai_config [ " api_key " ] } 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  question_generation_template 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptTemplate 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                prompt :  " Given the context please generate a question. Context:  {{ documents}}; Question: " 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel_openai 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question_generation_template 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                output_variable :  query 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                model_name_or_path :  pmodel 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2022-12-20 11:21:26 +01:00 
										
									 
								 
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config_with_prompt_template.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pipeline . run ( query = " not relevant " ,  documents = [ Document ( " Berlin is a city in Germany. " ) ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    response  =  result [ " answers " ] [ 0 ] . answer 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-02 16:28:56 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  any ( word  for  word  in  [ " berlin " ,  " germany " ,  " population " ,  " city " ,  " amazing " ]  if  word  in  response . casefold ( ) ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-19 11:17:06 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " invocation_context " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result [ " query " ] )  >  0 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  " query "  in  result [ " invocation_context " ]  and  len ( result [ " invocation_context " ] [ " query " ] )  >  0 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-02-27 15:00:19 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								def  test_complex_pipeline_with_multiple_same_prompt_node_components_yaml ( tmp_path ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: Can this become a unit test? Is it actually worth as a test? 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # p2 and p3 are essentially the same PromptNode component, make sure we can use them both as is in the pipeline 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  open ( tmp_path  /  " tmp_config.yml " ,  " w " )  as  tmp_file : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        tmp_file . write ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            """ 
 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            version :  ignore 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            components : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - generation 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  p3 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              params : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-27 12:14:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								                default_prompt_template :  question - answering - per - document 
							 
						 
					
						
							
								
									
										
										
										
											2023-01-26 13:38:35 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								              type :  PromptNode 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            pipelines : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            -  name :  query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              nodes : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  Query 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p1 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								              -  name :  p3 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                inputs : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                -  p2 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pipeline  =  Pipeline . load_from_yaml ( path = tmp_path  /  " tmp_config.yml " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  pipeline  is  not  None 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-08 12:47:52 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-06-21 15:41:28 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_hf_token_limit_warning ( caplog ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt  =  " Repeating text "  *  200  +  " Docs: Berlin is an amazing city.; Answer: " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  caplog . at_level ( logging . WARNING ) : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        node  =  PromptNode ( " google/flan-t5-small " ,  devices = [ " cpu " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        _  =  node . prompt_model . _ensure_token_limit ( prompt = prompt ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  " The prompt has been truncated from 812 tokens to 412 tokens "  in  caplog . text 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  " and answer length (100 tokens) fit within the max token limit (512 tokens). "  in  caplog . text 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-03 13:49:21 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								class  TestRunBatch :  
						 
					
						
							
								
									
										
										
										
											2023-06-13 16:24:29 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.skip ( reason = " Skipped as test is extremely flaky " ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.integration 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    def  test_simple_pipeline_batch_no_query_single_doc_list ( self ,  prompt_model ) : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        node  =  PromptNode ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            prompt_model , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            default_prompt_template = " Please give a sentiment for this context. Answer with positive, negative or neutral. Context:  {documents} ; Answer: " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result  =  pipe . run_batch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            queries = None ,  documents = [ Document ( " Berlin is an amazing city. " ) ,  Document ( " I am not feeling well. " ) ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] [ 0 ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] [ 0 ] [ 0 ] ,  str ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  " positive "  in  result [ " results " ] [ 0 ] [ 0 ] . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  " negative "  in  result [ " results " ] [ 1 ] [ 0 ] . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.integration 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    def  test_simple_pipeline_batch_no_query_multiple_doc_list ( self ,  prompt_model ) : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        node  =  PromptNode ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            prompt_model , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            default_prompt_template = " Please give a sentiment for this context. Answer with positive, negative or neutral. Context:  {documents} ; Answer: " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            output_variable = " out " , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result  =  pipe . run_batch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            queries = None , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                [ Document ( " Berlin is an amazing city. " ) ,  Document ( " Paris is an amazing city. " ) ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                [ Document ( " I am not feeling well. " ) ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " out " ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " out " ] [ 0 ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " out " ] [ 0 ] [ 0 ] ,  str ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  all ( " positive "  in  x . casefold ( )  for  x  in  result [ " out " ] [ 0 ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  " negative "  in  result [ " out " ] [ 1 ] [ 0 ] . casefold ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.integration 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    @pytest.mark.parametrize ( " prompt_model " ,  [ " hf " ,  " openai " ,  " azure " ] ,  indirect = True ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    def  test_simple_pipeline_batch_query_multiple_doc_list ( self ,  prompt_model ) : 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-02 09:55:09 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        skip_test_for_invalid_key ( prompt_model ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        prompt_template  =  PromptTemplate ( 
							 
						 
					
						
							
								
									
										
										
										
											2023-05-23 15:22:58 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								            " Given the context please answer the question. Context:  {documents} ; Question:  {query} ; Answer: " 
							 
						 
					
						
							
								
									
										
										
										
											2023-02-20 11:58:13 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        node  =  PromptNode ( prompt_model ,  default_prompt_template = prompt_template ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe  =  Pipeline ( ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        pipe . add_node ( component = node ,  name = " prompt_node " ,  inputs = [ " Query " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        result  =  pipe . run_batch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            queries = [ " Who lives in Berlin? " ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            documents = [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                [ Document ( " My name is Carla and I live in Berlin " ) ,  Document ( " My name is James and I live in London " ) ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								                [ Document ( " My name is Christelle and I live in Paris " ) ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            ] , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            debug = True , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] [ 0 ] ,  list ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  isinstance ( result [ " results " ] [ 0 ] [ 0 ] ,  str ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-17 14:16:41 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_chatgpt_direct_prompting ( chatgpt_prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This is testing ChatGPT, should be removed 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-17 14:16:41 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( chatgpt_prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pn  =  PromptNode ( chatgpt_prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pn ( " Hey, I need some Python help. When should I use list comprehension? " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result )  ==  1  and  all ( w  in  result [ 0 ]  for  w  in  [ " comprehension " ,  " list " ] ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.skip  
						 
					
						
							
								
									
										
										
										
											2023-03-17 14:16:41 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.integration  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_chatgpt_direct_prompting_w_messages ( chatgpt_prompt_model ) :  
						 
					
						
							
								
									
										
										
										
											2023-04-06 14:47:44 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    # TODO: This is a ChatGPTInvocationLayer unit test 
							 
						 
					
						
							
								
									
										
										
										
											2023-03-17 14:16:41 +01:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								    skip_test_for_invalid_key ( chatgpt_prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    pn  =  PromptNode ( chatgpt_prompt_model ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    messages  =  [ 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        { " role " :  " system " ,  " content " :  " You are a helpful assistant. " } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        { " role " :  " user " ,  " content " :  " Who won the world series in 2020? " } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        { " role " :  " assistant " ,  " content " :  " The Los Angeles Dodgers won the World Series in 2020. " } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        { " role " :  " user " ,  " content " :  " Where was it played? " } , 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    result  =  pn ( messages ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    assert  len ( result )  ==  1  and  all ( w  in  result [ 0 ] . casefold ( )  for  w  in  [ " arlington " ,  " texas " ] ) 
							 
						 
					
						
							
								
									
										
										
										
											2023-06-13 16:35:19 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@pytest.mark.unit  
						 
					
						
							
								
									
										
										
										
											2023-06-19 13:27:11 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.invocation_layer.open_ai.load_openai_tokenizer " ,  lambda  tokenizer_name :  None )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_model.PromptModel._ensure_token_limit " ,  lambda  self ,  prompt :  prompt )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_content_moderation_gpt_3_and_gpt_3_5 ( ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    Check  all  possible  cases  of  the  moderation  checks  passing  /  failing  in  a  PromptNode  call 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    for  both  ChatGPTInvocationLayer  and  OpenAIInvocationLayer . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt_node_gpt_3_5  =  PromptNode ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        model_name_or_path = " gpt-3.5-turbo " ,  api_key = " key " ,  model_kwargs = { " moderate_content " :  True } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt_node_gpt_3  =  PromptNode ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        model_name_or_path = " text-davinci-003 " ,  api_key = " key " ,  model_kwargs = { " moderate_content " :  True } 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  patch ( " haystack.nodes.prompt.invocation_layer.open_ai.check_openai_policy_violation " )  as  mock_check ,  patch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " haystack.nodes.prompt.invocation_layer.chatgpt.ChatGPTInvocationLayer._execute_openai_request " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    )  as  mock_execute_gpt_3_5 ,  patch ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        " haystack.nodes.prompt.invocation_layer.open_ai.OpenAIInvocationLayer._execute_openai_request " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    )  as  mock_execute_gpt_3 : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        VIOLENT_TEXT  =  " some violent text " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        mock_check . side_effect  =  lambda  input ,  headers :  input  ==  VIOLENT_TEXT  or  input  ==  [ VIOLENT_TEXT ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # case 1: prompt fails the moderation check 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # prompt should not be sent to OpenAi & function should return an empty list 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        mock_check . return_value  =  True 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  prompt_node_gpt_3_5 ( VIOLENT_TEXT )  ==  prompt_node_gpt_3 ( VIOLENT_TEXT )  ==  [ ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # case 2: prompt passes the moderation check but the generated output fails the check 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # function should also return an empty list 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        mock_execute_gpt_3_5 . return_value  =  mock_execute_gpt_3 . return_value  =  [ VIOLENT_TEXT ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  prompt_node_gpt_3_5 ( " normal prompt " )  ==  prompt_node_gpt_3 ( " normal prompt " )  ==  [ ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # case 3: both prompt and output pass the moderation check 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        # function should return the output 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        mock_execute_gpt_3_5 . return_value  =  mock_execute_gpt_3 . return_value  =  [ " normal output " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  prompt_node_gpt_3_5 ( " normal prompt " )  ==  prompt_node_gpt_3 ( " normal prompt " )  ==  [ " normal output " ] 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
									
										
										
										
											2023-06-13 16:35:19 +02:00 
										
									 
								 
							 
							
								
									
										 
								
							 
							
								 
							
								
									
								 
							
							
								@patch ( " haystack.nodes.prompt.prompt_node.PromptModel " )  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								def  test_prompt_node_warns_about_missing_documents ( mock_model ,  caplog ) :  
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    lfqa_prompt  =  PromptTemplate ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        prompt = """ Synthesize a comprehensive answer from the following text for the given question. 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        Provide  a  clear  and  concise  response  that  summarizes  the  key  points  and  information  presented  in  the  text . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        Your  answer  should  be  in  your  own  words  and  be  no  longer  than  50  words . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        If  answer  is  not  in  . text .  say  i  dont  know . 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        \n \n  Related  text :  { join ( documents ) }  \n \n  Question :  { query }  \n \n  Answer : """ 
 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    prompt_node  =  PromptNode ( default_prompt_template = lfqa_prompt ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								    with  caplog . at_level ( logging . WARNING ) : 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        results ,  _  =  prompt_node . run ( query = " non-matching query " ) 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        assert  ( 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            " Expected prompt parameter  ' documents '  to be provided but it is missing.  " 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								            " Continuing with an empty list of documents. "  in  caplog . text 
							 
						 
					
						
							
								
							 
							
								
							 
							
								 
							
								
									
								 
							
							
								        )