Add basic demo UI via streamlit (#671)

* Added starter code for frontend demo

* worked on comments

* Added Docker config for frontend

* update docker file. restructure folder structure. minimal renamings and defaults

* add screenshot to readme

Co-authored-by: Malte Pietsch <malte.pietsch@deepset.ai>
This commit is contained in:
Tanmay Laud 2020-12-27 18:06:09 +05:30 committed by GitHub
parent fc521fe293
commit 7cd9e09491
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 124 additions and 4 deletions

View File

@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
haystack-api:
build:
@ -27,10 +27,16 @@ services:
command: "/bin/bash -c 'sleep 15 && gunicorn rest_api.application:app -b 0.0.0.0 -k uvicorn.workers.UvicornWorker --workers 1 --timeout 180 --preload'"
elasticsearch:
# This will start an empty elasticsearch instance (so you have to add your documents yourself)
image: "elasticsearch:7.6.1"
#image: "elasticsearch:7.6.1"
# If you want a demo image instead that is "ready-to-query" with some indexed Game of Thrones articles:
#image: "deepset/elasticsearch-game-of-thrones"
image: "deepset/elasticsearch-game-of-thrones"
ports:
- 9200:9200
environment:
- discovery.type=single-node
- discovery.type=single-node
ui:
image: "deepset/haystack-streamlit-ui"
ports:
- 8501:8501
environment:
- API_ENDPOINT=http://haystack-api:8000

18
ui/Dockerfile Normal file
View File

@ -0,0 +1,18 @@
FROM python:3.7.4-stretch
WORKDIR /home/user
RUN apt-get update && apt-get install -y curl git pkg-config cmake
# copy code
COPY utils.py /home/user/
COPY webapp.py /home/user/
# install as a package
COPY requirements.txt /home/user/
RUN pip install -r requirements.txt
EXPOSE 8501
# cmd for running the API
CMD ["streamlit", "run", "webapp.py"]

24
ui/README.md Normal file
View File

@ -0,0 +1,24 @@
## Demo UI
This is a minimal UI that can spin up to test Haystack for your prototypes. It's based on streamlit and is very easy to extend for your purposes.
![Screenshot](https://raw.githubusercontent.com/deepset-ai/haystack/master/docs/_src/img/streamlit_ui_screenshot.png)
## Usage
### Option 1: Local
Execute in this folder:
```
streamlit run webapp.py
```
Requirements: This expects a running Haystack REST API at `http://localhost:8000`
### Option 2: Container
Just run
```
docker-compose up -d
```
in the root folder of the Haystack repository. This will start three containers (Elasticsearch, Haystack API, Haystack UI).
You can find the UI at `http://localhost:8501`

2
ui/requirements.txt Normal file
View File

@ -0,0 +1,2 @@
streamlit==0.72.0
st-annotated-text==1.0.1

42
ui/utils.py Normal file
View File

@ -0,0 +1,42 @@
import requests
import streamlit as st
import os
API_ENDPOINT = os.getenv("API_ENDPOINT", "http://localhost:8000")
MODEL_ID = "1"
DOC_REQUEST = "doc-qa"
def format_request(question,filters=None,top_k_reader=5,top_k_retriever=5):
if filters == None:
return {
"questions": [question],
"top_k_retriever": top_k_retriever,
"top_k_reader": top_k_reader
}
return {
"questions": [question],
"filters": {
"option1":[filters]
},
"top_k_retriever": top_k_retriever,
"top_k_reader": top_k_reader
}
@st.cache(show_spinner=False)
def retrieve_doc(question,filters=None,top_k_reader=5,top_k_retriever=5):
# Query Haystack API
url = API_ENDPOINT +'/models/' + MODEL_ID + "/" + DOC_REQUEST
req = format_request(question,filters,top_k_reader=top_k_reader,top_k_retriever=top_k_retriever)
response_raw = requests.post(url,json=req).json()
# Format response
result = []
answers = response_raw['results'][0]['answers']
for i in range(top_k_reader):
answer = answers[i]['answer']
if answer:
context = '...' + answers[i]['context'] + '...'
meta_name = answers[i]['meta']['name']
relevance = round(answers[i]['probability']*100,2)
result.append({'context':context,'answer':answer,'source':meta_name,'relevance':relevance})
return result, response_raw

28
ui/webapp.py Normal file
View File

@ -0,0 +1,28 @@
import streamlit as st
from utils import retrieve_doc
from annotated_text import annotated_text
def annotate_answer(answer,context):
start_idx = context.find(answer)
end_idx = start_idx+len(answer)
annotated_text(context[:start_idx],(answer,"ANSWER","#8ef"),context[end_idx:])
st.write("# Haystack Demo")
st.sidebar.header("Options")
top_k_reader = st.sidebar.slider("Number of answers",min_value=1,max_value=10,value=5,step=1)
top_k_retriever = st.sidebar.slider("Number of documents from retriever",min_value=1,max_value=10,value=3,step=1)
question = st.text_input("Please provide your query:",value="Who is the father of Arya Starck?")
run_query = st.button("Run")
debug = st.sidebar.checkbox("Show debug info")
if run_query:
with st.spinner("Performing neural search on documents... 🧠 \n "
"Do you want to optimize speed or accuracy? \n"
"Check out the docs: https://haystack.deepset.ai/docs/latest/optimizationmd "):
results,raw_json = retrieve_doc(question,top_k_reader=top_k_reader,top_k_retriever=top_k_retriever)
st.write("## Retrieved answers:")
for result in results:
annotate_answer(result['answer'],result['context'])
'**Relevance:** ', result['relevance'] , '**source:** ' , result['source']
if debug:
st.subheader('REST API JSON response')
st.write(raw_json)