Align TransformersReader defaults with FARMReader (#2490)

* Align TransformersReader defaults with vFARMReader

* Update Documentation & Code Style

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Julian Risch 2022-05-04 10:04:18 +02:00 committed by GitHub
parent a0bf34036f
commit 970c476615
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 5 additions and 5 deletions

View File

@ -564,7 +564,7 @@ With this reader, you can directly get predictions via predict()
#### \_\_init\_\_
```python
def __init__(model_name_or_path: str = "distilbert-base-uncased-distilled-squad", model_version: Optional[str] = None, tokenizer: Optional[str] = None, context_window_size: int = 70, use_gpu: bool = True, top_k: int = 10, top_k_per_candidate: int = 4, return_no_answers: bool = True, max_seq_len: int = 256, doc_stride: int = 128)
def __init__(model_name_or_path: str = "distilbert-base-uncased-distilled-squad", model_version: Optional[str] = None, tokenizer: Optional[str] = None, context_window_size: int = 70, use_gpu: bool = True, top_k: int = 10, top_k_per_candidate: int = 3, return_no_answers: bool = False, max_seq_len: int = 256, doc_stride: int = 128)
```
Load a QA model from Transformers.

View File

@ -4174,12 +4174,12 @@
},
"top_k_per_candidate": {
"title": "Top K Per Candidate",
"default": 4,
"default": 3,
"type": "integer"
},
"return_no_answers": {
"title": "Return No Answers",
"default": true,
"default": false,
"type": "boolean"
},
"max_seq_len": {

View File

@ -26,8 +26,8 @@ class TransformersReader(BaseReader):
context_window_size: int = 70,
use_gpu: bool = True,
top_k: int = 10,
top_k_per_candidate: int = 4,
return_no_answers: bool = True,
top_k_per_candidate: int = 3,
return_no_answers: bool = False,
max_seq_len: int = 256,
doc_stride: int = 128,
):