fix names in workflows

This commit is contained in:
Massimiliano Pippi 2023-11-24 14:59:31 +01:00
parent e6637f5ec2
commit 4a1fe163b6
No known key found for this signature in database
GPG Key ID: C69551A629D71A65
9 changed files with 29 additions and 333 deletions

View File

@ -4,11 +4,8 @@ on:
workflow_run:
workflows:
- "end-to-end"
- "end-to-end (Preview)"
- "Linting"
- "Tests"
- "Tests (Preview)"
- "REST API Tests"
types:
- completed
pull_request:

View File

@ -4,7 +4,6 @@ on:
pull_request_target:
paths:
- "haystack/**/*.py"
- "rest_api/**/*.py"
jobs:
label:

View File

@ -1,5 +1,5 @@
# If you change this name also do it in ci_metrics.yml
name: end-to-end (Preview)
name: end-to-end
on:
workflow_dispatch: # Activate this workflow manually
@ -12,8 +12,8 @@ on:
- synchronize
- ready_for_review
paths:
- "e2e/preview/**/*.py"
- ".github/workflows/e2e_preview.yml"
- "e2e/**/*.py"
- ".github/workflows/e2e.yml"
env:
PYTHON_VERSION: "3.8"
@ -36,7 +36,7 @@ jobs:
sudo apt install ffmpeg # for local Whisper tests
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf tika 'azure-ai-formrecognizer>=3.2.0b2'
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf tika 'azure-ai-formrecognizer>=3.2.0b2'
- name: Run tests
run: pytest e2e/preview
run: pytest e2e

View File

@ -92,223 +92,3 @@ jobs:
- "workflow:${{ github.workflow }}"
- "branch:${{ github.ref_name }}"
- "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
license_check_vanilla:
name: Core dependencies, no extras
env:
REQUIREMENTS_FILE: requirements_vanilla.txt
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Get explicit and transitive dependencies
run: |
pip install .
pip freeze > ${{ env.REQUIREMENTS_FILE }}
- name: Check Licenses
id: license_check_report
uses: pilosus/action-pip-license-checker@v2
with:
github-token: ${{ secrets.GH_ACCESS_TOKEN }}
requirements: ${{ env.REQUIREMENTS_FILE }}
fail: "Copyleft,Other,Error"
# Exclusions in the vanilla distribution must be explicitly motivated
#
# - certifi is pulled in by requests
# - num2words is pulled in by quantulum3
# - tqdm is MLP but there are no better alternatives
# - nvidia libraries are brought in by torch on Linux,
# FIXME: to be removed once we stop depending on torch with the vanilla install
exclude: "(?i)^(certifi|num2words|tqdm|nvidia-).*"
- name: Print report
if: ${{ always() }}
run: echo "${{ steps.license_check_report.outputs.report }}"
- name: Calculate alert data
id: calculator
shell: bash
if: (success() || failure())
run: |
if [ "${{ job.status }}" = "success" ]; then
echo "alert_type=success" >> "$GITHUB_OUTPUT";
else
echo "alert_type=error" >> "$GITHUB_OUTPUT";
fi
- name: Send event to Datadog
# This step would fail when running in PRs opened from forks since
# secrets are not accessible.
# To prevent showing bogus failures in those PRs we skip the step.
# The workflow will fail in any case if the actual check fails in the previous steps.
if: (success() || failure()) && env.CORE_DATADOG_API_KEY != ''
uses: masci/datadog@v1
with:
api-key: ${{ env.CORE_DATADOG_API_KEY }}
api-url: https://api.datadoghq.eu
events: |
- title: "${{ github.job }} in ${{ github.workflow }} workflow"
text: "License compliance check: core dependencies, no extras."
alert_type: "${{ steps.calculator.outputs.alert_type }}"
source_type_name: "Github"
host: ${{ github.repository_owner }}
tags:
- "project:${{ github.repository }}"
- "job:${{ github.job }}"
- "run_id:${{ github.run_id }}"
- "workflow:${{ github.workflow }}"
- "branch:${{ github.ref_name }}"
- "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
license_check_all:
name: All extras
env:
REQUIREMENTS_FILE: requirements_all.txt
runs-on: ubuntu-latest-4-cores
steps:
- name: Checkout the code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Get explicit and transitive dependencies
run: |
pip install -U pip
pip install .[all]
pip freeze > ${{ env.REQUIREMENTS_FILE }}
- name: Check Licenses
id: license_check_report
uses: pilosus/action-pip-license-checker@v2
with:
github-token: ${{ secrets.GH_ACCESS_TOKEN }}
requirements: ${{ env.REQUIREMENTS_FILE }}
fail: "Copyleft,Other,Error"
# We allow incompatible licenses when they come from optional dependencies.
#
# Special cases:
# - pyzmq is flagged because dual-licensed, but we assume using BSD
# - tqdm is MLP but there are no better alternatives
exclude: "(?i)^(astroid|certifi|chardet|num2words|nvidia-|pathspec|pinecone-client|psycopg2|pylint|PyMuPDF|pyzmq|tqdm).*"
- name: Print report
if: ${{ always() }}
run: echo "${{ steps.license_check_report.outputs.report }}"
- name: Calculate alert data
id: calculator
shell: bash
if: (success() || failure())
run: |
if [ "${{ job.status }}" = "success" ]; then
echo "alert_type=success" >> "$GITHUB_OUTPUT";
else
echo "alert_type=error" >> "$GITHUB_OUTPUT";
fi
- name: Send event to Datadog
# This step would fail when running in PRs opened from forks since
# secrets are not accessible.
# To prevent showing bogus failures in those PRs we skip the step.
# The workflow will fail in any case if the actual check fails in the previous steps.
if: (success() || failure()) && env.CORE_DATADOG_API_KEY != ''
uses: masci/datadog@v1
with:
api-key: ${{ env.CORE_DATADOG_API_KEY }}
api-url: https://api.datadoghq.eu
events: |
- title: "${{ github.job }} in ${{ github.workflow }} workflow"
text: "License compliance check: all available extras."
alert_type: "${{ steps.calculator.outputs.alert_type }}"
source_type_name: "Github"
host: ${{ github.repository_owner }}
tags:
- "project:${{ github.repository }}"
- "job:${{ github.job }}"
- "run_id:${{ github.run_id }}"
- "workflow:${{ github.workflow }}"
- "branch:${{ github.ref_name }}"
- "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
license_check_all_GPU:
name: All extras, GPU version
env:
REQUIREMENTS_FILE: requirements_all_gpu.txt
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Get explicit and transitive dependencies
run: |
pip install -U pip
pip install .[all-gpu]
pip freeze > ${{ env.REQUIREMENTS_FILE }}
- name: Check Licenses
id: license_check_report
uses: pilosus/action-pip-license-checker@v2
with:
github-token: ${{ secrets.GH_ACCESS_TOKEN }}
requirements: ${{ env.REQUIREMENTS_FILE }}
fail: "Copyleft,Other,Error"
# We allow incompatible licenses when they come from optional dependencies.
#
# Special cases:
# - pyzmq is flagged because dual-licensed, but we assume using BSD
# - tqdm is MLP but there are no better alternatives
exclude: "(?i)^(astroid|certifi|chardet|num2words|nvidia-|pathspec|pinecone-client|psycopg2|pylint|PyMuPDF|pyzmq|tqdm).*"
- name: Print report
if: ${{ always() }}
run: echo "${{ steps.license_check_report.outputs.report }}"
- name: Calculate alert data
id: calculator
shell: bash
if: (success() || failure())
run: |
if [ "${{ job.status }}" = "success" ]; then
echo "alert_type=success" >> "$GITHUB_OUTPUT";
else
echo "alert_type=error" >> "$GITHUB_OUTPUT";
fi
- name: Send event to Datadog
# This step would fail when running in PRs opened from forks since
# secrets are not accessible.
# To prevent showing bogus failures in those PRs we skip the step.
# The workflow will fail in any case if the actual check fails in the previous steps.
if: (success() || failure()) && env.CORE_DATADOG_API_KEY != ''
uses: masci/datadog@v1
with:
api-key: ${{ env.CORE_DATADOG_API_KEY }}
api-url: https://api.datadoghq.eu
events: |
- title: "${{ github.job }} in ${{ github.workflow }} workflow"
text: "License compliance check: all available extras, GPU version."
alert_type: "${{ steps.calculator.outputs.alert_type }}"
source_type_name: "Github"
host: ${{ github.repository_owner }}
tags:
- "project:${{ github.repository }}"
- "job:${{ github.job }}"
- "run_id:${{ github.run_id }}"
- "workflow:${{ github.workflow }}"
- "branch:${{ github.ref_name }}"
- "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"

View File

@ -1,13 +1,13 @@
# If you change this name also do it in linting-skipper.yml and ci_metrics.yml
name: Linting (Preview)
name: Linting
on:
pull_request:
paths:
- "haystack/preview/**/*.py"
- "test/preview/**/*.py"
- "e2e/preview/**/*.py"
- "**/pyproject.toml"
- "haystack/**/*.py"
- "test/**/*.py"
- "e2e/**/*.py"
- "pyproject.toml"
env:
PYTHON_VERSION: "3.8"
@ -31,20 +31,18 @@ jobs:
**/*.py
files_ignore: |
test/**
rest_api/test/**
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Mypy
if: steps.files.outputs.any_changed == 'true'
run: |
mkdir .mypy_cache/
mypy --install-types --non-interactive ${{ steps.files.outputs.all_changed_files }} --exclude=rest_api/build/ --exclude=rest_api/test/
mypy --install-types --non-interactive ${{ steps.files.outputs.all_changed_files }}
pylint:
runs-on: ubuntu-latest
@ -61,7 +59,7 @@ jobs:
uses: tj-actions/changed-files@v40
with:
files: |
haystack/preview/**/*.py
haystack/**/*.py
- uses: actions/setup-python@v4
with:
@ -69,8 +67,7 @@ jobs:
- name: Install Haystack
run: |
pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
pip install ./haystack-linter
pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Pylint
if: steps.files.outputs.any_changed == 'true'

View File

@ -1,75 +0,0 @@
name: Sync docs with Readme
on:
pull_request:
paths:
- "docs/pydoc/**"
push:
branches:
- main
# release branches have the form v1.9.x
- "v[0-9].*[0-9].x"
jobs:
sync:
runs-on: ubuntu-latest
steps:
- name: Checkout this repo
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docs/pydoc/requirements.txt
- name: Generate API docs
env:
# This is necessary to fetch the documentation categories
# from Readme.io as we need them to associate the slug
# in config files with their id.
README_API_KEY: ${{ secrets.README_API_KEY }}
run: ./.github/utils/pydoc-markdown.sh
- name: Get current version
id: current-version
if: github.event_name == 'push'
shell: bash
# We only need `major.minor` in Readme so we cut the full version string to the first two tokens
run: echo "minor=$(cut -d "." -f 1,2 < VERSION.txt)" >> "$GITHUB_OUTPUT"
- name: Sync docs with unstable release
# Instead of putting more logic into the previous step, let's just assume that commits on `main`
# will always be synced to the current `X.Y-unstable` version on Readme
id: sync-main
if: github.ref_name == 'main' && github.event_name == 'push'
uses: readmeio/rdme@8.3.1
env:
README_API_KEY: ${{ secrets.README_API_KEY }}
with:
rdme: docs ./docs/pydoc/temp --key="$README_API_KEY" --version=${{ steps.current-version.outputs.minor }}-unstable
- name: Sync preview docs with 2.0
# Sync the preview docs to the `2.0` version on Readme
id: sync-main-preview
if: github.ref_name == 'main' && github.event_name == 'push'
uses: readmeio/rdme@8.3.1
env:
README_API_KEY: ${{ secrets.README_API_KEY }}
with:
rdme: docs ./docs/pydoc/temp-preview --key="$README_API_KEY" --version=2.0
- name: Sync docs with current release
# Mutually exclusive with the previous one, this step is supposed to only run on version branches.
# Sync the current Haystack version `X.Y.Z` with its corresponding Readme version `X.Y`.
# See https://docs.github.com/en/actions/learn-github-actions/contexts#steps-context for the condition used
if: steps.sync-main.outcome == 'skipped' && github.event_name == 'push'
uses: readmeio/rdme@8.3.1
env:
README_API_KEY: ${{ secrets.README_API_KEY }}
with:
rdme: docs ./docs/pydoc/temp --key="$README_API_KEY" --version=${{ steps.current-version.outputs.minor }}

View File

@ -13,7 +13,6 @@ on:
- "**.py"
- "pyproject.toml"
- "!.github/**/*.py"
- "!rest_api/**/*.py"
jobs:
reno:

View File

@ -7,7 +7,7 @@ on:
- main
pull_request:
paths:
- examples/preview/**
- examples/**
types:
- opened
- reopened
@ -33,14 +33,14 @@ jobs:
- name: Install snippets dependencies
run: |
pip install --upgrade pip
pip install ".[preview]" torch
pip install "." torch
- name: Get changed files
id: files
uses: tj-actions/changed-files@v40
with:
files: |
examples/preview/**.py
examples/**.py
- name: Run each snippet
run: |

View File

@ -1,5 +1,4 @@
# If you change this name also do it in tests_preview_skipper.yml
name: Tests (Preview)
name: Tests
on:
workflow_dispatch: # Activate this workflow manually
@ -15,8 +14,8 @@ on:
- synchronize
- ready_for_review
paths:
- "haystack/preview/**/*.py"
- "test/preview/**/*.py"
- "haystack/**/*.py"
- "test/**/*.py"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
@ -38,7 +37,7 @@ jobs:
- name: Install Black
run: |
pip install --upgrade pip
pip install .[formatting]
pip install .[dev]
- name: Check status
run: |
@ -116,10 +115,10 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Run
run: pytest -m "not integration" test/preview
run: pytest -m "not integration" test
- name: Calculate alert data
id: calculator
@ -174,10 +173,10 @@ jobs:
sudo apt install ffmpeg # for local Whisper tests
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Run
run: pytest --maxfail=5 -m "integration" test/preview
run: pytest --maxfail=5 -m "integration" test
- name: Calculate alert data
id: calculator
@ -230,13 +229,13 @@ jobs:
colima start
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Run Tika
run: docker run -d -p 9998:9998 apache/tika:2.9.0.0
- name: Run
run: pytest --maxfail=5 -m "integration" test/preview
run: pytest --maxfail=5 -m "integration" test
- name: Calculate alert data
id: calculator
@ -281,10 +280,10 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Haystack
run: pip install .[dev,preview,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
run: pip install .[dev,audio] langdetect transformers[torch,sentencepiece]==4.35.2 'sentence-transformers>=2.2.0' pypdf markdown-it-py mdit_plain tika 'azure-ai-formrecognizer>=3.2.0b2' cohere
- name: Run
run: pytest --maxfail=5 -m "integration" test/preview -k 'not tika'
run: pytest --maxfail=5 -m "integration" test -k 'not tika'
- name: Calculate alert data
id: calculator