diff --git a/.github/workflows/airflow-apis-tests-3_9.yml b/.github/workflows/airflow-apis-tests.yml similarity index 98% rename from .github/workflows/airflow-apis-tests-3_9.yml rename to .github/workflows/airflow-apis-tests.yml index df919e8b064..a37a62d2244 100644 --- a/.github/workflows/airflow-apis-tests-3_9.yml +++ b/.github/workflows/airflow-apis-tests.yml @@ -68,10 +68,10 @@ jobs: java-version: '17' distribution: 'temurin' - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - name: Install Ubuntu dependencies run: | diff --git a/.github/workflows/py-checkstyle.yml b/.github/workflows/py-checkstyle.yml index daae28b7abb..c742a27d682 100644 --- a/.github/workflows/py-checkstyle.yml +++ b/.github/workflows/py-checkstyle.yml @@ -56,10 +56,10 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - name: Install Ubuntu related dependencies run: | diff --git a/.github/workflows/py-cli-e2e-tests.yml b/.github/workflows/py-cli-e2e-tests.yml index 1d8e973f0e8..969d87305ab 100644 --- a/.github/workflows/py-cli-e2e-tests.yml +++ b/.github/workflows/py-cli-e2e-tests.yml @@ -74,7 +74,7 @@ jobs: - name: Setup Openmetadata Test Environment uses: ./.github/actions/setup-openmetadata-test-environment with: - python-version: 3.9 + python-version: 3.10 - name: Run Python Tests & record coverage @@ -217,10 +217,10 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - name: Install Ubuntu dependencies run: | diff --git a/.github/workflows/py-tests-postgres.yml b/.github/workflows/py-tests-postgres.yml index 54c4a58a1f9..91ab78fce1f 100644 --- a/.github/workflows/py-tests-postgres.yml +++ b/.github/workflows/py-tests-postgres.yml @@ -77,7 +77,6 @@ jobs: ingestion_dependency: "mysql,elasticsearch,sample-data" - name: Run Python Tests - if: ${{ matrix.py-version != '3.9' }} run: | source env/bin/activate make run_python_tests diff --git a/.github/workflows/py-tests-skip.yml b/.github/workflows/py-tests-skip.yml index d390a45a83b..06d86d6b3bb 100644 --- a/.github/workflows/py-tests-skip.yml +++ b/.github/workflows/py-tests-skip.yml @@ -26,6 +26,6 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - py-version: ['3.8', '3.9', '3.10', '3.11'] + py-version: ['3.9', '3.10', '3.11'] steps: - run: 'echo "Step is not required"' diff --git a/.github/workflows/py-tests.yml b/.github/workflows/py-tests.yml index 49cf642e7d5..b4d8353527e 100644 --- a/.github/workflows/py-tests.yml +++ b/.github/workflows/py-tests.yml @@ -36,7 +36,7 @@ jobs: strategy: fail-fast: false matrix: - py-version: ["3.8", "3.9", "3.10", "3.11"] + py-version: ["3.9", "3.10", "3.11"] steps: - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@main diff --git a/.github/workflows/python-packages-publish.yml b/.github/workflows/python-packages-publish.yml index 3c93e5389f5..725ac556f7c 100644 --- a/.github/workflows/python-packages-publish.yml +++ b/.github/workflows/python-packages-publish.yml @@ -20,10 +20,10 @@ jobs: environment: release steps: - uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - name: Install Ubuntu related dependencies run: | sudo apt-get update && sudo apt-get install -y libsasl2-dev unixodbc-dev python3-venv diff --git a/ingestion/pyproject.toml b/ingestion/pyproject.toml index ec2b46ea578..b29776f544c 100644 --- a/ingestion/pyproject.toml +++ b/ingestion/pyproject.toml @@ -13,7 +13,7 @@ authors = [ ] license = { file = "LICENSE" } description = "Ingestion Framework for OpenMetadata" -requires-python = ">=3.8" +requires-python = ">=3.9" [project.urls] Homepage = "https://open-metadata.org/" diff --git a/openmetadata-airflow-apis/pyproject.toml b/openmetadata-airflow-apis/pyproject.toml index 349b97021ce..f488c69f5b8 100644 --- a/openmetadata-airflow-apis/pyproject.toml +++ b/openmetadata-airflow-apis/pyproject.toml @@ -13,7 +13,7 @@ authors = [ ] license = {file = "LICENSE"} description = "Airflow REST APIs to create and manage DAGS" -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ "pendulum~=3.0", "apache-airflow>=2.2.2", diff --git a/openmetadata-docs/content/partials/v1.7/deployment/upgrade/upgrade-prerequisites.md b/openmetadata-docs/content/partials/v1.7/deployment/upgrade/upgrade-prerequisites.md index bcda1577075..24a130badbd 100644 --- a/openmetadata-docs/content/partials/v1.7/deployment/upgrade/upgrade-prerequisites.md +++ b/openmetadata-docs/content/partials/v1.7/deployment/upgrade/upgrade-prerequisites.md @@ -103,83 +103,15 @@ After the migration is finished, you can revert this changes. # Backward Incompatible Changes -## 1.6.0 +## 1.7.0 -### Ingestion Workflow Status +### Removing support for Python 3.8 -We are updating how we compute the success percentage. Previously, we took into account for partial success the results -of the Source (e.g., the tables we were able to properly retrieve from Snowflake, Redshift, etc.). This means that we had -an error threshold in there were if up to 90% of the tables were successfully ingested, we would still consider the -workflow as successful. However, any errors when sending the information to OpenMetadata would be considered as a failure. +Python 3.8 was [officially EOL on 2024-10-07](https://devguide.python.org/versions/). Some of our dependencies have already +started removing support for higher versions, and are following suit to ensure we are using the latest and most stable +versions of our dependencies. -Now, we're changing this behavior to consider the success rate of all the steps involved in the workflow. The UI will -then show more `Partial Success` statuses rather than `Failed`, properly reflecting the real state of the workflow. - -# Database Metadata & Lineage Workflow - -With 1.6 Release we are moving the `View Lineage` & `Stored Procedure Lineage` computation from metadata workflow to lineage workflow. - -This means that we are removing the `overrideViewLineage` property from the `DatabaseServiceMetadataPipeline` schema which will be moved to the `DatabaseServiceQueryLineagePipeline` schema. - -### Profiler & Auto Classification Workflow - -We are creating a new `Auto Classification` workflow that will take care of managing the sample data and PII classification, -which was previously done by the Profiler workflow. This change will allow us to have a more modular and scalable system. - -The Profiler workflow will now only focus on the profiling part of the data, while the Auto Classification will take care -of the rest. - -This means that we are removing these properties from the `DatabaseServiceProfilerPipeline` schema: -- `generateSampleData` -- `processPiiSensitive` -- `confidence` -which will be moved to the new `DatabaseServiceAutoClassificationPipeline` schema. - -What you will need to do: -- If you are using the **EXTERNAL** ingestion for the profiler (YAML configuration), you will need to update your configuration, -removing these properties as well. -- If you still want to use the Auto PII Classification and sampling features, you can create the new workflow -from the UI. - -### RBAC Policy Updates for `EditTags` - -We have given more granularity to the `EditTags` policy. Previously, it was a single policy that allowed the user to manage -any kind of tagging to the assets, including adding tags, glossary terms, and Tiers. - -Now, we have split this policy to give further control on which kind of tagging the user can manage. The `EditTags` policy has been -split into: - -- `EditTags`: to add tags. -- `EditGlossaryTerms`: to add Glossary Terms. -- `EditTier`: to add Tier tags. - -### Collate - Metadata Actions for ML Tagging - Deprecation Notice - -Since we are introducing the `Auto Classification` workflow, **we are going to remove in 1.7 the `ML Tagging` action** -from the Metadata Actions. That feature will be covered already by the `Auto Classification` workflow, which even brings -more flexibility allow the on-the-fly usage of the sample data for classification purposes without having to store -it in the database. - -### Service Spec for the Ingestion Framework - -This impacts users who maintain their own connectors for the ingestion framework that are **NOT** part of the -[OpenMetadata python library (openmetadata-ingestion)](https://github.com/open-metadata/OpenMetadata/tree/ff261fb3738f3a56af1c31f7151af9eca7a602d5/ingestion/src/metadata/ingestion/source). -Introducing the ["connector specifcication class (`ServiceSpec`)"](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/service_spec/service_spec.py). -The `ServiceSpec` class serves as the entrypoint for the connector and holds the references for the classes that will be used -to ingest and process the metadata from the source. -You can see [postgres](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/postgres/service_spec.py) for an -implementation example. - - -### Fivetran - -The filtering of Fivetran pipelines now supports using their names instead of IDs. This change may affect existing configurations that rely on pipeline IDs for filtering. - -### DBT Cloud Pipeline Service - -We are removing the field `jobId` which we required to ingest dbt metadata from a specific job, instead of this we added a new field called `jobIds` which will accept multiple job ids to ingest metadata from multiple jobs. - -### MicroStrategy - -The `serviceType` for MicroStrategy connector is renamed from `Mstr` to `MicroStrategy`. +This means that for Release 1.7, the supported Python versions for the Ingestion Framework are 3.9, 3.10 and 3.11. +We were already shipping our Docker images with Python 3.10, so this change should not affect you if you are using our Docker images. +However, if you installed the `openmetadata-ingestion` package directly, please make sure to update your Python version to 3.9 or higher.