chore(python): test with python 3.11 (#11280)

Co-authored-by: Tamas Nemeth <treff7es@gmail.com>
Co-authored-by: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com>
This commit is contained in:
Harshal Sheth 2024-12-27 13:46:49 -05:00 committed by GitHub
parent ac8e539457
commit ed8639e401
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 24 additions and 34 deletions

View File

@ -30,11 +30,11 @@ jobs:
DATAHUB_TELEMETRY_ENABLED: false
strategy:
matrix:
python-version: ["3.9", "3.10"]
python-version: ["3.9", "3.11"]
include:
- python-version: "3.9"
extraPythonRequirement: "dagster>=1.3.3"
- python-version: "3.10"
- python-version: "3.11"
extraPythonRequirement: "dagster>=1.3.3"
fail-fast: false
steps:
@ -57,7 +57,7 @@ jobs:
if: always()
run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && uv pip freeze
- uses: actions/upload-artifact@v4
if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }}
if: ${{ always() && matrix.python-version == '3.11' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }}
with:
name: Test Results (dagster Plugin ${{ matrix.python-version}})
path: |

View File

@ -33,7 +33,7 @@ jobs:
# DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }}
strategy:
matrix:
python-version: ["3.8", "3.10"]
python-version: ["3.8", "3.11"]
command:
[
"testQuick",
@ -43,7 +43,7 @@ jobs:
]
include:
- python-version: "3.8"
- python-version: "3.10"
- python-version: "3.11"
fail-fast: false
steps:
- name: Free up disk space

View File

@ -30,7 +30,7 @@ jobs:
DATAHUB_TELEMETRY_ENABLED: false
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.8", "3.9", "3.10", "3.11"]
fail-fast: false
steps:
- name: Set up JDK 17
@ -52,7 +52,7 @@ jobs:
if: always()
run: source metadata-ingestion-modules/prefect-plugin/venv/bin/activate && uv pip freeze
- uses: actions/upload-artifact@v4
if: ${{ always() && matrix.python-version == '3.10'}}
if: ${{ always() && matrix.python-version == '3.11'}}
with:
name: Test Results (Prefect Plugin ${{ matrix.python-version}})
path: |

View File

@ -148,10 +148,6 @@ setuptools.setup(
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",

View File

@ -1,4 +1,3 @@
# Datahub Dagster Plugin
See the DataHub Dagster docs for details.
See the [DataHub Dagster docs](https://datahubproject.io/docs/lineage/dagster/) for details.

View File

@ -107,9 +107,6 @@ setuptools.setup(
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",

View File

@ -1,4 +1,3 @@
# Datahub GX Plugin
See the DataHub GX docs for details.
See the [DataHub GX docs](https://datahubproject.io/docs/metadata-ingestion/integration_docs/great-expectations) for details.

View File

@ -118,9 +118,6 @@ setuptools.setup(
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",

View File

@ -28,7 +28,7 @@ The `prefect-datahub` collection allows you to easily integrate DataHub's metada
## Prerequisites
- Python 3.7+
- Python 3.8+
- Prefect 2.0.0+ and < 3.0.0+
- A running instance of DataHub

View File

@ -103,10 +103,6 @@ setuptools.setup(
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
@ -120,7 +116,7 @@ setuptools.setup(
],
# Package info.
zip_safe=False,
python_requires=">=3.7",
python_requires=">=3.8",
package_dir={"": "src"},
packages=setuptools.find_namespace_packages(where="./src"),
entry_points=entry_points,

View File

@ -298,8 +298,8 @@ abs_base = {
}
data_lake_profiling = {
"pydeequ~=1.1.0",
"pyspark~=3.3.0",
"pydeequ>=1.1.0",
"pyspark~=3.5.0",
}
delta_lake = {
@ -318,7 +318,7 @@ databricks = {
# 0.1.11 appears to have authentication issues with azure databricks
# 0.22.0 has support for `include_browse` in metadata list apis
"databricks-sdk>=0.30.0",
"pyspark~=3.3.0",
"pyspark~=3.5.0",
"requests",
# Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes
# Version 3.0.0 required SQLAlchemy > 2.0.21
@ -874,9 +874,6 @@ See the [DataHub docs](https://datahubproject.io/docs/metadata-ingestion).
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
@ -917,6 +914,7 @@ See the [DataHub docs](https://datahubproject.io/docs/metadata-ingestion).
"sync-file-emitter",
"sql-parser",
"iceberg",
"feast",
}
else set()
)

View File

@ -225,7 +225,7 @@ class S3Source(StatefulIngestionSourceBase):
self.init_spark()
def init_spark(self):
os.environ.setdefault("SPARK_VERSION", "3.3")
os.environ.setdefault("SPARK_VERSION", "3.5")
spark_version = os.environ["SPARK_VERSION"]
# Importing here to avoid Deequ dependency for non profiling use cases

View File

@ -1,3 +1,6 @@
import sys
import pytest
from freezegun import freeze_time
from datahub.ingestion.run.pipeline import Pipeline
@ -6,6 +9,11 @@ from tests.test_helpers import mce_helpers
FROZEN_TIME = "2020-04-14 07:00:00"
# The test is skipped for python 3.11 due to conflicting dependencies in installDev
# setup that requires pydantic < 2 for majority plugins. Note that the test works with
# python 3.11 if run with standalone virtual env setup with feast plugin alone using
# `pip install acryl-datahub[feast]` since it allows pydantic > 2
@pytest.mark.skipif(sys.version_info > (3, 11), reason="Skipped on Python 3.11+")
@freeze_time(FROZEN_TIME)
def test_feast_repository_ingest(pytestconfig, tmp_path, mock_time):
test_resources_dir = pytestconfig.rootpath / "tests/integration/feast"