fix(datahub-ingestion): remove old jars, sync pyspark version (#9217)

This commit is contained in:
david-leifker 2023-11-10 22:10:00 -06:00 committed by GitHub
parent 0e3efabd2c
commit ebd2e2312b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 42 additions and 7 deletions

View File

@ -9,20 +9,21 @@ ext {
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
docker_repo = 'datahub-ingestion-base'
docker_dir = 'datahub-ingestion-base'
docker_target = project.getProperties().getOrDefault("dockerTarget", "slim")
revision = 2 // increment to trigger rebuild
}
docker {
name "${docker_registry}/${docker_repo}:v${version}-slim"
version "v${version}-slim"
name "${docker_registry}/${docker_repo}:v${version}-${docker_target}"
version "v${version}-${docker_target}"
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
files fileTree(rootProject.projectDir) {
include "docker/${docker_dir}/*"
}.exclude {
i -> i.file.isHidden() || i.file == buildDir
}
buildArgs([APP_ENV: 'slim'])
buildArgs([APP_ENV: docker_target])
}
tasks.getByName('docker').dependsOn('build')

View File

@ -22,10 +22,22 @@ ENV PATH="/datahub-ingestion/.local/bin:$PATH"
FROM base as slim-install
RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]"
FROM base as full-install
FROM base as full-install-build
USER 0
RUN apt-get update && apt-get install -y -qq maven
USER datahub
COPY ./docker/datahub-ingestion/pyspark_jars.sh .
RUN pip install --no-cache --user ".[base]" && \
pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \
pip install --no-cache --user ".[all]"
pip install --no-cache --user ".[all]" && \
./pyspark_jars.sh
FROM base as full-install
COPY --from=full-install-build /datahub-ingestion/.local /datahub-ingestion/.local
FROM base as dev-install
# Dummy stage for development. Assumes code is built on your machine and mounted to this image.

View File

@ -0,0 +1,22 @@
#!/bin/bash
set -ex
HADOOP_CLIENT_DEPENDENCY="${HADOOP_CLIENT_DEPENDENCY:-org.apache.hadoop:hadoop-client:3.3.6}"
ZOOKEEPER_DEPENDENCY="${ZOOKEEPER_DEPENDENCY:-org.apache.zookeeper:zookeeper:3.7.2}"
PYSPARK_JARS="$(python -m site --user-site)/pyspark/jars"
# Remove conflicting versions
echo "Removing version conflicts from $PYSPARK_JARS"
CONFLICTS="zookeeper hadoop- slf4j-"
for jar in $CONFLICTS; do
rm "$PYSPARK_JARS/$jar"*.jar
done
# Fetch dependencies
mvn dependency:get -Dtransitive=true -Dartifact="$HADOOP_CLIENT_DEPENDENCY"
mvn dependency:get -Dtransitive=true -Dartifact="$ZOOKEEPER_DEPENDENCY"
# Move to pyspark location
echo "Moving jars to $PYSPARK_JARS"
find "$HOME/.m2" -type f -name "*.jar" -exec mv {} "$PYSPARK_JARS/" \;

View File

@ -242,7 +242,7 @@ s3_base = {
}
data_lake_profiling = {
"pydeequ==1.1.0",
"pydeequ~=1.1.0",
"pyspark~=3.3.0",
}
@ -256,7 +256,7 @@ powerbi_report_server = {"requests", "requests_ntlm"}
databricks = {
# 0.1.11 appears to have authentication issues with azure databricks
"databricks-sdk>=0.9.0",
"pyspark",
"pyspark~=3.3.0",
"requests",
}