diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6fb1ed3..9be616e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -45,6 +45,12 @@ jobs: echo ::set-output name=tags::${TAGS} echo ::set-output name=build_args::${BUILD_ARGS} echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') + + - name: Run Hadolint + uses: reviewdog/action-hadolint@v1 + with: + github_token: ${{ secrets.github_token }} + reporter: github-pr-review - name: Build image uses: docker/build-push-action@v2 diff --git a/Dockerfile b/Dockerfile index 7097389..fa7ae6e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ ENV PATH="/opt/miniconda3/bin:${PATH}" ENV PYSPARK_PYTHON="/opt/miniconda3/bin/python" RUN set -ex && \ - apt-get update && \ + apt-get update && \ apt-get install -y curl bzip2 --no-install-recommends && \ curl -s -L --url "https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh" --output /tmp/miniconda.sh && \ bash /tmp/miniconda.sh -b -f -p "/opt/miniconda3" && \ @@ -30,7 +30,8 @@ RUN set -ex && \ echo "spark.hadoop.fs.s3.impl=org.apache.hadoop.fs.s3a.S3AFileSystem" >> $SPARK_HOME/conf/spark-defaults.conf && \ apt-get remove -y curl bzip2 && \ apt-get autoremove -y && \ - apt-get clean + apt-get clean && \ + rm -rf /var/lib/apt/lists/* ENTRYPOINT ["spark-submit"] CMD ["--help"]