Skip to content

Commit

Permalink
Add recipes for installing a few common tools in Docker image (#13655)
Browse files Browse the repository at this point in the history
(cherry picked from commit bfb7cb3)
  • Loading branch information
mik-laj authored and kaxil committed Jan 21, 2021
1 parent 680f383 commit 8e7552b
Show file tree
Hide file tree
Showing 3 changed files with 181 additions and 0 deletions.
41 changes: 41 additions & 0 deletions docs/apache-airflow/docker-images-recipes/gcloud.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ARG BASE_AIRFLOW_IMAGE
FROM ${BASE_AIRFLOW_IMAGE}

SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]

USER 0

ARG CLOUD_SDK_VERSION=322.0.0
ENV GCLOUD_HOME=/opt/google-cloud-sdk

ENV PATH="${GCLOUD_HOME}/bin/:${PATH}"

RUN DOWNLOAD_URL="https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz" \
&& TMP_DIR="$(mktemp -d)" \
&& curl -fL "${DOWNLOAD_URL}" --output "${TMP_DIR}/google-cloud-sdk.tar.gz" \
&& mkdir -p "${GCLOUD_HOME}" \
&& tar xzf "${TMP_DIR}/google-cloud-sdk.tar.gz" -C "${GCLOUD_HOME}" --strip-components=1 \
&& "${GCLOUD_HOME}/install.sh" \
--bash-completion=false \
--path-update=false \
--usage-reporting=false \
--additional-components alpha beta kubectl \
--quiet \
&& rm -rf "${TMP_DIR}" \
&& gcloud --version

USER ${AIRFLOW_UID}
88 changes: 88 additions & 0 deletions docs/apache-airflow/docker-images-recipes/hadoop.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ARG BASE_AIRFLOW_IMAGE

FROM ${BASE_AIRFLOW_IMAGE}

SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]

USER 0

# Install Java
RUN mkdir -pv /usr/share/man/man1 \
&& mkdir -pv /usr/share/man/man7 \
&& curl -fsSL https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - \
&& echo 'deb https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/ buster main' > \
/etc/apt/sources.list.d/adoptopenjdk.list \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
adoptopenjdk-8-hotspot-jre \
&& apt-get autoremove -yqq --purge \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ENV JAVA_HOME=/usr/lib/jvm/adoptopenjdk-8-hotspot-jre-amd64

RUN mkdir -p /opt/spark/jars

# Install Apache Hadoop
ARG HADOOP_VERSION=2.10.1
ENV HADOOP_HOME=/opt/hadoop
ENV HADOOP_CONF_DIR=/etc/hadoop
ENV MULTIHOMED_NETWORK=1
ENV USER=root

RUN HADOOP_URL="https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz" \
&& curl 'https://dist.apache.org/repos/dist/release/hadoop/common/KEYS' | gpg --import - \
&& curl -fSL "$HADOOP_URL" -o /tmp/hadoop.tar.gz \
&& curl -fSL "$HADOOP_URL.asc" -o /tmp/hadoop.tar.gz.asc \
&& gpg --verify /tmp/hadoop.tar.gz.asc \
&& tar -xvf /tmp/hadoop.tar.gz -C "${HADOOP_HOME}" --strip-components=1 \
&& rm /tmp/hadoop.tar.gz /tmp/hadoop.tar.gz.asc \
&& ln -s "${HADOOP_HOME}/etc/hadoop" /etc/hadoop \
&& mkdir "${HADOOP_HOME}/logs" \
&& mkdir /hadoop-data

ENV PATH="$HADOOP_HOME/bin/:$PATH"

# Install Apache Hive
ARG HIVE_VERSION=2.3.7
ENV HIVE_HOME=/opt/hive
ENV HIVE_CONF_DIR=/etc/hive

RUN HIVE_URL="https://archive.apache.org/dist/hive/hive-${HIVE_VERSION}/apache-hive-${HIVE_VERSION}-bin.tar.gz" \
&& curl -fSL 'https://downloads.apache.org/hive/KEYS' | gpg --import - \
&& curl -fSL "$HIVE_URL" -o /tmp/hive.tar.gz \
&& curl -fSL "$HIVE_URL.asc" -o /tmp/hive.tar.gz.asc \
&& gpg --verify /tmp/hive.tar.gz.asc \
&& mkdir -p "${HIVE_HOME}" \
&& tar -xf /tmp/hive.tar.gz -C "${HIVE_HOME}" --strip-components=1 \
&& rm /tmp/hive.tar.gz /tmp/hive.tar.gz.asc \
&& ln -s "${HIVE_HOME}/etc/hive" "${HIVE_CONF_DIR}" \
&& mkdir "${HIVE_HOME}/logs"

ENV PATH="$HIVE_HOME/bin/:$PATH"

# Install GCS connector for Apache Hadoop
# See: https://cloud.google.com/dataproc/docs/concepts/connectors/cloud-storage
ARG GCS_VARIANT="hadoop2"
ARG GCS_VERSION="2.1.5"

RUN GCS_JAR_PATH="/opt/spark/jars/gcs-connector-${GCS_VARIANT}-${GCS_VERSION}.jar" \
&& GCS_JAR_URL="https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-${GCS_VARIANT}-${GCS_VERSION}.jar" \
&& curl "${GCS_JAR_URL}" -o "${GCS_JAR_PATH}"

ENV HADOOP_CLASSPATH="/opt/spark/jars/gcs-connector-${GCS_VARIANT}-${GCS_VERSION}.jar:$HADOOP_CLASSPATH"

USER ${AIRFLOW_UID}
52 changes: 52 additions & 0 deletions docs/apache-airflow/production-deployment.rst
Original file line number Diff line number Diff line change
Expand Up @@ -749,6 +749,58 @@ additional apt dev and runtime dependencies.
--build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless"
Recipes
-------

Users sometimes share interesting ways of using the Docker images. We encourage users to contribute these
recipes to the documentation in case they prove useful to other members of the community by
submitting a pull request. The sections below capture this knowledge.

Google Cloud SDK installation
.............................

Some operators, such as :class:`airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`,
:class:`airflow.providers.google.cloud.operators.dataflow.DataflowStartSqlJobOperator`, require
the installation of `Google Cloud SDK <https://cloud.google.com/sdk>`__ (includes ``gcloud``). You can also run these commands with BashOperator.

Create a new Dockerfile like the one shown below.

.. exampleinclude:: /docker-images-recipes/gcloud.Dockerfile
:language: dockerfile

Then build a new image.

.. code-block:: bash
docker build . \
--build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.0" \
-t my-airflow-image
Apache Hadoop Stack installation
................................

Airflow is often used to run tasks on Hadoop cluster. It required Java Runtime Environment (JRE) to run.
Below are the steps to take tools that are frequently used in Hadoop-world:

- Java Runtime Environment (JRE)
- Apache Hadoop
- Apache Hive
- `Cloud Storage connector for Apache Hadoop <https://cloud.google.com/dataproc/docs/concepts/connectors/cloud-storage>`__


Create a new Dockerfile like the one shown below.

.. exampleinclude:: /docker-images-recipes/hadoop.Dockerfile
:language: dockerfile

Then build a new image.

.. code-block:: bash
docker build . \
--build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.0" \
-t my-airflow-image
More details about the images
-----------------------------
Expand Down

0 comments on commit 8e7552b

Please sign in to comment.