Move Shipyard and Airflow Dockerfiles
This PS migrates the Shipyard and Airflow Dockerfiles into this repo and adds a Makefile We will run the following command from the root directory to build the airflow and shipyard images $ sudo make build_airflow $ sudo make build_shipyard Change-Id: I9a9fb761ce193b1c5b9c5d9589982366eb73e396
This commit is contained in:
parent
491a8571b6
commit
772b3b74e7
26
Makefile
Normal file
26
Makefile
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
AIRFLOW_IMAGE_NAME ?= airflow
|
||||
IMAGE_PREFIX ?= attcomdev
|
||||
IMAGE_TAG ?= latest
|
||||
SHIPYARD_IMAGE_NAME ?= shipyard
|
||||
|
||||
.PHONY: build_airflow
|
||||
build_airflow:
|
||||
docker build -t $(IMAGE_PREFIX)/$(AIRFLOW_IMAGE_NAME):$(IMAGE_TAG) images/airflow/
|
||||
|
||||
.PHONY: build_shipyard
|
||||
build_shipyard:
|
||||
docker build -t $(IMAGE_PREFIX)/$(SHIPYARD_IMAGE_NAME):$(IMAGE_TAG) -f images/shipyard/Dockerfile .
|
93
images/airflow/Dockerfile
Normal file
93
images/airflow/Dockerfile
Normal file
@ -0,0 +1,93 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Docker image to run Airflow on Kubernetes
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Do not prompt user for choices on installation/configuration of packages
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV container docker
|
||||
|
||||
# Airflow Home Directory
|
||||
ARG AIRFLOW_HOME=/usr/local/airflow
|
||||
|
||||
# Kubectl version
|
||||
ARG KUBECTL_VERSION=1.7.5
|
||||
|
||||
RUN set -ex && \
|
||||
apt-get -qq update && \
|
||||
apt-get -y install \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
g++ \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libpq-dev \
|
||||
locales \
|
||||
netcat \
|
||||
netbase \
|
||||
python3 \
|
||||
python3-setuptools \
|
||||
python3-pip \
|
||||
python3-dev \
|
||||
python3-dateutil \
|
||||
make \
|
||||
--no-install-recommends \
|
||||
&& python3 -m pip install -U pip \
|
||||
&& apt-get clean \
|
||||
&& rm -rf \
|
||||
/var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/* \
|
||||
/usr/share/man \
|
||||
/usr/share/doc \
|
||||
/usr/share/doc-base
|
||||
|
||||
# Copy dependency requirements
|
||||
# Install DryDock libraries
|
||||
# Install Armada libraries
|
||||
COPY ./requirements.txt /tmp/
|
||||
RUN pip3 install -r /tmp/requirements.txt
|
||||
RUN pip3 install -e git://github.com/att-comdev/drydock.git#egg=drydock_provisioner
|
||||
RUN pip3 install -e git://github.com/att-comdev/armada.git#egg=armada
|
||||
|
||||
# Create airflow user
|
||||
RUN useradd -ms /bin/bash -d ${AIRFLOW_HOME} airflow
|
||||
|
||||
# Download and install kubectl
|
||||
RUN curl -L -o /usr/local/bin/kubectl \
|
||||
https://storage.googleapis.com/kubernetes-release/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl \
|
||||
&& chmod +x /usr/local/bin/kubectl
|
||||
|
||||
# Copy entrypoint.sh and airflow_start_service.sh
|
||||
COPY script/entrypoint.sh ${AIRFLOW_HOME}/entrypoint.sh
|
||||
COPY script/airflow_start_service.sh ${AIRFLOW_HOME}/airflow_start_service.sh
|
||||
|
||||
# Change permissions
|
||||
RUN chown -R airflow: ${AIRFLOW_HOME} \
|
||||
&& chmod +x ${AIRFLOW_HOME}/entrypoint.sh
|
||||
|
||||
# Expose port 8080 for Airflow Web
|
||||
# Expose port 5555 for Airflow Flower
|
||||
# Expose port 8793 for Airflow Worker
|
||||
EXPOSE 8080 5555 8793
|
||||
|
||||
# Set work directory
|
||||
USER airflow
|
||||
WORKDIR ${AIRFLOW_HOME}
|
||||
|
||||
# Execute entrypoint
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
7
images/airflow/README.md
Normal file
7
images/airflow/README.md
Normal file
@ -0,0 +1,7 @@
|
||||
## Docker Build ##
|
||||
|
||||
We can build the airflow image by executing the following command:
|
||||
|
||||
```
|
||||
docker build -t attcomdev/airflow-helm:v0.1.0 .
|
||||
```
|
24
images/airflow/requirements.txt
Normal file
24
images/airflow/requirements.txt
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
pytz==2017.2
|
||||
pyOpenSSL==17.3.0
|
||||
ndg-httpsclient==0.4.3
|
||||
pyasn1==0.3.6
|
||||
psycopg2==2.7.3.1
|
||||
docker-py==1.6.0
|
||||
apache-airflow[crypto,celery,postgres,hive,hdfs,jdbc]==1.8.2
|
||||
python-openstackclient==3.11.0
|
||||
sphinx>=1.6.2
|
||||
sphinx_rtd_theme==0.2.4
|
48
images/airflow/script/airflow_start_service.sh
Normal file
48
images/airflow/script/airflow_start_service.sh
Normal file
@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
cmd=$1
|
||||
|
||||
# Initialize Airflow DB
|
||||
if [[ $cmd == 'initdb' ]]; then
|
||||
airflow_cmd="/usr/bin/python3 /usr/local/bin/airflow initdb"
|
||||
eval $airflow_cmd
|
||||
# Start the services based on argument from Airflow Helm Chart
|
||||
elif [[ $cmd == 'webserver' ]]; then
|
||||
airflow_cmd="/usr/bin/python3 /usr/local/bin/airflow webserver"
|
||||
eval $airflow_cmd
|
||||
elif [[ $cmd == 'flower' ]]; then
|
||||
airflow_cmd="/usr/bin/python3 /usr/local/bin/airflow flower"
|
||||
eval $airflow_cmd
|
||||
elif [[ $cmd == 'worker' ]]; then
|
||||
airflow_cmd="/usr/bin/python3 /usr/local/bin/airflow worker"
|
||||
eval $airflow_cmd
|
||||
# If command contains the word 'scheduler'
|
||||
elif [[ $cmd == *scheduler* ]]; then
|
||||
while true; do
|
||||
# Start Airflow Scheduler
|
||||
# $2 and $3 will take on values '-n' and '-1' respectively
|
||||
# The value '-1' indicates that the airflow scheduler will run
|
||||
# continuously. Any other value will mean that the scheduler will
|
||||
# terminate and restart after x seconds.
|
||||
airflow_cmd="/usr/bin/python3 /usr/local/bin/airflow scheduler $2 $3"
|
||||
eval $airflow_cmd
|
||||
done
|
||||
else
|
||||
echo "Invalid Command!"
|
||||
exit 0
|
||||
fi
|
||||
|
20
images/airflow/script/entrypoint.sh
Normal file
20
images/airflow/script/entrypoint.sh
Normal file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Execute Airflow Start Service Script
|
||||
CMD="bash /usr/local/airflow/airflow_start_service.sh"
|
||||
|
||||
exec $CMD "$@"
|
Loading…
Reference in New Issue
Block a user