Skip to content
This repository has been archived by the owner on Jul 23, 2024. It is now read-only.

Commit

Permalink
Added support for HAWQ and PXF installation
Browse files Browse the repository at this point in the history
  • Loading branch information
Mariano Gonzalez committed Sep 7, 2018
1 parent ae2af74 commit fda0229
Show file tree
Hide file tree
Showing 9 changed files with 374 additions and 4 deletions.
88 changes: 86 additions & 2 deletions contrib/hawq-docker/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,13 @@ OS_VERSION := centos7
# Do not use underscore "_" in CLUSTER_ID
CLUSTER_ID := $(OS_VERSION)
# Monut this local directory to /data in data container and share with other containers
LOCAL :=
LOCAL :=
# networks used in docker
NETWORK := $(CLUSTER_ID)_hawq_network
HAWQ_HOME := "/data/hawq-devel"
JAVA_TOOL_OPTIONS := -Dfile.encoding=UTF8

all:
all:
@echo " Usage:"
@echo " To setup a build and test environment: make run"
@echo " To start all containers: make start"
Expand Down Expand Up @@ -227,3 +229,85 @@ distclean:
docker network rm $(NETWORK) 2>&1 >/dev/null || true; \
fi
@echo "Distclean Done!"

hawq:
@echo "Logging into ${CLUSTER_ID}-namenode container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-namenode" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "JAVA_TOOL_OPTIONS=$(JAVA_TOOL_OPTIONS)" \
-u gpadmin --privileged -it ${CLUSTER_ID}-namenode "make-hawq.sh"; \
else \
echo "${CLUSTER_ID}-namenode container does not exist!"; \
fi

start-hawq:
@echo "Logging into ${CLUSTER_ID}-namenode container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-namenode" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "NAMENODE=${CLUSTER_ID}-namenode" \
-u gpadmin --privileged -it ${CLUSTER_ID}-namenode "start-hawq.sh"; \
else \
echo "${CLUSTER_ID}-namenode container does not exist!"; \
fi

pxf:
@make -f $(THIS_MAKEFILE_PATH) pxf-namenode
@i=1; \
while [ $$i -le $(NDATANODES) ] ; do \
make -f $(THIS_MAKEFILE_PATH) CUR_DATANODE=$$i pxf-datanode; \
i=$$((i+1)); \
done

pxf-namenode:
@echo "Logging into ${CLUSTER_ID}-namenode container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-namenode" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "JAVA_TOOL_OPTIONS=$(JAVA_TOOL_OPTIONS)" \
-u gpadmin --privileged -it ${CLUSTER_ID}-namenode "make-pxf.sh"; \
else \
echo "${CLUSTER_ID}-namenode container does not exist!" && exit 1; \
fi

pxf-datanode:
@echo "Logging into ${CLUSTER_ID}-datanode$(CUR_DATANODE) container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-datanode$(CUR_DATANODE)" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "JAVA_TOOL_OPTIONS=$(JAVA_TOOL_OPTIONS)" \
-u gpadmin --privileged -it ${CLUSTER_ID}-datanode$(CUR_DATANODE) "make-pxf.sh"; \
else \
echo "${CLUSTER_ID}-datanode$(CUR_DATANODE) container does not exist!" && exit 1; \
fi

start-pxf:
@make -f $(THIS_MAKEFILE_PATH) start-pxf-namenode
@i=1; \
while [ $$i -le $(NDATANODES) ] ; do \
make -f $(THIS_MAKEFILE_PATH) CUR_DATANODE=$$i start-pxf-datanode; \
i=$$((i+1)); \
done

start-pxf-namenode:
@echo "Logging into ${CLUSTER_ID}-namenode container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-namenode" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "NAMENODE=${CLUSTER_ID}-namenode" \
-u gpadmin --privileged -it ${CLUSTER_ID}-namenode "start-pxf.sh"; \
else \
echo "${CLUSTER_ID}-namenode container does not exist!" && exit 1; \
fi

start-pxf-datanode:
@echo "Logging into ${CLUSTER_ID}-datanode$(CUR_DATANODE) container"
@if [ ! -z "`docker ps -a --filter="name=${CLUSTER_ID}-datanode$(CUR_DATANODE)" | grep -v CONTAINER`" ]; then \
docker exec \
-e "HAWQ_HOME=$(HAWQ_HOME)" \
-e "NAMENODE=${CLUSTER_ID}-namenode" \
-u gpadmin --privileged -it ${CLUSTER_ID}-datanode$(CUR_DATANODE) "start-pxf.sh"; \
else \
echo "${CLUSTER_ID}-datanode$(CUR_DATANODE) container does not exist!" && exit 1; \
fi
2 changes: 1 addition & 1 deletion contrib/hawq-docker/centos7-docker/hawq-dev/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ RUN yum install -y epel-release && \
openldap-devel protobuf-devel readline-devel net-snmp-devel apr-devel \
libesmtp-devel python-pip json-c-devel \
java-1.7.0-openjdk-devel lcov cmake \
openssh-clients openssh-server perl-JSON && \
openssh-clients openssh-server perl-JSON unzip && \
yum clean all

RUN rpm -ivh --nodeps https://rpmfind.net/linux/centos/6.10/os/x86_64/Packages/bison-2.4.1-5.el6.x86_64.rpm
Expand Down
4 changes: 4 additions & 0 deletions contrib/hawq-docker/centos7-docker/hawq-test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ RUN ln -s /usr/hdp/current/hadoop-hdfs-namenode/../hadoop/sbin/hadoop-daemon.sh
COPY conf/* /etc/hadoop/conf/

COPY entrypoint.sh /usr/bin/entrypoint.sh
COPY make-hawq.sh /usr/bin/make-hawq.sh
COPY make-pxf.sh /usr/bin/make-pxf.sh
COPY start-hawq.sh /usr/bin/start-hawq.sh
COPY start-pxf.sh /usr/bin/start-pxf.sh
COPY start-hdfs.sh /usr/bin/start-hdfs.sh

USER gpadmin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://${hdfs.namenode}:8020</value>
<value>hdfs://@hdfs.namenode@:8020</value>
</property>
</configuration>
3 changes: 3 additions & 0 deletions contrib/hawq-docker/centos7-docker/hawq-test/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ if [ ! -f /etc/profile.d/hadoop.sh ]; then
sudo chmod a+x /etc/profile.d/hadoop.sh
fi

sudo chmod 777 /etc/hadoop/conf/core-site.xml
sudo sed "s/@hdfs.namenode@/$NAMENODE/g" -i /etc/hadoop/conf/core-site.xml

sudo start-hdfs.sh
sudo sysctl -p

Expand Down
36 changes: 36 additions & 0 deletions contrib/hawq-docker/centos7-docker/hawq-test/make-hawq.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/bin/bash

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
git clone https://github.com/apache/incubator-hawq.git /data/hawq

cd /data/hawq
./configure --prefix=${HAWQ_HOME}
make -j16
make install

source ${HAWQ_HOME}/greenplum_path.sh

sudo sed 's|localhost|centos7-namenode|g' -i ${GPHOME}/etc/hawq-site.xml
sudo echo 'centos7-datanode1' > ${GPHOME}/etc/slaves
sudo echo 'centos7-datanode2' >> ${GPHOME}/etc/slaves
sudo echo 'centos7-datanode3' >> ${GPHOME}/etc/slaves

sudo -u hdfs hdfs dfs -chown gpadmin /

echo "Make HAWQ Done!"

146 changes: 146 additions & 0 deletions contrib/hawq-docker/centos7-docker/hawq-test/make-pxf.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
#!/bin/bash

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ${HAWQ_HOME}/greenplum_path.sh
export PXF_HOME=${GPHOME}/pxf

sudo echo "source ${GPHOME}/greenplum_path.sh" >> /home/gpadmin/.bashrc

cd /data/hawq/pxf
make
make install

sudo sed 's|-pxf|-gpadmin|g' -i ${PXF_HOME}/conf/pxf-env.sh

rm -rf ${PXF_HOME}/conf/pxf-log4j.properties
cat <<EOF >>${PXF_HOME}/conf/pxf-log4j.properties
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
log4j.appender.ROLLINGFILE.File=${PXF_HOME}/pxf-service/logs/pxf-service.log
log4j.appender.ROLLINGFILE.MaxFileSize=10MB
log4j.appender.ROLLINGFILE.MaxBackupIndex=10
log4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout
log4j.appender.ROLLINGFILE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSSS} %p %t %c - %m%n
EOF

rm -rf ${PXF_HOME}/conf/pxf-private.classpath
cat <<EOF >>${PXF_HOME}/conf/pxf-private.classpath
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################
# This file contains the internal classpaths required to run PXF.
# Edit to set the base paths according to your specific package layout
# Adding new resources should be done using pxf-public.classpath file.
##################################################################
# PXF Configuration
/data/hawq-devel/pxf/conf
# Hadoop Configuration
/usr/hdp/2.5.0.0-1245/hadoop/etc/hadoop
# Hive Configuration
# hive/conf
# Hbase Configuration
# base/conf
# PXF Libraries
/data/hawq-devel/pxf/lib/pxf-hbase-*[0-9].jar
/data/hawq-devel/pxf/lib/pxf-hdfs-*[0-9].jar
/data/hawq-devel/pxf/lib/pxf-hive-*[0-9].jar
/data/hawq-devel/pxf/lib/pxf-json-*[0-9].jar
/data/hawq-devel/pxf/lib/pxf-jdbc-*[0-9].jar
/data/hawq-devel/pxf/lib/pxf-ignite-*[0-9].jar
# Hadoop Libraries
/usr/hdp/2.5.0.0-1245/hadoop/client/hadoop-hdfs-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/hadoop-mapreduce-client-core-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/hadoop-auth-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/hadoop-common-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/lib/asm-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/avro-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-cli-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-codec-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-collections-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-configuration-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-io-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-lang-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-logging-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/commons-compress-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/guava-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/htrace-core*.jar
/usr/hdp/2.5.0.0-1245/hadoop/client/jetty-*.jar
/usr/hdp/2.5.0.0-1245/hadoop/client/jackson-core-asl-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/jackson-mapper-asl-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/lib/jersey-core-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/lib/jersey-server-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/log4j-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/protobuf-java-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/slf4j-api-*[0-9].jar
/usr/hdp/2.5.0.0-1245/hadoop/client/gson-*[0-9].jar
# Hive Libraries
# hive/lib/antlr-runtime*.jar
# hive/lib/datanucleus-api-jdo*.jar
# hive/lib/datanucleus-core*.jar
# hive/lib/hive-exec*.jar
# hive/lib/hive-metastore*.jar
# hive/lib/jdo-api*.jar
# hive/lib/libfb303*.jar
# when running on OSx, 1.0.5 or higher version is required
# hive/lib/snappy-java*.jar
# HBase Libraries
# hbase/lib/hbase-client*.jar
# hbase/lib/hbase-common*.jar
# hbase/lib/hbase-protocol*.jar
# hbase/lib/htrace-core*.jar
# hbase/lib/netty*.jar
# hbase/lib/zookeeper*.jar
# hbase/lib/metrics-core*.jar
EOF

echo "Make PXF Done!"
45 changes: 45 additions & 0 deletions contrib/hawq-docker/centos7-docker/hawq-test/start-hawq.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
source ${HAWQ_HOME}/greenplum_path.sh

export BASEDIR=/data
export GPHOME=${HAWQ_HOME}
export HAWQSITE_CONF=${HAWQ_HOME}/etc/hawq-site.xml
export HOME=/home/gpadmin
export HOSTNAME=centos7-namenode
export JAVA_HOME=/etc/alternatives/java_sdk
export LD_LIBRARY_PATH=${HAWQ_HOME}/lib:/${HAWQ_HOME}/lib:
export LIBHDFS3_CONF=${HAWQ_HOME}/etc/hdfs-client.xml
export LIBYARN_CONF=${HAWQ_HOME}/etc/yarn-client.xml
export NAMENODE=${NAMENODE}
export OPENSSL_CONF=${HAWQ_HOME}/etc/openssl.cnf
export PATH=/${HAWQ_HOME}/bin:/${HAWQ_HOME}/bin:/usr/lib64/qt-3.3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export PWD=/data
export PYTHONPATH=/${HAWQ_HOME}/lib/python:/${HAWQ_HOME}/lib/python:
export USER=gpadmin

hawq stop cluster -a
hdfs dfs -rm -r /hawq_default
rm -rf /home/gpadmin/hawq-data-directory/masterdd

echo "Starting HAWQ Cluster"
hawq init cluster -a
echo "Starting HAWQ Cluster Done!"

hawq state
Loading

0 comments on commit fda0229

Please sign in to comment.