Add support to build hadoop-3.0.1 related package

Change-Id: I2ba39c3d84bdd0b07e9fbe58ef0c8bc655c303ac
This commit is contained in:
Zhuang Changkun 2018-05-18 19:17:42 +08:00
parent af6cac8e36
commit c118e53a8e
6 changed files with 154 additions and 4 deletions

Binary file not shown.

View File

@ -29,6 +29,8 @@
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<targetJavaVersion>1.6</targetJavaVersion>
<sourceJavaVersion>1.6</sourceJavaVersion>
<file.encoding>UTF-8</file.encoding> <file.encoding>UTF-8</file.encoding>
<downloadSources>true</downloadSources> <downloadSources>true</downloadSources>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@ -66,6 +68,15 @@
<hadoop.version>2.4.1</hadoop.version> <hadoop.version>2.4.1</hadoop.version>
</properties> </properties>
</profile> </profile>
<profile>
<id>hadoop3</id>
<properties>
<hadoop.artifactid>hadoop-common</hadoop.artifactid>
<hadoop.version>3.0.1</hadoop.version>
<targetJavaVersion>1.8</targetJavaVersion>
<sourceJavaVersion>1.8</sourceJavaVersion>
</properties>
</profile>
</profiles> </profiles>
<build> <build>
@ -99,8 +110,8 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
<configuration> <configuration>
<source>1.6</source> <source>${sourceJavaVersion}</source>
<target>1.6</target> <target>${targetJavaVersion}</target>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>

View File

@ -6,14 +6,14 @@ export BRANCH=$1
export BRANCH=${BRANCH:-master} export BRANCH=${BRANCH:-master}
echo "Building all needed hadoop-openstack versions" echo "Building all needed hadoop-openstack versions"
hadoop_versions=(2.2.0 2.3.0 2.5.0 2.6.0 2.7.1 2.7.5 2.8.2) hadoop_versions=(2.2.0 2.3.0 2.5.0 2.6.0 2.7.1 2.7.5 2.8.2 3.0.1)
for ver in ${hadoop_versions[*]} for ver in ${hadoop_versions[*]}
do do
./tools/build-hadoop-openstack.sh $BRANCH ${ver} ./tools/build-hadoop-openstack.sh $BRANCH ${ver}
done done
echo "Building all oozie versions" echo "Building all oozie versions"
hadoop_oozie_versions=(2.7.1 2.7.5 2.8.2) hadoop_oozie_versions=(2.7.1 2.7.5 2.8.2 3.0.1)
for ver in ${hadoop_oozie_versions[*]} for ver in ${hadoop_oozie_versions[*]}
do do
./tools/build-oozie.sh ${ver} ./tools/build-oozie.sh ${ver}

View File

@ -18,6 +18,9 @@ case "${HADOOP_VERSION}" in
"2.2.0" | "2.3.0" | "2.5.0" | "2.6.0" | "2.7.1" | "2.7.5" | "2.8.2") "2.2.0" | "2.3.0" | "2.5.0" | "2.6.0" | "2.7.1" | "2.7.5" | "2.8.2")
EXTRA_ARGS="-P hadoop2" EXTRA_ARGS="-P hadoop2"
;; ;;
"3.0.1")
EXTRA_ARGS="-P hadoop3"
;;
esac esac
echo "Install required packages" echo "Install required packages"

View File

@ -28,6 +28,12 @@ case "${PLUGIN_VERSION}" in
OOZIE_VERSION="4.3.0" OOZIE_VERSION="4.3.0"
HADOOP_VERSION="2.8.2" HADOOP_VERSION="2.8.2"
BUILD_ARGS="-Puber -P hadoop-2" BUILD_ARGS="-Puber -P hadoop-2"
;;
"3.0.1")
OOZIE_VERSION="5.0.0"
HADOOP_VERSION="3.0.1"
BUILD_ARGS="-Puber "
esac esac
echo "Install required packages" echo "Install required packages"
@ -61,6 +67,10 @@ if [ "${OOZIE_VERSION}" = "4.3.0" ]; then
--subnode "/N:project/N:dependencies/N:dependency[last()]" -t elem -n scope -v compile pom.xml.tmp > pom.xml --subnode "/N:project/N:dependencies/N:dependency[last()]" -t elem -n scope -v compile pom.xml.tmp > pom.xml
popd popd
fi fi
if [ "${OOZIE_VERSION}" = "5.0.0" ]; then
# see https://issues.apache.org/jira/browse/OOZIE-3219
patch -p0 < ./../tools/oozie_core.patch
fi
./bin/mkdistro.sh assembly:single ${BUILD_ARGS} -Dhadoop.version=${HADOOP_VERSION} -DjavaVersion=1.8 -DtargetJavaVersion=1.8 -DskipTests ./bin/mkdistro.sh assembly:single ${BUILD_ARGS} -Dhadoop.version=${HADOOP_VERSION} -DjavaVersion=1.8 -DtargetJavaVersion=1.8 -DskipTests
mkdir -p ./../dist/oozie/ mkdir -p ./../dist/oozie/

126
tools/oozie_core.patch Normal file
View File

@ -0,0 +1,126 @@
diff --git core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
index 3b87cc9e760424758fa62436c87e218e3693349a..9e603ed03df516c8b08df9c7359a1aca57f4b796 100644
--- core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
+++ core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
@@ -20,8 +20,8 @@ package org.apache.oozie.util.db;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
import com.google.common.collect.Sets;
-import org.apache.directory.api.util.Strings;
import org.apache.oozie.util.XLog;
import javax.annotation.Nullable;
@@ -354,7 +354,7 @@ public class FailingConnectionWrapper implements Connection {
@Override
public boolean apply(@Nullable String input) {
- Preconditions.checkArgument(Strings.isNotEmpty(input));
+ Preconditions.checkArgument(!Strings.isNullOrEmpty(input));
boolean isDmlStatement = false;
for (final String dmlPrefix : DML_PREFIXES) {
diff --git examples/pom.xml examples/pom.xml
index 2bfc7c8b3fc3de4876c62d70f25bed35b886ece8..c13febc81cf5b26b4af90885787113e247ae7cb8 100644
--- examples/pom.xml
+++ examples/pom.xml
@@ -38,6 +38,12 @@
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
diff --git sharelib/pig/pom.xml sharelib/pig/pom.xml
index 5084b61affb58412730d176399582f1672765ab3..6619a0a82a218eff6aea1b30829bc0f5827790e6 100644
--- sharelib/pig/pom.xml
+++ sharelib/pig/pom.xml
@@ -107,6 +107,11 @@
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>provided</scope>
+ </dependency>
<dependency>
<groupId>dk.brics.automaton</groupId>
<artifactId>automaton</artifactId>
diff --git sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
index 2f3cfbe4dfa2c2015686d2f2ca09ec4de544f423..5af39cda52b403d8989fdd5d4f2524395a55580a 100644
--- sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
+++ sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
@@ -19,9 +19,9 @@
package org.apache.oozie.action.hadoop;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.commons.lang.StringUtils;
-import org.apache.directory.api.util.Strings;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
@@ -424,7 +424,7 @@ class SparkArgsExtractor {
private void addUserDefined(final String userList, final Map<String, URI> urisMap) {
if (userList != null) {
for (final String file : userList.split(OPT_VALUE_SEPARATOR)) {
- if (!Strings.isEmpty(file)) {
+ if (!Strings.isNullOrEmpty(file)) {
final Path p = new Path(file);
urisMap.put(p.getName(), p.toUri());
}
diff --git tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
index 852940909619612d2d88d96fc54d9a95cebeffaa..f21fd3bde8cfdf2e5ed6f39f3533030d7a97cdbf 100644
--- tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
+++ tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
@@ -19,6 +19,7 @@
package org.apache.oozie.tools;
import com.google.common.base.Charsets;
+import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gson.Gson;
@@ -26,7 +27,6 @@ import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.directory.api.util.Strings;
import org.apache.oozie.BundleActionBean;
import org.apache.oozie.BundleJobBean;
import org.apache.oozie.CoordinatorActionBean;
@@ -166,7 +166,7 @@ public class OozieDBImportCLI {
}
private static void setImportBatchSize() {
- if (Strings.isNotEmpty(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY))) {
+ if (!Strings.isNullOrEmpty(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY))) {
try {
IMPORT_BATCH_SIZE = Integer.parseInt(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY));
}
diff --git webapp/pom.xml webapp/pom.xml
index 797996912b6e6381b261a69f8eb1e012fe488fdf..67526d953cdb29af5e8e736046ed5b1514d41ecb 100644
--- webapp/pom.xml
+++ webapp/pom.xml
@@ -148,6 +148,12 @@
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
</dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
<build>