Browse Source

Add support to build hadoop-3.0.1 related package

Change-Id: I2ba39c3d84bdd0b07e9fbe58ef0c8bc655c303ac
Zhuang Changkun 11 months ago
parent
commit
c118e53a8e

BIN
common-artifacts/hadoop-native-libs-3.0.1.tar.gz View File


+ 13
- 2
hadoop-swiftfs/pom.xml View File

@@ -29,6 +29,8 @@
29 29
     <packaging>jar</packaging>
30 30
 
31 31
   <properties>
32
+    <targetJavaVersion>1.6</targetJavaVersion>
33
+    <sourceJavaVersion>1.6</sourceJavaVersion>
32 34
     <file.encoding>UTF-8</file.encoding>
33 35
     <downloadSources>true</downloadSources>
34 36
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@@ -66,6 +68,15 @@
66 68
         <hadoop.version>2.4.1</hadoop.version>
67 69
       </properties>
68 70
     </profile>
71
+    <profile>
72
+      <id>hadoop3</id>
73
+      <properties>
74
+        <hadoop.artifactid>hadoop-common</hadoop.artifactid>
75
+        <hadoop.version>3.0.1</hadoop.version>
76
+        <targetJavaVersion>1.8</targetJavaVersion>
77
+        <sourceJavaVersion>1.8</sourceJavaVersion>
78
+      </properties>
79
+    </profile>
69 80
   </profiles>
70 81
 
71 82
   <build>
@@ -99,8 +110,8 @@
99 110
         <groupId>org.apache.maven.plugins</groupId>
100 111
         <artifactId>maven-compiler-plugin</artifactId>
101 112
         <configuration>
102
-          <source>1.6</source>
103
-          <target>1.6</target>
113
+          <source>${sourceJavaVersion}</source>
114
+          <target>${targetJavaVersion}</target>
104 115
         </configuration>
105 116
       </plugin>
106 117
       <plugin>

+ 2
- 2
tools/build-all-artifacts.sh View File

@@ -6,14 +6,14 @@ export BRANCH=$1
6 6
 export BRANCH=${BRANCH:-master}
7 7
 
8 8
 echo "Building all needed hadoop-openstack versions"
9
-hadoop_versions=(2.2.0 2.3.0 2.5.0 2.6.0 2.7.1 2.7.5 2.8.2)
9
+hadoop_versions=(2.2.0 2.3.0 2.5.0 2.6.0 2.7.1 2.7.5 2.8.2 3.0.1)
10 10
 for ver in ${hadoop_versions[*]}
11 11
 do
12 12
     ./tools/build-hadoop-openstack.sh $BRANCH ${ver}
13 13
 done
14 14
 
15 15
 echo "Building all oozie versions"
16
-hadoop_oozie_versions=(2.7.1 2.7.5 2.8.2)
16
+hadoop_oozie_versions=(2.7.1 2.7.5 2.8.2 3.0.1)
17 17
 for ver in ${hadoop_oozie_versions[*]}
18 18
 do
19 19
     ./tools/build-oozie.sh ${ver}

+ 3
- 0
tools/build-hadoop-openstack.sh View File

@@ -18,6 +18,9 @@ case "${HADOOP_VERSION}" in
18 18
     "2.2.0" | "2.3.0" | "2.5.0" | "2.6.0" | "2.7.1" | "2.7.5" | "2.8.2")
19 19
         EXTRA_ARGS="-P hadoop2"
20 20
     ;;
21
+    "3.0.1")
22
+        EXTRA_ARGS="-P hadoop3"
23
+    ;;
21 24
 esac
22 25
 
23 26
 echo "Install required packages"

+ 10
- 0
tools/build-oozie.sh View File

@@ -28,6 +28,12 @@ case "${PLUGIN_VERSION}" in
28 28
         OOZIE_VERSION="4.3.0"
29 29
         HADOOP_VERSION="2.8.2"
30 30
         BUILD_ARGS="-Puber -P hadoop-2"
31
+    ;;
32
+    "3.0.1")
33
+        OOZIE_VERSION="5.0.0"
34
+        HADOOP_VERSION="3.0.1"
35
+        BUILD_ARGS="-Puber "
36
+
31 37
 esac
32 38
 
33 39
 echo "Install required packages"
@@ -61,6 +67,10 @@ if [ "${OOZIE_VERSION}" = "4.3.0" ]; then
61 67
         --subnode "/N:project/N:dependencies/N:dependency[last()]" -t elem -n scope -v compile pom.xml.tmp > pom.xml
62 68
     popd
63 69
 fi
70
+if [ "${OOZIE_VERSION}" = "5.0.0" ]; then
71
+    # see https://issues.apache.org/jira/browse/OOZIE-3219
72
+    patch -p0 < ./../tools/oozie_core.patch
73
+fi
64 74
 
65 75
 ./bin/mkdistro.sh assembly:single ${BUILD_ARGS} -Dhadoop.version=${HADOOP_VERSION} -DjavaVersion=1.8 -DtargetJavaVersion=1.8 -DskipTests
66 76
 mkdir -p ./../dist/oozie/

+ 126
- 0
tools/oozie_core.patch View File

@@ -0,0 +1,126 @@
1
+diff --git core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
2
+index 3b87cc9e760424758fa62436c87e218e3693349a..9e603ed03df516c8b08df9c7359a1aca57f4b796 100644
3
+--- core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
4
++++ core/src/main/java/org/apache/oozie/util/db/FailingConnectionWrapper.java
5
+@@ -20,8 +20,8 @@ package org.apache.oozie.util.db;
6
+ 
7
+ import com.google.common.base.Preconditions;
8
+ import com.google.common.base.Predicate;
9
++import com.google.common.base.Strings;
10
+ import com.google.common.collect.Sets;
11
+-import org.apache.directory.api.util.Strings;
12
+ import org.apache.oozie.util.XLog;
13
+ 
14
+ import javax.annotation.Nullable;
15
+@@ -354,7 +354,7 @@ public class FailingConnectionWrapper implements Connection {
16
+ 
17
+         @Override
18
+         public boolean apply(@Nullable String input) {
19
+-            Preconditions.checkArgument(Strings.isNotEmpty(input));
20
++            Preconditions.checkArgument(!Strings.isNullOrEmpty(input));
21
+ 
22
+             boolean isDmlStatement = false;
23
+             for (final String dmlPrefix : DML_PREFIXES) {
24
+diff --git examples/pom.xml examples/pom.xml
25
+index 2bfc7c8b3fc3de4876c62d70f25bed35b886ece8..c13febc81cf5b26b4af90885787113e247ae7cb8 100644
26
+--- examples/pom.xml
27
++++ examples/pom.xml
28
+@@ -38,6 +38,12 @@
29
+             <scope>provided</scope>
30
+         </dependency>
31
+ 
32
++        <dependency>
33
++            <groupId>org.apache.hadoop</groupId>
34
++            <artifactId>hadoop-common</artifactId>
35
++            <scope>provided</scope>
36
++        </dependency>
37
++
38
+         <dependency>
39
+             <groupId>org.apache.hadoop</groupId>
40
+             <artifactId>hadoop-minicluster</artifactId>
41
+diff --git sharelib/pig/pom.xml sharelib/pig/pom.xml
42
+index 5084b61affb58412730d176399582f1672765ab3..6619a0a82a218eff6aea1b30829bc0f5827790e6 100644
43
+--- sharelib/pig/pom.xml
44
++++ sharelib/pig/pom.xml
45
+@@ -107,6 +107,11 @@
46
+             <artifactId>hadoop-client</artifactId>
47
+             <scope>provided</scope>
48
+         </dependency>
49
++        <dependency>
50
++            <groupId>org.apache.hadoop</groupId>
51
++            <artifactId>hadoop-common</artifactId>
52
++            <scope>provided</scope>
53
++        </dependency>
54
+         <dependency>
55
+            <groupId>dk.brics.automaton</groupId>
56
+             <artifactId>automaton</artifactId>
57
+diff --git sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
58
+index 2f3cfbe4dfa2c2015686d2f2ca09ec4de544f423..5af39cda52b403d8989fdd5d4f2524395a55580a 100644
59
+--- sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
60
++++ sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
61
+@@ -19,9 +19,9 @@
62
+ package org.apache.oozie.action.hadoop;
63
+ 
64
+ import com.google.common.annotations.VisibleForTesting;
65
++import com.google.common.base.Strings;
66
+ import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
67
+ import org.apache.commons.lang.StringUtils;
68
+-import org.apache.directory.api.util.Strings;
69
+ import org.apache.hadoop.conf.Configuration;
70
+ import org.apache.hadoop.filecache.DistributedCache;
71
+ import org.apache.hadoop.fs.Path;
72
+@@ -424,7 +424,7 @@ class SparkArgsExtractor {
73
+     private void addUserDefined(final String userList, final Map<String, URI> urisMap) {
74
+         if (userList != null) {
75
+             for (final String file : userList.split(OPT_VALUE_SEPARATOR)) {
76
+-                if (!Strings.isEmpty(file)) {
77
++                if (!Strings.isNullOrEmpty(file)) {
78
+                     final Path p = new Path(file);
79
+                     urisMap.put(p.getName(), p.toUri());
80
+                 }
81
+diff --git tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
82
+index 852940909619612d2d88d96fc54d9a95cebeffaa..f21fd3bde8cfdf2e5ed6f39f3533030d7a97cdbf 100644
83
+--- tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
84
++++ tools/src/main/java/org/apache/oozie/tools/OozieDBImportCLI.java
85
+@@ -19,6 +19,7 @@
86
+ package org.apache.oozie.tools;
87
+ 
88
+ import com.google.common.base.Charsets;
89
++import com.google.common.base.Strings;
90
+ import com.google.common.collect.Lists;
91
+ import com.google.common.collect.Sets;
92
+ import com.google.gson.Gson;
93
+@@ -26,7 +27,6 @@ import com.google.gson.Gson;
94
+ import com.google.gson.JsonSyntaxException;
95
+ import org.apache.commons.cli.Options;
96
+ import org.apache.commons.cli.ParseException;
97
+-import org.apache.directory.api.util.Strings;
98
+ import org.apache.oozie.BundleActionBean;
99
+ import org.apache.oozie.BundleJobBean;
100
+ import org.apache.oozie.CoordinatorActionBean;
101
+@@ -166,7 +166,7 @@ public class OozieDBImportCLI {
102
+     }
103
+ 
104
+     private static void setImportBatchSize() {
105
+-        if (Strings.isNotEmpty(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY))) {
106
++        if (!Strings.isNullOrEmpty(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY))) {
107
+             try {
108
+                 IMPORT_BATCH_SIZE = Integer.parseInt(System.getProperty(OOZIE_DB_IMPORT_BATCH_SIZE_KEY));
109
+             }
110
+diff --git webapp/pom.xml webapp/pom.xml
111
+index 797996912b6e6381b261a69f8eb1e012fe488fdf..67526d953cdb29af5e8e736046ed5b1514d41ecb 100644
112
+--- webapp/pom.xml
113
++++ webapp/pom.xml
114
+@@ -148,6 +148,12 @@
115
+             <artifactId>hadoop-client</artifactId>
116
+             <scope>provided</scope>
117
+         </dependency>
118
++
119
++        <dependency>
120
++            <groupId>org.apache.hadoop</groupId>
121
++            <artifactId>hadoop-common</artifactId>
122
++            <scope>provided</scope>
123
++        </dependency>
124
+     </dependencies>
125
+ 
126
+     <build>

Loading…
Cancel
Save