This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.9 by this push:
     new 20f2796  [ZEPPELIN-5093]. yarn interpreter mode is broken for flink 
interpreter
20f2796 is described below

commit 20f279607649902117a263957004d2ad64a09141
Author: Jeff Zhang <zjf...@apache.org>
AuthorDate: Mon Oct 12 15:59:40 2020 +0800

    [ZEPPELIN-5093]. yarn interpreter mode is broken for flink interpreter
    
    ### What is this PR for?
    
    There're 2 issues in yarn interpreter mode for flink interpreter:
    1. Fail to submit yarn app when hive is enabled
    2. Unable to create HiveCatalog in hadoop3
    
    ### What type of PR is it?
    [Bug Fix ]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-5093
    
    ### How should this be tested?
    *CI pass
    https://travis-ci.org/github/zjffdu/zeppelin/builds/734950621
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zjf...@apache.org>
    
    Closes #3940 from zjffdu/ZEPPELIN-5093 and squashes the following commits:
    
    847da2174 [Jeff Zhang] [ZEPPELIN-5093]. yarn interpreter mode is broken for 
flink interpreter
    
    (cherry picked from commit c62eb87b54844af33babc57d0919b5630a913e16)
    Signed-off-by: Jeff Zhang <zjf...@apache.org>
---
 .../launcher/YarnRemoteInterpreterProcess.java     | 23 +++++++++++++++++++++-
 1 file changed, 22 insertions(+), 1 deletion(-)

diff --git 
a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java
 
b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java
index 7411686..f7037e4 100644
--- 
a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java
+++ 
b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java
@@ -260,7 +260,8 @@ public class YarnRemoteInterpreterProcess extends 
RemoteInterpreterProcess {
 
       String hiveConfDir = 
launchContext.getProperties().getProperty("HIVE_CONF_DIR");
       if (!org.apache.commons.lang3.StringUtils.isBlank(hiveConfDir)) {
-        srcPath = localFs.makeQualified(new Path(new 
File(hiveConfDir).toURI()));
+        File hiveConfZipFile = createHiveConfZip(new File(hiveConfDir));
+        srcPath = localFs.makeQualified(new Path(hiveConfZipFile.toURI()));
         destPath = copyFileToRemote(stagingDir, srcPath, (short) 1);
         addResource(fs, destPath, localResources, LocalResourceType.ARCHIVE, 
"hive_conf");
       }
@@ -332,6 +333,9 @@ public class YarnRemoteInterpreterProcess extends 
RemoteInterpreterProcess {
       }
       envs.put(ApplicationConstants.Environment.CLASSPATH.name(), newValue);
     }
+    // set HADOOP_MAPRED_HOME explicitly, otherwise it won't work for hadoop3
+    // see 
https://stackoverflow.com/questions/50719585/unable-to-run-mapreduce-wordcount
+    this.envs.put("HADOOP_MAPRED_HOME", "${HADOOP_HOME}");
   }
 
   private String[] getYarnAppClasspath() {
@@ -490,6 +494,23 @@ public class YarnRemoteInterpreterProcess extends 
RemoteInterpreterProcess {
     return flinkArchive;
   }
 
+  private File createHiveConfZip(File hiveConfDir) throws IOException {
+    File hiveConfArchive = File.createTempFile("hive_conf", ".zip", 
Files.createTempDir());
+    ZipOutputStream hiveConfZipStream = new ZipOutputStream(new 
FileOutputStream(hiveConfArchive));
+    hiveConfZipStream.setLevel(0);
+
+    if (!hiveConfDir.exists()) {
+      throw new IOException("HIVE_CONF_DIR " + hiveConfDir.getAbsolutePath() + 
" doesn't exist");
+    }
+    for (File file : hiveConfDir.listFiles()) {
+      addFileToZipStream(hiveConfZipStream, file, null);
+    }
+
+    hiveConfZipStream.flush();
+    hiveConfZipStream.close();
+    return hiveConfArchive;
+  }
+
   private Path copyFileToRemote(
           Path destDir,
           Path srcPath,

Reply via email to