This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f82ed5e8 [MINOR][TEST] Remove out-dated hive version in run-tests.py
f82ed5e8 is described below

commit f82ed5e8e0d2081db9894ae0ca737eddc4b0c390
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Wed Apr 24 21:22:15 2019 -0700

    [MINOR][TEST] Remove out-dated hive version in run-tests.py
    
    ## What changes were proposed in this pull request?
    
    ```
    ========================================================================
    Building Spark
    ========================================================================
    [info] Building Spark (w/Hive 1.2.1) using SBT with these arguments:  
-Phadoop-3.2 -Pkubernetes -Phive-thriftserver -Pkinesis-asl -Pyarn 
-Pspark-ganglia-lgpl -Phive -Pmesos test:package 
streaming-kinesis-asl-assembly/assembly
    ```
    
    `(w/Hive 1.2.1)` is incorrect when testing hadoop-3.2, It's should be 
(w/Hive 2.3.4).
    This pr removes `(w/Hive 1.2.1)` in run-tests.py.
    
    ## How was this patch tested?
    
    N/A
    
    Closes #24451 from wangyum/run-tests-invalid-info.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 dev/run-tests.py | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/dev/run-tests.py b/dev/run-tests.py
index 70dcb4a..5211aea 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -297,8 +297,7 @@ def build_spark_maven(hadoop_version):
     mvn_goals = ["clean", "package", "-DskipTests"]
     profiles_and_goals = build_profiles + mvn_goals
 
-    print("[info] Building Spark (w/Hive 1.2.1) using Maven with these 
arguments: ",
-          " ".join(profiles_and_goals))
+    print("[info] Building Spark using Maven with these arguments: ", " 
".join(profiles_and_goals))
 
     exec_maven(profiles_and_goals)
 
@@ -310,8 +309,7 @@ def build_spark_sbt(hadoop_version):
                  "streaming-kinesis-asl-assembly/assembly"]
     profiles_and_goals = build_profiles + sbt_goals
 
-    print("[info] Building Spark (w/Hive 1.2.1) using SBT with these 
arguments: ",
-          " ".join(profiles_and_goals))
+    print("[info] Building Spark using SBT with these arguments: ", " 
".join(profiles_and_goals))
 
     exec_sbt(profiles_and_goals)
 
@@ -323,7 +321,7 @@ def build_spark_unidoc_sbt(hadoop_version):
     sbt_goals = ["unidoc"]
     profiles_and_goals = build_profiles + sbt_goals
 
-    print("[info] Building Spark unidoc (w/Hive 1.2.1) using SBT with these 
arguments: ",
+    print("[info] Building Spark unidoc using SBT with these arguments: ",
           " ".join(profiles_and_goals))
 
     exec_sbt(profiles_and_goals)
@@ -334,7 +332,7 @@ def build_spark_assembly_sbt(hadoop_version, 
checkstyle=False):
     build_profiles = get_hadoop_profiles(hadoop_version) + 
modules.root.build_profile_flags
     sbt_goals = ["assembly/package"]
     profiles_and_goals = build_profiles + sbt_goals
-    print("[info] Building Spark assembly (w/Hive 1.2.1) using SBT with these 
arguments: ",
+    print("[info] Building Spark assembly using SBT with these arguments: ",
           " ".join(profiles_and_goals))
     exec_sbt(profiles_and_goals)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to