This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/main by this push:
     new 6f01f81  Remove load-jars-hdfs from bin/accumulo-util (#2491)
6f01f81 is described below

commit 6f01f81326cd00ed71b51c4fd98158939ef4d709
Author: Christopher Tubbs <ctubb...@apache.org>
AuthorDate: Mon Feb 14 17:36:05 2022 -0500

    Remove load-jars-hdfs from bin/accumulo-util (#2491)
    
    Remove the jar loading utility, which is broken, from the accumulo-util
    script. This utility was intended as a convenience to help users copy
    jars to HDFS to be used with the VFS classloader. However, that utility
    is in process of being separated from Accumulo's core, and so this tool
    makes a lot of assumptions about the user's classloader configuration.
    Furthermore, it's not really necessary for Accumulo to maintain a helper
    tool for copying files for the user, as they can copy files on their
    own, according to their specific needs.
    
    This fixes #2440
---
 assemble/bin/accumulo-util | 58 ----------------------------------------------
 1 file changed, 58 deletions(-)

diff --git a/assemble/bin/accumulo-util b/assemble/bin/accumulo-util
index e5364ab..723c690 100755
--- a/assemble/bin/accumulo-util
+++ b/assemble/bin/accumulo-util
@@ -26,7 +26,6 @@ Commands:
   build-native        Builds Accumulo native libraries
   dump-zoo            Dumps data in ZooKeeper
   gen-monitor-cert    Generates Accumulo monitor certificate
-  load-jars-hdfs      Loads Accumulo jars in lib/ to HDFS for VFS classloader
   
 EOF
   exit 1
@@ -130,60 +129,6 @@ function gen_monitor_cert() {
   echo
 }
 
-function load_jars_hdfs() {
-
-  if [[ -x "$HADOOP_HOME/bin/hadoop" ]]; then
-    HADOOP="$HADOOP_HOME/bin/hadoop"
-  else
-    HADOOP=$(which hadoop)
-  fi
-  if [[ ! -x "$HADOOP" ]]; then
-    echo "Could not find 'hadoop' command. Please set hadoop on your PATH or 
set HADOOP_HOME"
-    exit 1
-  fi
-
-  # Find the system context directory in HDFS
-  SYSTEM_CONTEXT_HDFS_DIR=$(grep "general.vfs.classpaths" 
"$conf/accumulo.properties" | cut -d '=' -f 2)
-
-  if [[ -z "$SYSTEM_CONTEXT_HDFS_DIR" ]]; then
-    echo "Your accumulo.properties file is not set up for the HDFS 
Classloader. Please add the following to your accumulo.properties file where 
##CLASSPATH## is one of the following formats:"
-    echo "A single directory: hdfs://host:port/directory/"
-    echo "A single directory with a regex: hdfs://host:port/directory/.*.jar"
-    echo "Multiple directories: 
hdfs://host:port/directory/.*.jar,hdfs://host:port/directory2/"
-    echo ""
-    echo "general.vfs.classpaths=##CLASSPATH##"
-    exit 1
-  fi
-
-  # Create the system context directy in HDFS if it does not exist
-  if ! "$HADOOP" fs -ls "$SYSTEM_CONTEXT_HDFS_DIR" > /dev/null; then
-    if ! "$HADOOP" fs -mkdir "$SYSTEM_CONTEXT_HDFS_DIR" > /dev/null; then
-      echo "Unable to create classpath directory at $SYSTEM_CONTEXT_HDFS_DIR"
-      exit 1
-    fi
-  fi
-
-  # Replicate to all tservers to avoid network contention on startup
-  TSERVERS=${conf}/tservers
-  NUM_TSERVERS=$(grep -E -c -v '(^#|^\s*$)' "$TSERVERS")
-
-  #let each datanode service around 50 clients
-  REP=$(( NUM_TSERVERS / 50 ))
-  (( REP < 3 )) && REP=3
-
-  # Copy all jars in lib to the system context directory
-  "$HADOOP" fs -moveFromLocal "$lib"/*.jar "$SYSTEM_CONTEXT_HDFS_DIR"  > 
/dev/null
-  "$HADOOP" fs -setrep -R $REP "$SYSTEM_CONTEXT_HDFS_DIR"  > /dev/null
-
-  # We need some of the jars in lib, copy them back out and remove them from 
the system context dir
-  "$HADOOP" fs -copyToLocal "$SYSTEM_CONTEXT_HDFS_DIR/commons-vfs2.jar" 
"$lib/."  > /dev/null
-  "$HADOOP" fs -rm "$SYSTEM_CONTEXT_HDFS_DIR/commons-vfs2.jar"  > /dev/null
-  "$HADOOP" fs -copyToLocal "$SYSTEM_CONTEXT_HDFS_DIR/accumulo-start.jar" 
"$lib/."  > /dev/null
-  "$HADOOP" fs -rm "$SYSTEM_CONTEXT_HDFS_DIR/accumulo-start.jar"  > /dev/null
-  "$HADOOP" fs -copyToLocal "$SYSTEM_CONTEXT_HDFS_DIR/slf4j*.jar" "$lib/."  > 
/dev/null
-  "$HADOOP" fs -rm "$SYSTEM_CONTEXT_HDFS_DIR/slf4j*.jar"  > /dev/null
-}
-
 function main() {
   SOURCE="${BASH_SOURCE[0]}"
   while [ -h "${SOURCE}" ]; do
@@ -206,9 +151,6 @@ function main() {
     gen-monitor-cert)
       gen_monitor_cert
       ;;
-    load-jars-hdfs)
-      load_jars_hdfs
-      ;;
     *)
       echo -e "'$1' is an invalid <command>\\n"
       print_usage 1>&2

Reply via email to