Repository: zeppelin Updated Branches: refs/heads/master d5b46987f -> b62e2e01b
[ZEPPELIN-2341] Remove -Psparkr build profile ### What is this PR for? Currently users who build Zeppelin from source need to include `-Psparkr` to use `%r` with embedded local Spark. But it's quite inconvenient to write this build profile every time we build i think. So I removed `-Psparkr` and make `r` related libraries automatically downloaded when we build Zeppelin like I did #2213 ### What type of PR is it? Improvement ### Todos * [x] - remove the rest of `-Psparkr` build profile in `dev/create_release.sh`, `dev/publish_release.sh`, and `docs/install/build.md` after #2213 merged ### What is the Jira issue? [ZEPPELIN-2341](https://issues.apache.org/jira/browse/ZEPPELIN-2341) ### How should this be tested? 1. Apply this patch 2. Build source with below command ``` mvn clean package -DskipTests -pl 'zeppelin-interpreter, zeppelin-zengine, zeppelin-server, zeppelin-display, spark, spark-dependencies' ``` Aftr this step, there will be `R` dir under `ZEPPELIN_HOME/interpreter/spark`. Before this PR, only `dep` dir and `zeppelin-spark_2.10-0.8.0-SNAPSHOT.jar` is generated without `-Psparkr` build profile. 4. Restart Zeppelin. To make sure, run R tutorial note under `Zeppelin Tutorial` folder It should be run successfully without any error ### Screenshots (if appropriate) If we build without `-Psparkr` - before : R related properties are not activated by default in Spark interpreter  - after  ### Questions: * Does the licenses files need update? no * Is there breaking changes for older versions? no * Does this needs documentation? no Author: AhyoungRyu <fbdkdu...@hanmail.net> Author: Ahyoung Ryu <ahyoung...@apache.org> Closes #2215 from AhyoungRyu/ZEPPELIN-2341/includeSparkRByDefault and squashes the following commits: 8db18cc [AhyoungRyu] Remove the rest of '-Psparkr' in docs & sh files f891fd4 [Ahyoung Ryu] Merge branch 'master' into ZEPPELIN-2341/includeSparkRByDefault 445be3e [AhyoungRyu] Add SPARKR env to check each test case need to download r dep or not 67af02a [AhyoungRyu] Remove -PsparkR in travis file a00466c [AhyoungRyu] Remove sparkr build profile in pom files Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/b62e2e01 Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/b62e2e01 Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/b62e2e01 Branch: refs/heads/master Commit: b62e2e01bbdca87ab68a68ffae5e228217da5679 Parents: d5b4698 Author: AhyoungRyu <fbdkdu...@hanmail.net> Authored: Tue Apr 4 15:33:39 2017 +0900 Committer: ahyoungryu <ahyoung...@apache.org> Committed: Fri Apr 7 00:55:26 2017 +0900 ---------------------------------------------------------------------- .travis.yml | 8 +- dev/create_release.sh | 4 +- dev/publish_release.sh | 2 +- docs/install/build.md | 12 +-- docs/install/virtual_machine.md | 2 +- scripts/vagrant/zeppelin-dev/README.md | 2 +- .../vagrant/zeppelin-dev/show-instructions.sh | 2 +- spark-dependencies/pom.xml | 96 ++++++++--------- spark/pom.xml | 107 ++++++++----------- testing/install_external_dependencies.sh | 4 +- 10 files changed, 106 insertions(+), 133 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index ea42117..f77906f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -59,19 +59,19 @@ matrix: # Test spark module for 2.1.0 with scala 2.11, livy - jdk: "oraclejdk7" - env: SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.1 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false" + env: SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.1 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false" # Test spark module for 2.0.2 with scala 2.11 - jdk: "oraclejdk7" - env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" + env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" # Test spark module for 1.6.3 with scala 2.10 - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Psparkr -Pscala-2.10" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.spark.* -DfailIfNoTests=false" + env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.spark.* -DfailIfNoTests=false" # Test spark module for 1.6.3 with scala 2.11 - jdk: "oraclejdk7" - env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" + env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" # Test python/pyspark with python 2 - jdk: "oraclejdk7" http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/dev/create_release.sh ---------------------------------------------------------------------- diff --git a/dev/create_release.sh b/dev/create_release.sh index 34e6c88..33ee84f 100755 --- a/dev/create_release.sh +++ b/dev/create_release.sh @@ -106,8 +106,8 @@ function make_binary_release() { git_clone make_source_package -make_binary_release all "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pscala-${SCALA_VERSION}" -make_binary_release netinst "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pscala-${SCALA_VERSION} -pl zeppelin-interpreter,zeppelin-zengine,:zeppelin-display_${SCALA_VERSION},:zeppelin-spark-dependencies_${SCALA_VERSION},:zeppelin-spark_${SCALA_VERSION},zeppelin-web,zeppelin-server,zeppelin-distribution -am" +make_binary_release all "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Pscala-${SCALA_VERSION}" +make_binary_release netinst "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Pscala-${SCALA_VERSION} -pl zeppelin-interpreter,zeppelin-zengine,:zeppelin-display_${SCALA_VERSION},:zeppelin-spark-dependencies_${SCALA_VERSION},:zeppelin-spark_${SCALA_VERSION},zeppelin-web,zeppelin-server,zeppelin-distribution -am" # remove non release files and dirs rm -rf "${WORKING_DIR}/zeppelin" http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/dev/publish_release.sh ---------------------------------------------------------------------- diff --git a/dev/publish_release.sh b/dev/publish_release.sh index 6c0bc9d..76eac4b 100755 --- a/dev/publish_release.sh +++ b/dev/publish_release.sh @@ -46,7 +46,7 @@ if [[ $RELEASE_VERSION == *"SNAPSHOT"* ]]; then DO_SNAPSHOT="yes" fi -PUBLISH_PROFILES="-Ppublish-distr -Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pr" +PUBLISH_PROFILES="-Ppublish-distr -Pspark-2.1 -Phadoop-2.6 -Pyarn -Pr" PROJECT_OPTIONS="-pl !zeppelin-distribution" NEXUS_STAGING="https://repository.apache.org/service/local/staging" NEXUS_PROFILE="153446d1ac37c4" http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/docs/install/build.md ---------------------------------------------------------------------- diff --git a/docs/install/build.md b/docs/install/build.md index 8bae369..5543f0b 100644 --- a/docs/install/build.md +++ b/docs/install/build.md @@ -69,7 +69,7 @@ If you're unsure about the options, use the same commands that creates official # update all pom.xml to use scala 2.11 ./dev/change_scala_version.sh 2.11 # build zeppelin with all interpreters and include latest version of Apache spark support for local mode. -mvn clean package -DskipTests -Pspark-2.0 -Phadoop-2.4 -Pyarn -Psparkr -Pr -Pscala-2.11 +mvn clean package -DskipTests -Pspark-2.0 -Phadoop-2.4 -Pyarn -Pr -Pscala-2.11 ``` ####3. Done @@ -149,10 +149,6 @@ enable YARN support for local mode enable [R](https://www.r-project.org/) support with [SparkR](https://spark.apache.org/docs/latest/sparkr.html) integration. -##### `-Psparkr` (optional) - -another [R](https://www.r-project.org/) support with [SparkR](https://spark.apache.org/docs/latest/sparkr.html) integration as well as local mode support. - ##### `-Pvendor-repo` (optional) enable 3rd party vendor repository (cloudera) @@ -184,14 +180,14 @@ Here are some examples with several options: ```bash # build with spark-2.1, scala-2.11 ./dev/change_scala_version.sh 2.11 -mvn clean package -Pspark-2.1 -Phadoop-2.4 -Pyarn -Psparkr -Pscala-2.11 -DskipTests +mvn clean package -Pspark-2.1 -Phadoop-2.4 -Pyarn -Pscala-2.11 -DskipTests # build with spark-2.0, scala-2.11 ./dev/change_scala_version.sh 2.11 -mvn clean package -Pspark-2.0 -Phadoop-2.4 -Pyarn -Psparkr -Pscala-2.11 -DskipTests +mvn clean package -Pspark-2.0 -Phadoop-2.4 -Pyarn -Pscala-2.11 -DskipTests # build with spark-1.6, scala-2.10 -mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -Psparkr -DskipTests +mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -DskipTests # spark-cassandra integration mvn clean package -Pcassandra-spark-1.5 -Dhadoop.version=2.6.0 -Phadoop-2.6 -DskipTests -DskipTests http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/docs/install/virtual_machine.md ---------------------------------------------------------------------- diff --git a/docs/install/virtual_machine.md b/docs/install/virtual_machine.md index d0973d6..d726004 100644 --- a/docs/install/virtual_machine.md +++ b/docs/install/virtual_machine.md @@ -110,7 +110,7 @@ This assumes you've already cloned the project either on the host machine in the ``` cd /zeppelin -mvn clean package -Pspark-1.6 -Phadoop-2.4 -Psparkr -DskipTests +mvn clean package -Pspark-1.6 -Phadoop-2.4 -DskipTests ./bin/zeppelin-daemon.sh start ``` http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/scripts/vagrant/zeppelin-dev/README.md ---------------------------------------------------------------------- diff --git a/scripts/vagrant/zeppelin-dev/README.md b/scripts/vagrant/zeppelin-dev/README.md index 8ebde4b..60bb09c 100644 --- a/scripts/vagrant/zeppelin-dev/README.md +++ b/scripts/vagrant/zeppelin-dev/README.md @@ -87,7 +87,7 @@ This assumes you've already cloned the project either on the host machine in the ``` cd /zeppelin -mvn clean package -Pspark-1.6 -Phadoop-2.4 -Psparkr -DskipTests +mvn clean package -Pspark-1.6 -Phadoop-2.4 -DskipTests ./bin/zeppelin-daemon.sh start ``` http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/scripts/vagrant/zeppelin-dev/show-instructions.sh ---------------------------------------------------------------------- diff --git a/scripts/vagrant/zeppelin-dev/show-instructions.sh b/scripts/vagrant/zeppelin-dev/show-instructions.sh index bb9abee..8e896a2 100644 --- a/scripts/vagrant/zeppelin-dev/show-instructions.sh +++ b/scripts/vagrant/zeppelin-dev/show-instructions.sh @@ -34,7 +34,7 @@ echo 'mvn clean package -DskipTests' echo echo '# or for a specific Spark/Hadoop build with additional options such as python and R support' echo -echo 'mvn clean package -Pspark-1.6 -Phadoop-2.4 -Psparkr -DskipTests' +echo 'mvn clean package -Pspark-1.6 -Phadoop-2.4 -DskipTests' echo './bin/zeppelin-daemon.sh start' echo echo 'On your host machine browse to http://localhost:8080/' http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/spark-dependencies/pom.xml ---------------------------------------------------------------------- diff --git a/spark-dependencies/pom.xml b/spark-dependencies/pom.xml index 3592509..b6a74dd 100644 --- a/spark-dependencies/pom.xml +++ b/spark-dependencies/pom.xml @@ -827,57 +827,6 @@ </dependencies> </profile> - <profile> - <id>sparkr</id> - <build> - <plugins> - <plugin> - <groupId>com.googlecode.maven-download-plugin</groupId> - <artifactId>download-maven-plugin</artifactId> - <executions> - <execution> - <id>download-sparkr-files</id> - <phase>validate</phase> - <goals> - <goal>wget</goal> - </goals> - <configuration> - <readTimeOut>60000</readTimeOut> - <retries>5</retries> - <url>${spark.bin.download.url}</url> - <unpack>true</unpack> - <outputDirectory>${project.build.directory}</outputDirectory> - </configuration> - </execution> - </executions> - </plugin> - <plugin> - <artifactId>maven-resources-plugin</artifactId> - <version>2.7</version> - <executions> - <execution> - <id>copy-sparkr-files</id> - <phase>generate-resources</phase> - <goals> - <goal>copy-resources</goal> - </goals> - <configuration> - <outputDirectory>${project.build.directory}/../../interpreter/spark/R/lib</outputDirectory> - <resources> - <resource> - <directory> - ${project.build.directory}/spark-${spark.version}-bin-without-hadoop/R/lib - </directory> - </resource> - </resources> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - </build> - </profile> - </profiles> <build> @@ -1039,6 +988,51 @@ </execution> </executions> </plugin> + + <!-- include sparkr by default --> + <plugin> + <groupId>com.googlecode.maven-download-plugin</groupId> + <artifactId>download-maven-plugin</artifactId> + <executions> + <execution> + <id>download-sparkr-files</id> + <phase>validate</phase> + <goals> + <goal>wget</goal> + </goals> + <configuration> + <readTimeOut>60000</readTimeOut> + <retries>5</retries> + <url>${spark.bin.download.url}</url> + <unpack>true</unpack> + <outputDirectory>${project.build.directory}</outputDirectory> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <artifactId>maven-resources-plugin</artifactId> + <version>2.7</version> + <executions> + <execution> + <id>copy-sparkr-files</id> + <phase>generate-resources</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/../../interpreter/spark/R/lib</outputDirectory> + <resources> + <resource> + <directory> + ${project.build.directory}/spark-${spark.version}-bin-without-hadoop/R/lib + </directory> + </resource> + </resources> + </configuration> + </execution> + </executions> + </plugin> </plugins> </build> </project> http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/spark/pom.xml ---------------------------------------------------------------------- diff --git a/spark/pom.xml b/spark/pom.xml index c494111..ade4081 100644 --- a/spark/pom.xml +++ b/spark/pom.xml @@ -320,6 +320,19 @@ </dependencies> <build> + <!-- sparkr resources --> + <resources> + <resource> + <directory>src/main/resources</directory> + <excludes> + <exclude>interpreter-setting.json</exclude> + </excludes> + </resource> + <resource> + <directory>src/main/sparkr-resources</directory> + </resource> + </resources> + <plugins> <plugin> <artifactId>maven-enforcer-plugin</artifactId> @@ -440,18 +453,42 @@ </executions> </plugin> - <!-- exclude sparkr by default. sparkr is enabled by profile 'sparkr' --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> - <excludes> - <exclude>**/SparkRInterpreter.java</exclude> + <excludes combine.self="override"></excludes> + <testExcludes combine.self="override"> + <testExclude>${pyspark.test.exclude}</testExclude> + </testExcludes> + </configuration> + </plugin> + <plugin> + <groupId>org.scala-tools</groupId> + <artifactId>maven-scala-plugin</artifactId> + <configuration> + <excludes combine.self="override"> </excludes> - <testExcludes> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <excludes combine.self="override"> + <exclude>${pyspark.test.exclude}</exclude> + </excludes> + </configuration> + </plugin> + + <!-- include sparkr by default --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + <excludes combine.self="override"></excludes> + <testExcludes combine.self="override"> <testExclude>${pyspark.test.exclude}</testExclude> - <testExclude>**/SparkRInterpreterTest.java</testExclude> - <testExclude>**/ZeppelinRTest.java</testExclude> </testExcludes> </configuration> </plugin> @@ -459,9 +496,7 @@ <groupId>org.scala-tools</groupId> <artifactId>maven-scala-plugin</artifactId> <configuration> - <excludes> - <exclude>**/ZeppelinR.scala</exclude> - <exclude>**/SparkRBackend.scala</exclude> + <excludes combine.self="override"> </excludes> </configuration> </plugin> @@ -469,12 +504,8 @@ <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <configuration> - <includes> - <include>${pyspark.test.include}</include> - </includes> - <excludes> + <excludes combine.self="override"> <exclude>${pyspark.test.exclude}</exclude> - <exclude>**/SparkRInterpreterTest.java</exclude> </excludes> </configuration> </plugin> @@ -609,53 +640,5 @@ <avro.mapred.classifier>hadoop2</avro.mapred.classifier> </properties> </profile> - - <!-- include sparkr in the build --> - <profile> - <id>sparkr</id> - <build> - <resources> - <resource> - <directory>src/main/resources</directory> - <excludes> - <exclude>interpreter-setting.json</exclude> - </excludes> - </resource> - <resource> - <directory>src/main/sparkr-resources</directory> - </resource> - </resources> - - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-compiler-plugin</artifactId> - <configuration> - <excludes combine.self="override"></excludes> - <testExcludes combine.self="override"> - <testExclude>${pyspark.test.exclude}</testExclude> - </testExcludes> - </configuration> - </plugin> - <plugin> - <groupId>org.scala-tools</groupId> - <artifactId>maven-scala-plugin</artifactId> - <configuration> - <excludes combine.self="override"> - </excludes> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-surefire-plugin</artifactId> - <configuration> - <excludes combine.self="override"> - <exclude>${pyspark.test.exclude}</exclude> - </excludes> - </configuration> - </plugin> - </plugins> - </build> - </profile> </profiles> </project> http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b62e2e01/testing/install_external_dependencies.sh ---------------------------------------------------------------------- diff --git a/testing/install_external_dependencies.sh b/testing/install_external_dependencies.sh index 963a876..e88f63b 100755 --- a/testing/install_external_dependencies.sh +++ b/testing/install_external_dependencies.sh @@ -20,8 +20,8 @@ set -ev touch ~/.environ -# Install R dependencies if R profiles are used -if [[ ${PROFILE/"-Pr "} != $PROFILE ]] || [[ ${PROFILE/"-Psparkr "} != $PROFILE ]] ; then +# Install R dependencies if SPARKR is true +if [[ "${SPARKR}" = "true" ]] ; then echo "R_LIBS=~/R" > ~/.Renviron echo "export R_LIBS=~/R" >> ~/.environ source ~/.environ