Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/manual/userimpersonation.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/manual/userimpersonation.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/manual/userimpersonation.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/manual/userimpersonation.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/pleasecontribute.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/pleasecontribute.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/pleasecontribute.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/pleasecontribute.html Tue Apr  4 06:06:16 
2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/explorezeppelinui.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/explorezeppelinui.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/explorezeppelinui.html 
(original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/explorezeppelinui.html Tue Apr 
 4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: 
zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/install_with_flink_and_spark_cluster.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/install_with_flink_and_spark_cluster.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- 
zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/install_with_flink_and_spark_cluster.html
 (original)
+++ 
zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/install_with_flink_and_spark_cluster.html
 Tue Apr  4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/tutorial.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/tutorial.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/tutorial.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/quickstart/tutorial.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-configuration.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-configuration.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-configuration.html 
(original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-configuration.html Tue Apr  
4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-credential.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-credential.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-credential.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-credential.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-helium.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-helium.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-helium.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-helium.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-interpreter.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-interpreter.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-interpreter.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-interpreter.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebook.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebook.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebook.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebook.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebookRepo.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebookRepo.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebookRepo.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rest-api/rest-notebookRepo.html Tue Apr  
4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/rss.xml
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/rss.xml?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/rss.xml (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/rss.xml Tue Apr  4 06:06:16 2017
@@ -5,8 +5,8 @@
         <description>Apache Zeppelin - The Apache Software 
Foundation</description>
         <link>http://zeppelin.apache.org</link>
         <link>http://zeppelin.apache.org</link>
-        <lastBuildDate>2017-04-01T18:59:50+09:00</lastBuildDate>
-        <pubDate>2017-04-01T18:59:50+09:00</pubDate>
+        <lastBuildDate>2017-04-04T15:02:14+09:00</lastBuildDate>
+        <pubDate>2017-04-04T15:02:14+09:00</pubDate>
         <ttl>1800</ttl>
 
 

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/screenshots.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/screenshots.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/screenshots.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/screenshots.html Tue Apr  4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/search.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/search.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/search.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/search.html Tue Apr  4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/search_data.json
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/search_data.json?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/search_data.json (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/search_data.json Tue Apr  4 06:06:16 2017
@@ -279,6 +279,17 @@
     
   
 
+    "/interpreter/groovy.html": {
+      "title": "Apache Groovy Interpreter for Apache Zeppelin",
+      "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;Groovy Interpreter for 
Apache ZeppelinSamples%groovy//get a parameter defined as 
z.angularBind(&amp;#39;ngSearchParam&amp;#39;, value, 
&amp;#39;paragraph_id&amp;#39;)//g is a context object for groovy to avoid mix 
with z objectdef param = g.angular(&amp;#39;ngSearchParam&amp;#39;)//send 
request https://www.googleapis.com/customsearch/v1?q=ngSearchParam_valuedef r = 
HTTP.get(  //assume you defined the groovy interpreter property  //
    
`search_baseurl`=&amp;#39;https://www.googleapis.com/customsearch/v1&amp;#39;  
//in groovy object o.getProperty(&amp;#39;A&amp;#39;) == o.&amp;#39;A&amp;#39; 
== o.A == o[&amp;#39;A&amp;#39;]  url : g.search_baseurl,  query: [ q: param ], 
 headers: [    &amp;#39;Accept&amp;#39;:&amp;#39;application/json&amp;#39;,    
//&amp;#39;Authorization:&amp;#39; : 
g.getProperty(&amp;#39;search_auth&amp;#39;),  ] )//check response codeif( 
r.response.code==200 ) {  g.html().with{     //g.html() renders %angular to 
output and returns groovy.xml.MarkupBuilder    h2(&amp;quot;the response 
${r.response.code}&amp;quot;)    span( r.response.body )    
h2(&amp;quot;headers&amp;quot;)    pre( 
r.response.headers.join(&amp;#39;n&amp;#39;) )  }} else {  //just to show that 
it&amp;#39;s possible to use println with multiline groovy string to render 
output  println(&amp;quot;&amp;quot;&amp;quot;%angular    
&amp;lt;script&amp;gt; alert (&amp;quot;code=${r.response.code} n 
msg=${r.response.message}&amp;quot;) 
 &amp;lt;/script&amp;gt;  &amp;quot;&amp;quot;&amp;quot;)}%groovy//renders a 
table with headers a, b, c  and two rowsg.table(  [    
[&amp;#39;a&amp;#39;,&amp;#39;b&amp;#39;,&amp;#39;c&amp;#39;],    
[&amp;#39;a1&amp;#39;,&amp;#39;b1&amp;#39;,&amp;#39;c1&amp;#39;],    
[&amp;#39;a2&amp;#39;,&amp;#39;b2&amp;#39;,&amp;#39;c2&amp;#39;],  ])the g 
objectg.angular(String name)Returns angular object by name. Look up notebook 
scope first and then global scope.g.angularBind(String name, Object 
value)Assign a new value into angular object namejava.util.Properties 
g.getProperties()returns all properties defined for this interpreterString 
g.getProperty(&amp;#39;PROPERTY_NAME&amp;#39;) groovy 
g.PROPERTY_NAMEg.&amp;#39;PROPERTY_NAME&amp;#39;g[&amp;#39;PROPERTY_NAME&amp;#39;]g.getProperties().getProperty(&amp;#39;PROPERTY_NAME&amp;#39;)All
 above the accessor to named property defined in groovy interpreter.In this 
case with name PROPERTY_NAMEgroovy.xml.MarkupBuilder g.html()Starts or 
continues renderin
 g of %angular to output and returns groovy.xml.MarkupBuilderMarkupBuilder is 
usefull to generate html (xml)void g.table(obj)starts or continues rendering 
table rows.obj:  List(rows) of List(columns) where first line is a header ",
+      "url": " /interpreter/groovy.html",
+      "group": "interpreter",
+      "excerpt": "Apache Groovy is a powerful, optionally typed and dynamic 
language, with static-typing and static compilation capabilities, for the Java 
platform aimed at improving developer productivity thanks to a concise, 
familiar and easy to learn syntax."
+    }
+    ,
+    
+  
+
     "/interpreter/hbase.html": {
       "title": "HBase Shell Interpreter for Apache Zeppelin",
       "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;HBase Shell Interpreter for 
Apache ZeppelinOverviewHBase Shell is a JRuby IRB client for Apache HBase. This 
interpreter provides all capabilities of Apache HBase shell within Apache 
Zeppelin. The interpreter assumes that Apache HBase client software has been 
installed and it can connect to the Apache HBase cluster from the machine on 
where Apache Zeppelin is installed.To get start with HBase, please see HBase 
Quickstart.HBase relea
 se supportedBy default, Zeppelin is built against HBase 1.0.x releases. To 
work with HBase 1.1.x releases, use the following build command:# HBase 
1.1.4mvn clean package -DskipTests -Phadoop-2.6 -Dhadoop.version=2.6.0 -P 
build-distr -Dhbase.hbase.version=1.1.4 -Dhbase.hadoop.version=2.6.0To work 
with HBase 1.2.0+, use the following build command:# HBase 1.2.0mvn clean 
package -DskipTests -Phadoop-2.6 -Dhadoop.version=2.6.0 -P build-distr 
-Dhbase.hbase.version=1.2.0 -Dhbase.hadoop.version=2.6.0Configuration      
Property    Default    Description        hbase.home    /usr/lib/hbase    
Installation directory of HBase, defaults to HBASE_HOME in environment        
hbase.ruby.sources    lib/ruby    Path to Ruby scripts relative to 
&#39;hbase.home&#39;        zeppelin.hbase.test.mode    false    Disable checks 
for unit and manual tests  If you want to connect to HBase running on a 
cluster, you&amp;#39;ll need to follow the next step.Export HBASE_HOMEIn 
conf/zeppelin-env.sh, export HBASE_H
 OME environment variable with your HBase installation path. This ensures 
hbase-site.xml can be loaded.for exampleexport HBASE_HOME=/usr/lib/hbaseor, 
when running with CDHexport 
HBASE_HOME=&amp;quot;/opt/cloudera/parcels/CDH/lib/hbase&amp;quot;You can 
optionally export HBASE_CONF_DIR instead of HBASE_HOME should you have custom 
HBase configurations.Enabling the HBase Shell InterpreterIn a notebook, to 
enable the HBase Shell interpreter, click the Gear icon and select HBase 
Shell.Using the HBase Shell InterpreterIn a paragraph, use %hbase to select the 
HBase Shell interpreter and then input all commands. To get the list of 
available commands, use help.%hbasehelpFor example, to create a 
table%hbasecreate &amp;#39;test&amp;#39;, &amp;#39;cf&amp;#39;And then to put 
data into that table%hbaseput &amp;#39;test&amp;#39;, &amp;#39;row1&amp;#39;, 
&amp;#39;cf:a&amp;#39;, &amp;#39;value1&amp;#39;For more information on all 
commands available, refer to HBase shell commands.",
@@ -358,7 +369,7 @@
 
     "/interpreter/livy.html": {
       "title": "Livy Interpreter for Apache Zeppelin",
-      "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;Livy Interpreter for Apache 
ZeppelinOverviewLivy is an open source REST interface for interacting with 
Spark from anywhere. It supports executing snippets of code or programs in a 
Spark context that runs locally or in YARN.Interactive Scala, Python and R 
shellsBatch submissions in Scala, Java, PythonMulti users can share the same 
server (impersonation support)Can be used for submitting jobs from anywhere 
with RESTDoes not require a
 ny code change to your programsRequirementsAdditional requirements for the 
Livy interpreter are:Spark 1.3 or above.Livy server.ConfigurationWe added some 
common configurations for spark, and you can set any configuration you want.You 
can find all Spark configurations in here.And instead of starting property with 
spark. it should be replaced with livy.spark..Example: spark.driver.memory to 
livy.spark.driver.memory      Property    Default    Description        
zeppelin.livy.url    http://localhost:8998    URL where livy server is running  
      zeppelin.livy.spark.maxResult    1000    Max number of Spark SQL result 
to display.        zeppelin.livy.session.create_timeout    120    Timeout in 
seconds for session creation        zeppelin.livy.displayAppInfo    false    
Whether to display app info        zeppelin.livy.pull_status.interval.millis    
1000    The interval for checking paragraph execution status        
livy.spark.driver.cores        Driver cores. ex) 1, 2.          livy.spar
 k.driver.memory        Driver memory. ex) 512m, 32g.          
livy.spark.executor.instances        Executor instances. ex) 1, 4.          
livy.spark.executor.cores        Num cores per executor. ex) 1, 4.        
livy.spark.executor.memory        Executor memory per worker instance. ex) 
512m, 32g.        livy.spark.dynamicAllocation.enabled        Use dynamic 
resource allocation. ex) True, False.        
livy.spark.dynamicAllocation.cachedExecutorIdleTimeout        Remove an 
executor which has cached data blocks.        
livy.spark.dynamicAllocation.minExecutors        Lower bound for the number of 
executors.        livy.spark.dynamicAllocation.initialExecutors        Initial 
number of executors to run.        livy.spark.dynamicAllocation.maxExecutors    
    Upper bound for the number of executors.            
livy.spark.jars.packages            Adding extra libraries to livy interpreter  
        zeppelin.livy.ssl.trustStore        client trustStore file. Used when 
livy ssl is enabled  
       zeppelin.livy.ssl.trustStorePassword        password for trustStore 
file. Used when livy ssl is enabled    We remove livy.spark.master in 
zeppelin-0.7. Because we sugguest user to use livy 0.3 in zeppelin-0.7. And 
livy 0.3 don&amp;#39;t allow to specify livy.spark.master, it enfornce 
yarn-cluster mode.Adding External librariesYou can load dynamic library to livy 
interpreter by set livy.spark.jars.packages property to comma-separated list of 
maven coordinates of jars to include on the driver and executor classpaths. The 
format for the coordinates should be groupId:artifactId:version. Example      
Property    Example    Description          livy.spark.jars.packages      
io.spray:spray-json_2.10:1.3.1      Adding extra libraries to livy interpreter  
      How to useBasically, you can 
usespark%livy.sparksc.versionpyspark%livy.pysparkprint 
&amp;quot;1&amp;quot;sparkR%livy.sparkrhello &amp;lt;- function( name ) {    
sprintf( &amp;quot;Hello, %s&amp;quot;, name );}hello(&amp;quot;liv
 y&amp;quot;)ImpersonationWhen Zeppelin server is running with authentication 
enabled, then this interpreter utilizes Livy’s user impersonation feature 
i.e. sends extra parameter for creating and running a session 
(&amp;quot;proxyUser&amp;quot;: &amp;quot;${loggedInUser}&amp;quot;). This is 
particularly useful when multi users are sharing a Notebook server.Apply 
Zeppelin Dynamic FormsYou can leverage Zeppelin Dynamic Form. You can use both 
the text input and select form parameterization features.%livy.pysparkprint 
&amp;quot;${group_by=product_id,product_id|product_name|customer_id|store_id}&amp;quot;FAQLivy
 debugging: If you see any of these in error consoleConnect to livyhost:8998 
[livyhost/127.0.0.1, livyhost/0:0:0:0:0:0:0:1] failed: Connection refusedLooks 
like the livy server is not up yet or the config is wrongException: Session not 
found, Livy server would have restarted, or lost session.The session would have 
timed out, you may need to restart the interpreter.Blacklisted 
 configuration values in session config: spark.masterEdit 
conf/spark-blacklist.conf file in livy server and comment out #spark.master 
line.If you choose to work on livy in apps/spark/java directory in 
https://github.com/cloudera/hue,copy spark-user-configurable-options.template 
to spark-user-configurable-options.conf file in livy server and comment out 
#spark.master. ",
+      "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;Livy Interpreter for Apache 
ZeppelinOverviewLivy is an open source REST interface for interacting with 
Spark from anywhere. It supports executing snippets of code or programs in a 
Spark context that runs locally or in YARN.Interactive Scala, Python and R 
shellsBatch submissions in Scala, Java, PythonMulti users can share the same 
server (impersonation support)Can be used for submitting jobs from anywhere 
with RESTDoes not require a
 ny code change to your programsRequirementsAdditional requirements for the 
Livy interpreter are:Spark 1.3 or above.Livy server.ConfigurationWe added some 
common configurations for spark, and you can set any configuration you want.You 
can find all Spark configurations in here.And instead of starting property with 
spark. it should be replaced with livy.spark..Example: spark.driver.memory to 
livy.spark.driver.memory      Property    Default    Description        
zeppelin.livy.url    http://localhost:8998    URL where livy server is running  
      zeppelin.livy.spark.sql.maxResult    1000    Max number of Spark SQL 
result to display.        zeppelin.livy.spark.sql.field.truncate    true    
Whether to truncate field values longer than 20 characters or not        
zeppelin.livy.session.create_timeout    120    Timeout in seconds for session 
creation        zeppelin.livy.displayAppInfo    false    Whether to display app 
info        zeppelin.livy.pull_status.interval.millis    1000    The in
 terval for checking paragraph execution status        livy.spark.driver.cores  
      Driver cores. ex) 1, 2.          livy.spark.driver.memory        Driver 
memory. ex) 512m, 32g.          livy.spark.executor.instances        Executor 
instances. ex) 1, 4.          livy.spark.executor.cores        Num cores per 
executor. ex) 1, 4.        livy.spark.executor.memory        Executor memory 
per worker instance. ex) 512m, 32g.        livy.spark.dynamicAllocation.enabled 
       Use dynamic resource allocation. ex) True, False.        
livy.spark.dynamicAllocation.cachedExecutorIdleTimeout        Remove an 
executor which has cached data blocks.        
livy.spark.dynamicAllocation.minExecutors        Lower bound for the number of 
executors.        livy.spark.dynamicAllocation.initialExecutors        Initial 
number of executors to run.        livy.spark.dynamicAllocation.maxExecutors    
    Upper bound for the number of executors.            
livy.spark.jars.packages            Adding extra lib
 raries to livy interpreter          zeppelin.livy.ssl.trustStore        client 
trustStore file. Used when livy ssl is enabled        
zeppelin.livy.ssl.trustStorePassword        password for trustStore file. Used 
when livy ssl is enabled    We remove livy.spark.master in zeppelin-0.7. 
Because we sugguest user to use livy 0.3 in zeppelin-0.7. And livy 0.3 
don&amp;#39;t allow to specify livy.spark.master, it enfornce yarn-cluster 
mode.Adding External librariesYou can load dynamic library to livy interpreter 
by set livy.spark.jars.packages property to comma-separated list of maven 
coordinates of jars to include on the driver and executor classpaths. The 
format for the coordinates should be groupId:artifactId:version. Example      
Property    Example    Description          livy.spark.jars.packages      
io.spray:spray-json_2.10:1.3.1      Adding extra libraries to livy interpreter  
      How to useBasically, you can 
usespark%livy.sparksc.versionpyspark%livy.pysparkprint &amp;quot;1&amp;q
 uot;sparkR%livy.sparkrhello &amp;lt;- function( name ) {    sprintf( 
&amp;quot;Hello, %s&amp;quot;, name 
);}hello(&amp;quot;livy&amp;quot;)ImpersonationWhen Zeppelin server is running 
with authentication enabled, then this interpreter utilizes Livy’s user 
impersonation feature i.e. sends extra parameter for creating and running a 
session (&amp;quot;proxyUser&amp;quot;: &amp;quot;${loggedInUser}&amp;quot;). 
This is particularly useful when multi users are sharing a Notebook 
server.Apply Zeppelin Dynamic FormsYou can leverage Zeppelin Dynamic Form. You 
can use both the text input and select form parameterization 
features.%livy.pysparkprint 
&amp;quot;${group_by=product_id,product_id|product_name|customer_id|store_id}&amp;quot;FAQLivy
 debugging: If you see any of these in error consoleConnect to livyhost:8998 
[livyhost/127.0.0.1, livyhost/0:0:0:0:0:0:0:1] failed: Connection refusedLooks 
like the livy server is not up yet or the config is wrongException: Session not 
found, Livy serv
 er would have restarted, or lost session.The session would have timed out, you 
may need to restart the interpreter.Blacklisted configuration values in session 
config: spark.masterEdit conf/spark-blacklist.conf file in livy server and 
comment out #spark.master line.If you choose to work on livy in apps/spark/java 
directory in https://github.com/cloudera/hue,copy 
spark-user-configurable-options.template to 
spark-user-configurable-options.conf file in livy server and comment out 
#spark.master. ",
       "url": " /interpreter/livy.html",
       "group": "interpreter",
       "excerpt": "Livy is an open source REST interface for interacting with 
Spark from anywhere. It supports executing snippets of code or programs in a 
Spark context that runs locally or in YARN."
@@ -468,7 +479,7 @@
 
     "/interpreter/spark.html": {
       "title": "Apache Spark Interpreter for Apache Zeppelin",
-      "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;Spark Interpreter for Apache 
ZeppelinOverviewApache Spark is a fast and general-purpose cluster computing 
system.It provides high-level APIs in Java, Scala, Python and R, and an 
optimized engine that supports general execution graphs.Apache Spark is 
supported in Zeppelin with Spark interpreter group which consists of below five 
interpreters.      Name    Class    Description        %spark    
SparkInterpreter    Creates a SparkConte
 xt and provides a Scala environment        %spark.pyspark    
PySparkInterpreter    Provides a Python environment        %spark.r    
SparkRInterpreter    Provides an R environment with SparkR support        
%spark.sql    SparkSQLInterpreter    Provides a SQL environment        
%spark.dep    DepInterpreter    Dependency loader  ConfigurationThe Spark 
interpreter can be configured with properties provided by Zeppelin.You can also 
set other Spark properties which are not listed in the table. For a list of 
additional properties, refer to Spark Available Properties.      Property    
Default    Description        args        Spark commandline args      master    
local[*]    Spark master uri.  ex) spark://masterhost:7077      spark.app.name  
  Zeppelin    The name of spark application.        spark.cores.max        
Total number of cores to use.  Empty value uses all available core.        
spark.executor.memory     1g    Executor memory per worker instance.  ex) 512m, 
32g        zeppelin.dep
 .additionalRemoteRepository    spark-packages,  
http://dl.bintray.com/spark-packages/maven,  false;    A list of 
id,remote-repository-URL,is-snapshot;  for each remote repository.        
zeppelin.dep.localrepo    local-repo    Local repository for dependency loader  
      PYSPARKPYTHON    python    Python binary executable to use for PySpark in 
both driver and workers (default is python).            Property 
spark.pyspark.python take precedence if it is set        PYSPARKDRIVERPYTHON    
python    Python binary executable to use for PySpark in driver only (default 
is PYSPARKPYTHON).            Property spark.pyspark.driver.python take 
precedence if it is set        zeppelin.spark.concurrentSQL    false    Execute 
multiple SQL concurrently if set true.        zeppelin.spark.maxResult    1000  
  Max number of Spark SQL result to display.        
zeppelin.spark.printREPLOutput    true    Print REPL output        
zeppelin.spark.useHiveContext    true    Use HiveContext instead of SQLConte
 xt if it is true.        zeppelin.spark.importImplicit    true    Import 
implicits, UDF collection, and sql if set true.  Without any configuration, 
Spark interpreter works out of box in local mode. But if you want to connect to 
your Spark cluster, you&amp;#39;ll need to follow below two simple steps.1. 
Export SPARK_HOMEIn conf/zeppelin-env.sh, export SPARK_HOME environment 
variable with your Spark installation path.For example,export 
SPARK_HOME=/usr/lib/sparkYou can optionally set more environment variables# set 
hadoop conf direxport HADOOP_CONF_DIR=/usr/lib/hadoop# set options to pass 
spark-submit commandexport SPARK_SUBMIT_OPTIONS=&amp;quot;--packages 
com.databricks:spark-csv_2.10:1.2.0&amp;quot;# extra classpath. e.g. set 
classpath for hive-site.xmlexport 
ZEPPELIN_INTP_CLASSPATH_OVERRIDES=/etc/hive/confFor Windows, ensure you have 
winutils.exe in %HADOOP_HOME%bin. Please see Problems running Hadoop on Windows 
for the details.2. Set master in Interpreter menuAfter start Zeppelin,
  go to Interpreter menu and edit master property in your Spark interpreter 
setting. The value may vary depending on your Spark cluster deployment type.For 
example,local[*] in local modespark://master:7077 in standalone 
clusteryarn-client in Yarn client modemesos://host:5050 in Mesos 
clusterThat&amp;#39;s it. Zeppelin will work with any version of Spark and any 
deployment type without rebuilding Zeppelin in this way. For the further 
information about Spark &amp;amp; Zeppelin version compatibility, please refer 
to &amp;quot;Available Interpreters&amp;quot; section in Zeppelin download 
page.Note that without exporting SPARK_HOME, it&amp;#39;s running in local mode 
with included version of Spark. The included version may vary depending on the 
build profile.SparkContext, SQLContext, SparkSession, 
ZeppelinContextSparkContext, SQLContext and ZeppelinContext are automatically 
created and exposed as variable names sc, sqlContext and z, respectively, in 
Scala, Python and R environments.Starin
 g from 0.6.1 SparkSession is available as variable spark when you are using 
Spark 2.x.Note that Scala/Python/R environment shares the same SparkContext, 
SQLContext and ZeppelinContext instance. Dependency ManagementThere are two 
ways to load external libraries in Spark interpreter. First is using 
interpreter setting menu and second is loading Spark properties.1. Setting 
Dependencies via Interpreter SettingPlease see Dependency Management for the 
details.2. Loading Spark PropertiesOnce SPARK_HOME is set in 
conf/zeppelin-env.sh, Zeppelin uses spark-submit as spark interpreter runner. 
spark-submit supports two ways to load configurations. The first is command 
line options such as --master and Zeppelin can pass these options to 
spark-submit by exporting SPARK_SUBMIT_OPTIONS in conf/zeppelin-env.sh. Second 
is reading configuration options from SPARK_HOME/conf/spark-defaults.conf. 
Spark properties that user can set to distribute libraries are:      
spark-defaults.conf    SPARK_SUBMIT_OPTI
 ONS    Description        spark.jars    --jars    Comma-separated list of 
local jars to include on the driver and executor classpaths.        
spark.jars.packages    --packages    Comma-separated list of maven coordinates 
of jars to include on the driver and executor classpaths. Will search the local 
maven repo, then maven central and any additional remote repositories given by 
--repositories. The format for the coordinates should be 
groupId:artifactId:version.        spark.files    --files    Comma-separated 
list of files to be placed in the working directory of each executor.  Here are 
few examples:SPARK_SUBMIT_OPTIONS in conf/zeppelin-env.shexport 
SPARK_SUBMIT_OPTIONS=&amp;quot;--packages com.databricks:spark-csv_2.10:1.2.0 
--jars /path/mylib1.jar,/path/mylib2.jar --files 
/path/mylib1.py,/path/mylib2.zip,/path/mylib3.egg&amp;quot;SPARK_HOME/conf/spark-defaults.confspark.jars
        /path/mylib1.jar,/path/mylib2.jarspark.jars.packages   
com.databricks:spark-csv_2.10:1.2.0spark.file
 s       /path/mylib1.py,/path/mylib2.egg,/path/mylib3.zip3. Dynamic Dependency 
Loading via %spark.dep interpreterNote: %spark.dep interpreter loads libraries 
to %spark and %spark.pyspark but not to  %spark.sql interpreter. So we 
recommend you to use the first option instead.When your code requires external 
library, instead of doing download/copy/restart Zeppelin, you can easily do 
following jobs using %spark.dep interpreter.Load libraries recursively from 
maven repositoryLoad libraries from local filesystemAdd additional maven 
repositoryAutomatically add libraries to SparkCluster (You can turn off)Dep 
interpreter leverages Scala environment. So you can write any Scala code 
here.Note that %spark.dep interpreter should be used before %spark, 
%spark.pyspark, %spark.sql.Here&amp;#39;s usages.%spark.depz.reset() // clean 
up previously added artifact and repository// add maven 
repositoryz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;)//
 add maven snapshot repositor
 
yz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;).snapshot()//
 add credentials for private maven 
repositoryz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;).username(&amp;quot;username&amp;quot;).password(&amp;quot;password&amp;quot;)//
 add artifact from filesystemz.load(&amp;quot;/path/to.jar&amp;quot;)// add 
artifact from maven repository, with no 
dependencyz.load(&amp;quot;groupId:artifactId:version&amp;quot;).excludeAll()// 
add artifact 
recursivelyz.load(&amp;quot;groupId:artifactId:version&amp;quot;)// add 
artifact recursively except comma separated GroupID:ArtifactId 
listz.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(&amp;quot;groupId:artifactId,groupId:artifactId,
 ...&amp;quot;)// exclude with 
patternz.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(*)z.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(&amp;quot;groupId:artifactId:*&amp;quot;)z.load(&amp;quot;groupId:artifactId:version&amp;
 quot;).exclude(&amp;quot;groupId:*&amp;quot;)// local() skips adding artifact 
to spark clusters (skipping 
sc.addJar())z.load(&amp;quot;groupId:artifactId:version&amp;quot;).local()ZeppelinContextZeppelin
 automatically injects ZeppelinContext as variable z in your Scala/Python 
environment. ZeppelinContext provides some additional functions and 
utilities.Object ExchangeZeppelinContext extends map and it&amp;#39;s shared 
between Scala and Python environment.So you can put some objects from Scala and 
read it from Python, vice versa.  // Put object from scala%sparkval myObject = 
...z.put(&amp;quot;objName&amp;quot;, myObject)// Exchanging data 
framesmyScalaDataFrame = ...z.put(&amp;quot;myScalaDataFrame&amp;quot;, 
myScalaDataFrame)val myPythonDataFrame = 
z.get(&amp;quot;myPythonDataFrame&amp;quot;).asInstanceOf[DataFrame]    # Get 
object from python%spark.pysparkmyObject = z.get(&amp;quot;objName&amp;quot;)# 
Exchanging data framesmyPythonDataFrame = ...z.put(&amp;quot;myPythonDataFrame&a
 mp;quot;, postsDf._jdf)myScalaDataFrame = 
DataFrame(z.get(&amp;quot;myScalaDataFrame&amp;quot;), sqlContext)  Form 
CreationZeppelinContext provides functions for creating forms.In Scala and 
Python environments, you can create forms programmatically.  %spark/* Create 
text input form */z.input(&amp;quot;formName&amp;quot;)/* Create text input 
form with default value */z.input(&amp;quot;formName&amp;quot;, 
&amp;quot;defaultValue&amp;quot;)/* Create select form 
*/z.select(&amp;quot;formName&amp;quot;, Seq((&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                         
(&amp;quot;option2&amp;quot;, &amp;quot;option2DisplayName&amp;quot;)))/* 
Create select form with default value*/z.select(&amp;quot;formName&amp;quot;, 
&amp;quot;option1&amp;quot;, Seq((&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                                    
(&amp;quot;option2&amp;quot;, &amp;quot;option2DisplayName&amp;quot;)))    
%spark.pyspark# Create text i
 nput formz.input(&amp;quot;formName&amp;quot;)# Create text input form with 
default valuez.input(&amp;quot;formName&amp;quot;, 
&amp;quot;defaultValue&amp;quot;)# Create select 
formz.select(&amp;quot;formName&amp;quot;, [(&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                      
(&amp;quot;option2&amp;quot;, &amp;quot;option2DisplayName&amp;quot;)])# Create 
select form with default valuez.select(&amp;quot;formName&amp;quot;, 
[(&amp;quot;option1&amp;quot;, &amp;quot;option1DisplayName&amp;quot;),         
             (&amp;quot;option2&amp;quot;, 
&amp;quot;option2DisplayName&amp;quot;)], &amp;quot;option1&amp;quot;)  In sql 
environment, you can create form in simple template.%spark.sqlselect * from 
${table=defaultTableName} where text like &amp;#39;%${search}%&amp;#39;To learn 
more about dynamic form, checkout Dynamic Form.Matplotlib Integration 
(pyspark)Both the python and pyspark interpreters have built-in support for 
inline visualization using matpl
 otlib, a popular plotting library for python. More details can be found in the 
python interpreter documentation, since matplotlib support is identical. More 
advanced interactive plotting can be done with pyspark through utilizing 
Zeppelin&amp;#39;s built-in Angular Display System, as shown below:Interpreter 
setting optionYou can choose one of shared, scoped and isolated options wheh 
you configure Spark interpreter. Spark interpreter creates separated Scala 
compiler per each notebook but share a single SparkContext in scoped mode 
(experimental). It creates separated SparkContext per each notebook in isolated 
mode.Setting up Zeppelin with KerberosLogical setup with Zeppelin, Kerberos Key 
Distribution Center (KDC), and Spark on YARN:Configuration SetupOn the server 
that Zeppelin is installed, install Kerberos client modules and configuration, 
krb5.conf.This is to make the server communicate with KDC.Set SPARK_HOME in 
[ZEPPELIN_HOME]/conf/zeppelin-env.sh to use spark-submit(Additionally
 , you might have to set export HADOOP_CONF_DIR=/etc/hadoop/conf)Add the two 
properties below to Spark configuration 
([SPARK_HOME]/conf/spark-defaults.conf):spark.yarn.principalspark.yarn.keytabNOTE:
 If you do not have permission to access for the above spark-defaults.conf 
file, optionally, you can add the above lines to the Spark Interpreter setting 
through the Interpreter tab in the Zeppelin UI.That&amp;#39;s it. Play with 
Zeppelin!",
+      "content"  : "&lt;!--Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);you may not use this file except in compliance with the 
License.You may obtain a copy of the License 
athttp://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law 
or agreed to in writing, softwaredistributed under the License is distributed 
on an &quot;AS IS&quot; BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied.See the License for the specific language governing 
permissions andlimitations under the License.--&gt;Spark Interpreter for Apache 
ZeppelinOverviewApache Spark is a fast and general-purpose cluster computing 
system.It provides high-level APIs in Java, Scala, Python and R, and an 
optimized engine that supports general execution graphs.Apache Spark is 
supported in Zeppelin with Spark interpreter group which consists of below five 
interpreters.      Name    Class    Description        %spark    
SparkInterpreter    Creates a SparkConte
 xt and provides a Scala environment        %spark.pyspark    
PySparkInterpreter    Provides a Python environment        %spark.r    
SparkRInterpreter    Provides an R environment with SparkR support        
%spark.sql    SparkSQLInterpreter    Provides a SQL environment        
%spark.dep    DepInterpreter    Dependency loader  ConfigurationThe Spark 
interpreter can be configured with properties provided by Zeppelin.You can also 
set other Spark properties which are not listed in the table. For a list of 
additional properties, refer to Spark Available Properties.      Property    
Default    Description        args        Spark commandline args      master    
local[*]    Spark master uri.  ex) spark://masterhost:7077      spark.app.name  
  Zeppelin    The name of spark application.        spark.cores.max        
Total number of cores to use.  Empty value uses all available core.        
spark.executor.memory     1g    Executor memory per worker instance.  ex) 512m, 
32g        zeppelin.dep
 .additionalRemoteRepository    spark-packages,  
http://dl.bintray.com/spark-packages/maven,  false;    A list of 
id,remote-repository-URL,is-snapshot;  for each remote repository.        
zeppelin.dep.localrepo    local-repo    Local repository for dependency loader  
      PYSPARKPYTHON    python    Python binary executable to use for PySpark in 
both driver and workers (default is python).            Property 
spark.pyspark.python take precedence if it is set        PYSPARKDRIVERPYTHON    
python    Python binary executable to use for PySpark in driver only (default 
is PYSPARKPYTHON).            Property spark.pyspark.driver.python take 
precedence if it is set        zeppelin.spark.concurrentSQL    false    Execute 
multiple SQL concurrently if set true.        zeppelin.spark.maxResult    1000  
  Max number of Spark SQL result to display.        
zeppelin.spark.printREPLOutput    true    Print REPL output        
zeppelin.spark.useHiveContext    true    Use HiveContext instead of SQLConte
 xt if it is true.        zeppelin.spark.importImplicit    true    Import 
implicits, UDF collection, and sql if set true.        
zeppelin.spark.enableSupportedVersionCheck    true    Do not change - developer 
only setting, not for production use  Without any configuration, Spark 
interpreter works out of box in local mode. But if you want to connect to your 
Spark cluster, you&amp;#39;ll need to follow below two simple steps.1. Export 
SPARK_HOMEIn conf/zeppelin-env.sh, export SPARK_HOME environment variable with 
your Spark installation path.For example,export SPARK_HOME=/usr/lib/sparkYou 
can optionally set more environment variables# set hadoop conf direxport 
HADOOP_CONF_DIR=/usr/lib/hadoop# set options to pass spark-submit commandexport 
SPARK_SUBMIT_OPTIONS=&amp;quot;--packages 
com.databricks:spark-csv_2.10:1.2.0&amp;quot;# extra classpath. e.g. set 
classpath for hive-site.xmlexport 
ZEPPELIN_INTP_CLASSPATH_OVERRIDES=/etc/hive/confFor Windows, ensure you have 
winutils.exe in %HADOOP_HO
 ME%bin. Please see Problems running Hadoop on Windows for the details.2. Set 
master in Interpreter menuAfter start Zeppelin, go to Interpreter menu and edit 
master property in your Spark interpreter setting. The value may vary depending 
on your Spark cluster deployment type.For example,local[*] in local 
modespark://master:7077 in standalone clusteryarn-client in Yarn client 
modemesos://host:5050 in Mesos clusterThat&amp;#39;s it. Zeppelin will work 
with any version of Spark and any deployment type without rebuilding Zeppelin 
in this way. For the further information about Spark &amp;amp; Zeppelin version 
compatibility, please refer to &amp;quot;Available Interpreters&amp;quot; 
section in Zeppelin download page.Note that without exporting SPARK_HOME, 
it&amp;#39;s running in local mode with included version of Spark. The included 
version may vary depending on the build profile.SparkContext, SQLContext, 
SparkSession, ZeppelinContextSparkContext, SQLContext and ZeppelinContext are 
automa
 tically created and exposed as variable names sc, sqlContext and z, 
respectively, in Scala, Python and R environments.Staring from 0.6.1 
SparkSession is available as variable spark when you are using Spark 2.x.Note 
that Scala/Python/R environment shares the same SparkContext, SQLContext and 
ZeppelinContext instance. Dependency ManagementThere are two ways to load 
external libraries in Spark interpreter. First is using interpreter setting 
menu and second is loading Spark properties.1. Setting Dependencies via 
Interpreter SettingPlease see Dependency Management for the details.2. Loading 
Spark PropertiesOnce SPARK_HOME is set in conf/zeppelin-env.sh, Zeppelin uses 
spark-submit as spark interpreter runner. spark-submit supports two ways to 
load configurations. The first is command line options such as --master and 
Zeppelin can pass these options to spark-submit by exporting 
SPARK_SUBMIT_OPTIONS in conf/zeppelin-env.sh. Second is reading configuration 
options from SPARK_HOME/conf/spark-
 defaults.conf. Spark properties that user can set to distribute libraries are: 
     spark-defaults.conf    SPARK_SUBMIT_OPTIONS    Description        
spark.jars    --jars    Comma-separated list of local jars to include on the 
driver and executor classpaths.        spark.jars.packages    --packages    
Comma-separated list of maven coordinates of jars to include on the driver and 
executor classpaths. Will search the local maven repo, then maven central and 
any additional remote repositories given by --repositories. The format for the 
coordinates should be groupId:artifactId:version.        spark.files    --files 
   Comma-separated list of files to be placed in the working directory of each 
executor.  Here are few examples:SPARK_SUBMIT_OPTIONS in 
conf/zeppelin-env.shexport SPARK_SUBMIT_OPTIONS=&amp;quot;--packages 
com.databricks:spark-csv_2.10:1.2.0 --jars /path/mylib1.jar,/path/mylib2.jar 
--files 
/path/mylib1.py,/path/mylib2.zip,/path/mylib3.egg&amp;quot;SPARK_HOME/conf/spark-default
 s.confspark.jars        /path/mylib1.jar,/path/mylib2.jarspark.jars.packages   
com.databricks:spark-csv_2.10:1.2.0spark.files       
/path/mylib1.py,/path/mylib2.egg,/path/mylib3.zip3. Dynamic Dependency Loading 
via %spark.dep interpreterNote: %spark.dep interpreter loads libraries to 
%spark and %spark.pyspark but not to  %spark.sql interpreter. So we recommend 
you to use the first option instead.When your code requires external library, 
instead of doing download/copy/restart Zeppelin, you can easily do following 
jobs using %spark.dep interpreter.Load libraries recursively from maven 
repositoryLoad libraries from local filesystemAdd additional maven 
repositoryAutomatically add libraries to SparkCluster (You can turn off)Dep 
interpreter leverages Scala environment. So you can write any Scala code 
here.Note that %spark.dep interpreter should be used before %spark, 
%spark.pyspark, %spark.sql.Here&amp;#39;s usages.%spark.depz.reset() // clean 
up previously added artifact and repository//
  add maven 
repositoryz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;)//
 add maven snapshot 
repositoryz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;).snapshot()//
 add credentials for private maven 
repositoryz.addRepo(&amp;quot;RepoName&amp;quot;).url(&amp;quot;RepoURL&amp;quot;).username(&amp;quot;username&amp;quot;).password(&amp;quot;password&amp;quot;)//
 add artifact from filesystemz.load(&amp;quot;/path/to.jar&amp;quot;)// add 
artifact from maven repository, with no 
dependencyz.load(&amp;quot;groupId:artifactId:version&amp;quot;).excludeAll()// 
add artifact 
recursivelyz.load(&amp;quot;groupId:artifactId:version&amp;quot;)// add 
artifact recursively except comma separated GroupID:ArtifactId 
listz.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(&amp;quot;groupId:artifactId,groupId:artifactId,
 ...&amp;quot;)// exclude with 
patternz.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(*)z.load(&amp;quot;groupId:art
 
ifactId:version&amp;quot;).exclude(&amp;quot;groupId:artifactId:*&amp;quot;)z.load(&amp;quot;groupId:artifactId:version&amp;quot;).exclude(&amp;quot;groupId:*&amp;quot;)//
 local() skips adding artifact to spark clusters (skipping 
sc.addJar())z.load(&amp;quot;groupId:artifactId:version&amp;quot;).local()ZeppelinContextZeppelin
 automatically injects ZeppelinContext as variable z in your Scala/Python 
environment. ZeppelinContext provides some additional functions and 
utilities.Object ExchangeZeppelinContext extends map and it&amp;#39;s shared 
between Scala and Python environment.So you can put some objects from Scala and 
read it from Python, vice versa.  // Put object from scala%sparkval myObject = 
...z.put(&amp;quot;objName&amp;quot;, myObject)// Exchanging data 
framesmyScalaDataFrame = ...z.put(&amp;quot;myScalaDataFrame&amp;quot;, 
myScalaDataFrame)val myPythonDataFrame = 
z.get(&amp;quot;myPythonDataFrame&amp;quot;).asInstanceOf[DataFrame]    # Get 
object from python%spark.pysparkmyO
 bject = z.get(&amp;quot;objName&amp;quot;)# Exchanging data 
framesmyPythonDataFrame = ...z.put(&amp;quot;myPythonDataFrame&amp;quot;, 
postsDf._jdf)myScalaDataFrame = 
DataFrame(z.get(&amp;quot;myScalaDataFrame&amp;quot;), sqlContext)  Form 
CreationZeppelinContext provides functions for creating forms.In Scala and 
Python environments, you can create forms programmatically.  %spark/* Create 
text input form */z.input(&amp;quot;formName&amp;quot;)/* Create text input 
form with default value */z.input(&amp;quot;formName&amp;quot;, 
&amp;quot;defaultValue&amp;quot;)/* Create select form 
*/z.select(&amp;quot;formName&amp;quot;, Seq((&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                         
(&amp;quot;option2&amp;quot;, &amp;quot;option2DisplayName&amp;quot;)))/* 
Create select form with default value*/z.select(&amp;quot;formName&amp;quot;, 
&amp;quot;option1&amp;quot;, Seq((&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                
                     (&amp;quot;option2&amp;quot;, 
&amp;quot;option2DisplayName&amp;quot;)))    %spark.pyspark# Create text input 
formz.input(&amp;quot;formName&amp;quot;)# Create text input form with default 
valuez.input(&amp;quot;formName&amp;quot;, &amp;quot;defaultValue&amp;quot;)# 
Create select formz.select(&amp;quot;formName&amp;quot;, 
[(&amp;quot;option1&amp;quot;, &amp;quot;option1DisplayName&amp;quot;),         
             (&amp;quot;option2&amp;quot;, 
&amp;quot;option2DisplayName&amp;quot;)])# Create select form with default 
valuez.select(&amp;quot;formName&amp;quot;, [(&amp;quot;option1&amp;quot;, 
&amp;quot;option1DisplayName&amp;quot;),                      
(&amp;quot;option2&amp;quot;, &amp;quot;option2DisplayName&amp;quot;)], 
&amp;quot;option1&amp;quot;)  In sql environment, you can create form in simple 
template.%spark.sqlselect * from ${table=defaultTableName} where text like 
&amp;#39;%${search}%&amp;#39;To learn more about dynamic form, checkout Dynamic 
Form.Matplot
 lib Integration (pyspark)Both the python and pyspark interpreters have 
built-in support for inline visualization using matplotlib, a popular plotting 
library for python. More details can be found in the python interpreter 
documentation, since matplotlib support is identical. More advanced interactive 
plotting can be done with pyspark through utilizing Zeppelin&amp;#39;s built-in 
Angular Display System, as shown below:Interpreter setting optionYou can choose 
one of shared, scoped and isolated options wheh you configure Spark 
interpreter. Spark interpreter creates separated Scala compiler per each 
notebook but share a single SparkContext in scoped mode (experimental). It 
creates separated SparkContext per each notebook in isolated mode.Setting up 
Zeppelin with KerberosLogical setup with Zeppelin, Kerberos Key Distribution 
Center (KDC), and Spark on YARN:Configuration SetupOn the server that Zeppelin 
is installed, install Kerberos client modules and configuration, krb5.conf.This 
is to 
 make the server communicate with KDC.Set SPARK_HOME in 
[ZEPPELIN_HOME]/conf/zeppelin-env.sh to use spark-submit(Additionally, you 
might have to set export HADOOP_CONF_DIR=/etc/hadoop/conf)Add the two 
properties below to Spark configuration 
([SPARK_HOME]/conf/spark-defaults.conf):spark.yarn.principalspark.yarn.keytabNOTE:
 If you do not have permission to access for the above spark-defaults.conf 
file, optionally, you can add the above lines to the Spark Interpreter setting 
through the Interpreter tab in the Zeppelin UI.That&amp;#39;s it. Play with 
Zeppelin!",
       "url": " /interpreter/spark.html",
       "group": "interpreter",
       "excerpt": "Apache Spark is a fast and general-purpose cluster computing 
system. It provides high-level APIs in Java, Scala, Python and R, and an 
optimized engine that supports general execution engine."

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/security/authentication.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/security/authentication.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/security/authentication.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/security/authentication.html Tue Apr  4 
06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: 
zeppelin/site/docs/0.8.0-SNAPSHOT/security/datasource_authorization.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/security/datasource_authorization.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/security/datasource_authorization.html 
(original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/security/datasource_authorization.html 
Tue Apr  4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/security/notebook_authorization.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/security/notebook_authorization.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/security/notebook_authorization.html 
(original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/security/notebook_authorization.html Tue 
Apr  4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/security/shiroauthentication.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/security/shiroauthentication.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/security/shiroauthentication.html 
(original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/security/shiroauthentication.html Tue Apr 
 4 06:06:16 2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/sitemap.txt
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/sitemap.txt?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/sitemap.txt (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/sitemap.txt Tue Apr  4 06:06:16 2017
@@ -27,6 +27,7 @@ http://zeppelin.apache.org/interpreter/c
 http://zeppelin.apache.org/interpreter/elasticsearch.html
 http://zeppelin.apache.org/interpreter/flink.html
 http://zeppelin.apache.org/interpreter/geode.html
+http://zeppelin.apache.org/interpreter/groovy.html
 http://zeppelin.apache.org/interpreter/hbase.html
 http://zeppelin.apache.org/interpreter/hdfs.html
 http://zeppelin.apache.org/interpreter/hive.html

Modified: zeppelin/site/docs/0.8.0-SNAPSHOT/storage/storage.html
URL: 
http://svn.apache.org/viewvc/zeppelin/site/docs/0.8.0-SNAPSHOT/storage/storage.html?rev=1790064&r1=1790063&r2=1790064&view=diff
==============================================================================
--- zeppelin/site/docs/0.8.0-SNAPSHOT/storage/storage.html (original)
+++ zeppelin/site/docs/0.8.0-SNAPSHOT/storage/storage.html Tue Apr  4 06:06:16 
2017
@@ -108,6 +108,7 @@
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/elasticsearch.html">Elasticsearch</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/flink.html">Flink</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/geode.html">Geode</a></li>
+                <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/groovy.html">Groovy</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hbase.html">HBase</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hdfs.html">HDFS</a></li>
                 <li><a 
href="/docs/0.8.0-SNAPSHOT/interpreter/hive.html">Hive</a></li>


Reply via email to