This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.9 by this push:
     new fc7213f  [hotfix] minor update of flink tutorial notes
fc7213f is described below

commit fc7213f4fa85640614396c0581e1417b9a215ade
Author: Jeff Zhang <zjf...@apache.org>
AuthorDate: Tue Jul 14 22:29:49 2020 +0800

    [hotfix] minor update of flink tutorial notes
---
 .../Flink Tutorial/1. Flink Basics_2F2YS7PCE.zpln  | 17 +++++++----
 ...al Steps for Building Flink Job_2F7SKEHPA.zpln} | 30 ++++++++++++++------
 .../3. Flink Job Control Tutorial_2F5RKHCDV.zpln   | 26 +++++++----------
 .../Flink Tutorial/4. Streaming ETL_2EYD56B9B.zpln | 20 +++++--------
 .../5. Streaming Data Analytics_2EYT7Q6R8.zpln     | 33 ++++------------------
 .../7. Batch Data Analytics_2EZ9G3JJU.zpln         | 12 +++++++-
 .../8. Logistic Regression (Alink)_2F4HJNWVN.zpln  |  8 ++++++
 7 files changed, 74 insertions(+), 72 deletions(-)

diff --git a/notebook/Flink Tutorial/1. Flink Basics_2F2YS7PCE.zpln 
b/notebook/Flink Tutorial/1. Flink Basics_2F2YS7PCE.zpln
index 2febf6f..dec2c63 100644
--- a/notebook/Flink Tutorial/1. Flink Basics_2F2YS7PCE.zpln    
+++ b/notebook/Flink Tutorial/1. Flink Basics_2F2YS7PCE.zpln    
@@ -2,9 +2,9 @@
   "paragraphs": [
     {
       "title": "Introduction",
-      "text": "%md\n\n# Introduction\n\n[Apache 
Flink](https://flink.apache.org/) is a framework and distributed processing 
engine for stateful computations over unbounded and bounded data streams. This 
is Flink tutorial for runninb classical wordcount in both batch and streaming 
mode. \n\nThere\u0027re 3 things you need to do before using flink in 
zeppelin.\n\n* Download [Flink 1.10](https://flink.apache.org/downloads.html) 
for scala 2.11 (Only scala-2.11 is supported, scala-2.12 is not [...]
+      "text": "%md\n\n# Introduction\n\n[Apache 
Flink](https://flink.apache.org/) is a framework and distributed processing 
engine for stateful computations over unbounded and bounded data streams. This 
is Flink tutorial for running classical wordcount in both batch and streaming 
mode. \n\nThere\u0027re 3 things you need to do before using flink in 
Zeppelin.\n\n* Download [Flink 1.10](https://flink.apache.org/downloads.html) 
for scala 2.11 (Only scala-2.11 is supported, scala-2.12 is not [...]
       "user": "anonymous",
-      "dateUpdated": "2020-05-11 14:22:22.877",
+      "dateUpdated": "2020-07-14 22:20:39.929",
       "config": {
         "colWidth": 12.0,
         "fontSize": 9.0,
@@ -30,17 +30,18 @@
         "msg": [
           {
             "type": "HTML",
-            "data": "\u003cdiv 
class\u003d\"markdown-body\"\u003e\n\u003ch1\u003eIntroduction\u003c/h1\u003e\n\u003cp\u003e\u003ca
 href\u003d\"https://flink.apache.org/\"\u003eApache Flink\u003c/a\u003e is a 
framework and distributed processing engine for stateful computations over 
unbounded and bounded data streams. This is Flink tutorial for runninb 
classical wordcount in both batch and streaming 
mode.\u003c/p\u003e\n\u003cp\u003eThere\u0026rsquo;re 3 things you need to do 
before using [...]
+            "data": "\u003cdiv 
class\u003d\"markdown-body\"\u003e\n\u003ch1\u003eIntroduction\u003c/h1\u003e\n\u003cp\u003e\u003ca
 href\u003d\"https://flink.apache.org/\"\u003eApache Flink\u003c/a\u003e is a 
framework and distributed processing engine for stateful computations over 
unbounded and bounded data streams. This is Flink tutorial for running 
classical wordcount in both batch and streaming 
mode.\u003c/p\u003e\n\u003cp\u003eThere\u0026rsquo;re 3 things you need to do 
before using [...]
           }
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580997898536_-1239502599",
       "id": "paragraph_1580997898536_-1239502599",
       "dateCreated": "2020-02-06 22:04:58.536",
-      "dateStarted": "2020-05-11 14:22:22.878",
-      "dateFinished": "2020-05-11 14:22:24.363",
+      "dateStarted": "2020-07-14 22:20:39.929",
+      "dateFinished": "2020-07-14 22:20:39.954",
       "status": "FINISHED"
     },
     {
@@ -76,6 +77,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580998080340_1531975932",
       "id": "paragraph_1580998080340_1531975932",
@@ -117,6 +119,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580998084555_-697674675",
       "id": "paragraph_1580998084555_-697674675",
@@ -162,6 +165,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580998407443_936860398",
       "id": "paragraph_1580998407443_936860398",
@@ -180,6 +184,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1582600214095_1825730071",
       "id": "paragraph_1582600214095_1825730071",
@@ -189,7 +194,7 @@
   ],
   "name": "1. Flink Basics",
   "id": "2F2YS7PCE",
-  "defaultInterpreterGroup": "spark",
+  "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
   "noteParams": {},
   "noteForms": {},
diff --git a/notebook/Flink Tutorial/2. 3 Essential Steps for Building Flink 
Job_2F7SKEHPA.zpln b/notebook/Flink Tutorial/2. Three Essential Steps for 
Building Flink Job_2F7SKEHPA.zpln
similarity index 90%
rename from notebook/Flink Tutorial/2. 3 Essential Steps for Building Flink 
Job_2F7SKEHPA.zpln
rename to notebook/Flink Tutorial/2. Three Essential Steps for Building Flink 
Job_2F7SKEHPA.zpln
index 03f8518..aec2347 100644
--- a/notebook/Flink Tutorial/2. 3 Essential Steps for Building Flink 
Job_2F7SKEHPA.zpln        
+++ b/notebook/Flink Tutorial/2. Three Essential Steps for Building Flink 
Job_2F7SKEHPA.zpln    
@@ -2,9 +2,9 @@
   "paragraphs": [
     {
       "title": "Introduction",
-      "text": "%md\n\n# Introduction\n\n\nTypically there\u0027re 3 essential 
steps for building one flink job. And each step has its favorite tools.\n\n* 
Define source/sink (SQL DDL)\n* Define data flow (Table Api / SQL)\n* Implement 
business logic (UDF)\n\nThis tutorial demonstrate how to use build one typical 
flink via these 3 steps and their favorite tools.\nIn this demo, we will do 
real time analysis of cdn access data. First we read cdn access log from kafka 
queue and do some proce [...]
+      "text": "%md\n\n# Introduction\n\n\nTypically there\u0027re 3 essential 
steps for building one flink job. And each step has its favorite tools.\n\n* 
Define source/sink (SQL DDL)\n* Define data flow (Table Api / SQL)\n* Implement 
business logic (UDF)\n\nThis tutorial demonstrates how to build one typical 
flinkjob  via these 3 steps and their favorite tools.\nIn this demo, we will do 
real time analysis of cdn access data. First we read cdn access log from kafka 
queue and do some proc [...]
       "user": "anonymous",
-      "dateUpdated": "2020-04-28 14:47:07.249",
+      "dateUpdated": "2020-07-14 22:22:38.673",
       "config": {
         "colWidth": 12.0,
         "fontSize": 9.0,
@@ -30,17 +30,18 @@
         "msg": [
           {
             "type": "HTML",
-            "data": "\u003cdiv 
class\u003d\"markdown-body\"\u003e\n\u003ch1\u003eIntroduction\u003c/h1\u003e\n\u003cp\u003eTypically
 there\u0026rsquo;re 3 essential steps for building one flink job. And each 
step has its favorite 
tools.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eDefine source/sink (SQL 
DDL)\u003c/li\u003e\n\u003cli\u003eDefine data flow (Table Api / 
SQL)\u003c/li\u003e\n\u003cli\u003eImplement business logic 
(UDF)\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eThis tuto [...]
+            "data": "\u003cdiv 
class\u003d\"markdown-body\"\u003e\n\u003ch1\u003eIntroduction\u003c/h1\u003e\n\u003cp\u003eTypically
 there\u0026rsquo;re 3 essential steps for building one flink job. And each 
step has its favorite 
tools.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eDefine source/sink (SQL 
DDL)\u003c/li\u003e\n\u003cli\u003eDefine data flow (Table Api / 
SQL)\u003c/li\u003e\n\u003cli\u003eImplement business logic 
(UDF)\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eThis tuto [...]
           }
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587965294481_785664297",
       "id": "paragraph_1587965294481_785664297",
       "dateCreated": "2020-04-27 13:28:14.481",
-      "dateStarted": "2020-04-28 14:47:07.249",
-      "dateFinished": "2020-04-28 14:47:07.271",
+      "dateStarted": "2020-07-14 22:22:38.673",
+      "dateFinished": "2020-07-14 22:22:38.688",
       "status": "FINISHED"
     },
     {
@@ -71,6 +72,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585734329697_1695781588",
       "id": "paragraph_1585734329697_1695781588",
@@ -112,6 +114,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585733282496_767011327",
       "id": "paragraph_1585733282496_767011327",
@@ -153,6 +156,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585733896337_1928136072",
       "id": "paragraph_1585733896337_1928136072",
@@ -189,6 +193,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585733368214_-94290606",
       "id": "paragraph_1585733368214_-94290606",
@@ -258,6 +263,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586844130766_-1152098073",
       "id": "paragraph_1586844130766_-1152098073",
@@ -347,6 +353,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586617588031_-638632283",
       "id": "paragraph_1586617588031_-638632283",
@@ -435,6 +442,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585796091843_-1858464529",
       "id": "paragraph_1585796091843_-1858464529",
@@ -469,6 +477,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1585757391555_145331506",
       "id": "paragraph_1585757391555_145331506",
@@ -480,7 +489,7 @@
     {
       "text": "%md\n\n# Query sink table via jdbc interpreter\n\nYou can also 
query the sink table (mysql) directly via jdbc interpreter. Here I will create 
a jdbc interpreter named `mysql` and use it to query the sink table. Regarding 
how to connect mysql in Zeppelin, refer this 
[link](http://zeppelin.apache.org/docs/0.9.0-preview1/interpreter/jdbc.html#mysql)",
       "user": "anonymous",
-      "dateUpdated": "2020-04-29 12:02:12.920",
+      "dateUpdated": "2020-07-14 22:23:28.657",
       "config": {
         "colWidth": 12.0,
         "fontSize": 9.0,
@@ -510,12 +519,13 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587976725546_2073084823",
       "id": "paragraph_1587976725546_2073084823",
       "dateCreated": "2020-04-27 16:38:45.548",
-      "dateStarted": "2020-04-29 12:02:12.923",
-      "dateFinished": "2020-04-29 12:02:12.939",
+      "dateStarted": "2020-07-14 22:23:28.657",
+      "dateFinished": "2020-07-14 22:23:28.670",
       "status": "FINISHED"
     },
     {
@@ -607,6 +617,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586931452339_-1281904044",
       "id": "paragraph_1586931452339_-1281904044",
@@ -637,6 +648,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587115507009_250592635",
       "id": "paragraph_1587115507009_250592635",
@@ -644,7 +656,7 @@
       "status": "READY"
     }
   ],
-  "name": "2. 3 Essential Steps for Building Flink Job",
+  "name": "2. Three Essential Steps for Building Flink Job",
   "id": "2F7SKEHPA",
   "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
diff --git a/notebook/Flink Tutorial/3. Flink Job Control 
Tutorial_2F5RKHCDV.zpln b/notebook/Flink Tutorial/3. Flink Job Control 
Tutorial_2F5RKHCDV.zpln
index ae80c38..9a70026 100644
--- a/notebook/Flink Tutorial/3. Flink Job Control Tutorial_2F5RKHCDV.zpln      
+++ b/notebook/Flink Tutorial/3. Flink Job Control Tutorial_2F5RKHCDV.zpln      
@@ -4,7 +4,7 @@
       "title": "Introduction",
       "text": "%md\n\n# Introduction\n\nThis tutorial is to demonstrate how to 
do job control in flink (job submission/cancel/resume).\n2 steps:\n1. Create 
custom data stream and register it as flink table. The custom data stream is a 
simulated web access logs. \n2. Query this flink table (pv for each page type), 
you can cancel it and then resume it again w/o savepoint.\n",
       "user": "anonymous",
-      "dateUpdated": "2020-04-29 15:23:45.078",
+      "dateUpdated": "2020-07-14 22:24:11.629",
       "config": {
         "colWidth": 12.0,
         "fontSize": 9.0,
@@ -35,12 +35,13 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587964310955_443124874",
       "id": "paragraph_1587964310955_443124874",
       "dateCreated": "2020-04-27 13:11:50.955",
-      "dateStarted": "2020-04-29 15:23:45.080",
-      "dateFinished": "2020-04-29 15:23:45.094",
+      "dateStarted": "2020-07-14 22:24:11.630",
+      "dateFinished": "2020-07-14 22:24:11.642",
       "status": "FINISHED"
     },
     {
@@ -76,6 +77,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586733774605_1418179269",
       "id": "paragraph_1586733774605_1418179269",
@@ -159,20 +161,8 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "ERROR",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "url\tc\nhome\t19\nproduct\t55\nsearch\t38\n"
-          },
-          {
-            "type": "TEXT",
-            "data": "Fail to run sql command: select url, count(1) as c from 
log group by url\njava.io.IOException: Fail to run stream sql job\n\tat 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:166)\n\tat
 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:101)\n\tat
 
org.apache.zeppelin.flink.FlinkStreamSqlInterpreter.callInnerSelect(FlinkStreamSqlInterpreter.java:90)\n\tat
 org.apache.zeppelin.flink.FlinkSqlInterrpeter.call [...]
-          }
-        ]
-      },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586847370895_154139610",
       "id": "paragraph_1586847370895_154139610",
@@ -249,6 +239,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586733780533_1100270999",
       "id": "paragraph_1586733780533_1100270999",
@@ -323,6 +314,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586733868269_783581378",
       "id": "paragraph_1586733868269_783581378",
@@ -396,6 +388,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1586754954622_-1794803125",
       "id": "paragraph_1586754954622_-1794803125",
@@ -414,6 +407,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587207857968_1997116221",
       "id": "paragraph_1587207857968_1997116221",
diff --git a/notebook/Flink Tutorial/4. Streaming ETL_2EYD56B9B.zpln 
b/notebook/Flink Tutorial/4. Streaming ETL_2EYD56B9B.zpln
index f18d0d1..11501b4 100644
--- a/notebook/Flink Tutorial/4. Streaming ETL_2EYD56B9B.zpln   
+++ b/notebook/Flink Tutorial/4. Streaming ETL_2EYD56B9B.zpln   
@@ -37,6 +37,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579054287919_-61477360",
       "id": "paragraph_1579054287919_-61477360",
@@ -73,6 +74,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1587959422055_1513725291",
       "id": "paragraph_1587959422055_1513725291",
@@ -116,6 +118,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578044987529_1240899810",
       "id": "paragraph_1578044987529_1240899810",
@@ -159,6 +162,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905686087_1273839451",
       "id": "paragraph_1578905686087_1273839451",
@@ -193,20 +197,8 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "ERROR",
-        "msg": [
-          {
-            "type": "ANGULAR",
-            "data": "\u003ch1\u003eDuration: {{duration}} seconds\n"
-          },
-          {
-            "type": "TEXT",
-            "data": "Fail to run sql command: insert into sink_kafka select 
status, direction, cast(event_ts/1000000000 as timestamp(3)) from source_kafka 
where status \u003c\u003e \u0027foo\u0027\njava.io.IOException: 
java.util.concurrent.ExecutionException: 
org.apache.flink.client.program.ProgramInvocationException: Job failed (JobID: 
9d350f962fffb020222af2bba3388912)\n\tat 
org.apache.zeppelin.flink.FlinkSqlInterrpeter.callInsertInto(FlinkSqlInterrpeter.java:526)\n\tat
 org.apache.zeppe [...]
-          }
-        ]
-      },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905715189_33634195",
       "id": "paragraph_1578905715189_33634195",
@@ -320,6 +312,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579058345516_-1005807622",
       "id": "paragraph_1579058345516_-1005807622",
@@ -350,6 +343,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579058056677_-1981512536",
       "id": "paragraph_1579058056677_-1981512536",
diff --git a/notebook/Flink Tutorial/5. Streaming Data Analytics_2EYT7Q6R8.zpln 
b/notebook/Flink Tutorial/5. Streaming Data Analytics_2EYT7Q6R8.zpln
index e14406c..5fcbaf9 100644
--- a/notebook/Flink Tutorial/5. Streaming Data Analytics_2EYT7Q6R8.zpln        
+++ b/notebook/Flink Tutorial/5. Streaming Data Analytics_2EYT7Q6R8.zpln        
@@ -37,6 +37,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579054784565_2122156822",
       "id": "paragraph_1579054784565_2122156822",
@@ -74,20 +75,8 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "ERROR",
-        "msg": [
-          {
-            "type": "ANGULAR",
-            "data": "\u003ch1\u003e{{value_1}}\u003c/h1\u003e until 
\u003ch2\u003e{{value_0}}\u003c/h2\u003e\n"
-          },
-          {
-            "type": "TEXT",
-            "data": "Fail to run sql command: select max(event_ts), count(1) 
from sink_kafka\njava.io.IOException: Fail to run stream sql job\n\tat 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:166)\n\tat
 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:101)\n\tat
 
org.apache.zeppelin.flink.FlinkStreamSqlInterpreter.callInnerSelect(FlinkStreamSqlInterpreter.java:74)\n\tat
 org.apache.zeppelin.flink.FlinkSqlInterrpeter.callS [...]
-          }
-        ]
-      },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578909960516_-1812187661",
       "id": "paragraph_1578909960516_-1812187661",
@@ -159,20 +148,8 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "ERROR",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "status\tpv\nbar\t48\nbaz\t28\n"
-          },
-          {
-            "type": "TEXT",
-            "data": "Fail to run sql command: select status, count(1) as pv 
from sink_kafka group by status\njava.io.IOException: Fail to run stream sql 
job\n\tat 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:166)\n\tat
 
org.apache.zeppelin.flink.sql.AbstractStreamSqlJob.run(AbstractStreamSqlJob.java:101)\n\tat
 
org.apache.zeppelin.flink.FlinkStreamSqlInterpreter.callInnerSelect(FlinkStreamSqlInterpreter.java:90)\n\tat
 org.apache.zeppelin.flink.FlinkSqlIn [...]
-          }
-        ]
-      },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578910004762_-286113604",
       "id": "paragraph_1578910004762_-286113604",
@@ -264,6 +241,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578910016872_1942851900",
       "id": "paragraph_1578910016872_1942851900",
@@ -282,6 +260,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578921455738_-1465781668",
       "id": "paragraph_1578921455738_-1465781668",
@@ -291,7 +270,7 @@
   ],
   "name": "5. Streaming Data Analytics",
   "id": "2EYT7Q6R8",
-  "defaultInterpreterGroup": "spark",
+  "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
   "noteParams": {},
   "noteForms": {},
diff --git a/notebook/Flink Tutorial/7. Batch Data Analytics_2EZ9G3JJU.zpln 
b/notebook/Flink Tutorial/7. Batch Data Analytics_2EZ9G3JJU.zpln
index c75f400..f66031a 100644
--- a/notebook/Flink Tutorial/7. Batch Data Analytics_2EZ9G3JJU.zpln    
+++ b/notebook/Flink Tutorial/7. Batch Data Analytics_2EZ9G3JJU.zpln    
@@ -37,6 +37,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579053946947_-1754951794",
       "id": "paragraph_1579053946947_-1754951794",
@@ -131,6 +132,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578043926895_1558885985",
       "id": "paragraph_1578043926895_1558885985",
@@ -234,6 +236,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578904010994_-1300955750",
       "id": "paragraph_1578904010994_-1300955750",
@@ -348,6 +351,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578904047823_-1212655985",
       "id": "paragraph_1578904047823_-1212655985",
@@ -391,6 +395,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905208609_-1175158458",
       "id": "paragraph_1578905208609_-1175158458",
@@ -429,6 +434,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905223714_1228156151",
       "id": "paragraph_1578905223714_1228156151",
@@ -478,6 +484,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580872625746_1221957461",
       "id": "paragraph_1580872625746_1221957461",
@@ -572,6 +579,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905241045_-678553675",
       "id": "paragraph_1578905241045_-678553675",
@@ -679,6 +687,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905276271_782994324",
       "id": "paragraph_1578905276271_782994324",
@@ -697,6 +706,7 @@
         "forms": {}
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580890230317_-355710383",
       "id": "paragraph_1580890230317_-355710383",
@@ -704,7 +714,7 @@
       "status": "READY"
     }
   ],
-  "name": "7. Batch Exploratory Data Analytics",
+  "name": "7. Batch Data Analytics",
   "id": "2EZ9G3JJU",
   "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
diff --git a/notebook/Flink Tutorial/8. Logistic Regression 
(Alink)_2F4HJNWVN.zpln b/notebook/Flink Tutorial/8. Logistic Regression 
(Alink)_2F4HJNWVN.zpln
index fe0540e..097ddf8 100644
--- a/notebook/Flink Tutorial/8. Logistic Regression (Alink)_2F4HJNWVN.zpln     
+++ b/notebook/Flink Tutorial/8. Logistic Regression (Alink)_2F4HJNWVN.zpln     
@@ -33,6 +33,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1588147625869_1181490991",
       "id": "paragraph_1588147625869_1181490991",
@@ -73,6 +74,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872979_-705704388",
       "id": "20200309-234752_541772059",
@@ -108,6 +110,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872982_-1596320538",
       "id": "20200309-234752_30368548",
@@ -143,6 +146,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872983_-260771927",
       "id": "20200309-234752_1624274051",
@@ -178,6 +182,7 @@
         "msg": []
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872983_-2044786839",
       "id": "20200309-234752_452787710",
@@ -218,6 +223,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872984_389357281",
       "id": "20200309-234752_912989550",
@@ -306,6 +312,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872984_-1459808170",
       "id": "20200309-234752_1664292334",
@@ -346,6 +353,7 @@
         ]
       },
       "apps": [],
+      "runtimeInfos": {},
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1583768872984_-575920971",
       "id": "20200309-234752_517801851",

Reply via email to