Repository: spark
Updated Branches:
  refs/heads/branch-1.5 321cb99ca -> 16414dae0


[SPARK-9812] [STREAMING] Fix Python 3 compatibility issue in PySpark Streaming 
and some docs

This PR includes the following fixes:
1. Use `range` instead of `xrange` in `queue_stream.py` to support Python 3.
2. Fix the issue that `utf8_decoder` will return `bytes` rather than `str` when 
receiving an empty `bytes` in Python 3.
3. Fix the commands in docs so that the user can copy them directly to the 
command line. The previous commands was broken in the middle of a path, so when 
copying to the command line, the path would be split to two parts by the extra 
spaces, which forces the user to fix it manually.

Author: zsxwing <[email protected]>

Closes #8315 from zsxwing/SPARK-9812.

(cherry picked from commit 1f29d502e7ecd6faa185d70dc714f9ea3922fb6d)
Signed-off-by: Tathagata Das <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/16414dae
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/16414dae
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/16414dae

Branch: refs/heads/branch-1.5
Commit: 16414dae03b427506b2a1ebb7d405e6fa3bdad17
Parents: 321cb99
Author: zsxwing <[email protected]>
Authored: Wed Aug 19 18:36:01 2015 -0700
Committer: Tathagata Das <[email protected]>
Committed: Wed Aug 19 18:36:10 2015 -0700

----------------------------------------------------------------------
 examples/src/main/python/streaming/direct_kafka_wordcount.py | 6 +++---
 examples/src/main/python/streaming/flume_wordcount.py        | 5 +++--
 examples/src/main/python/streaming/kafka_wordcount.py        | 5 +++--
 examples/src/main/python/streaming/mqtt_wordcount.py         | 5 +++--
 examples/src/main/python/streaming/queue_stream.py           | 4 ++--
 python/pyspark/streaming/flume.py                            | 4 +++-
 python/pyspark/streaming/kafka.py                            | 4 +++-
 python/pyspark/streaming/kinesis.py                          | 4 +++-
 8 files changed, 23 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/examples/src/main/python/streaming/direct_kafka_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/direct_kafka_wordcount.py 
b/examples/src/main/python/streaming/direct_kafka_wordcount.py
index 6ef188a..ea20678 100644
--- a/examples/src/main/python/streaming/direct_kafka_wordcount.py
+++ b/examples/src/main/python/streaming/direct_kafka_wordcount.py
@@ -23,8 +23,8 @@
  http://kafka.apache.org/documentation.html#quickstart
 
  and then run the example
-    `$ bin/spark-submit --jars external/kafka-assembly/target/scala-*/\
-      spark-streaming-kafka-assembly-*.jar \
+    `$ bin/spark-submit --jars \
+      
external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
       examples/src/main/python/streaming/direct_kafka_wordcount.py \
       localhost:9092 test`
 """
@@ -37,7 +37,7 @@ from pyspark.streaming.kafka import KafkaUtils
 
 if __name__ == "__main__":
     if len(sys.argv) != 3:
-        print >> sys.stderr, "Usage: direct_kafka_wordcount.py <broker_list> 
<topic>"
+        print("Usage: direct_kafka_wordcount.py <broker_list> <topic>", 
file=sys.stderr)
         exit(-1)
 
     sc = SparkContext(appName="PythonStreamingDirectKafkaWordCount")

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/examples/src/main/python/streaming/flume_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/flume_wordcount.py 
b/examples/src/main/python/streaming/flume_wordcount.py
index 091b64d..d75bc6d 100644
--- a/examples/src/main/python/streaming/flume_wordcount.py
+++ b/examples/src/main/python/streaming/flume_wordcount.py
@@ -23,8 +23,9 @@
  https://flume.apache.org/documentation.html
 
  and then run the example
-    `$ bin/spark-submit --jars external/flume-assembly/target/scala-*/\
-      spark-streaming-flume-assembly-*.jar 
examples/src/main/python/streaming/flume_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/flume-assembly/target/scala-*/spark-streaming-flume-assembly-*.jar \
+      examples/src/main/python/streaming/flume_wordcount.py \
       localhost 12345
 """
 from __future__ import print_function

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/examples/src/main/python/streaming/kafka_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/kafka_wordcount.py 
b/examples/src/main/python/streaming/kafka_wordcount.py
index b178e78..8d697f6 100644
--- a/examples/src/main/python/streaming/kafka_wordcount.py
+++ b/examples/src/main/python/streaming/kafka_wordcount.py
@@ -23,8 +23,9 @@
  http://kafka.apache.org/documentation.html#quickstart
 
  and then run the example
-    `$ bin/spark-submit --jars external/kafka-assembly/target/scala-*/\
-      spark-streaming-kafka-assembly-*.jar 
examples/src/main/python/streaming/kafka_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
+      examples/src/main/python/streaming/kafka_wordcount.py \
       localhost:2181 test`
 """
 from __future__ import print_function

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/examples/src/main/python/streaming/mqtt_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/mqtt_wordcount.py 
b/examples/src/main/python/streaming/mqtt_wordcount.py
index 617ce5e..abf9c0e 100644
--- a/examples/src/main/python/streaming/mqtt_wordcount.py
+++ b/examples/src/main/python/streaming/mqtt_wordcount.py
@@ -26,8 +26,9 @@
  http://www.eclipse.org/paho/#getting-started
 
  and then run the example
-    `$ bin/spark-submit --jars external/mqtt-assembly/target/scala-*/\
-      spark-streaming-mqtt-assembly-*.jar 
examples/src/main/python/streaming/mqtt_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/mqtt-assembly/target/scala-*/spark-streaming-mqtt-assembly-*.jar \
+      examples/src/main/python/streaming/mqtt_wordcount.py \
       tcp://localhost:1883 foo`
 """
 

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/examples/src/main/python/streaming/queue_stream.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/queue_stream.py 
b/examples/src/main/python/streaming/queue_stream.py
index dcd6a0f..b380890 100644
--- a/examples/src/main/python/streaming/queue_stream.py
+++ b/examples/src/main/python/streaming/queue_stream.py
@@ -36,8 +36,8 @@ if __name__ == "__main__":
     # Create the queue through which RDDs can be pushed to
     # a QueueInputDStream
     rddQueue = []
-    for i in xrange(5):
-        rddQueue += [ssc.sparkContext.parallelize([j for j in xrange(1, 
1001)], 10)]
+    for i in range(5):
+        rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 
10)]
 
     # Create the QueueInputDStream and use it do some processing
     inputStream = ssc.queueStream(rddQueue)

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/python/pyspark/streaming/flume.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/flume.py 
b/python/pyspark/streaming/flume.py
index cbb573f..c0cdc50 100644
--- a/python/pyspark/streaming/flume.py
+++ b/python/pyspark/streaming/flume.py
@@ -31,7 +31,9 @@ __all__ = ['FlumeUtils', 'utf8_decoder']
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class FlumeUtils(object):

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/python/pyspark/streaming/kafka.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/kafka.py 
b/python/pyspark/streaming/kafka.py
index dc5b7fd..8a814c6 100644
--- a/python/pyspark/streaming/kafka.py
+++ b/python/pyspark/streaming/kafka.py
@@ -29,7 +29,9 @@ __all__ = ['Broker', 'KafkaUtils', 'OffsetRange', 
'TopicAndPartition', 'utf8_dec
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class KafkaUtils(object):

http://git-wip-us.apache.org/repos/asf/spark/blob/16414dae/python/pyspark/streaming/kinesis.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/kinesis.py 
b/python/pyspark/streaming/kinesis.py
index bcfe270..34be588 100644
--- a/python/pyspark/streaming/kinesis.py
+++ b/python/pyspark/streaming/kinesis.py
@@ -26,7 +26,9 @@ __all__ = ['KinesisUtils', 'InitialPositionInStream', 
'utf8_decoder']
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class KinesisUtils(object):


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to