Repository: spark Updated Branches: refs/heads/branch-1.0 5ef94ebd1 -> 46825cd1c
SPARK-1917: fix PySpark import of scipy.special functions https://issues.apache.org/jira/browse/SPARK-1917 Author: Uri Laserson <[email protected]> Closes #866 from laserson/SPARK-1917 and squashes the following commits: d947e8c [Uri Laserson] Added test for scipy.special importing 1798bbd [Uri Laserson] SPARK-1917: fix PySpark import of scipy.special (cherry picked from commit 5e98967b612ccf026cb1cc5ff3ac8bf72d7e836e) Signed-off-by: Matei Zaharia <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/46825cd1 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/46825cd1 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/46825cd1 Branch: refs/heads/branch-1.0 Commit: 46825cd1c4126c52c632a7900bacc0ad0b254d95 Parents: 5ef94eb Author: Uri Laserson <[email protected]> Authored: Sat May 31 14:59:09 2014 -0700 Committer: Matei Zaharia <[email protected]> Committed: Sat May 31 14:59:16 2014 -0700 ---------------------------------------------------------------------- python/pyspark/cloudpickle.py | 2 +- python/pyspark/tests.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/46825cd1/python/pyspark/cloudpickle.py ---------------------------------------------------------------------- diff --git a/python/pyspark/cloudpickle.py b/python/pyspark/cloudpickle.py index 6a7c23a..eb5dbb8 100644 --- a/python/pyspark/cloudpickle.py +++ b/python/pyspark/cloudpickle.py @@ -933,7 +933,7 @@ def _change_cell_value(cell, newval): Note: These can never be renamed due to client compatibility issues""" def _getobject(modname, attribute): - mod = __import__(modname) + mod = __import__(modname, fromlist=[attribute]) return mod.__dict__[attribute] def _generateImage(size, mode, str_rep): http://git-wip-us.apache.org/repos/asf/spark/blob/46825cd1/python/pyspark/tests.py ---------------------------------------------------------------------- diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py index 64f2eeb..ed90915 100644 --- a/python/pyspark/tests.py +++ b/python/pyspark/tests.py @@ -35,6 +35,14 @@ from pyspark.context import SparkContext from pyspark.files import SparkFiles from pyspark.serializers import read_int +_have_scipy = False +try: + import scipy.sparse + _have_scipy = True +except: + # No SciPy, but that's okay, we'll skip those tests + pass + SPARK_HOME = os.environ["SPARK_HOME"] @@ -359,5 +367,21 @@ class TestSparkSubmit(unittest.TestCase): self.assertIn("[2, 4, 6]", out) [email protected](not _have_scipy, "SciPy not installed") +class SciPyTests(PySparkTestCase): + """General PySpark tests that depend on scipy """ + + def test_serialize(self): + from scipy.special import gammaln + x = range(1, 5) + expected = map(gammaln, x) + observed = self.sc.parallelize(x).map(gammaln).collect() + self.assertEqual(expected, observed) + + if __name__ == "__main__": + if not _have_scipy: + print "NOTE: Skipping SciPy tests as it does not seem to be installed" unittest.main() + if not _have_scipy: + print "NOTE: SciPy tests were skipped as it does not seem to be installed"
