1#
2# Licensed to the Apache Software Foundation (ASF) under one or more
3# contributor license agreements.  See the NOTICE file distributed with
4# this work for additional information regarding copyright ownership.
5# The ASF licenses this file to You under the Apache License, Version 2.0
6# (the "License"); you may not use this file except in compliance with
7# the License.  You may obtain a copy of the License at
8#
9#    http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17
18import sys
19if sys.version >= '3':
20    long = int
21    unicode = str
22
23import py4j.protocol
24from py4j.protocol import Py4JJavaError
25from py4j.java_gateway import JavaObject
26from py4j.java_collections import JavaArray, JavaList
27
28from pyspark import RDD, SparkContext
29from pyspark.serializers import PickleSerializer, AutoBatchedSerializer
30from pyspark.sql import DataFrame, SQLContext
31
32# Hack for support float('inf') in Py4j
33_old_smart_decode = py4j.protocol.smart_decode
34
35_float_str_mapping = {
36    'nan': 'NaN',
37    'inf': 'Infinity',
38    '-inf': '-Infinity',
39}
40
41
42def _new_smart_decode(obj):
43    if isinstance(obj, float):
44        s = str(obj)
45        return _float_str_mapping.get(s, s)
46    return _old_smart_decode(obj)
47
48py4j.protocol.smart_decode = _new_smart_decode
49
50
51_picklable_classes = [
52    'LinkedList',
53    'SparseVector',
54    'DenseVector',
55    'DenseMatrix',
56    'Rating',
57    'LabeledPoint',
58]
59
60
61# this will call the MLlib version of pythonToJava()
62def _to_java_object_rdd(rdd):
63    """ Return a JavaRDD of Object by unpickling
64
65    It will convert each Python object into Java object by Pyrolite, whenever the
66    RDD is serialized in batch or not.
67    """
68    rdd = rdd._reserialize(AutoBatchedSerializer(PickleSerializer()))
69    return rdd.ctx._jvm.org.apache.spark.mllib.api.python.SerDe.pythonToJava(rdd._jrdd, True)
70
71
72def _py2java(sc, obj):
73    """ Convert Python object into Java """
74    if isinstance(obj, RDD):
75        obj = _to_java_object_rdd(obj)
76    elif isinstance(obj, DataFrame):
77        obj = obj._jdf
78    elif isinstance(obj, SparkContext):
79        obj = obj._jsc
80    elif isinstance(obj, list):
81        obj = [_py2java(sc, x) for x in obj]
82    elif isinstance(obj, JavaObject):
83        pass
84    elif isinstance(obj, (int, long, float, bool, bytes, unicode)):
85        pass
86    else:
87        data = bytearray(PickleSerializer().dumps(obj))
88        obj = sc._jvm.org.apache.spark.mllib.api.python.SerDe.loads(data)
89    return obj
90
91
92def _java2py(sc, r, encoding="bytes"):
93    if isinstance(r, JavaObject):
94        clsName = r.getClass().getSimpleName()
95        # convert RDD into JavaRDD
96        if clsName != 'JavaRDD' and clsName.endswith("RDD"):
97            r = r.toJavaRDD()
98            clsName = 'JavaRDD'
99
100        if clsName == 'JavaRDD':
101            jrdd = sc._jvm.org.apache.spark.mllib.api.python.SerDe.javaToPython(r)
102            return RDD(jrdd, sc)
103
104        if clsName == 'Dataset':
105            return DataFrame(r, SQLContext.getOrCreate(sc))
106
107        if clsName in _picklable_classes:
108            r = sc._jvm.org.apache.spark.mllib.api.python.SerDe.dumps(r)
109        elif isinstance(r, (JavaArray, JavaList)):
110            try:
111                r = sc._jvm.org.apache.spark.mllib.api.python.SerDe.dumps(r)
112            except Py4JJavaError:
113                pass  # not pickable
114
115    if isinstance(r, (bytearray, bytes)):
116        r = PickleSerializer().loads(bytes(r), encoding=encoding)
117    return r
118
119
120def callJavaFunc(sc, func, *args):
121    """ Call Java Function """
122    args = [_py2java(sc, a) for a in args]
123    return _java2py(sc, func(*args))
124
125
126def callMLlibFunc(name, *args):
127    """ Call API in PythonMLLibAPI """
128    sc = SparkContext.getOrCreate()
129    api = getattr(sc._jvm.PythonMLLibAPI(), name)
130    return callJavaFunc(sc, api, *args)
131
132
133class JavaModelWrapper(object):
134    """
135    Wrapper for the model in JVM
136    """
137    def __init__(self, java_model):
138        self._sc = SparkContext.getOrCreate()
139        self._java_model = java_model
140
141    def __del__(self):
142        self._sc._gateway.detach(self._java_model)
143
144    def call(self, name, *a):
145        """Call method of java_model"""
146        return callJavaFunc(self._sc, getattr(self._java_model, name), *a)
147
148
149def inherit_doc(cls):
150    """
151    A decorator that makes a class inherit documentation from its parents.
152    """
153    for name, func in vars(cls).items():
154        # only inherit docstring for public functions
155        if name.startswith("_"):
156            continue
157        if not func.__doc__:
158            for parent in cls.__bases__:
159                parent_func = getattr(parent, name, None)
160                if parent_func and getattr(parent_func, "__doc__", None):
161                    func.__doc__ = parent_func.__doc__
162                    break
163    return cls
164