1import numpy as np 2 3import sklearn.linear_model as skl_linear_model 4import sklearn.preprocessing as skl_preprocessing 5 6from Orange.data import Variable, ContinuousVariable 7from Orange.preprocess import Normalize 8from Orange.preprocess.score import LearnerScorer 9from Orange.regression import Learner, Model, SklLearner, SklModel 10 11 12__all__ = ["LinearRegressionLearner", "RidgeRegressionLearner", 13 "LassoRegressionLearner", "SGDRegressionLearner", 14 "ElasticNetLearner", "ElasticNetCVLearner", 15 "PolynomialLearner"] 16 17 18class _FeatureScorerMixin(LearnerScorer): 19 feature_type = Variable 20 class_type = ContinuousVariable 21 22 def score(self, data): 23 data = Normalize()(data) 24 model = self(data) 25 return np.abs(model.coefficients), model.domain.attributes 26 27 28class LinearRegressionLearner(SklLearner, _FeatureScorerMixin): 29 __wraps__ = skl_linear_model.LinearRegression 30 31 # Arguments are needed for signatures, pylint: disable=unused-argument 32 def __init__(self, preprocessors=None, fit_intercept=True): 33 super().__init__(preprocessors=preprocessors) 34 self.params = vars() 35 36 def fit(self, X, Y, W=None): 37 model = super().fit(X, Y, W) 38 return LinearModel(model.skl_model) 39 40 41class RidgeRegressionLearner(LinearRegressionLearner): 42 __wraps__ = skl_linear_model.Ridge 43 44 # Arguments are needed for signatures, pylint: disable=unused-argument 45 def __init__(self, alpha=1.0, fit_intercept=True, 46 normalize=False, copy_X=True, max_iter=None, 47 tol=0.001, solver='auto', preprocessors=None): 48 super().__init__(preprocessors=preprocessors) 49 self.params = vars() 50 51 52class LassoRegressionLearner(LinearRegressionLearner): 53 __wraps__ = skl_linear_model.Lasso 54 55 # Arguments are needed for signatures, pylint: disable=unused-argument 56 def __init__(self, alpha=1.0, fit_intercept=True, normalize=False, 57 precompute=False, copy_X=True, max_iter=1000, 58 tol=0.0001, warm_start=False, positive=False, 59 preprocessors=None): 60 super().__init__(preprocessors=preprocessors) 61 self.params = vars() 62 63 64class ElasticNetLearner(LinearRegressionLearner): 65 __wraps__ = skl_linear_model.ElasticNet 66 67 # Arguments are needed for signatures, pylint: disable=unused-argument 68 def __init__(self, alpha=1.0, l1_ratio=0.5, fit_intercept=True, 69 normalize=False, precompute=False, max_iter=1000, 70 copy_X=True, tol=0.0001, warm_start=False, positive=False, 71 preprocessors=None): 72 super().__init__(preprocessors=preprocessors) 73 self.params = vars() 74 75 76class ElasticNetCVLearner(LinearRegressionLearner): 77 __wraps__ = skl_linear_model.ElasticNetCV 78 79 # Arguments are needed for signatures, pylint: disable=unused-argument 80 def __init__(self, l1_ratio=0.5, eps=0.001, n_alphas=100, alphas=None, 81 fit_intercept=True, normalize=False, precompute='auto', 82 max_iter=1000, tol=0.0001, cv=5, copy_X=True, 83 verbose=0, n_jobs=1, positive=False, preprocessors=None): 84 super().__init__(preprocessors=preprocessors) 85 self.params = vars() 86 87 88class SGDRegressionLearner(LinearRegressionLearner): 89 __wraps__ = skl_linear_model.SGDRegressor 90 preprocessors = SklLearner.preprocessors + [Normalize()] 91 92 # Arguments are needed for signatures, pylint: disable=unused-argument 93 def __init__(self, loss='squared_loss', penalty='l2', alpha=0.0001, 94 l1_ratio=0.15, fit_intercept=True, max_iter=5, tol=1e-3, 95 shuffle=True, epsilon=0.1, n_jobs=1, random_state=None, 96 learning_rate='invscaling', eta0=0.01, power_t=0.25, 97 class_weight=None, warm_start=False, average=False, 98 preprocessors=None): 99 super().__init__(preprocessors=preprocessors) 100 self.params = vars() 101 102 103class PolynomialLearner(Learner): 104 """Generate polynomial features and learn a prediction model 105 106 Parameters 107 ---------- 108 learner : LearnerRegression 109 learner to be fitted on the transformed features 110 degree : int 111 degree of used polynomial 112 preprocessors : List[Preprocessor] 113 preprocessors to be applied on the data before learning 114 """ 115 name = 'poly learner' 116 preprocessors = SklLearner.preprocessors 117 118 def __init__(self, learner=LinearRegressionLearner(), degree=2, 119 preprocessors=None, include_bias=True): 120 super().__init__(preprocessors=preprocessors) 121 self.degree = degree 122 self.learner = learner 123 self.include_bias = include_bias 124 125 def fit(self, X, Y, W=None): 126 polyfeatures = skl_preprocessing.PolynomialFeatures( 127 self.degree, include_bias=self.include_bias) 128 X = polyfeatures.fit_transform(X) 129 clf = self.learner 130 if W is None or not self.supports_weights: 131 model = clf.fit(X, Y, None) 132 else: 133 model = clf.fit(X, Y, sample_weight=W.reshape(-1)) 134 return PolynomialModel(model, polyfeatures) 135 136 137class LinearModel(SklModel): 138 @property 139 def intercept(self): 140 return self.skl_model.intercept_ 141 142 @property 143 def coefficients(self): 144 return self.skl_model.coef_ 145 146 def __str__(self): 147 return 'LinearModel {}'.format(self.skl_model) 148 149 150class PolynomialModel(Model): 151 def __init__(self, model, polyfeatures): 152 super().__init__() 153 self.model = model 154 self.polyfeatures = polyfeatures 155 156 def predict(self, X): 157 X = self.polyfeatures.fit_transform(X) 158 return self.model.predict(X) 159 160 def __str__(self): 161 return 'PolynomialModel {}'.format(self.model) 162 163 164PolynomialLearner.__returns__ = PolynomialModel 165