000a22e7ead9a184a325b3913e814f1f80636a6f,mlxtend/regressor/stacking_cv_regression.py,StackingCVRegressor,fit,#StackingCVRegressor#,119

Before Change


        
        if self.refit:
            self.regr_ = [clone(clf) for clf in self.regressors]
            self.meta_regr_ = clone(self.meta_regressor)
        else:
            self.regr_ = self.regressors
            self.meta_regr_ = self.meta_regressor

        kfold = check_cv(self.cv, y)
        if isinstance(self.cv, int):
            // Override shuffle parameter in case of self generated
            // cross-validation strategy
            kfold.shuffle = self.shuffle

        meta_features = np.zeros((X.shape[0], len(self.regressors)))

        //
        // The outer loop iterates over the base-regressors. Each regressor
        // is trained cv times and makes predictions, after which we train
        // the meta-regressor on their combined results.
        //
        for i, regr in enumerate(self.regressors):
            //
            // In the inner loop, each model is trained cv times on the
            // training-part of this fold of data; and the holdout-part of data
            // is used for predictions. This is repeated cv times, so in
            // the end we have predictions for each data point.
            //
            // Advantage of this complex approach is that data points we"re
            // predicting have not been trained on by the algorithm, so it"s
            // less susceptible to overfitting.
            //
            for train_idx, holdout_idx in kfold.split(X, y, groups):
                instance = clone(regr)
                instance.fit(X[train_idx], y[train_idx])
                y_pred = instance.predict(X[holdout_idx])
                meta_features[holdout_idx, i] = y_pred

        // save meta-features for training data
        if self.store_train_meta_features:
            self.train_meta_features_ = meta_features

        // Train meta-model on the out-of-fold predictions
        if not self.use_features_in_secondary:
            self.meta_regr_.fit(meta_features, y)
        elif sparse.issparse(X):
            self.meta_regr_.fit(sparse.hstack((X, meta_features)), y)
        else:
            self.meta_regr_.fit(np.hstack((X, meta_features)), y)

After Change


            //
            for train_idx, holdout_idx in kfold.split(X, y, groups):
                instance = clone(regr)
                if sample_weight is None:
                    instance.fit(X[train_idx], y[train_idx])
                else:
                    instance.fit(X[train_idx], y[train_idx],
                                 sample_weight=sample_weight[train_idx])
                y_pred = instance.predict(X[holdout_idx])
                meta_features[holdout_idx, i] = y_pred

        // save meta-features for training data
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 11

Instances


Project Name: rasbt/mlxtend
Commit Name: 000a22e7ead9a184a325b3913e814f1f80636a6f
Time: 2018-09-23
Author: kmori05@gmail.com
File Name: mlxtend/regressor/stacking_cv_regression.py
Class Name: StackingCVRegressor
Method Name: fit


Project Name: rasbt/mlxtend
Commit Name: 000a22e7ead9a184a325b3913e814f1f80636a6f
Time: 2018-09-23
Author: kmori05@gmail.com
File Name: mlxtend/regressor/stacking_cv_regression.py
Class Name: StackingCVRegressor
Method Name: fit


Project Name: rasbt/mlxtend
Commit Name: 4d09f8fd2b7db876f34f208f7fc8131e0a91ccb0
Time: 2018-09-23
Author: kmori05@gmail.com
File Name: mlxtend/classifier/stacking_classification.py
Class Name: StackingClassifier
Method Name: fit


Project Name: rasbt/mlxtend
Commit Name: b21be33c042c1a9fd415468653941e093c4a6e05
Time: 2018-09-23
Author: kmori05@gmail.com
File Name: mlxtend/classifier/stacking_cv_classification.py
Class Name: StackingCVClassifier
Method Name: fit