Every line of 'feature importance sklearn' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
796 def feature_importances(clf, X, y): 797 798 from sklearn.feature_selection import SelectKBest 799 from sklearn.feature_selection import f_classif 800 801 try: 802 clfimp = clf.feature_importances_ 803 except: 804 sk = SelectKBest(f_classif, k='all') 805 sk_fit = sk.fit(X, y) 806 clfimp = sk_fit.scores_ 807 808 return (clfimp)
411 def get_features_importance(estimator): 412 413 features_importance = estimator.coef_ 414 print(features_importance) 415 return features_importance
168 def feature_importance(self, df, model, convert=False): 169 X, y = self.split_x_y(df) 170 171 if convert: 172 X = self.one_hot_encode(X, self.categoricals(X)) 173 model.fit(X, y) 174 importances = model.feature_importances_ 175 std = np.std([tree.feature_importances_ for tree in model.estimators_], axis=0) 176 indices = np.argsort(importances) 177 178 print("Feature ranking:") 179 plt.figure(figsize=(16, 14)) 180 plt.title("Feature importances") 181 plt.barh( 182 range(X.shape[1]), 183 importances[indices], 184 color="r", 185 xerr=std[indices], 186 align="center", 187 ) 188 plt.yticks(range(X.shape[1]), [list(X)[i] for i in indices]) 189 plt.ylim([-1, X.shape[1]]) 190 plt.show()