5 examples of 'logistic regression sklearn' in Python

Every line of 'logistic regression sklearn' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
89def logistic_regression(w, x):
90 """Logistic regression classifier model.
91
92 w: Weights w. (n_features,) NumPy array
93 x: Data point x_i. (n_features,) NumPy array
94 -> float in [0, 1]
95 """
96 return scipy.special.expit(numpy.dot(x, w.T))
94def Logistic_Regression(X,Y,alpha,theta,num_iters):
95 m = len(Y)
96 for x in xrange(num_iters):
97 new_theta = Gradient_Descent(X,Y,theta,m,alpha)
98 theta = new_theta
99 if x % 100 == 0:
100 Cost_Function(X,Y,theta,m)
101 print 'theta ', theta
102 print 'cost is ', Cost_Function(X,Y,theta,m)
103 Declare_Winner(theta)
158def logistic_regression_3(X, y, max_iter : int = 100, learning_rate : float = 0.1):
159 W = np.zeros((np.size(X, 1), np.size(y, 1)))
160 for _ in range(max_iter):
161 N = len(y)
162 index = np.random.permutation(N)
163 X = X[index]
164 y = y[index]
165 W_prev = np.copy(W)
166 y_pred = softmax(X[0:10][:] @ W)
167 grad = X[0:10][:].T @ (y_pred - y[0:10])
168 W -= learning_rate * grad
169 if np.allclose(W, W_prev):
170 break
171 return W
1447def predict_proba(self, X):
1448 """
1449 Probability estimates.
1450
1451 The returned estimates for all classes are ordered by the
1452 label of classes.
1453
1454 For a multi_class problem, if multi_class is set to be "multinomial"
1455 the softmax function is used to find the predicted probability of
1456 each class.
1457 Else use a one-vs-rest approach, i.e calculate the probability
1458 of each class assuming it to be positive using the logistic function.
1459 and normalize these values across all the classes.
1460
1461 Parameters
1462 ----------
1463 X : array-like of shape (n_samples, n_features)
1464 Vector to be scored, where `n_samples` is the number of samples and
1465 `n_features` is the number of features.
1466
1467 Returns
1468 -------
1469 T : array-like of shape (n_samples, n_classes)
1470 Returns the probability of the sample for each class in the model,
1471 where classes are ordered as they are in ``self.classes_``.
1472 """
1473 check_is_fitted(self)
1474
1475 ovr = (self.multi_class in ["ovr", "warn"] or
1476 (self.multi_class == 'auto' and (self.classes_.size <= 2 or
1477 self.solver == 'liblinear')))
1478 if ovr:
1479 return super()._predict_proba_lr(X)
1480 else:
1481 decision = self.decision_function(X)
1482 if decision.ndim == 1:
1483 # Workaround for multi_class="multinomial" and binary outcomes
1484 # which requires softmax prediction with only a 1D decision.
1485 decision_2d = np.c_[-decision, decision]
1486 else:
1487 decision_2d = decision
1488 return softmax(decision_2d, copy=False)
139def _regress(self, x, y, alpha):
140 kw = self.ridge_kw or {}
141 coef = ridge_regression(x, y, alpha, **kw)
142 self.iters += 1
143 return coef

Related snippets