# 10 examples of 'linear regression in machine learning' in Python

Every line of 'linear regression in machine learning' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

## All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
``94def Logistic_Regression(X,Y,alpha,theta,num_iters):95	m = len(Y)96	for x in xrange(num_iters):97		new_theta = Gradient_Descent(X,Y,theta,m,alpha)98		theta = new_theta99		if x % 100 == 0:100			Cost_Function(X,Y,theta,m)101			print 'theta ', theta	102			print 'cost is ', Cost_Function(X,Y,theta,m)103	Declare_Winner(theta)``
``80def Linear_regression():81    # get train data82    data =np.loadtxt('data.csv',delimiter=',')8384    #define hyperparamters85    #learning_rate is used for update gradient86    #defint the number that will iteration87    # define  y =mx+b88    learning_rate = 0.00189    initial_b =0.090    initial_m = 0.091    num_iter = 10009293    #train model94    #print b m error95    print 'initial variables:\n initial_b = {0}\n intial_m = {1}\n error of begin = {2} \n'\96        .format(initial_b,initial_m,compute_error(initial_b,initial_m,data))9798    #optimizing b and m99    [b ,m] = optimizer(data,initial_b,initial_m,learning_rate,num_iter)100101    #print final b m error102    print 'final formula parmaters:\n b = {1}\n m={2}\n error of end = {3} \n'.format(num_iter,b,m,compute_error(b,m,data))103104    #plot result105    plot_data(data,b,m)``
``89def logistic_regression(w, x):90    """Logistic regression classifier model.9192    w: Weights w. (n_features,) NumPy array93    x: Data point x_i. (n_features,) NumPy array94    -> float in [0, 1]95    """96    return scipy.special.expit(numpy.dot(x, w.T))``
``64def logistic_regression(opytimizer):65    # Instanciating the model66    model = torch.nn.Sequential()6768    # Some model parameters69    n_features = 6470    n_classes = 107172    # Adding linear layer73    model.add_module("linear", torch.nn.Linear(74        n_features, n_classes, bias=False))7576    # Input variables77    batch_size = 10078    epochs = 1007980    # Gathering parameters from Opytimizer81    # Pay extremely attention to their order when declaring due to their bounds82    learning_rate = opytimizer[0][0]83    momentum = opytimizer[1][0]8485    # Declaring the loss function86    loss = torch.nn.CrossEntropyLoss(reduction='mean')8788    # Declaring the optimization algorithm89    opt = optim.SGD(model.parameters(), lr=learning_rate, momentum=momentum)9091    # Performing training loop92    for _ in range(epochs):93        # Initial cost as 0.094        cost = 0.09596        # Calculating the number of batches97        num_batches = len(X_train) // batch_size9899        # For every batch100        for k in range(num_batches):101            # Declaring initial and ending for each batch102            start, end = k * batch_size, (k + 1) * batch_size103104            # Cost will be the loss accumulated from model's fitting105            cost += fit(model, loss, opt,106                        X_train[start:end], Y_train[start:end])107108    # Predicting samples from evaluating set109    preds = predict(model, X_val)110111    # Calculating accuracy112    acc = np.mean(preds == Y_val)113114    return 1 - acc``
``158def logistic_regression_3(X, y, max_iter : int = 100, learning_rate : float = 0.1):159    W = np.zeros((np.size(X, 1), np.size(y, 1)))160    for _ in range(max_iter):161        N = len(y)162        index = np.random.permutation(N)163        X = X[index]164        y = y[index]165        W_prev = np.copy(W)166        y_pred = softmax(X[0:10][:] @ W)167        grad = X[0:10][:].T @ (y_pred - y[0:10])168        W -= learning_rate * grad169        if np.allclose(W, W_prev):170            break171    return W``
``99def test (self) :100    for t in self.test_cases :101        h = self.__hypothesis ( t[0:-1] )102        print "H = %lf, ANS = %d" % ( h, t[self.__MAX_FEATURE_CNT])``
``8def linear_regression(feat1, feat2):9    return random.gauss(2 * feat1 + feat2 + 5, 3)``
``205def test_predict_2(self):206    X = np.array([[3.5]])207    m, n = X.shape208    intercept = np.ones((m, 1), dtype=np.int64)209    X = np.append(intercept, X, axis=1)210    theta = np.zeros((n + 1, 1), dtype=np.int64)211212    assert_allclose([[0]],213                    predict(X, theta),214                    rtol=0, atol=0.001)``
``187def linear_regression(x, y):188    """189    NOTE: Proceed linear regression190    Input191    - x: 1d timeseries (time)192    - y: time varying 2d field (time, lat, lon)193    Output194    - slope: 2d array, spatial map, linear regression slope on each grid195    - intercept: 2d array, spatial map, linear regression intercept on each grid196    """197    # get original global dimension198    lat = y.getLatitude()199    lon = y.getLongitude()200    # Convert 3d (time, lat, lon) to 2d (time, lat*lon) for polyfit applying201    im = y.shape[2]202    jm = y.shape[1]203    y_2d = y.reshape(y.shape[0], jm * im)204    # Linear regression205    slope_1d, intercept_1d = np.polyfit(x, y_2d, 1)206    # Retreive to cdms2 variabile from numpy array207    slope = MV2.array(slope_1d.reshape(jm, im))208    intercept = MV2.array(intercept_1d.reshape(jm, im))209    # Set lat/lon coordinates210    slope.setAxis(0, lat)211    slope.setAxis(1, lon)212    slope.mask = y.mask213    intercept.setAxis(0, lat)214    intercept.setAxis(1, lon)215    intercept.mask = y.mask216    # return result217    return slope, intercept``
``101def run_logistic_regression(df):102    # Logistic regression103    X = df['pageviews_cumsum']104    X = sm.add_constant(X)105    y = df['is_conversion']106    logit = sm.Logit(y, X)107    logistic_regression_results = logit.fit()108    print(logistic_regression_results.summary())109    return logistic_regression_results``