10 examples of 'linear regression in machine learning' in Python

Every line of 'linear regression in machine learning' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
94def Logistic_Regression(X,Y,alpha,theta,num_iters):
95 m = len(Y)
96 for x in xrange(num_iters):
97 new_theta = Gradient_Descent(X,Y,theta,m,alpha)
98 theta = new_theta
99 if x % 100 == 0:
100 Cost_Function(X,Y,theta,m)
101 print 'theta ', theta
102 print 'cost is ', Cost_Function(X,Y,theta,m)
103 Declare_Winner(theta)
80def Linear_regression():
81 # get train data
82 data =np.loadtxt('data.csv',delimiter=',')
83
84 #define hyperparamters
85 #learning_rate is used for update gradient
86 #defint the number that will iteration
87 # define y =mx+b
88 learning_rate = 0.001
89 initial_b =0.0
90 initial_m = 0.0
91 num_iter = 1000
92
93 #train model
94 #print b m error
95 print 'initial variables:\n initial_b = {0}\n intial_m = {1}\n error of begin = {2} \n'\
96 .format(initial_b,initial_m,compute_error(initial_b,initial_m,data))
97
98 #optimizing b and m
99 [b ,m] = optimizer(data,initial_b,initial_m,learning_rate,num_iter)
100
101 #print final b m error
102 print 'final formula parmaters:\n b = {1}\n m={2}\n error of end = {3} \n'.format(num_iter,b,m,compute_error(b,m,data))
103
104 #plot result
105 plot_data(data,b,m)
89def logistic_regression(w, x):
90 """Logistic regression classifier model.
91
92 w: Weights w. (n_features,) NumPy array
93 x: Data point x_i. (n_features,) NumPy array
94 -> float in [0, 1]
95 """
96 return scipy.special.expit(numpy.dot(x, w.T))
64def logistic_regression(opytimizer):
65 # Instanciating the model
66 model = torch.nn.Sequential()
67
68 # Some model parameters
69 n_features = 64
70 n_classes = 10
71
72 # Adding linear layer
73 model.add_module("linear", torch.nn.Linear(
74 n_features, n_classes, bias=False))
75
76 # Input variables
77 batch_size = 100
78 epochs = 100
79
80 # Gathering parameters from Opytimizer
81 # Pay extremely attention to their order when declaring due to their bounds
82 learning_rate = opytimizer[0][0]
83 momentum = opytimizer[1][0]
84
85 # Declaring the loss function
86 loss = torch.nn.CrossEntropyLoss(reduction='mean')
87
88 # Declaring the optimization algorithm
89 opt = optim.SGD(model.parameters(), lr=learning_rate, momentum=momentum)
90
91 # Performing training loop
92 for _ in range(epochs):
93 # Initial cost as 0.0
94 cost = 0.0
95
96 # Calculating the number of batches
97 num_batches = len(X_train) // batch_size
98
99 # For every batch
100 for k in range(num_batches):
101 # Declaring initial and ending for each batch
102 start, end = k * batch_size, (k + 1) * batch_size
103
104 # Cost will be the loss accumulated from model's fitting
105 cost += fit(model, loss, opt,
106 X_train[start:end], Y_train[start:end])
107
108 # Predicting samples from evaluating set
109 preds = predict(model, X_val)
110
111 # Calculating accuracy
112 acc = np.mean(preds == Y_val)
113
114 return 1 - acc
158def logistic_regression_3(X, y, max_iter : int = 100, learning_rate : float = 0.1):
159 W = np.zeros((np.size(X, 1), np.size(y, 1)))
160 for _ in range(max_iter):
161 N = len(y)
162 index = np.random.permutation(N)
163 X = X[index]
164 y = y[index]
165 W_prev = np.copy(W)
166 y_pred = softmax(X[0:10][:] @ W)
167 grad = X[0:10][:].T @ (y_pred - y[0:10])
168 W -= learning_rate * grad
169 if np.allclose(W, W_prev):
170 break
171 return W
99def test (self) :
100 for t in self.test_cases :
101 h = self.__hypothesis ( t[0:-1] )
102 print "H = %lf, ANS = %d" % ( h, t[self.__MAX_FEATURE_CNT])
8def linear_regression(feat1, feat2):
9 return random.gauss(2 * feat1 + feat2 + 5, 3)
205def test_predict_2(self):
206 X = np.array([[3.5]])
207 m, n = X.shape
208 intercept = np.ones((m, 1), dtype=np.int64)
209 X = np.append(intercept, X, axis=1)
210 theta = np.zeros((n + 1, 1), dtype=np.int64)
211
212 assert_allclose([[0]],
213 predict(X, theta),
214 rtol=0, atol=0.001)
187def linear_regression(x, y):
188 """
189 NOTE: Proceed linear regression
190 Input
191 - x: 1d timeseries (time)
192 - y: time varying 2d field (time, lat, lon)
193 Output
194 - slope: 2d array, spatial map, linear regression slope on each grid
195 - intercept: 2d array, spatial map, linear regression intercept on each grid
196 """
197 # get original global dimension
198 lat = y.getLatitude()
199 lon = y.getLongitude()
200 # Convert 3d (time, lat, lon) to 2d (time, lat*lon) for polyfit applying
201 im = y.shape[2]
202 jm = y.shape[1]
203 y_2d = y.reshape(y.shape[0], jm * im)
204 # Linear regression
205 slope_1d, intercept_1d = np.polyfit(x, y_2d, 1)
206 # Retreive to cdms2 variabile from numpy array
207 slope = MV2.array(slope_1d.reshape(jm, im))
208 intercept = MV2.array(intercept_1d.reshape(jm, im))
209 # Set lat/lon coordinates
210 slope.setAxis(0, lat)
211 slope.setAxis(1, lon)
212 slope.mask = y.mask
213 intercept.setAxis(0, lat)
214 intercept.setAxis(1, lon)
215 intercept.mask = y.mask
216 # return result
217 return slope, intercept
101def run_logistic_regression(df):
102 # Logistic regression
103 X = df['pageviews_cumsum']
104 X = sm.add_constant(X)
105 y = df['is_conversion']
106 logit = sm.Logit(y, X)
107 logistic_regression_results = logit.fit()
108 print(logistic_regression_results.summary())
109 return logistic_regression_results

Related snippets