Skip to content

Commit 251d744

Browse files
committed
revert to unaliased imports
1 parent 12a8c5b commit 251d744

File tree

1 file changed

+27
-27
lines changed

1 file changed

+27
-27
lines changed

notebooks_en/4_Polynomial_Regression.ipynb

+27-27
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@
3939
"metadata": {},
4040
"outputs": [],
4141
"source": [
42-
"from matplotlib import pyplot as plt\n",
42+
"from matplotlib import pyplot\n",
4343
"from autograd import grad\n",
44-
"import autograd.numpy as np"
44+
"from autograd import numpy"
4545
]
4646
},
4747
{
@@ -63,10 +63,10 @@
6363
}
6464
],
6565
"source": [
66-
"np.random.seed(0) # fix seed for reproducibility\n",
67-
"x = np.linspace(-3, 3, 20)\n",
68-
"y = x**4 + x**3 - 4*x**2 + 8*np.random.normal(size=len(x))\n",
69-
"plt.scatter(x, y);"
66+
"numpy.random.seed(0) # fix seed for reproducibility\n",
67+
"x = numpy.linspace(-3, 3, 20)\n",
68+
"y = x**4 + x**3 - 4*x**2 + 8*numpy.random.normal(size=len(x))\n",
69+
"pyplot.scatter(x, y);"
7070
]
7171
},
7272
{
@@ -111,7 +111,7 @@
111111
"def polynomial_features(x, degree):\n",
112112
" \"\"\" Generate polynomial features for x.\"\"\"\n",
113113
" \n",
114-
" X = np.empty((len(x), degree+1))\n",
114+
" X = numpy.empty((len(x), degree+1))\n",
115115
" for i in range(degree+1):\n",
116116
" X[:,i] = x**i\n",
117117
" return X\n",
@@ -192,7 +192,7 @@
192192
" Returns:\n",
193193
" 1D array of predicted values\n",
194194
" '''\n",
195-
" return np.dot(X, params)\n",
195+
" return numpy.dot(X, params)\n",
196196
"\n",
197197
"def mse_loss(params, model, X, y):\n",
198198
" '''\n",
@@ -206,7 +206,7 @@
206206
" float, mean squared error\n",
207207
" '''\n",
208208
" y_pred = model(params, X)\n",
209-
" return np.mean( np.sum((y-y_pred)**2) )\n",
209+
" return numpy.mean( numpy.sum((y-y_pred)**2) )\n",
210210
"\n",
211211
"gradient = grad(mse_loss)"
212212
]
@@ -265,13 +265,13 @@
265265
"source": [
266266
"max_iter = 3000\n",
267267
"alpha = 0.01\n",
268-
"params = np.zeros(X_scaled.shape[1])\n",
269-
"descent = np.ones(X_scaled.shape[1])\n",
268+
"params = numpy.zeros(X_scaled.shape[1])\n",
269+
"descent = numpy.ones(X_scaled.shape[1])\n",
270270
"i = 0\n",
271271
"\n",
272272
"from sklearn.metrics import mean_absolute_error\n",
273273
"\n",
274-
"while np.linalg.norm(descent) > 0.01 and i < max_iter:\n",
274+
"while numpy.linalg.norm(descent) > 0.01 and i < max_iter:\n",
275275
" descent = gradient(params, linear_regression, X_scaled, y)\n",
276276
" params = params - descent * alpha\n",
277277
" loss = mse_loss(params, linear_regression, X_scaled, y)\n",
@@ -341,13 +341,13 @@
341341
}
342342
],
343343
"source": [
344-
"xgrid = np.linspace(x.min(), x.max(), 30)\n",
344+
"xgrid = numpy.linspace(x.min(), x.max(), 30)\n",
345345
"Xgrid_poly_feat = polynomial_features(xgrid, degree)\n",
346346
"Xgrid_scaled = min_max_scaler.transform(Xgrid_poly_feat)\n",
347347
"Xgrid_scaled[:,0] = 1 \n",
348-
"plt.scatter(x, y, c='r', label='true')\n",
349-
"plt.plot(xgrid, Xgrid_scaled@params, label='predicted')\n",
350-
"plt.legend();"
348+
"pyplot.scatter(x, y, c='r', label='true')\n",
349+
"pyplot.plot(xgrid, Xgrid_scaled@params, label='predicted')\n",
350+
"pyplot.legend();"
351351
]
352352
},
353353
{
@@ -489,7 +489,7 @@
489489
" float, regularized mean squared error\n",
490490
" '''\n",
491491
" y_pred = model(params, X)\n",
492-
" return np.mean( np.sum((y-y_pred)**2) ) + _lambda * np.sum( params[1:]**2 )\n",
492+
" return numpy.mean( numpy.sum((y-y_pred)**2) ) + _lambda * numpy.sum( params[1:]**2 )\n",
493493
"\n",
494494
"gradient = grad(regularized_loss) "
495495
]
@@ -529,13 +529,13 @@
529529
"source": [
530530
"max_iter = 3000\n",
531531
"alpha = 0.01\n",
532-
"params = np.zeros(X_scaled.shape[1])\n",
533-
"descent = np.ones(X_scaled.shape[1])\n",
532+
"params = numpy.zeros(X_scaled.shape[1])\n",
533+
"descent = numpy.ones(X_scaled.shape[1])\n",
534534
"i = 0\n",
535535
"\n",
536536
"from sklearn.metrics import mean_absolute_error\n",
537537
"\n",
538-
"while np.linalg.norm(descent) > 0.01 and i < max_iter:\n",
538+
"while numpy.linalg.norm(descent) > 0.01 and i < max_iter:\n",
539539
" descent = gradient(params, linear_regression, X_scaled, y)\n",
540540
" params = params - descent * alpha\n",
541541
" loss = mse_loss(params, linear_regression, X_scaled, y)\n",
@@ -586,10 +586,10 @@
586586
"print(\"weights with regularization\")\n",
587587
"print(params)\n",
588588
"\n",
589-
"plt.scatter(x, y, c='r', label='true')\n",
590-
"plt.plot(xgrid, Xgrid_scaled@no_regularization_params, label='w/o regularization')\n",
591-
"plt.plot(xgrid, Xgrid_scaled@params, label='with regularization')\n",
592-
"plt.legend();"
589+
"pyplot.scatter(x, y, c='r', label='true')\n",
590+
"pyplot.plot(xgrid, Xgrid_scaled@no_regularization_params, label='w/o regularization')\n",
591+
"pyplot.plot(xgrid, Xgrid_scaled@params, label='with regularization')\n",
592+
"pyplot.legend();"
593593
]
594594
},
595595
{
@@ -676,9 +676,9 @@
676676
"print(model.coef_)\n",
677677
"print(model.intercept_)\n",
678678
"\n",
679-
"plt.scatter(x, y, c='r', label='true')\n",
680-
"plt.plot(xgrid, y_pred_sklearn, label='sklearn ridge regression')\n",
681-
"plt.legend();\n"
679+
"pyplot.scatter(x, y, c='r', label='true')\n",
680+
"pyplot.plot(xgrid, y_pred_sklearn, label='sklearn ridge regression')\n",
681+
"pyplot.legend();\n"
682682
]
683683
},
684684
{

0 commit comments

Comments
 (0)