Activate next cell to toggle code on and off
from IPython.display import display
from IPython.display import HTML
import IPython.core.display as di # Example: di.display_html('<h3>%s:</h3>' % str, raw=True)
# This line will hide code by default when the notebook is eåxported as HTML
di.display_html('<script>jQuery(function() {if (jQuery("body.notebook_app").length == 0) { jQuery(".input_area").toggle(); jQuery(".prompt").toggle();}});</script>', raw=True)
# This line will add a button to toggle visibility of code blocks, for use with the HTML export version
di.display_html('''<button onclick="jQuery('.input_area').toggle(); jQuery('.prompt').toggle();">Toggle code</button>''', raw=True)
model
that the corresponding linear decision boundary
\begin{equation} \text{model}\left(\mathbf{x},\mathbf{w}\right) = \mathbf{x}^T \mathbf{w} = 0 \end{equation}separates the two classes as well as is possible using a linear model
model
, like softmaxwhose minimum provides us with the weights that service our ideal as best as possible
where here $f$ is some parameterized or unparameterized nonlinear function or feature transformation
where $f_1,\,f_2,\,...\,f_B$ are nonlinear parameterized or unparameterized feature transformations and $w_0$ through $w_B$
model
def softmax(w):
cost = np.sum(np.log(1 + np.exp(-y*model(x,w))))
return cost/float(len(y))
model
- linear or nonlinear - so we can push it to the backmodel
function too, since will look essentially the same throughout all examplesmodel
will always look like# an implementation of our model employing a nonlinear feature transformation
def model(x,w):
# feature transformation
f = feature_transforms(x,w[0])
# tack a 1 onto the top of each input point all at once
o = np.ones((1,np.shape(f)[1]))
f = np.vstack((o,f))
# compute linear combination and return
a = np.dot(f.T,w[1])
return a
## This code cell will not be shown in the HTML version of this notebook
# load data
csvname = sup_datapath + '2d_classification_data_v1.csv'
data = np.loadtxt(csvname,delimiter = ',')
x = data[:,:-1].T
y = data[:,-1:]
# plot dataset
demo = classif_plotter.Visualizer(data)
demo.plot_data()
model
is then (reduces to our typical linear model)# the trivial linear feature transformation
def feature_transforms(x):
return x
## This code cell will not be shown in the HTML version of this notebook
# pluck out best weights - those that provided lowest cost,
# and plot resulting fit
ind = np.argmin(run.cost_history)
w_best = run.weight_history[ind]
demo.plot_fit(w_best,run.model,normalizer = run.normalizer);
## This code cell will not be shown in the HTML version of this notebook
# load data
csvname = datapath + 'signed_projectile.csv'
data = np.loadtxt(csvname,delimiter = ',')
x = data[:,:-1].T
y = data[:,-1:]
# plot dataset
demo = classif_plotter.Visualizer(data)
demo.plot_data()
</figure> </p>
model
is thenPython
implementation# our quadratic feature transformation
def feature_transforms(x):
# calculate feature transform
f = np.array([(x.flatten()**d) for d in range(1,3)])
return f
## This code cell will not be shown in the HTML version of this notebook
# plot data and fit in original and feature transformed space
ind = np.argmin(run.cost_history)
w_best = run.weight_history[ind]
demo.plot_fit_and_feature_space(w_best,run.model,run.feature_transforms,normalizer = run.normalizer,view = [25,35])
## This code cell will not be shown in the HTML version of this notebook
# create instance of linear regression demo, used below and in the next examples
demo = nonlib.nonlinear_classification_visualizer.Visualizer(datapath + 'ellipse_2class_data.csv')
x = demo.x.T
y = demo.y[:,np.newaxis]
# an implementation of the least squares cost function for linear regression for N = 2 input dimension datasets
demo.plot_data();
model
of the formPython
our feature_transforms
function# a elliptical feature transformation
def feature_transforms(x):
# calculate feature transform
f = x**2
return f
## This code cell will not be shown in the HTML version of this notebook
# illustrate results
ind = np.argmin(run.cost_history)
w_best = run.weight_history[ind]
demo.static_N2_img(w_best,run,view1 = [20,45],view2 = [20,30])
## This code cell will not be shown in the HTML version of this notebook
# create instance of linear regression demo, used below and in the next examples
demo = nonlib.nonlinear_classification_visualizer.Visualizer(datapath + 'diagonal_stripes.csv')
x = demo.x.T
y = demo.y[:,np.newaxis]
# an implementation of the least squares cost function for linear regression for N = 2 input dimension datasets
demo.plot_data();
- in `Python` our `feature_transforms` function
# our nonlinearity, known as a feature transformation
def feature_transforms(x,w):
# tack a 1 onto the top of each input point all at once
o = np.ones((1,np.shape(x)[1]))
x = np.vstack((o,x))
# calculate feature transform
f = np.sin(np.dot((x).T,w)).T
return f
## This code cell will not be shown in the HTML version of this notebook
# illustrate results
ind = np.argmin(run.cost_history)
w_best = run.weight_history[ind]
demo.static_N2_simple(w_best,run,view = [30,155])
## This code cell will not be shown in the HTML version of this notebook
# create instance of linear regression demo, used below and in the next examples
demo = nonlib.nonlinear_classification_visualizer.Visualizer(datapath + '3_layercake_data.csv')
x = demo.x.T
y = demo.y[:,np.newaxis]
# an implementation of the least squares cost function for linear regression for N = 2 input dimension datasets
demo.plot_data();
where $i + j \leq D$.
In Python
we can implement this in a feature_transforms
as follows.
# a elliptical feature transformation
def feature_transforms(x):
# calculate feature transform
f = []
for i in range(0,D):
for j in range(0,D-i):
if i > 0 or j > 0:
term = (x[0,:]**i)*((x[1,:])**j)
f.append(term)
return np.array(f)
## This code cell will not be shown in the HTML version of this notebook
# draw resulting nonlinear boundaries for each classification problem, as well as the
# entire multiclass boundary
run = nonlib.basic_runner.Setup(x,y,feature_transforms,'multiclass_counter',normalize = 'standard')
w_best = combined_weights[-1]
demo.show_individual_classifiers(run,w_best)
## This code cell will not be shown in the HTML version of this notebook
# plot result of nonlinear multiclass classification
w_best = combined_weights[-1]
demo.multiclass_plot(run,w_best)