QuestionQuestion

Implement the PLA algorithm in Python. Make sure that you generate linearly separable data at least 20 points with two features, but they don't have to be evenly divided between the positive and negative class. Run and test your code, and turn in your code along with your data.

Solution PreviewSolution Preview

This material may consist of step-by-step explanations on how to solve a problem or examples of proper writing, including the use of citations, references, bibliographies, and formatting. This material is made available for the sole purpose of studying and learning - misuse is strictly forbidden.

from copy import deepcopy
import matplotlib,sys
from matplotlib import pyplot as plt
import numpy as np
import pdb
import random

def generate_data(radius1, radius2, center1,center2, nPoints):
matrix = [[],[],[],[]]
for i in range(nPoints):
if random.random()>=0.5:
rad = random.random()*radius1
th = random.random()*2*np.pi
x = center1[0] + rad*np.cos(th)
y = center1[1] + rad*np.sin(th)
flag = 0
else:
rad = random.random()*radius2
th = random.random()*2*np.pi
x = center2[0] + rad*np.cos(th)
y = center2[1] + rad*np.sin(th)
flag = 1
matrix[0].append(1.0)
matrix[1].append(x)
matrix[2].append(y)
matrix[3].append(flag)
return matrix

def predict(inputs,weights):
activation = (np.dot(inputs,weights)>=0)*1.0
return activation

def plot(matrix,weights=None,title="Data scatter plot and decision boundary"):
if len(matrix[0])==4: # 2D input (i1,i2)
fig,ax = plt.subplots()
ax.set_title(title)
ax.set_xlabel("Feature 1")
ax.set_ylabel("Feature 2")
axis_limit_expansion_factor = 0.2
x1_min = np.amin(matrix[:,1]) - axis_limit_expansion_factor*(np.amax(matrix[:,1])-np.amin(matrix[:,1]))
x1_max = np.amax(matrix[:,1]) + axis_limit_expansion_factor*(np.amax(matrix[:,1])-np.amin(matrix[:,1]))
x2_min = np.amin(matrix[:,2]) - axis_limit_expansion_factor*(np.amax(matrix[:,2])-np.amin(matrix[:,2]))
x2_max = np.amax(matrix[:,2]) + axis_limit_expansion_factor*(np.amax(matrix[:,2])-np.amin(matrix[:,2]))
xs,ys=np.meshgrid(np.linspace(x1_min,x1_max,1000),np.linspace(x2_min,x2_max,1000))
zs=np.zeros_like(xs)
for i in np.arange(xs.shape[0]):
for j in np.arange(xs.shape[1]):
zs[i,j]=predict([1.0,xs[i][j],ys[i][j]],weights)
cp = plt.contourf(xs,ys,zs,levels=[-1,0,1],colors=('g','w'),alpha=0.2)
plt.colorbar(cp)

one_data=[[],[]]
minusone_data=[[],[]]

for i in range(len(matrix)):
feature_x1 = matrix[i][1]
feature_x2 = matrix[i][2]
output_y = matrix[i][3]
if output_y == 1:
one_data[0].append(feature_x1)
one_data[1].append(feature_x2)
else:
minusone_data[0].append(feature_x1)
minusone_data[1].append(feature_x2)...
PLA Algorithm - Python
    $40.00 for this solution

    PayPal, G Pay, ApplePay, Amazon Pay, and all major credit cards accepted.

    Find A Tutor

    View available Python Programming Tutors

    Get College Homework Help.

    Are you sure you don't want to upload any files?

    Fast tutor response requires as much info as possible.

    Decision:
    Upload a file
    Continue without uploading

    SUBMIT YOUR HOMEWORK
    We couldn't find that subject.
    Please select the best match from the list below.

    We'll send you an email right away. If it's not in your inbox, check your spam folder.

    • 1
    • 2
    • 3
    Live Chats