I will appreciate it if someone can give me some help with python code to do the
ID: 3804593 • Letter: I
Question
I will appreciate it if someone can give me some help with python code to do the following:
Re-run an algorithm by using only the 4th dimension of xi. Set b = 5 and sigma_squared = 2.
Show a scatter plot of the data (x[4] versus y for each point).
Also, plot as a solid line the predictive mean of the Gaussian process at each point in the training set.
data:
Xtrain data - 350 rows, 7 Columns
Xtestdata - 42 Rows, 7 Colums
Ytrain data - 350 rows, 1 Columns
Ytestdata - 42 Rows, 1 Colums
b = [5,7,9,11,13,15]:
sigma squared = [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]
Algo:
def KN(b):
rows = Xtraindata.shape[0]
#print (rows)
matrix = numpy.zeros((350,350))
#print(matrix.shape)
for i in range(rows):
for j in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-Xtraindata[j])**2))
matrix[i][j]=val
return matrix
#calculating K(X, Dn)
def K(xTestEntry,b):
rows=len(Xtraindata)
#print (rows)
matrix = numpy.zeros((1,rows))
for i in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-xTestEntry)**2))
matrix[0][i]=val
return matrix
# Loping KN over b and sigma squared.
n = len(Xtraindata)
for b in [5,7,9,11,13,15]:
for sigma in [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]:
kn=KN(b)
ux=[]
for xTestEntry in Xtestdata:
kxdn =K(xTestEntry,b)
idMat = numpy.identity(n)*sigma
mid = numpy.linalg.inv(idMat + kn)
x1 = numpy.dot(kxdn,mid)
ypredict = numpy.dot(x1,Ytraindata)
ux.append(ypredict[0])
rms=sqrt(mean_squared_error(Ytestdata, ux))
print(b,sigma,rms)
Explanation / Answer
data:
Xtrain knowledge - 350 rows, 7 Columns
Xtestdata - forty two Rows, 7 Colums
Ytrain knowledge - 350 rows, 1 Columns
Ytestdata - forty two Rows, 1 Colums
b = [5,7,9,11,13,15]:
sigma square = [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]
Algo:
def KN(b):
rows = Xtraindata.shape[0]
#print (rows)
matrix = numpy.zeros((350,350))
#print(matrix.shape)
for i in range(rows):
for j in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-Xtraindata[j])**2))
matrix[i][j]=val
come matrix
#calculating K(X, Dn)
def K(xTestEntry,b):
rows=len(Xtraindata)
#print (rows)
matrix = numpy.zeros((1,rows))
for i in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-xTestEntry)**2))
matrix[0][i]=val
come matrix
# Loping KN over b and letter of the alphabet square.
n = len(Xtraindata)
for b in [5,7,9,11,13,15]:
for letter of the alphabet in [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]:
kn=KN(b)
ux=[]
for xTestEntry in Xtestdata:
kxdn =K(xTestEntry,b)
idMat = numpy.identity(n)*sigma
middle = numpy.linalg.inv(idMat + kn)
x1 = numpy.dot(kxdn,mid)
ypredict = numpy.dot(x1,Ytraindata)
ux.append(ypredict[0])
rms=sqrt(mean_squared_error(Ytestdata, ux))
print(b,sigma,rms)data:
Xtrain knowledge - 350 rows, 7 Columns
Xtestdata - forty two Rows, 7 Colums
Ytrain knowledge - 350 rows, 1 Columns
Ytestdata - forty two Rows, 1 Colums
b = [5,7,9,11,13,15]:
sigma square = [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]
Algo:
def KN(b):
rows = Xtraindata.shape[0]
#print (rows)
matrix = numpy.zeros((350,350))
#print(matrix.shape)
for i in range(rows):
for j in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-Xtraindata[j])**2))
matrix[i][j]=val
come matrix
#calculating K(X, Dn)
def K(xTestEntry,b):
rows=len(Xtraindata)
#print (rows)
matrix = numpy.zeros((1,rows))
for i in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-xTestEntry)**2))
matrix[0][i]=val
come matrix
# Loping KN over b and letter of the alphabet square.
n = len(Xtraindata)
for b in [5,7,9,11,13,15]:
for letter of the alphabet in [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]:
kn=KN(b)
ux=[]
for xTestEntry in Xtestdata:
kxdn =K(xTestEntry,b)
idMat = numpy.identity(n)*sigma
middle = numpy.linalg.inv(idMat + kn)
x1 = numpy.dot(kxdn,mid)
ypredict = numpy.dot(x1,Ytraindata)
ux.append(ypredict[0])
rms=sqrt(mean_squared_error(Ytestdata, ux))
print(b,sigma,rms)data:
Xtrain knowledge - 350 rows, 7 Columns
Xtestdata - forty two Rows, 7 Colums
Ytrain knowledge - 350 rows, 1 Columns
Ytestdata - forty two Rows, 1 Colums
b = [5,7,9,11,13,15]:
sigma square = [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]
Algo:
def KN(b):
rows = Xtraindata.shape[0]
#print (rows)
matrix = numpy.zeros((350,350))
#print(matrix.shape)
for i in range(rows):
for j in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-Xtraindata[j])**2))
matrix[i][j]=val
come matrix
#calculating K(X, Dn)
def K(xTestEntry,b):
rows=len(Xtraindata)
#print (rows)
matrix = numpy.zeros((1,rows))
for i in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-xTestEntry)**2))
matrix[0][i]=val
come matrix
# Loping KN over b and letter of the alphabet square.
n = len(Xtraindata)
for b in [5,7,9,11,13,15]:
for letter of the alphabet in [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]:
kn=KN(b)
ux=[]
for xTestEntry in Xtestdata:
kxdn =K(xTestEntry,b)
idMat = numpy.identity(n)*sigma
middle = numpy.linalg.inv(idMat + kn)
x1 = numpy.dot(kxdn,mid)
ypredict = numpy.dot(x1,Ytraindata)
ux.append(ypredict[0])
rms=sqrt(mean_squared_error(Ytestdata, ux))
print(b,sigma,rms)data:
Xtrain knowledge - 350 rows, 7 Columns
Xtestdata - forty two Rows, 7 Colums
Ytrain knowledge - 350 rows, 1 Columns
Ytestdata - forty two Rows, 1 Colums
b = [5,7,9,11,13,15]:
sigma square = [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]
Algo:
def KN(b):
rows = Xtraindata.shape[0]
#print (rows)
matrix = numpy.zeros((350,350))
#print(matrix.shape)
for i in range(rows):
for j in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-Xtraindata[j])**2))
matrix[i][j]=val
come matrix
#calculating K(X, Dn)
def K(xTestEntry,b):
rows=len(Xtraindata)
#print (rows)
matrix = numpy.zeros((1,rows))
for i in range(rows):
val = numpy.exp((-1/b)*(norm(Xtraindata[i]-xTestEntry)**2))
matrix[0][i]=val
come matrix
# Loping KN over b and letter of the alphabet square.
n = len(Xtraindata)
for b in [5,7,9,11,13,15]:
for letter of the alphabet in [.1,.2,.3,.4,.5,.6,.7,.8,.9,1]:
kn=KN(b)
ux=[]
for xTestEntry in Xtestdata:
kxdn =K(xTestEntry,b)
idMat = numpy.identity(n)*sigma
middle = numpy.linalg.inv(idMat + kn)
x1 = numpy.dot(kxdn,mid)
ypredict = numpy.dot(x1,Ytraindata)
ux.append(ypredict[0])
rms=sqrt(mean_squared_error(Ytestdata, ux))
print(b,sigma,rms)