deflinearModelLossRSS(b,X,y):# Make predictionspredY=linearModelPredict(b,X)# Compute residualsres=y-predY# Compute the residual sum of squaresresidual_sum_of_squares=sum(res**2)# Compute the gradient of the lossgradient=-2*np.dot(res,X)return(residual_sum_of_squares,gradient)
deflinearModelFit(X,y,lossfcn):nrows,ncols=X.shapebetas=np.zeros((ncols,1))# Optimize the lossRES=so.minimize(lossfcn,betas,args=(X,y),jac=True,# hess = 2 # isn't it just 2)# Obtain estimates from the optimizerestimated_betas=RES.x# Compute goodness of fitres=y-np.mean(y)TSS=sum(res**2)RSS,deriv=linearModelLossRSS(estimated_betas,X,y)# L2 loss and RSS are the same thingR2=1-RSS/TSSreturn(estimated_betas,R2)
fromscipy.optimizeimportminimizeimportnumpyasnp# x = np.array([139, ...])# y = np.array([151, ...])# Define the Modeldeff(x,a,b):returna*x+b# The objective Function to minimize (least-squares regression)defobj(x,y,a,b):returnnp.sum((y-f(x,a,b))**2)# define the bounds -infty < a < infty, b <= 0bounds=[(None,None),(None,0)]res=minimize(lambdacoeffs:obj(x,y,*coeffs),x0=np.zeros(2),bounds=bounds)# res.x contains your coefficients
Last Updated: 2024-05-14 ; Contributors: AhmedThahir