Using SGD without using sklearn (LogLoss increasing with every epoch)
问题 def train(X_train,y_train,X_test,y_test,epochs,alpha,eta0): w,b = initialize_weights(X_train[0]) loss_test=[] N=len(X_train) for i in range(0,epochs): print(i) for j in range(N-1): grad_dw=gradient_dw(X_train[j],y_train[j],w,b,alpha,N) grad_db=gradient_db(X_train[j],y_train[j],w,b) w=np.array(w)+(alpha*(np.array(grad_dw))) b=b+(alpha*(grad_db)) predict2 = [] for m in range(len(y_test)): z=np.dot(w[0],X_test[m])+b if sigmoid(z) == 0: # sigmoid(w,x,b) returns 1/(1+exp(-(dot(x,w)+b))) predict2