Hello ! I wanted to know please if i missed something in my training function for binary class logistic regression
Python:
def logistic_regression_train(data, labels, max_iters=10, lr=0.001,
print_period=1000, plot_period=1000):
# Initialize the weights randomly according to a Gaussian distribution
weights = np.random.normal(0., 0.1, [data.shape[1],])
for it in range(max_iters):
#The gradient and do a gradient step
gradient = gradient_cross_entropy(data, labels, weights)
weights = weights - lr * gradient
# If we reach 100% accuracy, we can stop training immediately
predictions = logistic_regression_predict(data, weights)#on recalcule avec les nouv w
#print(predictions)
if accuracy_fn(predictions, labels) == 100:
break
# logging
if print_period and it % print_period == 0:
print('loss at iteration', it, ":", cross_entropy(data, labels, weights))
return weights