Régression logistique

python
def sigmoid(z):
    return 1 / (1 + np.exp(-z))

def gradientDescent(X, y, theta, alpha, epochs):
    m = len(y)

    for i in range(epochs):
        h = sigmoid(X.dot(theta))

        gradient = (X.T.dot(h - y)) / m
        theta   -= alpha * gradient

    return theta

# Fit
theta = gradientDescent(X, Y, theta=theta, alpha=0.1, epochs=10000)

# Predict
z  = b0 + b1 * x[0] + b2 * x[1]
p  = 1 / (1 + np.exp(-z))

print('Probabilité de badass:', p)
print('Prediction:', (1 if p > 0.5 else 0))
from sklearn.linear_model import LogisticRegression

# Fit
model = LogisticRegression(C=1e20, solver='liblinear', random_state=0)
model.fit(X, Y)

# Predict
print('Probabilité de badass:', model.predict_proba([x])[0][1])
print('Prediction:', model.predict([x])[0])
import statsmodels.api as sm

# Fit
Xb       = sm.add_constant(X)
Logistic = sm.Logit(Y, Xb)
result   = Logistic.fit()
result.summary()

# Predict
xb = np.concatenate([[1], x])
result.predict(xb)

Logistic Regression.ipynb