| | import numpy as np |
| | import pandas as pd |
| | from scipy.stats import norm |
| |
|
| | |
| | def logit(x): |
| | p = 1 / (1 + np.exp(-x)) |
| | dp = p * (1 - p) |
| | return p, dp |
| |
|
| | def probit(x): |
| | p = norm.cdf(x) |
| | dp = norm.pdf(x) |
| | return p, dp |
| |
|
| | def bhood(func, param, y, z): |
| | xb = np.dot(z, param) |
| | p, dp = func(xb) |
| | p = np.clip(p, 1e-8, 1 - 1e-8) |
| | lhood = np.sum(y * np.log(p) + (1 - y) * np.log(1 - p)) |
| | grad = np.dot(z.T, y - p) |
| | hess = -np.dot(z.T * dp.flatten(), z) |
| | return lhood, grad, hess |
| |
|
| | |
| | class BinaryLogit: |
| | def __init__(self, mode="logit", tol=1e-4): |
| | self.mode = mode |
| | self.tol = tol |
| | self.coef = None |
| | self.se = None |
| | self.t = None |
| | self.APE = None |
| | self.PEA = None |
| | self.p = None |
| | self.likelihood = None |
| | self.n = None |
| | self.k = None |
| |
|
| | def _get_func(self): |
| | if self.mode == "logit": |
| | return logit |
| | elif self.mode == "probit": |
| | return probit |
| | else: |
| | raise NotImplementedError(f"Unknown mode: {self.mode}") |
| |
|
| | def _get_z_y(self, x, y=None): |
| | |
| | if isinstance(x, pd.DataFrame): |
| | x_array = x.values |
| | x_names = ["const"] + x.columns.tolist() |
| | else: |
| | x_array = np.asarray(x) |
| | x_names = ["const"] + [f"x{i}" for i in range(x_array.shape[1])] |
| |
|
| | self.n, self.k = x_array.shape |
| | const = np.ones((self.n, 1)) |
| | z = np.hstack([const, x_array]) |
| | self.x_names = x_names |
| |
|
| | if y is None: |
| | return z |
| |
|
| | if isinstance(y, (pd.Series, pd.DataFrame)): |
| | y_array = np.asarray(y).flatten() |
| | self.y_name = y.name if hasattr(y, 'name') else "y" |
| | else: |
| | y_array = np.asarray(y).flatten() |
| | self.y_name = "y" |
| |
|
| | return z, y_array |
| |
|
| | def fit(self, x, y): |
| | z, y = self._get_z_y(x, y) |
| | func = self._get_func() |
| |
|
| | param = np.zeros(z.shape[1]) |
| | f = bhood(func, param, y, z) |
| | r = np.max(np.abs(f[1])) |
| |
|
| | while r > self.tol: |
| | param = param - np.dot(np.linalg.inv(f[2]), f[1]) |
| | f = bhood(func, param, y, z) |
| | r = np.max(np.abs(f[1])) |
| |
|
| | self.coef = param |
| | self.intercept_ = param[0] |
| | self.coef_ = param[1:] |
| | self.likelihood = f[0] |
| | self.se = np.sqrt(np.diag(np.linalg.inv(-f[2]))) |
| | self.t = self.coef / self.se |
| | self.p = (1 - norm.cdf(np.abs(self.t))) * 2 |
| |
|
| | u = func(np.dot(z, param.reshape(-1, 1))) |
| | self.APE = np.mean(u[1]) * param |
| | u = func(np.dot(np.mean(z, axis=0), param)) |
| | self.PEA = u[1] * param |
| | return self |
| |
|
| | def predict(self, x): |
| | if isinstance(x, pd.DataFrame): |
| | x_array = x.values |
| | else: |
| | x_array = np.asarray(x) |
| |
|
| | if x_array.shape[1] != self.k: |
| | raise ValueError(f"Expected {self.k} features, got {x_array.shape[1]}") |
| |
|
| | const = np.ones((x_array.shape[0], 1)) |
| | z = np.hstack([const, x_array]) |
| | func = self._get_func() |
| | return func(np.dot(z, self.coef))[0] |
| |
|
| | def summary(self): |
| | col_names = ["coef", "se", "t", "p", "PEA", "APE"] |
| | df = pd.DataFrame(np.c_[self.coef, self.se, self.t, self.p, self.PEA, self.APE], |
| | index=self.x_names, |
| | columns=col_names) |
| | return df |