jijzepttools.blackbox_optimization.sparse_bayesian_linear_reg#

Classes#

Functions#

efficient_multivariate_normal(phi, d, alpha)

sample_from_inv_gamma(shape, scale)

sample_theta(x, y, lamb2, tau2, sigma2)

sample_sigma2(x, y, theta, lamb2, tau2)

sample_lamb2(theta, sigma2, tau2, nu)

sample_tau2(theta, sigma2, lamb2, xi)

sample_nu(lamb2)

sample_xi(tau2)

horseshoe_gibbs_sampling(x, y, theta, sigma2, lamb2, ...)

remove_duplicates_and_average(x, y[, logger])

get_nonzero_column_indices(x[, logger])

Module Contents#

class Trace#
coef: numpy.ndarray#
bias: numpy.ndarray#
lambda2: numpy.ndarray#
sigma2: float#
tau2: float#
nu: numpy.ndarray#
xi: float#
property num_samples#
class SparseBayesianLinearRegression(random_seed: int | None = None)#
rs#
trace: list[Trace] = []#
fit(x: numpy.ndarray, y: numpy.ndarray, draws: int = 10, tune: int = 100)#
efficient_multivariate_normal(phi: numpy.ndarray, d: numpy.ndarray, alpha: numpy.ndarray)#
sample_from_inv_gamma(shape, scale)#
sample_theta(x, y, lamb2, tau2, sigma2)#
sample_sigma2(x, y, theta, lamb2, tau2)#
sample_lamb2(theta, sigma2, tau2, nu)#
sample_tau2(theta, sigma2, lamb2, xi)#
sample_nu(lamb2)#
sample_xi(tau2)#
horseshoe_gibbs_sampling(x, y, theta, sigma2, lamb2, tau2, nu, xi, max_iter)#
remove_duplicates_and_average(x, y, logger=None)#
get_nonzero_column_indices(x, logger=None)#