diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c629ca..792ab35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [1.1.5] - 2018-07-21 +### Added +- setting parameters for different models + +## [1.1.4] - 2018-07-21 +### Added +- sample method for LinearRegression model + ## [1.1.3] - 2018-05-25 ### Fixed - HLR fit method sets shared vars if no minibatch_size given diff --git a/docs/api/modules.rst b/docs/api/modules.rst index 7dc61fa..11f78b6 100644 --- a/docs/api/modules.rst +++ b/docs/api/modules.rst @@ -1,7 +1,7 @@ -api +pymc3_models ============ .. toctree:: :maxdepth: 4 - pymc3_models.models + pymc3_models diff --git a/docs/api/pymc3_models.models.rst b/docs/api/pymc3_models.models.rst index 6ff7b45..a5856e7 100644 --- a/docs/api/pymc3_models.models.rst +++ b/docs/api/pymc3_models.models.rst @@ -1,16 +1,19 @@ -models -============================= +pymc3\_models.models package +============================ -pymc3\_models\.models\.HierarchicalLogisticRegression module ------------------------------------------------------------- +Submodules +---------- + +pymc3\_models.models.HierarchicalLogisticRegression module +---------------------------------------------------------- .. automodule:: pymc3_models.models.HierarchicalLogisticRegression :members: :undoc-members: :show-inheritance: -pymc3\_models\.models\.LinearRegression module ----------------------------------------------- +pymc3\_models.models.LinearRegression module +-------------------------------------------- .. automodule:: pymc3_models.models.LinearRegression :members: diff --git a/docs/api/pymc3_models.rst b/docs/api/pymc3_models.rst new file mode 100644 index 0000000..ad7c902 --- /dev/null +++ b/docs/api/pymc3_models.rst @@ -0,0 +1,29 @@ +pymc3\_models package +===================== + +Subpackages +----------- + +.. toctree:: + + pymc3_models.models + +Submodules +---------- + +pymc3\_models.exc module +------------------------ + +.. automodule:: pymc3_models.exc + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: pymc3_models + :members: + :undoc-members: + :show-inheritance: diff --git a/pymc3_models/models/HierarchicalLogisticRegression.py b/pymc3_models/models/HierarchicalLogisticRegression.py index aa63592..981724f 100644 --- a/pymc3_models/models/HierarchicalLogisticRegression.py +++ b/pymc3_models/models/HierarchicalLogisticRegression.py @@ -13,10 +13,23 @@ class HierarchicalLogisticRegression(BayesianModel): Custom Hierachical Logistic Regression built using PyMC3. """ - def __init__(self): + def __init__(self, + mu_alpha_mu = 0.0, + mu_alpha_sd = 100.0, + sigma_alpha_sd = 100.0, + mu_beta_mu = 0.0, + mu_beta_sd = 100.0, + sigma_beta_sd = 100.0): super(HierarchicalLogisticRegression, self).__init__() self.num_cats = None + self.mu_alpha_mu = mu_alpha_mu + self.mu_alpha_sd = mu_alpha_sd + self.sigma_alpha_sd = sigma_alpha_sd + self.mu_beta_mu = mu_beta_mu + self.mu_beta_sd = mu_beta_sd + self.sigma_beta_sd = sigma_beta_sd + def create_model(self): """ Creates and returns the PyMC3 model. @@ -42,11 +55,11 @@ def create_model(self): model = pm.Model() with model: - mu_alpha = pm.Normal('mu_alpha', mu=0, sd=100) - sigma_alpha = pm.HalfNormal('sigma_alpha', sd=100) + mu_alpha = pm.Normal('mu_alpha', mu=self.mu_alpha_mu, sd=self.mu_alpha_sd) + sigma_alpha = pm.HalfNormal('sigma_alpha', sd=self.sigma_alpha_sd) - mu_beta = pm.Normal('mu_beta', mu=0, sd=100) - sigma_beta = pm.HalfNormal('sigma_beta', sd=100) + mu_beta = pm.Normal('mu_beta', mu=self.mu_beta_mu, sd=self.mu_beta_sd) + sigma_beta = pm.HalfNormal('sigma_beta', sd=self.sigma_beta_sd) alpha = pm.Normal('alpha', mu=mu_alpha, sd=sigma_alpha, shape=(self.num_cats,)) betas = pm.Normal('beta', mu=mu_beta, sd=sigma_beta, shape=(self.num_cats, self.num_pred)) diff --git a/pymc3_models/models/LinearRegression.py b/pymc3_models/models/LinearRegression.py index 86636b9..1f3d018 100644 --- a/pymc3_models/models/LinearRegression.py +++ b/pymc3_models/models/LinearRegression.py @@ -13,9 +13,20 @@ class LinearRegression(BayesianModel): Linear Regression built using PyMC3. """ - def __init__(self): + def __init__(self, + alpha_mu = 0, + alpha_sd = 100, + beta_mu = 0, + beta_sd = 100, + tau = 1): super(LinearRegression, self).__init__() + self.alpha_mu = alpha_mu + self.alpha_sd = alpha_sd + self.beta_mu = beta_mu + self.beta_sd = beta_sd + self.tau = tau + def create_model(self): """ Creates and returns the PyMC3 model. @@ -38,10 +49,10 @@ def create_model(self): model = pm.Model() with model: - alpha = pm.Normal('alpha', mu=0, sd=100, shape=(1)) - betas = pm.Normal('betas', mu=0, sd=100, shape=(1, self.num_pred)) + alpha = pm.Normal('alpha', mu=self.alpha_mu, sd=self.alpha_sd, shape=(1)) + betas = pm.Normal('betas', mu=self.beta_mu, sd=self.beta_sd, shape=(1, self.num_pred)) - s = pm.HalfNormal('s', tau=1) + s = pm.HalfNormal('s', tau=self.tau) mean = alpha + T.sum(betas * model_input, 1) @@ -93,17 +104,17 @@ def fit(self, X, y, inference_type='advi', minibatch_size=None, inference_args=N return self - def predict(self, X, return_std=False): + + def sample(self, X, samples=2000): """ - Predicts values of new data with a trained Linear Regression model + samples the conditional posterior estimates Parameters ---------- X : numpy array, shape [n_samples, n_features] - return_std : Boolean flag of whether to return standard deviations with mean values. Defaults to False. + samples : number of draws to make for each point """ - if self.trace is None: raise PyMC3ModelsError('Run fit on the model before predict.') @@ -116,6 +127,23 @@ def predict(self, X, return_std=False): ppc = pm.sample_ppc(self.trace, model=self.cached_model, samples=2000) + return ppc + + def predict(self, X, return_std=False, samples=2000): + """ + Predicts values of new data with a trained Linear Regression model + + Parameters + ---------- + X : numpy array, shape [n_samples, n_features] + + return_std : Boolean flag of whether to return standard deviations with mean values. Defaults to False. + + samples: numberof draws to make for each input + """ + + ppc = self.sample(X, samples) + if return_std: return ppc['y'].mean(axis=0), ppc['y'].std(axis=0) else: