Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion csrank/choicefunction/baseline.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def __init__(self, **kwargs):
"""

def fit(self, X, Y, **kwd):
pass
self._pre_fit()

def _predict_scores_fixed(self, X, Y, **kwargs):
return np.zeros_like(Y) + Y.mean()
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/cmpnet_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/fate_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ def fit(
documentation of :func:`~csrank.core.FATENetwork.fit` for more
information.
"""
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/fatelinear_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def fit(
verbose=0,
**kwd,
):
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/feta_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/fetalinear_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def fit(
verbose=0,
**kwd,
):
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
6 changes: 5 additions & 1 deletion csrank/choicefunction/generalized_linear_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,10 @@ def construct_model(self, X, Y):
BinaryCrossEntropyLikelihood("yl", p=self.p_, observed=self.Yt_)
logger.info("Model construction completed")

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)

def fit(
self,
X,
Expand Down Expand Up @@ -217,7 +221,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function
"""
self.random_state_ = check_random_state(self.random_state)
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state_
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/pairwise_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ def fit(self, X, Y, tune_size=0.1, thin_thresholds=1, verbose=0, **kwd):
Keyword arguments for the fit function

"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
Expand Down
1 change: 1 addition & 0 deletions csrank/choicefunction/ranknet_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self._pre_fit()
if tune_size > 0:
X_train, X_val, Y_train, Y_val = train_test_split(
X, Y, test_size=tune_size, random_state=self.random_state
Expand Down
11 changes: 7 additions & 4 deletions csrank/core/cmpnet_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,6 @@ def construct_model(self):
model: keras :class:`Model`
Neural network to learn the CmpNet utility score
"""
self._initialize_optimizer()
self._initialize_regularizer()
x1x2 = concatenate([self.x1, self.x2])
x2x1 = concatenate([self.x2, self.x1])
logger.debug("Creating the model")
Expand All @@ -116,6 +114,12 @@ def construct_model(self):
)
return model

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)
self._initialize_optimizer()
self._initialize_regularizer()

def fit(
self, X, Y, epochs=10, callbacks=None, validation_split=0.1, verbose=0, **kwd
):
Expand Down Expand Up @@ -151,8 +155,7 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self.random_state_ = check_random_state(self.random_state)
self._initialize_regularizer()
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
x1, x2, y_double = self._convert_instances_(X, Y)

Expand Down
6 changes: 5 additions & 1 deletion csrank/core/fate_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,14 @@ def step_decay(self, epoch):
self.loss_
)

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)

def fit(
self, X, Y, epochs=10, callbacks=None, validation_split=0.1, verbose=0, **kwd
):
self.random_state_ = check_random_state(self.random_state)
self._pre_fit()
# Global Variables Initializer
n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self._construct_model_(self.n_objects_fit_)
Expand Down
42 changes: 24 additions & 18 deletions csrank/core/fate_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,6 @@ def __init__(
self.kernel_regularizer = kernel_regularizer
self.batch_size = batch_size
self.optimizer = optimizer
self._initialize_optimizer()
self._initialize_regularizer()
self._construct_layers()
self._store_kwargs(
kwargs, {"optimizer__", "kernel_regularizer__", "hidden_dense_layer__"}
)
Expand Down Expand Up @@ -148,6 +145,12 @@ def join_input_layers(self, input_layer, *layers, n_layers, n_objects):

return scores

def _pre_fit(self):
super()._pre_fit()
self._initialize_optimizer()
self._initialize_regularizer()
self._construct_layers()


class FATENetwork(FATENetworkCore):
def __init__(self, n_hidden_set_layers=1, n_hidden_set_units=1, **kwargs):
Expand All @@ -168,12 +171,6 @@ def __init__(self, n_hidden_set_layers=1, n_hidden_set_units=1, **kwargs):

self.n_hidden_set_layers = n_hidden_set_layers
self.n_hidden_set_units = n_hidden_set_units
self.set_layer = None
self._create_set_layers(
activation=self.activation,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer_,
)

def _create_set_layers(self, **kwargs):
"""
Expand All @@ -186,11 +183,11 @@ def _create_set_layers(self, **kwargs):
)
)
if self.n_hidden_set_layers >= 1:
self.set_layer = DeepSet(
self.set_layer_ = DeepSet(
units=self.n_hidden_set_units, layers=self.n_hidden_set_layers, **kwargs
)
else:
self.set_layer = None
self.set_layer_ = None

@staticmethod
def _bucket_frequencies(X, min_bucket_size=32):
Expand Down Expand Up @@ -308,6 +305,7 @@ def _fit(
**kwargs :
Keyword arguments for the fit function
"""
self._pre_fit()
if optimizer is not None:
self.optimizer = optimizer
if isinstance(X, dict):
Expand Down Expand Up @@ -422,7 +420,7 @@ def construct_model(self, n_features, n_objects):

"""
input_layer = Input(shape=(n_objects, n_features), name="input_node")
set_repr = self.set_layer(input_layer)
set_repr = self.set_layer_(input_layer)
scores = self.join_input_layers(
input_layer,
set_repr,
Expand All @@ -438,6 +436,17 @@ def construct_model(self, n_features, n_objects):
)
return model

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)
self._initialize_optimizer()
self._initialize_regularizer()
self._create_set_layers(
activation=self.activation,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer_,
)

def fit(
self,
X,
Expand Down Expand Up @@ -494,10 +503,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function
"""
self.random_state_ = check_random_state(self.random_state)
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self._initialize_optimizer()
self._initialize_regularizer()
self._fit(
X=X,
Y=Y,
Expand Down Expand Up @@ -596,9 +602,9 @@ def _get_context_representation(self, X, kwargs):
shape=(n_objects, self.n_object_features_fit_), name="input_node"
)
if self.n_hidden_set_layers >= 1:
self.set_layer(input_layer_scorer)
fr = self.set_layer.cached_models[n_objects].predict(X, **kwargs)
del self.set_layer.cached_models[n_objects]
self.set_layer_(input_layer_scorer)
fr = self.set_layer_.cached_models[n_objects].predict(X, **kwargs)
del self.set_layer_.cached_models[n_objects]
X_n = np.empty(
(fr.shape[0], n_objects, fr.shape[1] + self.n_object_features_fit_),
dtype="float",
Expand Down
6 changes: 5 additions & 1 deletion csrank/core/feta_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,10 @@ def step_decay(self, epoch):
self.loss
)

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)

def fit(
self, X, Y, epochs=10, callbacks=None, validation_split=0.1, verbose=0, **kwd
):
Expand All @@ -155,7 +159,7 @@ def fit(
predict the target variables and adjust its parameters by gradient
descent `epochs` times.
"""
self.random_state_ = check_random_state(self.random_state)
self._pre_fit()
# Global Variables Initializer
n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self._construct_model_(self.n_objects_fit_)
Expand Down
10 changes: 7 additions & 3 deletions csrank/core/feta_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,12 @@ def create_input_lambda(i):
)
return model

def _pre_fit(self):
super()._pre_fit()
self._initialize_optimizer()
self._initialize_regularizer()
self.random_state_ = check_random_state(self.random_state)

def fit(
self, X, Y, epochs=10, callbacks=None, validation_split=0.1, verbose=0, **kwd
):
Expand All @@ -290,13 +296,11 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self._initialize_optimizer()
self._initialize_regularizer()
self._construct_layers()

logger.debug("Enter fit function...")
self.random_state_ = check_random_state(self.random_state)

X, Y = self.sub_sampling(X, Y)
self.model_ = self.construct_model()
Expand Down
6 changes: 5 additions & 1 deletion csrank/core/pairwise_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ def __init__(
self.random_state = random_state
self.fit_intercept = fit_intercept

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)

def fit(self, X, Y, **kwargs):
"""
Fit a generic preference learning model on a provided set of queries.
Expand All @@ -69,7 +73,7 @@ def fit(self, X, Y, **kwargs):
Keyword arguments for the fit function

"""
self.random_state_ = check_random_state(self.random_state)
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
x_train, y_single = self._convert_instances_(X, Y)
if self.use_logistic_regression:
Expand Down
10 changes: 7 additions & 3 deletions csrank/core/ranknet_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,12 @@ def construct_model(self):
def _convert_instances_(self, X, Y):
raise NotImplementedError

def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)
self._initialize_optimizer()
self._initialize_regularizer()

def fit(
self, X, Y, epochs=10, callbacks=None, validation_split=0.1, verbose=0, **kwd
):
Expand Down Expand Up @@ -139,15 +145,13 @@ def fit(
**kwd :
Keyword arguments for the fit function
"""
self.random_state_ = check_random_state(self.random_state)
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
X1, X2, Y_single = self._convert_instances_(X, Y)

logger.debug("Instances created {}".format(X1.shape[0]))
logger.debug("Creating the model")

self._initialize_optimizer()
self._initialize_regularizer()
self._construct_layers()

# Model with input as two objects and output as probability of x1>x2
Expand Down
6 changes: 5 additions & 1 deletion csrank/discretechoice/baseline.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,13 @@ def __init__(self, random_state=None, **kwargs):

self.random_state = random_state

def fit(self, X, Y, **kwd):
def _pre_fit(self):
super()._pre_fit()
self.random_state_ = check_random_state(self.random_state)

def fit(self, X, Y, **kwd):
self._pre_fit()

def _predict_scores_fixed(self, X, **kwargs):
n_instances, n_objects, n_features = X.shape
return self.random_state_.rand(n_instances, n_objects)
1 change: 1 addition & 0 deletions csrank/discretechoice/generalized_nested_logit.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function of :meth:`pymc3.fit`or :meth:`pymc3.sample`
"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
if self.n_nests is None:
# TODO this looks like a bug to me, but it was already done this way
Expand Down
1 change: 1 addition & 0 deletions csrank/discretechoice/mixed_logit_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function of :meth:`pymc3.fit`or :meth:`pymc3.sample`
"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self.construct_model(X, Y)
fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs)
Expand Down
1 change: 1 addition & 0 deletions csrank/discretechoice/model_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def __init__(
self.models = dict()

def fit(self, X, Y):
self._pre_fit()
model_args = dict()
for param_key in self.parameter_keys:
model_args[param_key] = self.uniform_prior
Expand Down
1 change: 1 addition & 0 deletions csrank/discretechoice/multinomial_logit_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function of :meth:`pymc3.fit`or :meth:`pymc3.sample`
"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
self.construct_model(X, Y)
fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs)
Expand Down
1 change: 1 addition & 0 deletions csrank/discretechoice/nested_logit_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,7 @@ def fit(
**kwargs :
Keyword arguments for the fit function of :meth:`pymc3.fit`or :meth:`pymc3.sample`
"""
self._pre_fit()
_n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
if self.n_nests is None:
self.n_nests = int(self.n_objects_fit_ / 2)
Expand Down
Loading