Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 76 additions & 32 deletions aisp/csa/_ai_recognition_sys.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,42 +212,16 @@ def fit(
)

x_class = X[sample_index[_class_]]
# Calculating the similarity threshold between antigens

self._cells_affinity_threshold(x_class)
sufficiently_similar = (
self.affinity_threshold * self.affinity_threshold_scalar
)
# Initialize memory cells for a class.
sufficiently_similar = self.affinity_threshold * self.affinity_threshold_scalar

pool_c: list[BCell] = self._init_memory_c(x_class)

for ai in x_class:
# Calculating the stimulation of memory cells with aᵢ and selecting the largest
# stimulation from the memory set.
c_match = pool_c[0]
match_stimulation = -1.0
for cell in pool_c:
stimulation = self._affinity(cell.vector, ai)
if stimulation > match_stimulation:
match_stimulation = stimulation
c_match = cell

arb_list: list[_ARB] = [
_ARB(vector=c_match.vector, stimulation=match_stimulation)
]

set_clones: npt.NDArray = c_match.hyper_clonal_mutate(
int(self.rate_hypermutation * self.rate_clonal * match_stimulation),
self._feature_type,
)

for clone in set_clones:
arb_list.append(
_ARB(
vector=clone,
stimulation=self._affinity(clone, ai),
)
)
c_match, match_stimulation = self._select_best_matching_cell(ai, pool_c)

arb_list = self._generate_arb_list(ai, c_match, match_stimulation)
c_candidate = self._refinement_arb(ai, match_stimulation, arb_list)

if c_candidate.stimulation > match_stimulation:
Expand Down Expand Up @@ -315,6 +289,77 @@ def predict(self, X: Union[npt.NDArray, list]) -> npt.NDArray:
X, self.k, self._all_class_cell_vectors, self._affinity
)

def _select_best_matching_cell(
self,
ai: npt.NDArray,
pool_c: list[BCell]
) -> tuple[BCell, float]:
"""Select the BCell with the highest affinity with antigen.

Parameters
----------
ai : npt.NDArray
The current antigen.
pool_c : list[BCell]
Pool of memory B-Cells belonging to same class.

Returns
-------
tuple[BCell, float]
A tuple containing the best B cell and their affinity.
"""
c_match = pool_c[0]
match_stimulation = -1.0
for cell in pool_c:
stimulation = self._affinity(cell.vector, ai)
if stimulation > match_stimulation:
match_stimulation = stimulation
c_match = cell

return c_match, match_stimulation

def _generate_arb_list(
self,
ai: npt.NDArray,
c_match: BCell,
match_stimulation: float
) -> list[_ARB]:
"""Generate a pool from the best affinity B cell.

Parameters
----------
ai : npt.NDArray
The current antigen.
c_match : BCell
The best B-Cell
match_stimulation : float
The corresponding stimulation (affinity) value

Returns
-------
list[_ARB]
ARB set.
"""
n_clones = int(self.rate_hypermutation * self.rate_clonal * match_stimulation)
arb_list: list[_ARB] = [
_ARB(vector=c_match.vector, stimulation=match_stimulation)
]

if n_clones <= 0:
return arb_list

set_clones: npt.NDArray = c_match.hyper_clonal_mutate(
n_clones,
self._feature_type,
)

arb_list.extend(
_ARB(vector=clone, stimulation=self._affinity(clone, ai))
for clone in set_clones
)

return arb_list

def _refinement_arb(
self,
ai: npt.NDArray,
Expand Down Expand Up @@ -372,7 +417,6 @@ def _refinement_arb(
if iters == self.max_iters or avg_stimulation > self.affinity_threshold:
break

# pick a random cell for mutations.
random_index = random.randint(0, len(arb_list) - 1)
clone_arb = arb_list[random_index].hyper_clonal_mutate(
int(self.rate_clonal * c_match_stimulation), self._feature_type
Expand Down
3 changes: 2 additions & 1 deletion aisp/csa/_clonalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ class Clonalg(BaseOptimizer):
Maximum number of possible clones of a cell. This value is multiplied by
cell_affinity to determine the number of clones.
rate_hypermutation : float, default=1.0
Rate of mutated clones, used as a scalar factor.
Hypermutation rate controls the intensity of mutations during clonal expansion. Higher
values decrease mutation intensity, while lower values increase it.
n_diversity_injection : int, default=5
Number of new random memory cells injected to maintain diversity.
selection_size : int, default=5
Expand Down
4 changes: 0 additions & 4 deletions aisp/nsa/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def check_detector_bnsa_validity(
return False

for i in range(x_class.shape[0]):
# Calculate the normalized Hamming Distance
if hamming(x_class[i], vector_x) <= aff_thresh:
return False
return True
Expand Down Expand Up @@ -77,9 +76,7 @@ def bnsa_class_prediction(
total_distance = 0.0
class_found = True

# Calculates the Hamming distance between the row and all detectors.
for detector_index in range(n_detectors):
# Calculates the normalized Hamming distance between the sample and the detector
distance = hamming(features, class_detectors[class_index][detector_index])

# If the distance is less than or equal to the threshold, the detector recognizes
Expand All @@ -89,7 +86,6 @@ def bnsa_class_prediction(
break
total_distance += distance

# if the sample is self for the class
if class_found:
avg_distance = total_distance / n_detectors
# Choose the class with the largest average distance.
Expand Down
24 changes: 7 additions & 17 deletions aisp/nsa/_binary_negative_selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,16 +168,12 @@ def fit(
check_shape_match(X, y)
check_binary_array(X)

# Converts the entire array X to boolean
X = X.astype(np.bool_)
self._n_features = X.shape[1]
# Identifying the possible classes within the output array `y`.
self.classes = np.unique(y)
# Dictionary that will store detectors with classes as keys.

list_detectors_by_class: dict = {}
# Separates the classes for training.
sample_index: dict = self._slice_index_list_by_class(y)
# Progress bar for generating all detectors.

progress = tqdm(
total=int(self.N * (len(self.classes))),
Expand All @@ -187,18 +183,17 @@ def fit(
)

for _class_ in self.classes:
# Initializes the empty set that will contain the valid detectors.
valid_detectors_set: list = []
discard_count: int = 0
# Updating the progress bar with the current class the algorithm is processing.
progress.set_description_str(
f"Generating the detectors for the {_class_} class:"
)
x_class = X[sample_index[_class_]]
while len(valid_detectors_set) < self.N:
# Generates a candidate detector vector randomly with values 0 and 1.
vector_x = np.random.randint(0, 2, size=(self._n_features,)).astype(np.bool_)
# If the detector is valid, add it to the list of valid detectors.
vector_x = np.random.randint(0, 2, size=(self._n_features,)).astype(
np.bool_
)

if check_detector_bnsa_validity(x_class, vector_x, self.aff_thresh):
discard_count = 0
valid_detectors_set.append(vector_x)
Expand All @@ -208,16 +203,13 @@ def fit(
if discard_count == self.max_discards:
raise MaxDiscardsReachedError(_class_)

# Add detectors to the dictionary with classes as keys.
list_detectors_by_class[_class_] = np.array(valid_detectors_set)

# Notify the completion of detector generation for the classes.
progress.set_description(
f"\033[92m✔ Non-self detectors for classes ({', '.join(map(str, self.classes))}) "
f"successfully generated\033[0m"
)
progress.close()
# Saves the found detectors in the attribute for the class detectors.
self._detectors = list_detectors_by_class
self._detectors_stack = np.array(
[np.stack(self._detectors[class_name]) for class_name in self.classes]
Expand Down Expand Up @@ -261,16 +253,14 @@ def predict(self, X: Union[npt.NDArray, list]) -> npt.NDArray:
check_feature_dimension(X, self._n_features)
check_binary_array(X)

# Converts the entire array X to boolean.
if X.dtype != bool:
X = X.astype(bool)

# Initializes an empty array that will store the predictions.
c = []
# For each sample row in X.

for line in X:
class_found: bool = True
# Class prediction based on detectors

class_index = bnsa_class_prediction(
line, self._detectors_stack, self.aff_thresh
)
Expand Down
Loading