Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 83 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,3 +143,86 @@ For detailed API documentation, please refer to the [API Documentation](API_DOCU

This project is licensed under the **APACHE License** - see the [LICENSE](LICENSE) file for details.

## deeptune: A Python Package for Model Fine-Tuning and Training

`deeptune` is a Python package designed to help you fine-tune and train models of siamese architecture. It provides different backend options and loss functions like triplet loss and arcface loss.

### Features

- **Model Fine-Tuning:** Fine-tune pre-trained models with ease.
- **Training:** Train models with different backend options and loss functions.
- **Evaluation:** Evaluate models using various metrics.
- **CLI Support:** Interact with the package through the command line.

### Installation

To install the package, clone the repository and install the required packages:

```bash
git clone https://github.com/Devasy23/FaceRec.git
cd FaceRec/deeptune
pip install -r requirements.txt
```

### Usage

#### Command-Line Interface (CLI)

The package provides a CLI to interact with the model fine-tuning and training functionalities.

To evaluate a model, use the following command:

```bash
python -m deeptune.cli.cli evaluate_model <model_path> <dataset_path>
```

Replace `<model_path>` with the path to your model file and `<dataset_path>` with the path to your dataset.

#### Example

Here is an example of how to use the package in your Python code:

```python
from deeptune.evaluation.eval_mark_I import (
load_and_preprocess_image,
generate_embeddings,
calculate_intra_cluster_distances,
)
from keras.models import load_model
import numpy as np

# Load the pre-trained model
model_path = "path_to_your_model.h5"
model = load_model(model_path)

# Path to the dataset
dataset_path = "path_to_your_dataset"

# Generate embeddings
embeddings, labels = generate_embeddings(model, dataset_path)

# Calculate intra-cluster distances
intra_distances = calculate_intra_cluster_distances(embeddings, labels)

# Output the results
print(f"Intra-Cluster Distances: {intra_distances}")
print(f"Mean Distance: {np.mean(intra_distances)}")
```

### Project Structure

- `deeptune/`: Main package directory.
- `__init__.py`: Makes `deeptune` a Python package.
- `data/`: Sub-package for data-related functionalities.
- `models/`: Sub-package for model-related functionalities.
- `training/`: Sub-package for training-related functionalities.
- `evaluation/`: Sub-package for evaluation-related functionalities.
- `utils/`: Sub-package for utility functions.
- `cli/`: Sub-package for CLI-related functionalities.
- `config.py`: Configuration file for storing settings or parameters.
- `requirements.txt`: Lists the dependencies for the package.
- `cli/cli.py`: CLI script to interact with the package.

### License

This project is licensed under the **APACHE License** - see the [LICENSE](LICENSE) file for details.
1 change: 1 addition & 0 deletions deeptune/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune package initialization
1 change: 1 addition & 0 deletions deeptune/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# This file makes `cli` a sub-package
32 changes: 32 additions & 0 deletions deeptune/cli/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os

import click
import numpy as np
from keras.models import load_model

from deeptune.evaluation.eval_mark_I import (calculate_intra_cluster_distances,
generate_embeddings,
load_and_preprocess_image)


@click.group()
def cli():
pass


@click.command()
@click.argument("model_path")
@click.argument("dataset_path")
def evaluate_model(model_path, dataset_path):
"""Evaluate the model with the given dataset."""
model = load_model(model_path)
embeddings, labels = generate_embeddings(model, dataset_path)
intra_distances = calculate_intra_cluster_distances(embeddings, labels)
print(f"Intra-Cluster Distances: {intra_distances}")
print(f"Mean Distance: {np.mean(intra_distances)}")


cli.add_command(evaluate_model)

if __name__ == "__main__":
cli()
13 changes: 13 additions & 0 deletions deeptune/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from __future__ import annotations

import os

basedir = os.path.abspath(os.path.dirname(__file__))


class Config:
SECRET_KEY = os.environ.get("SECRET_KEY")
DEBUG = True
upload_image_path = os.path.join(basedir, "static/Images/uploads")
ALLOWED_EXTENSIONS = ["jpg", "jpeg", "png", "jfif"]
image_data_file = os.path.join(basedir, "static/Images/image_data.json")
1 change: 1 addition & 0 deletions deeptune/data/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune data sub-package initialization
1 change: 1 addition & 0 deletions deeptune/evaluation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune evaluation sub-package initialization
File renamed without changes.
1 change: 1 addition & 0 deletions deeptune/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune models sub-package initialization
17 changes: 17 additions & 0 deletions deeptune/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
deepface==0.0.92
fastapi==0.115.0
keras==2.15.0
matplotlib>=3.8.2
numpy==1.26.0
Pillow==10.4.0
pydantic==2.9.2
pymongo==4.6.3
python-dotenv==1.0.1
tensorflow==2.15.0
uvicorn==0.31.0
pytest==7.4.4
httpx
sphinx
sphinx-rtd-theme
myst-parser
mypy-extensions
1 change: 1 addition & 0 deletions deeptune/training/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune training sub-package initialization
48 changes: 48 additions & 0 deletions deeptune/training/data_generators.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import os
import random

import numpy as np
from keras.preprocessing import image


class TripletGenerator:
def __init__(self, dataset_path, batch_size=32, target_size=(160, 160)):
self.dataset_path = dataset_path
self.batch_size = batch_size
self.target_size = target_size
self.classes = os.listdir(dataset_path)
self.class_indices = {cls: i for i, cls in enumerate(self.classes)}
self.image_paths = {
cls: [
os.path.join(dataset_path, cls, img)
for img in os.listdir(os.path.join(dataset_path, cls))
]
for cls in self.classes
}

def __len__(self):
return len(self.classes)

def __getitem__(self, idx):
cls = self.classes[idx]
positive_images = random.sample(self.image_paths[cls], 2)
negative_cls = random.choice([c for c in self.classes if c != cls])
negative_image = random.choice(self.image_paths[negative_cls])

anchor = self.load_and_preprocess_image(positive_images[0])
positive = self.load_and_preprocess_image(positive_images[1])
negative = self.load_and_preprocess_image(negative_image)

return np.array([anchor, positive, negative]), np.array([1, 0])

def load_and_preprocess_image(self, img_path):
img = image.load_img(img_path, target_size=self.target_size)
img_array = image.img_to_array(img)
img_array = np.expand_dims(img_array, axis=0)
img_array /= 255.0
return img_array

def generate(self):
while True:
for i in range(len(self)):
yield self[i]
47 changes: 47 additions & 0 deletions deeptune/training/losses.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import numpy as np
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras.losses import Loss


class TripletLoss(Loss):
def __init__(self, margin=1.0, **kwargs):
super().__init__(**kwargs)
self.margin = margin

def call(self, y_true, y_pred):
anchor, positive, negative = y_pred[:, 0], y_pred[:, 1], y_pred[:, 2]
pos_dist = K.sum(K.square(anchor - positive), axis=-1)
neg_dist = K.sum(K.square(anchor - negative), axis=-1)
loss = K.maximum(pos_dist - neg_dist + self.margin, 0.0)
return K.mean(loss)


class ContrastiveLoss(Loss):
def __init__(self, margin=1.0, **kwargs):
super().__init__(**kwargs)
self.margin = margin

def call(self, y_true, y_pred):
y_true = K.cast(y_true, y_pred.dtype)
pos_dist = K.sum(K.square(y_pred[:, 0] - y_pred[:, 1]), axis=-1)
neg_dist = K.sum(K.square(y_pred[:, 0] - y_pred[:, 2]), axis=-1)
loss = y_true * pos_dist + (1 - y_true) * \
K.maximum(self.margin - neg_dist, 0.0)
return K.mean(loss)


class ArcFaceLoss(Loss):
def __init__(self, scale=64.0, margin=0.5, **kwargs):
super().__init__(**kwargs)
self.scale = scale
self.margin = margin

def call(self, y_true, y_pred):
y_true = K.cast(y_true, y_pred.dtype)
cosine = K.clip(y_pred, -1.0, 1.0)
theta = tf.acos(cosine)
target_logits = tf.cos(theta + self.margin)
logits = y_true * target_logits + (1 - y_true) * cosine
logits *= self.scale
return tf.nn.softmax_cross_entropy_with_logits(labels=y_true, logits=logits)
1 change: 1 addition & 0 deletions deeptune/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# deeptune utils sub-package initialization