Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
281 changes: 272 additions & 9 deletions algorithmia.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,14 @@
import matplotlib.pyplot as plt, mpld3
from matplotlib import colors
import matplotlib.patches as mpatches
from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass
from enum import Enum
import logging

emot_list= list()
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def get_emotion():
print("Getting emotion...")
Expand Down Expand Up @@ -49,14 +55,271 @@ def get_playlist():
elif((current == "Neutral") | (current == "Disgust") | (current == "Surprise")):
cluster_def = [[3, 2], [4, 5], [2, 7], [1, 5]]
else:
cluster_def = [[2, 10], [4, 5], [1, 6]]

playlist = list()
for sets in cluster_def:
for i in range(sets[1]):
ss = random.randint(songlist[sets[0]][0], songlist[sets[0]][1]);
playlist.append(str(ss).zfill(3)+".mp3_"+songnames[ss]);
return playlist
dominant_emotion = max(emotion_scores.items(), key=lambda x: x[1])[0]
confidence_score = emotion_scores[dominant_emotion]

# Get color code
color_mapping = _get_emotion_color_mapping()
color_code = color_mapping.get(dominant_emotion, 11) # Default to neutral color

# Store in history
global emot_list
emot_list.append(color_code)
logger.info(f"Detected emotion: {dominant_emotion} (confidence: {confidence_score:.2f})")
logger.debug(f"Emotion history: {emot_list}")

# Map to enum
try:
emotion_enum = EmotionType(dominant_emotion)
except ValueError:
emotion_enum = EmotionType.NEUTRAL

return EmotionResult(
emotion=emotion_enum,
confidence=confidence_score,
all_emotions=emotion_scores,
color_code=color_code
)

def get_emotion(image_path: str = "snapshots/pic.png") -> str:
"""
Detect emotion from facial expression in image.

This function uses deep learning to analyze facial expressions and
identify the dominant emotion. Results are stored in global history
for trend analysis.

Args:
image_path: Path to image file containing face (default: snapshots/pic.png)

Returns:
String name of detected emotion (e.g., 'Happy', 'Sad', 'Neutral')

Raises:
FileNotFoundError: If image file doesn't exist
RuntimeError: If emotion detection fails
"""
logger.info(f"Getting emotion from: {image_path}")

try:
# Call API
api_response = _call_emotion_api(image_path)

# Parse results
emotion_result = _parse_emotion_results(api_response)

# Return emotion name
return emotion_result.emotion.value

except Exception as e:
logger.error(f"Error in emotion detection: {str(e)}")
# Return neutral on error
return EmotionType.NEUTRAL.value

def get_emotion_detailed(image_path: str = "snapshots/pic.png") -> EmotionResult:
"""
Get detailed emotion detection results including all confidence scores.

Args:
image_path: Path to image file

Returns:
EmotionResult with full detection data
"""
api_response = _call_emotion_api(image_path)
return _parse_emotion_results(api_response)

@dataclass
class MusicCluster:
"""Configuration for music cluster selection"""
cluster_id: int
count: int
mood_category: str

class PlaylistGenerator:
"""
Advanced playlist generation based on detected emotions.

This class handles the complex logic of mapping emotions to music clusters
and generating personalized playlists.
"""

# Song database configuration
SONG_CLUSTERS = {
1: (1, 170),
2: (171, 334),
3: (335, 549),
4: (550, 740),
5: (741, 903)
}

# Emotion to cluster mapping with weights
EMOTION_CLUSTER_MAPPING = {
"Angry": [
MusicCluster(5, 2, "intense"),
MusicCluster(3, 7, "energetic"),
MusicCluster(2, 12, "powerful")
],
"Fear": [
MusicCluster(5, 2, "intense"),
MusicCluster(3, 7, "dark"),
MusicCluster(2, 12, "atmospheric")
],
"Sad": [
MusicCluster(3, 4, "melancholy"),
MusicCluster(4, 4, "reflective"),
MusicCluster(2, 13, "soothing")
],
"Neutral": [
MusicCluster(3, 2, "balanced"),
MusicCluster(4, 5, "moderate"),
MusicCluster(2, 7, "ambient"),
MusicCluster(1, 5, "light")
],
"Disgust": [
MusicCluster(3, 2, "edgy"),
MusicCluster(4, 5, "alternative"),
MusicCluster(2, 7, "experimental"),
MusicCluster(1, 5, "unusual")
],
"Surprise": [
MusicCluster(3, 2, "dynamic"),
MusicCluster(4, 5, "upbeat"),
MusicCluster(2, 7, "varied"),
MusicCluster(1, 5, "exciting")
],
"Happy": [
MusicCluster(2, 10, "joyful"),
MusicCluster(4, 5, "uplifting"),
MusicCluster(1, 6, "cheerful")
]
}

def __init__(self, song_database_path: str = "test.txt"):
"""
Initialize playlist generator.

Args:
song_database_path: Path to pickled song database
"""
self.song_database_path = song_database_path
self._song_names = None

def _load_song_database(self) -> Dict[int, str]:
"""
Load song names from database file.

Returns:
Dictionary mapping song IDs to names

Raises:
FileNotFoundError: If database file doesn't exist
"""
if self._song_names is None:
try:
with open(self.song_database_path, "rb") as fp:
self._song_names = pickle.load(fp, encoding='latin1')
logger.info(f"Loaded {len(self._song_names)} songs from database")
except FileNotFoundError:
logger.error(f"Song database not found: {self.song_database_path}")
raise

return self._song_names

def _select_song_from_cluster(self, cluster_id: int) -> Tuple[int, str]:
"""
Select a random song from specified cluster.

Args:
cluster_id: ID of the music cluster

Returns:
Tuple of (song_id, formatted_song_name)
"""
song_names = self._load_song_database()
cluster_range = self.SONG_CLUSTERS[cluster_id]

song_id = random.randint(cluster_range[0], cluster_range[1])
song_name = song_names[song_id]
formatted_name = f"{str(song_id).zfill(3)}.mp3_{song_name}"

return song_id, formatted_name

def generate_playlist(self, emotion: str, shuffle: bool = True) -> List[str]:
"""
Generate a playlist based on detected emotion.

Args:
emotion: Detected emotion string
shuffle: Whether to shuffle the final playlist

Returns:
List of song filenames

Raises:
ValueError: If emotion is not recognized
"""
logger.info(f"Generating playlist for emotion: {emotion}")

# Get cluster configuration for this emotion
if emotion not in self.EMOTION_CLUSTER_MAPPING:
logger.warning(f"Unknown emotion: {emotion}, using Neutral")
emotion = "Neutral"

cluster_config = self.EMOTION_CLUSTER_MAPPING[emotion]

# Build playlist
playlist = []
for music_cluster in cluster_config:
logger.debug(f"Adding {music_cluster.count} songs from cluster {music_cluster.cluster_id} ({music_cluster.mood_category})")

for _ in range(music_cluster.count):
_, formatted_song = self._select_song_from_cluster(music_cluster.cluster_id)
playlist.append(formatted_song)

# Optional shuffle
if shuffle:
random.shuffle(playlist)

logger.info(f"Generated playlist with {len(playlist)} songs")
return playlist

def get_playlist(shuffle: bool = True) -> List[str]:
"""
Generate a music playlist based on current detected emotion.

This function detects the user's emotion from a facial image and creates
a personalized playlist that matches their emotional state. Uses sophisticated
clustering algorithms to map emotions to music categories.

Args:
shuffle: Whether to shuffle the playlist (default: True)

Returns:
List of song filenames (format: "###.mp3_songname")

Raises:
FileNotFoundError: If required files (image/database) don't exist
RuntimeError: If emotion detection or playlist generation fails

Example:
>>> playlist = get_playlist()
>>> print(f"Generated {len(playlist)} songs")
Generated 21 songs
"""
try:
# Detect current emotion
current_emotion = get_emotion()

# Generate playlist
generator = PlaylistGenerator()
playlist = generator.generate_playlist(current_emotion, shuffle=shuffle)

return playlist

except Exception as e:
logger.error(f"Playlist generation failed: {str(e)}")
raise RuntimeError(f"Failed to generate playlist: {str(e)}")

def get_emotion_grid():
data = np.full((5,10), 81)
Expand Down