-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrealtime_monitoring.py
More file actions
119 lines (92 loc) · 3.57 KB
/
realtime_monitoring.py
File metadata and controls
119 lines (92 loc) · 3.57 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import joblib
import numpy as np
import pandas as pd
import time
# --- Configuration ---
MODEL_PATH = 'pv_fault_model.joblib'
SCALER_PATH = 'pv_scaler.joblib'
FEATURE_NAMES = [f'I_V{i + 1}' for i in range(50)]
def load_system():
try:
model = joblib.load(MODEL_PATH)
scaler = joblib.load(SCALER_PATH)
print("✅ System loaded successfully.")
return model, scaler
except FileNotFoundError:
print("❌ Error: Files not found. Run training script first.")
exit()
def get_complex_reading():
"""
Generates realistic, messy, and sometimes 'ambiguous' data.
"""
# 1. Base Shapes (The 'Perfect' scenarios)
# Normal: Rising from 0 to 7
curve_normal = np.linspace(0, 7, 50)
# Fault: Falling from 3.15 to 0 (based on your dataset)
part1 = np.full(20, 3.15)
part2 = np.linspace(3.15, 0, 30)
curve_fault = np.concatenate([part1, part2])
# 2. Decide Scenario
# 70% Normal, 20% Clear Fault, 10% "Ambiguous/Transition"
scenario_roll = np.random.rand()
if scenario_roll < 0.70:
# --- NORMAL ---
# Add random variation to the slope (sometimes steep, sometimes flat)
slope_var = np.random.uniform(0.8, 1.2)
base = curve_normal * slope_var
true_label = "NORMAL"
elif scenario_roll < 0.90:
# --- CLEAR FAULT ---
# Add variation to the drop depth
depth_var = np.random.uniform(0.8, 1.2)
base = curve_fault * depth_var
true_label = "FAULT"
else:
# --- AMBIGUOUS (The "Fishy" Fix) ---
# Mix the Normal and Fault curves together!
# This simulates a fault that is just starting or clearing up.
# mix_ratio 0.5 means it looks 50% like a fault, 50% like normal.
mix_ratio = np.random.uniform(0.3, 0.7)
base = (mix_ratio * curve_fault) + ((1 - mix_ratio) * curve_normal)
true_label = "AMBIGUOUS"
# 3. Add Realistic Noise
# Sometimes the sensor is clean (0.05), sometimes very noisy (0.5)
noise_level = np.random.uniform(0.05, 0.5)
noise = np.random.normal(0, noise_level, 50)
reading = base + noise
return reading, true_label
def monitor_system():
model, scaler = load_system()
print("\n--- STARTING REAL-TIME MONITORING (ENHANCED) ---")
print("Press Ctrl+C to stop.\n")
try:
while True:
# 1. Acquire Data
raw_data_array, real_status = get_complex_reading()
# 2. Process
raw_data_df = pd.DataFrame([raw_data_array], columns=FEATURE_NAMES)
scaled_data = scaler.transform(raw_data_df)
# 3. Predict
# Get probabilities for both classes: [Prob_Normal, Prob_Fault]
probs = model.predict_proba(scaled_data)[0]
# Prediction is the class with highest probability
prediction_idx = np.argmax(probs)
confidence = probs[prediction_idx] * 100
# 4. Display Logic
timestamp = time.strftime("%H:%M:%S")
if prediction_idx == 1:
pred_str = "FAULT DETECTED!"
icon = "🔴"
else:
pred_str = "System Normal"
icon = "🟢"
# Highlight Low Confidence
conf_str = f"{confidence:.1f}%"
if confidence < 80:
conf_str = f"⚠️ {conf_str} (UNCERTAIN)"
print(f"[{timestamp}] {icon} {pred_str} | Conf: {conf_str} | Truth: {real_status}")
time.sleep(2)
except KeyboardInterrupt:
print("\nMonitoring stopped.")
if __name__ == "__main__":
monitor_system()