From 30ce3333cfe9260415014abb3e65f3d2defb0391 Mon Sep 17 00:00:00 2001 From: gonzayb Date: Tue, 1 Jul 2025 21:57:10 -0300 Subject: [PATCH] Implemented elevator data service and tests for ML --- app/__pycache__/elevator_api.cpython-310.pyc | Bin 0 -> 11575 bytes app/elevator_api.py | 347 +++++++++++++++++ elevator_data.db | Bin 0 -> 36864 bytes .../test.cpython-310-pytest-8.4.1.pyc | Bin 0 -> 20942 bytes test/test.py | 348 ++++++++++++++++++ 5 files changed, 695 insertions(+) create mode 100644 app/__pycache__/elevator_api.cpython-310.pyc create mode 100644 app/elevator_api.py create mode 100644 elevator_data.db create mode 100644 test/__pycache__/test.cpython-310-pytest-8.4.1.pyc create mode 100644 test/test.py diff --git a/app/__pycache__/elevator_api.cpython-310.pyc b/app/__pycache__/elevator_api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..26d9b83177bce292b382a96d8d5425756458715b GIT binary patch literal 11575 zcmd5?+ix3JdY>DILyDqhS#j*x@lF#v76VC^?QYUKjowAHHG z)~cFJb!scsPE}KFy{e06t(9&YRimA$W@O1$?Cc}CI>A!a+zpB8Ed5ww=|@U+(w?+) zXvy0X_m41xWwvFOeWI`lmU}FreoE9&M)i4iMARP@^;7IpU2v$0!HJcC)T?Ka!Cm!Xo`E!RS|w$iM-p0?g}+`*LHvUe=E%WI&u7TDc7 z^&9^?f`^~ra{d;DEmdVERTU=NYC~ZPQy*iWnZ{C&rE1FGzp76Ah37QO+6J4zm9sMq zolUYlsI%-Gn?l`DT-X(~+DT-7WGHjk+M`Mc*lyts#mb+vdwpzb=ym` zyS1L>-m5EEX!u9>bS>j@rcn&#p;VKG(o=a~B0QC)Z{$)z;RIMO-Rv~o+RAm>WXEnb zMC~^c&!h3*ZZ2Qhy6xE9*;=*H4Z`Zxo?QGSXE^pSi zn7!kIqgxAGR$u_m-l2>x~~r9nl=RBIE_$9d1>eKR@1dFd+B<&)3NKWzi4%{`_P-<-G{Y? zRR@xQJU)pxJdMc2+Wmr%~^Tm(NW}NL)PTM8;4Cl`AT9DwC-pT@8AKP zk@lrMc_D;bswQ7x4x67TVSDM4Kq;+mX%2~~F&C1%% zV!5((^Om_%TwS`oUNM(%Z*CS#m0Hvi&8F~PHoAYoVnfvZki~`@Rg3M+;cG~2%a5PC zD{bqGTHWedkdNJGb|qT##l_@awwoP)CtvW&*c%u}?EN!uBo#f+ZTrEq%rNKY%|)}^?|{mD$1GcJpW`;#D3Z#1-9D!th$IxxXeiU~ z;nHQZ({;^Dzngfo>C}3*^`J&QnAbNp){9G}NL(YdtuC#Xi%Hf#gh1Z2IcYi8bfM%T ziNCa1n8T18J=8WkRx2^y{}`HLj7y*?tapA0n!=ZBjth13zl*1rL6Mk7d}AZlB$?5( z9LMf#3wHE7ZiInQLyjp60wua`_r4dIJ#OzbyL~5#d15d8G1v6haMx~hxoy_(S(w8A zQ*N)7R*IjF*|;VYM?!zpBK`}p-6)xf=GmyZ$>z+sghN@87}8&0W}Y`*)P;|*XGa_( zNRAmX?uQTPLkde9A4oL3e7}jEiRqAMHZ87U=Hi+vHrC+m%I3x`^W(MRC&Z(`2Whuz zF1O(Sb+(1$Zv7J+q4A9t?~GHLOXY~JTwE_MSIp?yb{3+ggOBlgAI_Fz*87~ppwu|T zghm$)3DSrYCdi45R0em2z8?`XdtuycfV^XW;fBB)>v{BoZ#l-OY1C2gv-kV0Caj;e zJ3IDX!JIcojaYFs-9o{98#VyNJBu`R_zit%x*D`bvlUEgGy=Ls@1bo<)W=LtqCUay zXmgjYmt%8`Z0Nj+xrr?(&}0~bHLFv%Yi{?Msmum^T;8}{s?5G!7%_BpbK_=GQ1NtW z)c(nb@HD6@&Uf>D8af7q5&cqWMNp4(?tRpbvvah-qqM4J?8M$u*i5984$qDn-ZVSKrdjMPb74>W!Xk*5}B3Dsa)KwkjJwT4_e7WQ@}N6#-q#v ze5v)@h&g62Lj3Mn?YnT8E8T}SKR;()TpUFogP%WlkP*1yVa`QkB7g`-jKprvjKnfA zG7Wy+Mi#{$nAd|qn6;d9S>DWkYu`3a^dC`2>bnlrVPw_k}#NRP5&SN2k zw1Aa>3O|Wm^Nfb=){%Fyy;Qx`b!;H6+io{qFV*ACj$2U205%3JFHcjb0hrK3m{A*j zI)q%pSXP}D)$m)2S2w!aur?vQuaAFa-K` zByJ34rtBcEfoB!ZQ+U?snWd2H&`X7sz{>%F=72glpq>sWnNrXW(sYkxh;(q23d(vR zr6Xu5Q&Uh5NP-SX>Q=>Gggf*j)NqpuN_hE&Nc!aBrteSC=1{uKP{cp%&LHAeEFrfL zf2rh6gy;gCydz^4$jb#4LIc)mA>kj5l)<%(%Ne2=s!Zx*k0{-R43RoipDOs(hU$HF zF9nI#hN+>puk7i#hfMCJhv}y}evP5=G|iA%8X}uCBr3h3?;|TaH0~RF8Bk?}1gD>7 z#cxJX>ga!hG{IN2_iAbI*Uy!ogyT3;n8WQ@dBl@f6jl9i#%&6s%(kT)=@S#|17!5fT!H z=<+l*pAx+_oi2iJRj6(K*pm&J^X0QtTIgp@i-0-F%X&(UE8PANp#;OXt|4#Ee|=+^7~UejiSBS?c= zEqEtl1Fx8acLR%en9VQ~-kM|jo``U5J{V*|j+uk3NbEGaND);tQAfB9eZqmhN=a68 zaN6wlU9;OT?-H?N0*X-_Xb%43xiTARB{6N}MWPC_dcNQ3FKNyk!vqX)m~B8?BAxJ( zvT2Axp%8u1{5_zC|2`FjuEK;6N{$1xm;`#+kF8eUE^^-G!u;{qsQt%OM8e1^laBXE z#gBJ=h3ej;f(99hUeyR!&Oe}rF(G^<5Sft3&m;X6C>@e+Ul>uUc{#6=#hj8)C^ICM ziuNCYSgMg&{&r9(9^$hdI)<0UE+PJeZMHqbrpyNJ5;lV5jue=%5x>Aq;MZwGP1uMO zP7(BNl0BKf3b|C?m0%sxI8_LIJ$??gYE(;S3-IcMU#IfRXiG(HYOE~G53l!Zl4h5jGMNcL(AVJ@7n-nDj#0R@0f#_!cZ& zZFTEb%ejV|u%o!U=jk{|0{}e84R4U}#T`Ja)e@Gdkcr{`I;Ox079*g0DIB!&T~F^> z+-f^j4J&cRmojtM{Qd#`IB%dJv}=k6l*2<5&*_T~3CtHTCMYNpo z4atFvFfIHZNt}$4ze9eH<^rYrlBT(RLR z*cKl@MY_8YJqi^{4mAG=#jok+rzoB;5W(mcjuv*^X5BeVAgnX+k;ym?3Pk+9n91BtX-!e76 z&S(Sv+pLnY#e!8xPAq_9)fvGOM~udsT{1ztjyEOTgIcfK>$fb9L(S@RbmCMC-zXR{ z_n8FRH|P|J`S!a+cjQ8Ya%8Yc)S^s? zr!)A?Xup}Bh&jyPK3{K*cj|vtpbF=?1mCcPLKe!bFijsfP>P~f^Rl&CyUY45y3f^W zpZBd+(4s<#^C?;;1=}LM!_VN(QxUX#+TCuqCE^s{3nEvWze~ju6{H>cA5c-DLP*-E z5Hd$j0R@i^nKeVo%ZT4o#+~CQPG@INnrBbH%86p|=7rFuaV}iN$XCKgxMkNa_^YmJ zw>HX^g6yT+Hoov+P6A=XR2oQlAAgCPATpIH)`W*Y<%%zDny{x#Yrvik&M^A20eTp* z{TWw<7a_CMNeLfrC~oRLA_Y9lCn@^ZpQwAPi}(QVrW?v9Fw~mlrguM*VF6@mAo_OU zbv%;8{y&j)=?=bZ+0(v+=JtOd$`8p05y79AvdMWIoElTw2rh+=Y;v0&{DLHa96RI& zNCNz@ktz=Uo@z5(#gAu@nWPVvI5`o15INc4-F6fV;;R$?`y9bMFH`)YZqxB;hkr`_ zq(tvz{@~+L_mn4b`YqR!?HE@DLy7xSn3oS_VQv)|VKTRxi1rZieVb3^6;n3;Souy2 zIC$fQ>=piZf|(?j5i|b=4t8RLTgkm^C zz~qU&)R#z0F!3vxoEx0p6yfqH`Vor^&M^5X#@NE*2Dv!b`Ddu&B0#`4`%yx`q1bz) z0OAipRD#wbDDWB;0tFV`EurvKz=PkVN5O2}hf*CCRgnQAsMQ)xyTu&D-}K8FQQK4` z#hEfhA`2i^bCzHrk^m}lP=LT|vhtmx`Y_PplOZAS+k85L!aa02$X^K0+~x^xRrN-( zLUW1!NXJCHr_cspQOWn=Ub*<_=!vo?o{{iH>Q{VWh0l^GOxRNfZz8Zb?ZUaDfTDv> z?0^r2pA$9sTuIbuuKqL)EFkr?k3^MgxEbQ;5DwW_2>k=m&hD1Mb$sdw(KeXwP7L*c z>q7hk(bm7zg=ph96m61HXeiCy?b*J$Sf&~UNl|jVIIWZu&jpu}@lkQ_X%^phc!R3% zP$3M|AJLuQqR_C!R8MW$ofs?AJ}V>lTI5Z*>#1<-vsD5M&bRLdDgG zBR-^e7r_Tr@BvOTR4i2RnDn9jOI7dzzk&~XNjS;A8C38|qTR5d%y<*S$2Q;Bp8NsT zj`m*cxG-lzj7DFy*;Wf* zOyVK&N8=NEL>k=17ppFyzAsU{fjlTSNn}F3wEsGNs;ZM%_8mWQp^dD8@9a=IA|}s5 zvPq?TIlqS*4t(klpzb6`LG^Kk_o)?LL>q_XwOXY~No~S1v+wq|J+0T}Zs9nm{B{+I zJA6RP=|x`?iQN-GCS~sFkO-uzxbPr^-*B6%Rm752V`T}S2riY2!WR&juckOMNw)86 zklx^gKu#Ox&r0R}_*apm#TZw8Pv;ueAvq+EhBSki5GM#QQ+Y-Dne;P5%WGi6F es.timestamp + AND de.id = ( + SELECT MIN(de3.id) + FROM demand_events de3 + WHERE de3.elevator_id = es.elevator_id + AND de3.request_time > es.timestamp); + """) +#Add test data for immediate testing + def seed_test_data(self): + conn = self.get_connection() + cursor = conn.cursor() + + #Checkk + cursor.execute("SELECT COUNT(*) FROM buildings") + #If already seeded, returns + if cursor.fetchone()[0] > 0: + conn.close() + return + #Add test building and elevator + cursor.execute(""" + INSERT INTO buildings (id, name, total_floors) VALUES (1, 'Yambay Tower', 10) + """) + + cursor.execute(""" + INSERT INTO elevators (id, building_id, name, min_floor, max_floor) VALUES (1, 1, 'Benitez Building', 1, 10) + """) + + conn.commit() + conn.close() + print("Test data seeded: Building 1 with Elevator 1 (floors 1-10)") + + #Define peak hours based on business rules + def is_peak_hour(self, hour: int, day_of_week: int) -> bool: + #Weekday morning (7-9) and evening (5-7) based on my country's business hours + if day_of_week in [0, 1, 2, 3, 4]:#Monday [0] to Friday [4] + + return hour in [7, 8, 17, 18] + # Weekend lunch + elif day_of_week in [0, 6]: #Weekend + return hour in [12, 13]#lunch + return False + #Saves demand events when someone calls the elevator + def record_demand(self, elevator_id: int, requested_floor: int, request_time: datetime = None) -> Dict: + if request_time is None: + request_time = datetime.now() + #0 Monday!!! + day_of_week = request_time.weekday() + hour_of_day = request_time.hour + is_peak = self.is_peak_hour(hour_of_day, day_of_week) + conn = self.get_connection() + cursor = conn.cursor() + + cursor.execute(""" + INSERT INTO demand_events + (elevator_id, requested_floor, request_time, day_of_week, hour_of_day, is_peak_hour) VALUES (?, ?, ?, ?, ?, ?) + """, (elevator_id, requested_floor, request_time, day_of_week, hour_of_day, is_peak)) + demand_id = cursor.lastrowid + conn.commit() + conn.close() + + return {'demand_id': demand_id, + 'elevator_id': elevator_id, + 'requested_floor': requested_floor, + 'is_peak_hour': is_peak, + 'timestamp': request_time.isoformat() + } + #Saves elevator state changes when it moves or rests + def record_elevator_state(self, elevator_id: int, floor: int, state: str, passenger_count: int = 0, previous_floor: int = None, timestamp: datetime = None) -> Dict: + if timestamp is None: + timestamp = datetime.now()#ojo + + conn = self.get_connection() + cursor = conn.cursor() + + #Validate state transitions + if state not in ['resting', 'moving', 'occupied']: + raise ValueError(f"Invalid state: {state}") + + #Validate elevator bounds + cursor.execute("SELECT min_floor, max_floor FROM elevators WHERE id = ?", (elevator_id,)) + elevator = cursor.fetchone() + if not elevator: + raise ValueError(f"Elevator {elevator_id} not found") + + if floor < elevator['min_floor'] or floor > elevator['max_floor']: + raise ValueError(f"Floor {floor} out of bounds for elevator {elevator_id}") + + cursor.execute(""" + INSERT INTO elevator_states (elevator_id, floor, state, passenger_count, timestamp, previous_floor) VALUES (?, ?, ?, ?, ?, ?) + """, (elevator_id, floor, state, passenger_count, timestamp, previous_floor)) + + state_id = cursor.lastrowid + conn.commit() + conn.close() + + return { + 'state_id': state_id, + 'elevator_id': elevator_id, + 'floor': floor, + 'state': state, + 'timestamp': timestamp.isoformat()} + +#Gets ML data + def get_ml_training_data(self, elevator_id: int = None, start_date: datetime = None, end_date: datetime = None) -> List[Dict]: + conn = self.get_connection() + cursor = conn.cursor() + + query = "SELECT * FROM ml_training_data WHERE 1=1" + params = [] + if elevator_id: + query += " AND elevator_id = ?" + params.append(elevator_id) + if start_date: + query += " AND rest_start_time >= ?" + params.append(start_date.strftime('%Y-%m-%d %H:%M:%S')) + + if end_date: + query += " AND rest_start_time <= ?" + params.append(end_date.strftime('%Y-%m-%d %H:%M:%S')) + + + cursor.execute(query, params) + rows = cursor.fetchall() + conn.close() + return [dict(row) for row in rows] + + def get_demand_analytics(self, elevator_id: int, days: int = 7) -> Dict: + conn = self.get_connection() + cursor = conn.cursor() + + start_date = datetime.now() - timedelta(days=days) + + #floor popularity + cursor.execute(""" + SELECT requested_floor, COUNT(*) as demand_count + FROM demand_events + WHERE elevator_id = ? + AND request_time >= ? + GROUP BY requested_floor + ORDER BY demand_count DESC + """, (elevator_id, start_date)) + + floor_popularity = [dict(row) for row in cursor.fetchall()] + + + cursor.execute(""" + SELECT + is_peak_hour, + AVG(CAST(hour_of_day AS FLOAT)) as avg_hour, + COUNT(*) as total_demands + FROM demand_events + WHERE elevator_id = ? + AND request_time >= ? + GROUP BY is_peak_hour + """, (elevator_id, start_date)) + + peak_analysis = [dict(row) for row in cursor.fetchall()] + conn.close() + + return {'elevator_id': elevator_id, + 'analysis_period_days': days, + 'floor_popularity': floor_popularity, + 'peak_hour_analysis': peak_analysis} + + + + +service = ElevatorDataService(DATABASE) + +#Endpoints, +#Saves demand +@app.route('/elevators//demand', methods=['POST']) +def record_demand(elevator_id): + data = request.get_json() + if 'requested_floor' not in data: + return jsonify({'error': 'requested_floor is required'}), 400 + + try: + request_time = None + if 'request_time' in data: + request_time = datetime.fromisoformat(data['request_time']) + result = service.record_demand( + elevator_id=elevator_id, + requested_floor=data['requested_floor'], + request_time=request_time + ) + return jsonify(result), 201 + + except Exception as e: + return jsonify({'error': str(e)}), 400 +#Saves elevator state +@app.route('/elevators//state', methods=['POST']) +def record_state(elevator_id): + """Record elevator state change""" + data = request.get_json() + + required_fields = ['floor', 'state'] + for field in required_fields: + if field not in data: + return jsonify({'error': f'{field} is required'}), 400 + + try: + result = service.record_elevator_state( + elevator_id=elevator_id, + floor=data['floor'], + state=data['state'], + passenger_count=data.get('passenger_count', 0), + previous_floor=data.get('previous_floor') + ) + return jsonify(result), 201 + + except Exception as e: + return jsonify({'error': str(e)}), 400 +#Brings training data +@app.route('/training-data', methods=['GET']) +def get_training_data(): + elevator_id = request.args.get('elevator_id', type=int) + + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + try: + start = datetime.fromisoformat(start_date) if start_date else None + end = datetime.fromisoformat(end_date) if end_date else None + + data = service.get_ml_training_data(elevator_id, start, end) + + return jsonify({'count': len(data),'data': data}) + + except Exception as e: + return jsonify({'error': str(e)}), 400 + +#Brings demand analytics +@app.route('/elevators//analytics', methods=['GET']) +def get_analytics(elevator_id): + days = request.args.get('days', default=7, type=int) + try: + analytics = service.get_demand_analytics(elevator_id, days) + return jsonify(analytics) + except Exception as e: + return jsonify({'error': str(e)}), 400 +#Health check, classic +@app.route('/health', methods=['GET']) +def health_check(): + return jsonify({'status': 'healthy', 'timestamp': datetime.now().isoformat()}) + +if __name__ == '__main__': + app.run(debug=True, port=2025) \ No newline at end of file diff --git a/elevator_data.db b/elevator_data.db new file mode 100644 index 0000000000000000000000000000000000000000..c81e7789f895f002f6665e0a49b7efae6f978898 GIT binary patch literal 36864 zcmeI3-%s0C6vva65}-oc9=1I8z9lXMQ5fwLYfUh1){xL7lvbK#ndAaq6NlLjKORx5 z{dnHLGHL(A{*FoeH+HUnT>Cl!q>5=$^o}co?ET66C zzRczGxfhJ(a=C}}ou_a75$7}U3H|0%$JZS`%&m0ui)5Y8|CReyCj-Z9wQnsLCgHm0{1 zSoCQdb)(v0@|=4sywA@pf9RTaZ#g}F)E^FAW_heTa$WBDrprBFtREN@SKl6x%^I8v zcj0Eznd_TkJaO#t6_@`sA~WxWL4>=*FMPAd2bL2JPMQ!z%6hBO%Ri3#HU+UR%gI8) zJdIUYVx%jU>1rB#%+|i8KTlUh^nvY+eD0Ye$G7`YQc5}$-5eg7=bV2~#!rSLR~X2y zjN6`h%B>HkuqOt6tDVYG!l*FHEr6?uWO_D1bh_O1hhwRfBOR;F{YIz!SW5)mX*G9Q zgp;3yTd1M6a}N>@nSa8QN|Sjnp< z+N3=#EbZSnYm{zSnvx}JiZKONmYy0QUVuW2xqQX$jq#Q_fKSP39H31`C#DfT(tMYYnQxbxP;9@>x&HAugZw8v!K#VFv3rWvgAy&lQT*QvPbg zc6$7R{>0*j(335sXfYr4yjo-1c*ky-6)F}qm@r^j*_JG|d9_iYP<+0Wzgm>3#PW%j zCVE5glNp>FOC{qF&I;s@yKNY*9Q<@OZ6Jt!1klQx?&F+~JZ&J%MK)9hGMidmgGu8|BZ5{kM{*v>!YW(vinrTj0? zeCx2EWg+Kvh#Lynn_4C;J3BvbTDx`buw=*H$$)+3(cyJnkHG*X$1I<|NbHHg@#=dWWs5 zOH2J&r?zXfJNoY4b%9-eW)DYRbU%)=U1pU^-|~DGUc(`$=Sj}GCl;k3+lZjibvbQ{ zo=MM|$x3lj)%{k>Xmm_zQIp(hwv5`#hCs~9DXbc689Q{7YE+H3#O;--P06LTPe0r* z6icQ2ujfGtD21h*%qa3JrvFs@?CtXAGDw$q3gqNkNIiqdI*^_u99w1Wk$4VbPg&dY zM=l*oazeCJznc&sjIg%OoS{$8M#{`|-(*|OX5G*mvandJ`;K038>-glR3;}u^*TM< zw}(!;=0&wNPifS4tiH0||5aWe^?QZlqeuCl-zXi%i>uVTuauP>{*0139NGPzc&fg& zpEwqE?zmNKT-z$dvrpGLwYs){IVP{v=t-ty$cLjFrl^w2XRNUqs^6Vym?EXLhb27NL*MF$oz}ZkYI4(P;@zBH`tbn)5C8!X z009sH0T2KI5C8!X009t~Vgh*opJH)wX&?XsAOHd&00JNY0w4eaAOHd&5D~!pKS}`v zKmY_l00ck)1V8`;KmY_l00gF=0N($nUt?S$2!H?xfB*=900@8p2!H?xfB*Mq0lfcDwY<1E5C8!X009sH0T2KI5C8!X009sP z2;lu6g#ZE|00JNY0w4eaAOHd&00JNY0@F_b@Bh=UF|H5VjD literal 0 HcmV?d00001 diff --git a/test/__pycache__/test.cpython-310-pytest-8.4.1.pyc b/test/__pycache__/test.cpython-310-pytest-8.4.1.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a4d3fd741569fcf720418c37b3d9f62993b33e5 GIT binary patch literal 20942 zcmc&+Ymgk*Ri5s7KX>*aS+bRQ>{Mcnkyg8US#~T(YiVuEkyb`p$?>8vjHi30STf8x{I661FI5sz?M4pXk2`%|rL`z;*=eFP}rKRzd z#?w|jWwb1wvUs{r-=^g*sYXptt!~%y`u!JFDNi|rKAee^3v1|4WTvb-DXnVhR((-- z(hN1dVO7hD6K_|o+C@hhN0QO$hJK}LwTxp`t9nK^uGDL~6Roz}H5L;5i{jx3F7q@3 zU508-)uRhBP0`fXBG;9#+QcG$n+|dy&gL_)bYB}69T3##Qp4Eo5 zBJMeD7|kAW?wi$3Ye0Ky3To{9#Svjs*go?k2wsg?kyYhZ(fDx|WVaGNr)Z(bLmY zv(u&7si#j&mvnFF%+k(KBBrlvrQL2_{pi$FO@z#{nDTb^7RYQ>S+xm?)RlPr6vWTB%jr)mq)U<~OC2ymV~( znW?iUW=oS3{go`%o0a)St7UAEZ9p#6H#k>++M1zbVQQ7CRhm71a{A2d)X5k8CLKL{ z`tK{CO{u=XV z=$WY#XQunBdIghuQ8y~JR#U56^;Xkgse32N{mI{rMc_}KX<@;=d&^+6lP}E*4wFCF z{o~cHnx@{oAOpPAwEWrZkn#XZ=9)qki|tJ`~mgyZxwvvj+TY)oFkKp2*gS$_siAMd4HWNYeIt%}D-)~TtR(Iw!XTx>;uZ95m18g($I z>b0Fc`OBr|)^e0D~C# zJ;)OJ6HGmQCeR0;?d|5Q%8-@WSgnr1Z`SmR)mpcylWw;4UB$RAw z<`|0&vG%8!&7aOe$$D>0Sm;lN3b-6#E z+o64%?$Jh4diU-UabAf$A~X!gXUBMV|mWg-f=EaQ8eXx$h`EWub) z!FR@i!{r|J?lJEk_wI>Xik7^Y*cw?$QB$&0bp`RXl|-(ToxY`9S3Uu4E{1mEisFjl zisMS6jc)rhXn%Ht_Wv=;+Nk}X!gsg*pO&`1+wISx{rOP)3&HXKtYU6?iyr)^--G}9 zvGiaFJt%J2gR2|&0DU~!>BDCg^;%JFgJ&UQLwE@gU^Mtn6gU{bh5PWI0$i%bAtZVYh(E$+9WTww!RH?Q0VZ_A=Pd z;2?uTPON=xa&mcy@ZGSakjZ^zbB?d~tqrrav_!3u$*z&>KC?!yy+$L)T8;t5NjdH0 zg6(oLM<54?YpaGc3{Ah$04i=9kU;IKp*uNzu25_OC!0>9(LxH-*;X-u_Zpfctz2ld zUamGOl&4O*VpivMmf#eoq@_^crVXQIIHQ&Mma$m1D*DxSquQ)ewJT?gQM5|>?qpe) z+r7!)!@l6df#AbqbJ9L9&qVOk<8IU4Dkt~43B9NNA|u^kJF0iK$q4u=E5M}%v5fY8 z6kkma-gngRDBo4Tr+iQSzOp>Ap3qybn2;3AqPQe3k%A6Zm<4;Arqbx8#jnR#R7e#* zLw0G&8&TSwwA6OU9mvSI9kb(*iU~Vmeuy?INUQ{;VutT274Z&MD9AS6A>*{{O%?Ji zspYi1ofO%Yvf}ufuu}oqMwymC=~1m9H8I|;$$Cd>8j_ldp_*>r`Z6kjf`8)(vKTuM+*X!H z$WOpGIy?ld<+e&rF=>>L;>|*UQGnYR4>9i)gIx$_owSGZ>)LI_QR~fMkpQ?X9qh$P zKr>ltShrPU8rfL>zEIHzkOnUgfRf2{oV=meT88H7XU1+;IL=@%gMAG4GuXpmg25z$ zayGyR(n3bmaf}b+Tac4%J!Ip`|w!s%l_cQ!Cv~mUuq1sykaiZYPxA& zM18^mib_#k0A(&Hwz9l;Lk=*opb!rrllFPQ--8DbNp`t_mbgJa0Qv`wM1ha7{_Gg& z9_ZJ6T%>0T^sa#V@m^N20(-TKaq$LppOJN%psCzW$U040F_Egtz&a(KjT3Qbd8vsg zyjxTA9jS>NCJKZftm!NFz9tQr5PeZW_Z+o)Rd}1>MIJmeFA(Cei>4`A@CL&g30m74{cticn5Vm-*2wc&8E2>0Z#FkDB|E?q6 zOif12?XM^bwtSSxOWgQz^Co)oV)trb-pAI>TlDKq_vhsTL(S)cxnemYB}a4TyzLAP_3J~ z>157T8%w$XI-|lQYL{{MPVIYTYI`O@nkU%rDzcj+2#RV^*>R^BlQ3FT9=@}DaKq_c zx5&`c68-v9YXJuL;;g`P0=NtNeS*6%UEqi58!?##6-X=t&WZ`gci1Vk><#c^3zz~s zesN&3przs!D~>(FoLDOIzNMnzql#zoJ#B$M8z&IUFa}FSK^yW@e1s{;!HthH1!G7t zByrj-_>MM<;>{RvT}m6)Mtpvh#n)WOX3?=z09k_TX6%eMihEW|uH@{jjon}2jbBGT zwwe-UjPHdvvZkz8V-7XC$R{tzC+{Iix5gln%!)+Gp{8w8Qzlf?zrQ0jZI_zv57q=- z`GI$&t_P$pwWqG1d`Ifqfw~G>0(BMa+&17>D{m*T>V~lDiuRCQycAg(wujNz-;?^^ zV-I5n-^(wYLG8iLszQpNM~bwSzoo#s@jBue#<8*v5P~7`(}(ML5MB~681s_?25tgt2kbAi*H5|@0oWbjcJZl+YnHcyxv3ZoD9Eyr5f^-u*b^5{vBO`Y@31h4~f|m&dv?!b!^}8o|Aj^)wJc`VE#Upy4rA z9k2!L7X%T+?~hX}p_XBE@y0CkALz+?(162EsIWsJL%?L{XKtj+LyPu(J>^IXh>5&Z{v8<^*NL&I)tNTdCEwK<+laBQHeW z66<1g}GF@7~n_= z>8NQ?e{#~@0=vAWN0%X&F>D51R^^%;4uy=46Haci1q24`+4_eDHdOs)e^Nke5IWzLRDPf2T|0KgryUJhULcF?RxaFap{Di zSqq^w?}XC#i7_FsZPT`U94u|QwFS8=p)|ms(Dwa781(_JJt}Cfz9!TzO4|j#BdP+8 zV@H@CRHlnL2)dxF9l_1X0Aay}1smtO7!!>JymiI8Na003HW?5F8Epmw3=VyG!GMkx zF|IJU8;pQ*Mwy43%}%O?T?H2%K*ZBWN6^8inIH@ijB9)+#)W0Zy5uTQpvTPcfJXTqutLm-DXL2jBz|zk)XpA9|^qOOYaH2D_PI4B$p~q*QEk_Na zDf(A5=j#`otSpLJqmFgv88Yxq9s`3c!{^f1( zeeQb4RNHmmBZ6bz`6F9qlaAstKM#_I;fc&t_JVH0Ctx8`QEbQn`gCh*T>*ntY1do; z3)Rx0y_3`QREDjBtOt^V+2bDSXWTb_jZh|99U#@0l;e|ZKuUvoe`S|+5sjGwrAczUbZWXb`@>5kNZ?? z0l^*b1f0Xd5_1|Xt#(ed%)I5+3OzLh>IkRO+86;REaM55=+*SydtZ~Rp)H;@mz;I> z&C$IRdk^fMnA|;iV0Plr6ZDa5xJ(GJ}DJ>f@ zP+JI&l#8yy8_G25!LPE`()8}$F*jgRP^#Lkrm5eK2Gl;+PZW+0y+lGf|l!0&OCaB%@BJY z!Clp7L=A`uy~=w5Hwms`QHvhI_k_^vF2n$?1iDapNP0WG03x8Aq(`sxUh2?m3aQ}B z6rh|Ay=Fki(FK5z6&$93MG%K6KuJPHR{&=~udgC+6p9+@74XNSSJvt(V*qHVwcyb! zm9P%Iy0s3VSD~We8H3=~_;m#J&ti;R@UhBwGWTC*>?;hoNS7bz*$XYz>4AHBscz_+ z6W8feAp0Azm-u_3-o_=AlAhg>a^CnYmi$!)n>Ai=^%}p;Kvv&v#(I|9dj_nKu8I2} zk;9~DE<|5fb6jHMn^|Jr0@hz-U%&uvAS@_Ck~fLm*Ao|KJZ8C`xY&>HuDEy{v3EvX z6orY?gx+Q1pZ40tQTkkRVEBi%$K8RIt?P)Zo93}8YRY+B7)R24F1%UP zL{FHE77LNV>t4I=J!*P$@V4;1$fK?PNPGj|Qx+Vod$#uPIiyorGu7Z!~L@q(m9YR|K)oJ%bwdO8M z54pl{Hioj}kaG8MOoU)}%(!;4Iw?a;^ba6*;qU&Ie`7~Lxh;NaGq}uP7Xt&q@{T>7 z-G$wRHy`Riy-Hm>Jh}GQOt&G)!Y7=jt$~P4gwCrhLe%L`Fm{~*P0z0W&(^@0B|10h z+KIws|F(o2t11t>8aWl?w}s7Y7C!R@oTqzsVOu|(~oTQuO+Cb{L$a0=z@H_({rW1_KFraP(O!$Y4 zzYR>-v0c3#1?C*v&_f!-0xR9_RcaUY+NJ+SDltA}l-CuK0=&>39xOV8*=EW)$ z=NNpRL5;yLGmuut82cK7-)Ha!gFj;M4F=tINla`ZU%-cN;ga1i*VdGXXOu)9R|;1) zqvWy%Q?aVhUldPAaG8%H7;JB%?F06rz~OJ$jZ)4q`+5v4;&{{2FK|C1WH^eWyfiM> zXnqpGt27r4IL^IX=GrBt5)f?eK=+F{(!G#|9hq)n`?1bJpSR%m!?nvR@b&5L*Ho_LAyfdiEvo46gp$v5gLOImNIeRO!tO|z?n91#K}L}x&9$w=D3T);7slxnCdIu zKiE6jf9g(hcFtGzN!&=7i?AL|~|W(jvQjGZZg9PexG;NJCWtDR%} z8xMZay?0ZmYuS;&SEV1n-E2sVpY?-(|};e={n{0Eau z=U^{S;|_GQSvOmERFGA?Niat19?52Vx@Tzplf}r|=dm-m#%|`jIGW8`-@W76ULON& zVco6-7{R(}f}}q<(Y-sa&8ENQk!?w|{;_R--ptW$rn@J{w^`68k8m^TT^{2OWC8OU zbd;MZHglYt={9?$n`!^Q$GVyKZjW{|&j2>W*PXvUxia` zWEf-_(;+>qL-dWz>himlZSk5#H{h6fTi}qj=WUVb zIFaJO8C^)B&l<-NgK^8?wEK03Op*zi&C7I_L^;U&P|4d8Ec(T!*o zj*uh(#9+1&Pk8Ka+#rPYgb%@Er>g z3w#za@wLbD#jgb*js@wRy2{C^tIoDW9$yP~a91qrnLY9|yYELRwz|Bh zpMeYYtXJMlIkmn;hJ7@?&ET)X4V5N~aQPE_coUb9pXcfGY%Xc=J)Mpg@$)2=%Cfrq zO8EgN)j7H?=PjM>756yLxO+AflZ2mQc?HH49NhL_@w}S6bD53^Ly?lSt+-N>l1aS1 zocvO=Zn3}P+|W0~nDiyKBx=IL^iP>i)<&_{9YzGIxZ{Xtl9-F64>CATjNM>&_$w;# ylFyQm?TQrA}2>pVY-U&pp5)nVNc<3syd>??))EgZTPJK literal 0 HcmV?d00001 diff --git a/test/test.py b/test/test.py new file mode 100644 index 0000000..71e33b2 --- /dev/null +++ b/test/test.py @@ -0,0 +1,348 @@ +import sqlite3 +import tempfile +import pytest + +import os +from datetime import datetime, timedelta +from unittest.mock import patch + +import json + +import sys +sys.path.append('.') +from app.elevator_api import ElevatorDataService, app +#Creates a temp DB +class TestElevatorDataService: + @pytest.fixture + def service(self): + db_fd, db_path = tempfile.mkstemp()# + os.close(db_fd) + + # Create test schema + conn = sqlite3.connect(db_path) + conn.executescript(""" + CREATE TABLE elevators ( + id INTEGER PRIMARY KEY, + building_id INTEGER NOT NULL, + name VARCHAR(50) NOT NULL, + max_capacity INTEGER NOT NULL DEFAULT 10, + min_floor INTEGER NOT NULL DEFAULT 1, + max_floor INTEGER NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + + CREATE TABLE demand_events ( + id INTEGER PRIMARY KEY, + elevator_id INTEGER NOT NULL, + requested_floor INTEGER NOT NULL, + request_time TIMESTAMP NOT NULL, + day_of_week INTEGER NOT NULL, + hour_of_day INTEGER NOT NULL, + is_peak_hour BOOLEAN NOT NULL DEFAULT FALSE, + weather_condition VARCHAR(20), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + + CREATE TABLE elevator_states ( + id INTEGER PRIMARY KEY, + elevator_id INTEGER NOT NULL, + floor INTEGER NOT NULL, + state VARCHAR(20) NOT NULL, + passenger_count INTEGER DEFAULT 0, + timestamp TIMESTAMP NOT NULL, + previous_floor INTEGER + ); + + CREATE VIEW ml_training_data AS + SELECT + es.elevator_id, + es.floor as current_resting_floor, + es.timestamp as rest_start_time, + de.requested_floor as next_demand_floor, + de.request_time as next_demand_time, + (julianday(de.request_time) - julianday(es.timestamp)) * 24 * 60 as minutes_until_demand, + de.day_of_week, + de.hour_of_day, + de.is_peak_hour, + ABS(de.requested_floor - es.floor) as distance_to_demand, + 10 as max_floor, + 1 as min_floor + FROM elevator_states es + JOIN demand_events de ON de.elevator_id = es.elevator_id + WHERE es.state = 'resting' + AND de.request_time > es.timestamp + AND de.id = ( + SELECT MIN(de3.id) + FROM demand_events de3 + WHERE de3.elevator_id = es.elevator_id + AND de3.request_time > es.timestamp + ); + + -- Test data + INSERT INTO elevators (id, building_id, name, min_floor, max_floor) + VALUES (1, 1, 'Main Elevator', 1, 10); + """) + conn.commit() + conn.close() + + service = ElevatorDataService(db_path) + yield service + + os.unlink(db_path) +#Tests if "peak hours" works + def test_peak_hour_detection(self, service): + #Weekday peak + assert service.is_peak_hour(8, 1) == True#Tuesday 8am + assert service.is_peak_hour(17, 4) == True#Friday 17 + #Not peak + assert service.is_peak_hour(10, 2) == False #Wedn 10 + assert service.is_peak_hour(20, 3) == False#Thursday 20 + #Weekend peak hour + assert service.is_peak_hour(12, 6) == True + assert service.is_peak_hour(8, 6) == False + #Test demand + def test_record_demand_success(self, service): + test_time = datetime(2025, 1, 15, 8, 30)#Monday 8.30 + + result = service.record_demand(elevator_id=1, requested_floor=5,request_time=test_time) + assert result['elevator_id'] == 1 + assert result['requested_floor'] == 5 + assert result['is_peak_hour'] == True#Should be peak!!! True + assert 'demand_id' in result + #STate success + def test_record_elevator_state_success(self, service): + result = service.record_elevator_state(elevator_id=1, floor=3,state='resting',passenger_count=0,previous_floor=2) + assert result['elevator_id'] == 1 + assert result['floor'] == 3 + assert result['state'] == 'resting' + assert 'state_id' in result +#Validation tests for states + def test_elevator_state_validation(self, service): + #Not a valid state! + with pytest.raises(ValueError, match="Invalid state"): + service.record_elevator_state(1, 3, 'invalid_state') + + #Out of bounds floor numbers + with pytest.raises(ValueError, match="out of bounds"): + service.record_elevator_state(1, 15, 'resting')#max 10 + # + with pytest.raises(ValueError, match="out of bounds"): + service.record_elevator_state(1, 0, 'resting')#min 1 +#Test ML Format + def test_ml_training_data_format(self, service): + #elevator rests on floor 3, then demand comes for floor 7 + rest_time = datetime(2025, 1, 15, 8, 0) + demand_time = datetime(2025, 1, 15, 8, 5)#5min diff + + #saves resting state + service.record_elevator_state(1, 3, 'resting', timestamp=rest_time) + #demand 5 minutes later + service.record_demand(1, 7, demand_time) + + #Gets training data + training_data = service.get_ml_training_data(elevator_id=1) + + assert len(training_data) == 1# Should have one record + record = training_data[0] + + # Check key ML features + assert record['current_resting_floor'] == 3 + assert record['next_demand_floor'] == 7 + assert record['distance_to_demand'] == 4 #from 3 to 7 there are 4 levels + assert record['is_peak_hour'] == 1#True + assert abs(record['minutes_until_demand'] - 5) < 1##5 minutes until demand + + + def test_demand_analytics(self, service): + floors = [1, 2, 2, 3, 3, 3]#Floor 3 most popular (3 times) + for floor in floors: + service.record_demand(1, floor)#Saves every floor for elevator 1 + + analytics = service.get_demand_analytics(1, days=1) + + assert analytics['elevator_id'] == 1 + assert len(analytics['floor_popularity']) == 3 + #"Most popular floor should be first" + most_popular = analytics['floor_popularity'][0] + assert most_popular['requested_floor'] == 3 + assert most_popular['demand_count'] == 3 +#Tests for filtering data by date + def test_data_filtering_by_date(self, service): + old_time = datetime(2025, 1, 1, 10, 0) + recent_time = datetime(2025, 1, 15, 10, 0) + #Old + service.record_elevator_state(1, 2, 'resting', timestamp=old_time) + service.record_demand(1, 5, old_time + timedelta(seconds=1)) + #Reent + service.record_elevator_state(1, 4, 'resting', timestamp=recent_time) + service.record_demand(1, 8, recent_time + timedelta(seconds=1)) + #Filter for recent data only + start_date = datetime(2025, 1, 10) + training_data = service.get_ml_training_data(elevator_id=1, start_date=start_date) + + #Should only get recent + assert len(training_data) == 1 + assert training_data[0]['next_demand_floor'] == 8 + + + +class TestAPIEndpoints: + @pytest.fixture + def client(self):#Creates test client + app.config['TESTING'] = True + with app.test_client() as client: + yield client + + @patch('app.elevator_api.service') + def test_record_demand_endpoint(self, mock_service, client): + mock_service.record_demand.return_value = {'demand_id': 1, 'elevator_id': 1, + 'requested_floor': 5,'is_peak_hour': True, + 'timestamp': '2025-01-15T08:30:00'} + + response = client.post('/elevators/1/demand', + json={'requested_floor': 5}) + + assert response.status_code == 201 + data = json.loads(response.data) + assert data['requested_floor'] == 5 + assert data['elevator_id'] == 1 + + @patch('app.elevator_api.service') + def test_record_demand_validation(self, mock_service, client): + #Missing required field, should get error + response = client.post('/elevators/1/demand', json={}) + + assert response.status_code == 400 + data = json.loads(response.data) + assert 'requested_floor is required' in data['error']####here + + @patch('app.elevator_api.service') + def test_record_state_endpoint(self, mock_service, client):#State recording + mock_service.record_elevator_state.return_value = {'state_id': 1, 'elevator_id': 1, 'floor': 3, + 'state': 'resting','timestamp': '2025-01-15T08:30:00'} + + response = client.post('/elevators/1/state', json={'floor': 3, 'state': 'resting'}) + + assert response.status_code == 201 + data = json.loads(response.data) + assert data['floor'] == 3 + assert data['state'] == 'resting' + #Test ML training data endopint + @patch('app.elevator_api.service') + def test_training_data_endpoint(self, mock_service, client): + mock_service.get_ml_training_data.return_value = [{'current_resting_floor': 3, + 'next_demand_floor': 7, 'distance_to_demand': 4, 'is_peak_hour': 1,'minutes_until_demand': 5.2}] + response = client.get('/training-data?elevator_id=1') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data['count'] == 1 + assert len(data['data']) == 1 + assert data['data'][0]['distance_to_demand'] == 4 +#health check endpoint + def test_health_check(self, client): + response = client.get('/health') + assert response.status_code == 200 + data = json.loads(response.data) + assert data['status'] == 'healthy' + assert 'timestamp' in data +#Tests data integrity +class TestDataIntegrity: + @pytest.fixture + def service(self): + db_fd, db_path = tempfile.mkstemp() + os.close(db_fd) + try: + conn = sqlite3.connect(db_path) + #Creates test schema with minimal data + conn.executescript(""" + CREATE TABLE elevators ( + id INTEGER PRIMARY KEY, + building_id INTEGER NOT NULL, + name VARCHAR(50) NOT NULL, + min_floor INTEGER NOT NULL DEFAULT 1, + max_floor INTEGER NOT NULL + ); + + CREATE TABLE demand_events ( + id INTEGER PRIMARY KEY, + elevator_id INTEGER NOT NULL, + requested_floor INTEGER NOT NULL, + request_time TIMESTAMP NOT NULL, + day_of_week INTEGER NOT NULL, + hour_of_day INTEGER NOT NULL, + is_peak_hour BOOLEAN NOT NULL DEFAULT FALSE + ); + + CREATE TABLE elevator_states ( + id INTEGER PRIMARY KEY, + elevator_id INTEGER NOT NULL, + floor INTEGER NOT NULL, + state VARCHAR(20) NOT NULL, + passenger_count INTEGER DEFAULT 0, + timestamp TIMESTAMP NOT NULL, + previous_floor INTEGER + ); + CREATE VIEW ml_training_data AS + SELECT + es.elevator_id, + es.floor as current_resting_floor, + es.timestamp as rest_start_time, + de.requested_floor as next_demand_floor, + de.request_time as next_demand_time, + (julianday(de.request_time) - julianday(es.timestamp)) * 24 * 60 as minutes_until_demand, + de.day_of_week, + de.hour_of_day, + de.is_peak_hour, + ABS(de.requested_floor - es.floor) as distance_to_demand, + 10 as max_floor, + 1 as min_floor + FROM elevator_states es + JOIN demand_events de ON de.elevator_id = es.elevator_id + WHERE es.state = 'resting' + AND de.request_time > es.timestamp + AND de.id = ( + SELECT MIN(de3.id) + FROM demand_events de3 + WHERE de3.elevator_id = es.elevator_id + AND de3.request_time > es.timestamp + ); + + INSERT INTO elevators VALUES (1, 1, 'Test', 1, 10); + + """) + conn.commit() + conn.close() + + service = ElevatorDataService(db_path) + yield service + finally:#Cleanup temp DB + if os.path.exists(db_path): + os.unlink(db_path) +#Concurrent demand recording test + def test_concurrent_demand_recording(self, service): + demands = [] + for i in range(10): + result = service.record_demand(1, i % 10 + 1) + demands.append(result['demand_id']) + assert len(set(demands)) == 10#unique ids + #Record sequence of states + def test_state_transition_logging(self, service): + states =[(1, 'resting'), (3, 'moving'),(5, 'occupied'), (5, 'resting')] + + state_ids = [] + for floor, state in states:#Record each state + result = service.record_elevator_state(1, floor, state) + state_ids.append(result['state_id']) + + # Check all states recorded + conn = service.get_connection() + cursor = conn.cursor() + cursor.execute("SELECT COUNT(*) FROM elevator_states WHERE elevator_id = 1") + count = cursor.fetchone()[0] + conn.close() + + assert count == 4 + +if __name__ == '__main__': + pytest.main(['-v', __file__])#-v to show verbose output \ No newline at end of file