Z3>ZXN4dkM?%%u+i%}be
zYD^YFu9`AR$7j#;)4sVMd`DV`a(2S+jM7F_q8z!8EAauo21SWEybc)0qiw46l)|7I
zP54(w;6w_EUK_bEJbd8XW%dc8h$Tz7O;U<7+26p<$Q{Ar)8Frn)GETW2P4xbArbFU
z#E*piT$oXsI5asKMj;YK7W}e9Of`-U?Mb1-qrmn%X
z7${mg9hO!nMw%?`+u%KL_j;H2JY;O%qyAa>ld0;#8~U&Vk-(G+?xb!}!6N9m*^dW_!9QaTb7_Wwg^
zc}-T3E^|8LY1$h8GwKE{y0q#qjLER?@e1xP(cJ}=U~k*f*{8*Xlh&+ysu#~gt998W
z_8bvvj=akR61mD6_WP5D;c;u7P*oCK+&nlM=JgpXLx=>TW(Ir@LW{Xny
z>|%G86+#57gF!3EK!czL=gy|qEpmVr-(+|)xn7tFz&C8my`racDUj$o67M=9bR84B
zj=en3d(H`-bE4Jjw(X5)O`q2s*OgLk!b^
zz)4*LY%=yRce_fx(?XM*eGoWOorj~7jirf5%t7ENppwFm3Kwq+b}w)CZmb&ZtE_6I
z&YOc<=v4S$4)%x2WB
z@P1d=v`zwQU|NR*FITX;=H12BWo}ZMi6G@(>QPg}`PRC>2DDu44+dAX4}TT~dEBZ`
z@7VN0;mAi9ZqiY1Js7Y|H(FvueizM8S!j!4uS=^*+9w_$eagyMv6A|ZbW(%`NJqb!
z_>Jl)Z2%HIDShae?11|GN|p3YKXkky>38UjB-UD5!Ft}h>MmcbeB^uVi=D;y@~|^F
zT=Pb(nzrHv>*M0Z+fe+E)rpQq>63hdqgQnF#tVD-!d{pxTIgLXEMn;*B8~?1h>p&L
zV<7Gr5FCS|V=!Jg$QKT>bYD8lAolBj^xenbjolVJErPREbhg5ZV_%mn!=CC-(B-4Wz5`$y$!7*Vl
zC=RAmNjc!+{O+F@|5=Q=Y&8z|yp646r@$*9
z1d}=VO!XL^5yY%EwbrK0KK5xSpD$(g`KD$9cfcF=rEEUm?K_fX2cJ(m%YTe!g-z0u
z!zbQv%mkRt2kG4ZRVu(o&NXsAAct{^KcOdj4cJ$Tn1M}hjl6Vbp8HSaFl>k6J;NA!
zu9uu^748|ScTGkY4E-5ha*tAgqup4#u92OqS~d_iBbo(APD%mQEXrUU
zLXF6kBUzBGl}1f=E=qv3#yYBOot%~eOim;0B5ZV|=|(dRmUVaImNHEF!#5u1R)xpHdJ${EQEUb!9MWj#f;@b>aSBinahNMl@U)+<+v8Elea$?^l%cPl_p%a3tG(-fy$`#w(sK};hn+{
z2ZG?jB%)S}6;|uyHI9wHq^6nA_e*NZdG&8iV|$a2
diff --git a/src/ssoss/__pycache__/process_video.cpython-311.pyc b/src/ssoss/__pycache__/process_video.cpython-311.pyc
deleted file mode 100644
index d59f87b942fe3ceff264ecf56faa8a3cd0f13264..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 33155
zcmeHw32Yl_mL5-ulqgXWDN58yUA8Feunu2pU$QQr@*&yXmhI-yA|=t*)kVsOhMMsv
zy$Ppd?8+VQTJE*coJDt!I_=2{!6xx0yND;iBpzS`R0%Z^!9a-@o5rRWNuW;8Yz7XH
z&G-Hyi&d25cF%4Wi)2dFtE#{L`~T~`_rLf4S3k(f$U#1rJLkoNL+js
z!NmiOPvg^0XojV;c38{qb;CNGbrbqY!?0n}IBe9?bNxidWae<@q-oeRnK7Kf!n70S
zVKYmUHJpvJVZt(L8MY*z9w1XOV<`;B3
z;MT=WXJ$G78E7Tgv?`qK~vp)-Q
zvk*7imyIVDU$rj>PjYb1#W~k!MX1$RuaWDXw;m%Ql5PVpW%s7Jw3I^Ey`XdWlXwOj~mKF$A@%#-gjaF?IFLv97hYvmcO%T$jWUQ-ihiP_tQp-fB^7n>q3
zHbq>sLCg`?q(3x1?F+g>(=Pw55a%5Uxj66CnBO%$>Kf&|lm1|1mMd{2Bd~CnH{_b(
z{3C&&-*tYd+ePndqidRTjrphi9A048Sb(yao^%EM6aJAY>l*bp~JTt&4ovd|J
zs5liSWrem~^`o-vW%2u5^?|98iCLdN@fgkWPX(!Z_05yhHyT~dlQSKSE`MmmEf+Lb
zTsPTN=WD9F*wj>aw5je?Q{7;5-Dp$YFh}ploMC!4)H3ubP@Ml(AQ$IYz5)R=g#bV*}H?TQwlHN#>Y8}@$8^mDfm39XMyQCOU6WHMy4!7z>ql<1#Vzaw>XzQ$
zxE?G5N7IO_0*LF;vvEsz*BQ^5bEnUE`pyh;r4*jY0d;%vLO0@0F<4v)?&s_q1_xMX
zC&e5BxT9GsDiMpe|4_fwBQ_oq>JN+chlQdpv8an?f6P{NfB&kjX2n+Xakp68A=q|_
zHoSXynM{_umQ_>Pim6O6Rfwhvj46%nt%&tkLyL~p;7w<5lH1I+;Etm%=UNHKBg(bm
zHf~Ri=((;_y`JvV=Ld$ka*EkOAbnKJ5I9%1aa2e<1-vs!&(Jiffw12g9$4cpr
zA9jn4Jwj=(SlTP$#m$FTv{v$_N;belDy&B@E_-=LaiPX2nC|lHo8S%!N3tWVy`iD7dSv4{rG%!ZWs
zm8(F!Ip>D{Op@09cpRi21@0;?6N~GYI-cwmireuLl(eegI<#Zyv6mott>k9
z(G}j-F4)>dTRU%QXMGhnG8#TYJ(N6%17FwowCP#S~WjBuUDZ!y&5t3baTiS
zTsYvoLDiSwt<^d0(##vf#;A-AlNF;gY9J%W6cYR$EFi;L26ccLk2BPzgPYByKXo};
z%~W=crd1O(+|=FDU)0>xYBd)@f`AA?8&jx|=Ei(RNamtZ3!OUukbYMT{vS7
zWMw{cYA8f+RpPiNuLL$<2c+jaZwi~D@>(F73!OvsvL%jdGVlMyq^?&~UI!%O(m8GI
zkfb_3h})NA(qe=b1I?jZA=k}7Xk4*wVVwOaRtube
z)DQl8#4qO?*W(p@sco4vB@@TB-j~|OxHgMk=Zw%?U}|)_ZwPNL^%Iy^l;O5X?6(ae
z42Q-I)2Le9c#{i+{0yNYV#q%!5ky1i_KY9z+{CmuB*`geLxBm#c7saBO@ZJvIPf4i
zWzIh{;T`dFMCapruve0-gLMVhK&4S7fWoWkx|3q*uvoBCr95c3XLNePhb+x3fR+Jq
z?P%QMpYYD0j~P48I6bt>gX+x$>8+=(Wd~!axFZOf%U9ta%B>s0!7*UYJDQ(oX-X>>
zLyx|*G%J+s5=(YPtZVk7MXO-17wz>+HG;iqX-=^3i>z`DwLu1{5q^qNR(sbgkg;)ind2AYuVNX-S_t1-TPa!e0CXrF=y%Ggy3w59FH9T+);6V
zdezaq;%Hvt1V@|bXp8j4oRuF|d{FVjTTjA^6@qiW=-j{R>|1g63C^RU^XT*ag7eMD
zaj++=rjiv?$zrEq+9sN|@uqDoj4|@(m+g#|-z}PU^QPV4TOy$#jr6~CckR^wXs4m8
zBlAalw16riN3(>X?0<$~d7v57%ZM)SX7q
z)Gu_8=E^7Ync<9Zrc#^}Rpf@cBn~yAVF(cj&8NN2P&@Z)VG~fvrf6QB@;Yk+7ohkl
z^gjneFmDbi&|}!FMt`bj2KBQ<9S($bQ3YX7)*O9hRJKr^76d}Sc`8+jX|>sqhk4#`
z_Xi*q5IhpoFn5OYV|6?Q+!z=e54n7h1bBg=Tt0trgbU0t5l9H)28Qk=)Tb0uB;_mK
zpsUR_fy6;3TyUG>`nkYN+{gqoas7mUilb!@V=pAMqz_;
zVVxj?rhwDHuux6d(|87}MlhZQSJyXSu3LXE7oe>S{?oF
z)7sU}6Dyr3gw9i9=c$$Lr}*usVpThrJAQw!Q1!+_?$0wc_M(OF2)1paZQBoROT(X5
zKB-)>?d5HIWA?gLd*h0|k#FjHwq0yGF4#|q_7lAQM9g0D;MDu4KCX#Q3DvvA>Rp0;
zw`kwZ+jqyxcPwQ6;)T@}-NswdmU_nS^^M5P6WUL24#OJ%8}A^o#!&=pJkmwFn_&>V
zm%uC1;Unfr#s{fYtxxv}%K1!w-+@IkH(%-c^gjKQG^S9iQNCL#=1j$wDauv9ZR$I)
zRhrEOy=uHOZ}u7Iv+$pde+yVKk`!z>)M*y9+}%)7DpEL&74;b<%!7Eh!dYSSbruzO
z?}SnaRTHAsLgD~xo8ikyJyhdku(hTq=1TRHuV^1a^1|6Z3)KYI`LAl_
z?`R&UG2x&VlooH>e0JE9xU#&uuM$04r>bXxx>d;6v^mSJo{?$mm3*>ndxG;LFh6J)Y!OT7J~S`mo>fT&YeEjiFH@V6$rZex=s%E(n{|%
zR!iL%q@MkP*u0m9=4&%!FYRZJbq}3C*Xx1|iq1W*t|8Y;U8n1%snIp)y@5V^X>}#o
zhWenJI`5^!2mNnSQ~1Hk1*g)1Ir-w>SsI
zf~rK)ae1KQau-PamNji^KmjBTWZXRAz3QK!{74})MTtlkgDhy6rX@x!H!*1?9t;eb
zq}d_e50lX{8B|;wh-XFnBC{k0OwtZX*GU~xkc1p5W-NIn
ziQmhTRqpoQh<{Oa%$0NbNFoE>Mf?5>2~!)2`183dMrzkK3am
zaeJpw^M+XShET9yEZ85(j5$hH9Stjv2EoxNIvOL{FDy9`Emo%q@FKTxHMe3Vw?fEu
ziMg&XH4d}mD5OxA$r5O5J{;ZvV6VXP5qU<&P@Ys_H)WFLf<_=jm0krAyd$
zMBH{HR@oG*tY0_ksvEx40Q`)=y2h4M^d*6X94tO7>OQ>u!R3cuz$_&-SH5td9-|u6EJY
zzIl(U|;Iq^G(0Q@vf-;Q%mY4VWg>U(lfj_-*
z8@J!q9@BlPfqd<_?q||T8RtvAw(t}l5X9OX@P)l_G5=oQLRU<_u-PB$7Y8I=*otij
zZ`;9orA@H6i}rTj-X61;J~;jUX~AA6+95%xlX@vHf8p%Cj0NpIZOodta4=de+Ui!U
zb-cAMmRGpmq_H}b1G~P-!1AeqU2oQy^M2F%o>ee8MUxXNhZos)k|vzST*+It>|C+z
zWHa*Fm|z(YEd#t|AeLP~G4`?unprhNW2y81f={w{%c3H&+%QiE`RkHFst
zcqPA@pJYOllPEY>u_==(KYv77{*NSQ+SIz*cxlaae2|7EEsN1B(YYVNKr_
z3O!<>=WmELEqWDe`uX23)|9W?z5I??w@;|*6stO)UJ$CfzY%XbpyW*l7;ieD<9p8Y
z7v2-f2!mx!x<+9P6_PdK
z5ceoV(rUq(`flC;;jU37K71|Va9B$UsIEqIT*Y#Gb~4IQC6acO|aR`Z87hXjHe6P_@rQa6T)n4`(rsg!vjlD=H^hi>})Q?9X@TTCU$vP{Vu
z6V}ZTuU?{-omV8#)T4p!Wa_9i!iQ)7!tQd61SP<
zZi#0m+NNMLh{K3yOHo+8<7SG)E;nU0XvQZ6Vsw^-VvZz}A+_C;7_hmTO*sv9q@N`lO2cQL(2!2hq;e(mx_eqor1kpw6{jgY0|KDO^(?SbGkk(`=IP$#iNSv9glP`
z6vnIt3;y@Qkuc;eF>A@9cLfG|*2-wtlJj>5q64w=+K+=vt>3@#(G8)zX*okE-@T9(
ztJ<-24RWf5+_gO0{p`ox(OjXdMJ#JsE)??ih<&W^?GO-99SX+MAUcJkV;l;sQNyacim)9j329myv(R;RcOL~fj&@IUjwG3@
zVY{v<$qn!zY?+)gSt2e5(F+-ieXf31Vj?V&|IjWBaBLb;8E4
zz-Vf(SPhsP1o~aoE!B*-@s7gusJc3tjIlloSV4li2b#Juy9Y
zThXr8W^D5UA9HgYyU-FfN%297h;7aCwzfnxlsBF1LaKGMy2sj^6Fr^UzJ?8Q*n%b{
zS7hDM4BhU8pSprsYEml_Wsv^(+AJD#-S3}~N{|yK*fHN|rwiL$y(|SKz3#s)^+phS
zU)r8VY$cdVzps(?pF%$!r1+t6aGBJ5={d6;nO#+FiWt#NwRNrN_29qMjo+E!26;oyN$zdz(o4tdu=kBbd=vRzWq==VwQb$@c!
zqVrSvONMiuu5$?kIxp)rwwI4AZsmy5ZaZhcq<^I@V(cbZatvkvvA#L8>ttfSG9emc
zU9r6-Fq%+%lQ?aF1a0i-i)))0^O{sJ%SvKNG?u1nCPcyX&2b0YibSL1@k)E+*vrB(
z{*`SMV1ll+?HNqwmf~rde9ZzCzJK!SgrDh>ZIU@Kp*ki_gN<>br^Y+jtf&@&IhiEZ(bKluSZU<6_qZY6pGwpkvnoUX0iX~zL1H!QZ_Uy|h-l
zeW`f4S8(rpI`oHc3H7}~ZJ$`%_dH*yJq|V%zZbQ21dE-KW77VP$gvlBC5x^1b9h&)
zkk`5lo_Y^&-4iP-UobD2KQF6{W-YaTlmiP!!PO;{9TCfpESS|veB$oD6>A@F?OQ{1
zgXr2Rl(mXwt*^`=KhlMDg6P-=mc2vVaZuQPNZfu%a2ysLhhYF&;9M=JUMZ*+3TnlI
z+I3BKPAS;?Ep*vX%f_%5Ma$co@mpaqdQmz{VSuEyKKp&gM~F*ya=&RNPQf09UyF@~7GbdYTzD6QtJpJB49HyZi03D#7OpR$&uC)gYVIR8r4
ze_j_-u+LzoWoC^EPaeLsZ8vJ>2o~WGb_K{P9BfMR@MWmPH0e30*m!1HXr4F1x;!J4
zm&^eoIJs_{i><((8&iyRTCFmbEmH}SY69k37sdjFaG;u9QW3p
zN7C&1)VcO(uI#0y2}EKLipt`(rf|mQTYOY(r_6|hE!}eAqpqn|b9bLA4_V~-jr)i#
zhSaSIXN>9su%i1&`x{zVNfJg+91z82LLg^;oHjqSur<94{~lm9vj-Tx3$O9PpAeL({|XCUTjE~lHBb7Q-H08EWLKixo!sAfDvvXhG&
z#wMn(GBYejVdJ{#VB8?lE}~-0Y{_s9d<}PzlEFF)(xkw2JR>m4thO>I1Cv2mk1;h>
z($qbd$R1fa1tsEVrmUpPY*JrI3|cU5yzb@3f)e3GhUAEV?5vUz;ueWCN>3O`+{}KR
zdq{0pZ|hSM@v}tmXj3%I)1BpyDwgW7C4IS8Z0i-w$qye^b9h$Rg|
z{thvJ$GXO3?)Z{Gq$^ff6zN-YmOd(4b-Gua?xh{e-Y3`(zfW}TiyU7ot@^n3Bli!F
zJn8?vlmF=CQ|Ghe{Mm~_&&7oPtNM~yeMu<2ES6pdu`C3gdOQ+s6e?Q8iWZ@8r&zdi
zU8BqC#^xP@3tB?Z@7&w9*!6zrLTA#Befvkv%Vkf!zh5a-cZtFO{9?^w
zyLp8VkCl5_d^Gl$U8FO(e>%Z@(Z##@HzH*_&^VHsYt(FOqNCs^7=
z3vGME#d<#4%UZq>i{l-Zuy{R2fUr20{|EwOV+@mxf$lN(Y9Lgf2yIjCuR(0Wy~FB50eV?&}mh6}l)Y2`7uCTZpA)8hQq
z>!eTANnJPd;|y5O(wm<(@C8|q&Y9q>gKc^ZvP0n&kOX*=6qYCGqQJ9jm=jm~Ae
z>Q9~O92{&82HkVHuKJ@#JLP-qtRn^(J5}Ib!sj+%o4J1^j#qYSHbk!)?mV1-(hJs8
zH_inKF%UN?$y&3pd)*|`e^@9mB3jX9jWttRoa)WmY~
z7CPP=jtrAzv?i*1SbuNw{$!+o%~`P+jJ~eG!XG-IqF^`N5(vTO^!w>$E{oXHR&^?^zO_7w?3f3%!7$V9s$i^
z?w0SLt08SA&abzyTb>ueYEM{~oo^t;Jz-^Q?4~Af14x2Gg#I>j5+WsKD)-;O~
z(>R-+c27@%M1|&xRr6T!gnTIkN!){pT+sNv%!oG98u7-kWwG_WeUW`@Hpg$xE)GSz
z9=;WQM_RK8wg%DGuykIqwegm=7s+`No$0R$I(W!dg%tO~4C9JUFNSC80Js=plA0RHb1i+l#
zP!~&H5PpoHH;ipSsx*R5>-*%R@6uqAuY^0A&+U%;{bZ&Z
z?GbDD2==|AeQ(5!U7oAfN+?L8X2IGZTFHrv)Mau*k-7x`tjo&KM0jV@f}7Vtd99y^
zt1@uoC?$EMx=^WF*t+A=x{>x}ZQ6}A<339kqTvp1wSP-BoP>nfkyeW7wNlH{fd}bVH03>*bHbyOgQF-wE#!5F
zvMLCd*AmKbb?wySfD)8_VSHQywTeyAJGQ^l$fWO`+hkM~Ba?yJD9g4h*Du%P`GFph
z#r;0blfn&s==l0{uh&;ZG&U_kSxejFT&SV6oHevP)$8Fbbw5Ms54QuV7-{AdjL@Ia`gc@cDm+jUm3wQ2pox6+c$}I&L5nffFA_%pmx)B%Qfys
zL_IF%bnvxy_=r`qVw`iII+NPvl&m1M5M)9Sbe)0C#UNuok$T4_#SBe{yc4d0*~zPj
z2~o9_4m*+#QD4<;nW87*$qd;RZiqzH*d#3l*SH3mxy2AvAz0UH;h$k{%c#8&`AZR9
zF1I~>Kaw3alq?$BaJ9iN&zr}3hK|My)4cT@?LF3ibVw2@GegavP%gxYuub2RDdoSS
zTk}!uTD>ZMQ83O
zSeOa+C5#3#w{#vUeij9e`)PkWc7aO{?GipJSh1VhQLtjum?6q1k^WtxuRjK51~Zgb
z5HZH`DgpBCw5Pixk_Em*Bqvcz2C@DPp?1GmyI-&!5N!t{rdYOZHM?vjyG+Qg5VI@D
zV@{X$MX`&_9zGrv>kbIT2gTxpkiR;s$h_g>5pmlg!FgD89*!JeH)PxFvHYUNx_cGt
zdR#;DX|un`r;RGT(W;024=s<73$}8f|Fq;u$#U1HWkTIo=`wp@*
zOA7IEK{3lMGnw1ZO&V8Cv_q`fEmZCiEBCCMlTX&OG{0e7rrGN{}w-TO*nR)pO_T7r^N25RJ`?E&GsE1jjvnf
zjPf*%JH;Jutmn%i)Z+C5O>O-T4*l+-<&M?%o|X0Oqg6b
zeGmGTPBIbMiZk7^o*A!I&s3eXcXyFlBk=+A8AK-)H8+NJ*U@pnV!uqyGrwUEXTDa=
zQ*~0-JQFof^|?dQiae!6ocyk+`4u=OlleQyzBhS}MeEoH59#>HA!$DVO^Ppw6HSEw
z!J#qgB#q|9A$KMtKJjdLME)D$ary{PM04np0vm=!B7k7XAwYTr8;IY^}*U$Mfw
z$F{%5Qjf0`HSjzVdQ;-`h%tbo|TeAoio~0Q?R&~E|`AW!yAu!r_
zA|WtLaxksGuJ8*DxfEW5>uf2@Ih3y-q)U>(lu3Kk%<;T_%0kOwKSSws9Cm9NYveY@Gj3s54T)rKa({{^;7gdr?5%iqh=UQYhfU~Unw-YLCKFP!
zxKsX{QnWPF&dnqJF-^R4D2k*vHrjq%y4Xd3_s1cz&nD5#8<96iNZ7h~<6-BLO(As*P4f^^Y1NCt_B|Vm5EB;Y~HH-{ZN`+vb58@d!3~kH|}K
zlu54*8;5GNh>b&rYPrt(9&zBi4t;8Zh`|gILsnS3xim(-IHZM4N;<=&jh6WwSd1AM
zfq|8mQq>o>gsrrgO&iU8gy%DKPD@ql_YDWsi8<*GV&-ki`I545MBL1TqtLipumD@7FcqX@=k&hH`>%aI*bO`qsNdSWK
zpXr*IEAGDo%-u+8wk2QreVpIVZ1Kpgh-+|q7RVW=(a=qJdhdi&lTqB#${2QO%tjA4
za0W-#fOO@ZfL|Eq{~Yp>ez>)qz1lo7J=wzfeUtDr+qS2pO>&2hihNFW7zH3D+DxdG
zKwI260z*i+gba-bMy^lc%MVPngS9=gGy9K-9yg3k!+p9jIPL}QVAg2=m9knR@D)Wc
z!)HHC_r|d3&xUc>n`|lXhf6m&uO)twZT)m_lT;mg@>yb)Jlir+0MpOs%eII&{1VmV
zenwdrGPmv?v^V8A?gqSNdM9Say{I*EXS^Mp#1Vriw<2yla8cWTN%5sW)G)FeqV;oF>i%*(gM8)GIiwrzX@CN{ysf(PY@
zTc(`-a9<`8d%TrBYbhs7v#L
z>^H_dV6KAO(0>V5|7%XtN+3gG+U9I34Vn}fBlhdp!2^N)%FBmgguO_#H$V;JEQ#dC
z@|;NtlFjj8@B4f2b>8ocm>^Nu(fq_s{%5iljxFAbwRZ`ooxG_kdH|05YPUz{_>NP;
z_ESR5fLJrYn_O@U$nWSDD!O4Y7AthF7P?mo-F(9v&z*dsTPPe93kTtDkX9Ug>yc;2
z_%m1yVDHOyv5I6dV%1b)y+NAeYkbWcPrHR0nA{zDR`R_2M-@WhX(9iNn12Q`uG;$5
z+CwX~hn{6UKL(RUp*A4a1_V=8B!jF6v*E3#*4(yk(3B49A}3JU>bmIga+6SZNT@n2
zRzXc&5rZVDZr@7XzNb6*p-~?DaqGs!x-s5Ty|$+x*ZK-ZRVJyd~_pE$+F^
z&wX3m6BaBTh{4x(d|p)>4e)J8g>6TlcMEl=g{m`R)fwcA=0%6UUx_AKDqgg-ueKat
zX*tfH@bV)Q{M2osWln6F6D*C9lYB`dI=Fm0?|wrldn0lx)^>=dho}#
zXT$uN!5_UXbPb7JLqh8iRvc(aPTt*9i}g#|rB1k4y!bqa-+zJsmY;to$Xh~!B_vux
zxL9jwdeZW={#ozycK*y+{@k0wnK${1!$QL)vEdSL+5Wl3%5uqHI>py_K6COXr}@J(
z{H=NH0zR%|n}Ksq(FS2apeaB+0Q}l~QSJ_Y_gQ|>$B#`S&5ZUOB^}ca(&@bJZ908R
zca2UHx_9Ul)P<;|9kWytAgV{jNZI)L@etzNgfAVO#^Fl`r<1z#bb3=aNvCO~$LWUd
zCd=j)%jOnkL#;fqXyx5Kf~8lq^zz9cuT;cuK_#3x)%Wc7O
zTeRH91(qZCr=#P`#^vDCuBZM#=zn%raP*4~tVq^a=9zp|+wz5{7x<2zX951~CH~4)
z@&{xY6)mH@WmJk}#ZEFyt7svuu=JBwEacdV{8^32tSMf2Vqw@)g_ylGN95Y5f@MRz
z!~q5u;h)LRlph^ZKYK|Pc`Am_fJU8Yn$uno+lT=b@um${X#DCBzN$Je-#L7g{YGm9>&TsF!zp{+HE
zG^lB6DM7M6W&`+A+br=w50Hj2A(H=zJlw`D*l{4ckd=f{e~NHuN6bVkJY=m85tL*G
z6r7m|gt&gn<^(0q;8;k!LYawVm!2X-`1lO0>s8Lt5-w=rESo9%b)!@9JPw}llFseO
zI~%B?%HbVz)SVIJ;K$9!4Nw(6pekfug#^myzr|rAl4cX1+3^xqHo2!&(mqCb=C-Sd
z(bf@dU?GG59>u|60OmHStstzu!FkiSjL-xkSO%PClBBNDaPeeb~i
z1JMG}R=a9zT(LDSU06OR*!GIHy+Y1DF=t;y2WvZ0ca%pfR=qI(w0*emgMHD#
zhX-N*UL}?`E_s)=OCw@Qo8V{{9qkY@xOT3(POrF5^JgybZ+UocfWJ=>H=xpC>xSr^uIvM(_Zvn
z%9nuZm4Y)_^$UqtpQIp(Bem--yy;iId>PoSNqVVde5x%w=HWsUQW67)8l+;F@eshBbU)pLv_*j{ns@8ex{I1C>E_L{PK31;G)hpKh=v{nG
zLLt^p?a|!Oy9MEb&A*V4CYiErSwllOUp+t4YG#KdV{?NEq=ZXRkb%_x+xbGo-qegD
zTmXqsVK`SUfq^;qzhx$rxe4QGKN|ni#1Mwxhi`&uIVzi>$~hx5o_=Y(RMmW?;m;(Q
z{-=lj?A!K#-2W$Y_YbMQq(sDxj9k9D06;F`!Hk8mP*5EscD+6YaOLi~<~w|S-CV%5K^dG7tmwm;ssdi>JL@k{(=4|JR&
z)SNc&LYEwkp&(Xy?f^3QG;dQ613$o{xQRBM;bUmQ)GAr}(1Vd!eRM#_g#r6rp*@4peu_126J82fSPkWoT2b#uB39c%hVoT7Vpk
zo2W+A^Kk>2wZ%;YX*JJjD67oGc*PZMU{4vDbf8Z*U$sd)z;5CBT;n&H4wW?Y@1bOT
zN5}?01@KT{TndRTN5qzX{@8WFHX+(3pwV%bFODx|3C?!W*$$I=Q`X(=g@Gk}Ev-#3
zwTY%STx_xgEU)5CC7(OX7TCW50?mPTt2MIM!tQ(N-(>tyJ!Nx?QL|B32&3Bf;hsZC(&%
zd)c}s%X|rj@dP71q*@8xzfIqUOHlJcSW$|$Rydr`xujhytw63l59c1uJ^HRt+9H;=
zEZ_rjZdm@8wk+2RrJV~V;?=K3iv>r6=xA8b$4bgStpA|?VZ)<_g-op0J|0?Xep)Zo
zb&GY~*i|J~^(^GFWn}kCZTGVQe&{m)wqK|n6>CRn37Kbqko|u4Vt2FxOI02Q|9Qh=
z-Y}mx{EPL%RK@=vqv}d(v4Q5O>3upyHzz1MB)_2QS@0?Vx8I=XIU6W?+Qy<^lA>cj
zJ}CP0d8?|<*1sY}|4^BmifUNZRAWnuo(HwOMA7rU5k=1r=f4(3FW9n%hV-5K>Kf16
zUP00Gphe7mVpUV=dEx;!r|S8TxkJ;Vjt{E-6GqkZ5>)*?A`9(6SVLR~phV7d8k#Td
zY-+#K#K=0giyrJIum|98nwWF@sN_xp`vH_>n>&brxm_v*{klZ@9AWC+Xgv|>fb?6e
z`sq3H(I%XGOFZ}1>ba4Xb0fTOOgJ|#o*PFLBGiefX@r{FhMGXW-AvUyMAwCkz;Z_@
zpql_~b>MmlsOcxyPhm$193wy^kULJ`1c8$TP7xR&Kva!8P2daxM#hN9Z%W1_D)tYE
zidB7WCmp5^xsOaYC0&;V+ZEAv<(DO0uOM1GmO!*RzJ_RB1JMd>AX+U#<#Dm{I35YM
zfM^T+Z6#V)d~9)gRL9qLf2q;b9izo5;5Q^<_}Ujf@-@_ld3GOT>elvQBLGHCQtcqL
z2BYV$ppVQFkK^&gvpk;3Y2Pf&Q#^5t$MeptcS3q1?f$5xv<(EB2$T>Y7xR+gCDXDT
zq1zDx5dwD!{6hk_2|NUVh@J^3q%SUW%y9F^^pKf#(prvVsv4%HF|zLnFcwr|Mv2AZ
zXoh~OkzT193Lvc+1BBXw|BFq^JD8aCw{CD4cP!c965EJff@-i+XN0w*3`|;MC2ZG{
zAj4pU37-rs2BV8^6To6LV(*d)+|V8}g3(fgBRVTpMh5kojDmGtw(%sGF@j&}5%BKiL{LeVu?i_!(WX+4dHp(>l7P8Tit)UwUy4a+mP{KTU8I
z0OpWdEx2f{@TLtxx@ZqnreQtUrf`+Z~cpDtasR7OjE!oe=$uVul~g}
gS$Ei9Op|+u{jFyVn6=cMTc5s^p8f0
Date: Sat, 7 Jun 2025 22:53:27 -0700
Subject: [PATCH 06/46] Use ffmpeg for random frame extraction
---
src/ssoss/process_video.py | 143 ++++++++++++-------------------------
1 file changed, 47 insertions(+), 96 deletions(-)
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index ece7286..ad98e9a 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -3,6 +3,7 @@
import glob
import os
import shutil
+import subprocess
from pathlib import PurePath, Path
from datetime import timedelta, timezone, datetime
import dateutil
@@ -103,6 +104,25 @@ def create_pic_list_from_zip(self, i_desc_timestamps):
prev_frame = frame_of_video
return intersection_desc, frames
+
+ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
+ """Save a specific frame quickly using ffmpeg."""
+ timestamp = frame_number / self.fps
+ cmd = [
+ "ffmpeg",
+ "-y",
+ "-hide_banner",
+ "-loglevel",
+ "error",
+ "-ss",
+ str(timestamp),
+ "-i",
+ str(self.video_filepath),
+ "-frames:v",
+ "1",
+ str(output_path),
+ ]
+ subprocess.run(cmd, check=True)
def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
"""
@@ -116,37 +136,15 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
image_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
image_path.mkdir(exist_ok=True, parents=True)
- capture = cv2.VideoCapture(str(self.video_filepath))
- frame_count = self.get_frame_count()
-
- i = 0 # index for all frames to extract
- j = 0 # index for frames list to extract as image
- k = 0 # intersection string description counter
-
- while capture.isOpened() and len(extract_frames) > 0 and i < frame_count:
- for current_frame in tqdm(range(0, extract_frames[-1]),
- desc="Frame Search",
- unit=" Frames"):
- ret, frame = capture.read()
- if ret is False:
- print("ERROR: ret is FALSE on OpenCV image")
- break
- if i == extract_frames[j] and j <= len(extract_frames)-1:
- frame_name = str(generic_so_desc[j]) + '.jpg'
- frame_filepath = image_path / frame_name
- cv2.imwrite(str(frame_filepath), frame)
- print(
- f'PICTURE CAPTURED AT {extract_frames[j]}: {generic_so_desc[j]}, Saved {j + 1} picture(s) of {len(extract_frames)}')
- j += 1
- k += 1
- if j == len(extract_frames):
- print("done processing images")
- capture.release()
- break
- i += 1
- if i > extract_frames[-1]:
- break
- capture.release()
+ for desc, frame_num in tqdm(
+ list(zip(generic_so_desc, extract_frames)),
+ desc="Frame Extraction",
+ unit=" frame"):
+ frame_name = str(desc) + '.jpg'
+ frame_filepath = image_path / frame_name
+ self.save_frame_ffmpeg(frame_num, frame_filepath)
+ print(
+ f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
if label_img:
self.generic_so_img_overlay_info_box(self.video_filename, project)
@@ -165,38 +163,15 @@ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=Fa
image_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
image_path.mkdir(exist_ok=True, parents=True)
- capture = cv2.VideoCapture(str(self.video_filepath))
- frame_count = self.get_frame_count()
-
- i = 0 # index for all frames to extract
- j = 0 # index for frames list to extract as image
- k = 0 # intersection string description counter
-
- while capture.isOpened() and len(extract_frames) > 0 and i < frame_count:
-
- for current_frame in tqdm(range(0, extract_frames[-1]),
- desc="Frame Search",
- unit=" Frames"):
- ret, frame = capture.read()
- if ret is False:
- print("ERROR: ret is FALSE on OpenCV image")
- break
- if i == extract_frames[j] and j <= len(extract_frames)-1:
- frame_name = str(intersection_desc[j]) + '.jpg'
- frame_filepath = image_path / frame_name
- cv2.imwrite(str(frame_filepath), frame)
- print(
- f'PICTURE CAPTURED AT {extract_frames[j]}: {intersection_desc[j]}, Saved {j + 1} picture(s) of {len(extract_frames)}')
- j += 1
- k += 1
- if j == len(extract_frames):
- print("done processing images")
- capture.release()
- break
- i += 1
- if i > extract_frames[-1]:
- break
- capture.release()
+ for desc, frame_num in tqdm(
+ list(zip(intersection_desc, extract_frames)),
+ desc="Frame Extraction",
+ unit=" frame"):
+ frame_name = str(desc) + '.jpg'
+ frame_filepath = image_path / frame_name
+ self.save_frame_ffmpeg(frame_num, frame_filepath)
+ print(
+ f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
if label_img:
self.img_overlay_info_box(self.video_filename, project)
@@ -247,27 +222,14 @@ def video_start_utc():
image_path = Path(self.video_dir, "out", self.video_filepath.stem, "frames/")
image_path.mkdir(exist_ok=True, parents=True)
- capture = cv2.VideoCapture(str(self.video_filepath))
- #print(f'Video is Open: {self.capture.isOpened()}')
- i = 0
start_frame = int(self.get_fps() * start_sec)
end_frame = int(self.get_fps() * end_sec)
- while capture.isOpened():
- ret, frame = capture.read()
- if ret == False:
- break
- if start_frame <= i and i <= end_frame:
- frame_name = 'Frame' + str(i) + '.jpg'
- frame_filepath = image_path / frame_name
- cv2.imwrite(str(frame_filepath), frame)
- print(f'Saved Image {i} to {frame_filepath}')
- i += 1
-
- if i > end_frame:
- capture.release()
- break
- capture.release()
+ for i in range(start_frame, end_frame + 1):
+ frame_name = 'Frame' + str(i) + '.jpg'
+ frame_filepath = image_path / frame_name
+ self.save_frame_ffmpeg(i, frame_filepath)
+ print(f'Saved Image {i} to {frame_filepath}')
def generate_gif(self, desc_timestamps, project, distance=100):
""" creates a folder of images to create a gif
@@ -316,22 +278,11 @@ def generate_gif(self, desc_timestamps, project, distance=100):
else:
frame_max = int(frame_list[i] + additional_frames)
- j = 0 # frame index
- capture = cv2.VideoCapture(str(self.video_filepath))
- while capture.isOpened():
- ret, frame = capture.read()
- if ret is False:
- break
- if frame_min <= j <= frame_max:
- frame_name = str(j) + "-" + intersection_desc[i] + '.jpg'
- frame_filepath = gif_path / frame_name
- cv2.imwrite(str(frame_filepath), frame)
- if j > frame_max:
- break
- else:
- j += 1
+ for j in range(frame_min, frame_max + 1):
+ frame_name = str(j) + "-" + intersection_desc[i] + '.jpg'
+ frame_filepath = gif_path / frame_name
+ self.save_frame_ffmpeg(j, frame_filepath)
i += 1
- capture.release()
self.assemble_gif()
def assemble_gif(self):
From ed3e4009ebc4006f81b61481b61737bc9b0b1fd5 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sat, 7 Jun 2025 23:06:57 -0700
Subject: [PATCH 07/46] Fix CLI formatting and add script entry
---
pyproject.toml | 3 +
src/ssoss/ssoss_cli.py | 139 ++++++++++++++++++++++++++++-------------
2 files changed, 100 insertions(+), 42 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index d47aa27..155990f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,3 +25,6 @@ files = ["requirements.txt"]
[project.urls]
"Homepage" = "https://github.com/redmond2742/ssoss"
"Bug Tracker" = "https://github.com/redmond2742/ssoss/issues"
+
+[project.scripts]
+ssoss = "ssoss.ssoss_cli:main"
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index f7fbbdc..c72251c 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -3,21 +3,32 @@
import process_video
-def args_static_obj_gpx_video(generic_so_file = "", gpx_file="", video_file="",
- vid_sync=("",""), frame_extract=("",""), extra_out=(True, False)):
-
+def args_static_obj_gpx_video(
+ generic_so_file="",
+ gpx_file="",
+ video_file="",
+ vid_sync=("", ""),
+ frame_extract=("", ""),
+ extra_out=(True, False),
+):
+
sightings = ""
if generic_so_file and gpx_file:
- project = process_road_objects.ProcessRoadObjects(gpx_filestring = gpx_file.name, generic_static_object_filestring = generic_so_file.name)
+ project = process_road_objects.ProcessRoadObjects(
+ gpx_filestring=gpx_file.name,
+ generic_static_object_filestring=generic_so_file.name,
+ )
if project.get_static_object_type() == "intersection":
sightings = project.intersection_checks()
elif project.get_static_object_type() == "generic static object":
sightings = project.generic_so_checks()
-
+
if generic_so_file:
- process_road_objects.ProcessRoadObjects(generic_static_object_filestring = generic_so_file.name)
+ process_road_objects.ProcessRoadObjects(
+ generic_static_object_filestring=generic_so_file.name
+ )
elif gpx_file:
- process_road_objects.ProcessRoadObjects(gpx_filestring = gpx_file.name)
+ process_road_objects.ProcessRoadObjects(gpx_filestring=gpx_file.name)
if video_file:
@@ -25,11 +36,15 @@ def args_static_obj_gpx_video(generic_so_file = "", gpx_file="", video_file="",
if vid_sync[0] and vid_sync[1]:
video.sync(int(vid_sync[0]), vid_sync[1])
if sightings and project.get_static_object_type() == "intersection":
- print('extracting traffic signal sightings')
- video.extract_sightings(sightings, project, label_img=extra_out[0], gen_gif=extra_out[1])
+ print("extracting traffic signal sightings")
+ video.extract_sightings(
+ sightings, project, label_img=extra_out[0], gen_gif=extra_out[1]
+ )
if sightings and project.get_static_object_type() == "generic static object":
- print('extracting generic static object sightings')
- video.extract_generic_so_sightings(sightings, project, label_img=extra_out[0], gen_gif=extra_out[1])
+ print("extracting generic static object sightings")
+ video.extract_generic_so_sightings(
+ sightings, project, label_img=extra_out[0], gen_gif=extra_out[1]
+ )
elif frame_extract[0] and frame_extract[1]:
print("extracting frames...")
video.extract_frames_between(frame_extract[0], frame_extract[1])
@@ -37,8 +52,9 @@ def args_static_obj_gpx_video(generic_so_file = "", gpx_file="", video_file="",
def main():
parser = argparse.ArgumentParser(
- prog="Safe Sightings of Signs and Signals",
- description="Software to help verify visible traffic signs and signals using GPX and Video files")
+ prog="Safe Sightings of Signs and Signals",
+ description="Software to help verify visible traffic signs and signals using GPX and Video files",
+ )
so_and_gpx_group = parser.add_argument_group(
"Static Objects and GPX Input",
@@ -54,40 +70,79 @@ def main():
)
video_extract_group = parser.add_argument_group(
"Extract Frames from Video File",
- "Enter Start and End Time (in seconds) for still images from video file"
+ "Enter Start and End Time (in seconds) for still images from video file",
)
- # Static Object & GPX arguments )
- so_and_gpx_group.add_argument("-so", "--static_object_file",
- metavar="Static Object File",
- help=".csv file to process of static road objects (Intersections, signs, etc.)",
- type=argparse.FileType('r')
+ # Static Object & GPX arguments
+ so_and_gpx_group.add_argument(
+ "-so",
+ "--static_object_file",
+ metavar="Static Object File",
+ help=".csv file to process of static road objects (Intersections, signs, etc.)",
+ type=argparse.FileType("r"),
+ )
+
+ so_and_gpx_group.add_argument(
+ "-gpx",
+ "--gpx_file",
+ metavar="GPX File",
+ help=".gpx file to process",
+ type=argparse.FileType("r"),
)
-
- so_and_gpx_group.add_argument("-gpx", "--gpx_file",
- metavar="GPX File",
- help=".gpx file to process",
- type=argparse.FileType('r')
- )
# Video file arguments
- video_group.add_argument("-v", "--video_file",
- metavar="Video File",
- help="Video file to process",
- type=argparse.FileType('r')
- )
+ video_group.add_argument(
+ "-v",
+ "--video_file",
+ metavar="Video File",
+ help="Video file to process",
+ type=argparse.FileType("r"),
+ )
# extract frames based on start and end time of video
- video_extract_group.add_argument("-fxs", "--frame_extract_start", help="Start extract frames in video (seconds)", type=int, nargs=1)
- video_extract_group.add_argument("-fxe", "--frame_extract_end", help="End Extract frames in video (seconds)",
- type=int, nargs=1)
+ video_extract_group.add_argument(
+ "-fxs",
+ "--frame_extract_start",
+ help="Start extract frames in video (seconds)",
+ type=int,
+ nargs=1,
+ )
+ video_extract_group.add_argument(
+ "-fxe",
+ "--frame_extract_end",
+ help="End Extract frames in video (seconds)",
+ type=int,
+ nargs=1,
+ )
- video_sync_group.add_argument("-sf", "--sync_frame", help="Sync Frame number for video. Sync with timestamp also", type=int)
- video_sync_group.add_argument("-st", "--sync_timestamp", help="2. Sync Timestamp ('2022-10-24T14:21:54.988Z') for video. Sync with frame number also", type=str)
+ video_sync_group.add_argument(
+ "-sf",
+ "--sync_frame",
+ help="Sync Frame number for video. Sync with timestamp also",
+ type=int,
+ )
+ video_sync_group.add_argument(
+ "-st",
+ "--sync_timestamp",
+ help="2. Sync Timestamp ('2022-10-24T14:21:54.988Z') for video. Sync with frame number also",
+ type=str,
+ )
- video_sync_group.add_argument("--label", help="Include descriptive label on bottom of image", action="store_true")
- video_sync_group.add_argument("--gif", help="Generate GIF of Sight Distance", action="store_true")
- video_sync_group.add_argument("--bbox", help="Add bounding box around traffic signals", action="store_true")
+ video_sync_group.add_argument(
+ "--label",
+ help="Include descriptive label on bottom of image",
+ action="store_true",
+ )
+ video_sync_group.add_argument(
+ "--gif",
+ help="Generate GIF of Sight Distance",
+ action="store_true",
+ )
+ video_sync_group.add_argument(
+ "--bbox",
+ help="Add bounding box around traffic signals",
+ action="store_true",
+ )
# process args depending on filled in values
args = parser.parse_args()
@@ -99,7 +154,7 @@ def main():
if args.frame_extract_start and args.frame_extract_end:
frames = (args.frame_extract_start[0], args.frame_extract_end[0])
- lb = gif = bbox = False
+ lb = gif = bbox = False
if args.label:
lb = True
if args.gif:
@@ -113,11 +168,11 @@ def main():
args_static_obj_gpx_video(generic_so_file = args.static_object_file,
gpx_file = args.gpx_file,
video_file = args.video_file,
- vid_sync = sync_input,
- frame_extract = frames,
+ vid_sync = sync_input,
+ frame_extract = frames,
extra_out = lb_gif_bbox
)
if __name__ == "__main__":
- main()
\ No newline at end of file
+ main()
From 152ff750b54737db78437f5c24a9e092257b89f0 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sun, 8 Jun 2025 07:38:06 -0700
Subject: [PATCH 08/46] Convert road object classes to dataclasses and add type
hints
---
src/ssoss/dynamic_road_object.py | 24 +++--
src/ssoss/process_road_objects.py | 80 ++++++++------
src/ssoss/static_road_object.py | 167 +++++++++++++++---------------
tests/test_static_road_object.py | 29 +++---
4 files changed, 162 insertions(+), 138 deletions(-)
diff --git a/src/ssoss/dynamic_road_object.py b/src/ssoss/dynamic_road_object.py
index 3f02e24..bc6b890 100644
--- a/src/ssoss/dynamic_road_object.py
+++ b/src/ssoss/dynamic_road_object.py
@@ -20,7 +20,15 @@
class DynamicRoadObject:
- def __init__(self, id_num, name, obj_type, sro_df, gpx_df, source="GPX"):
+ def __init__(
+ self,
+ id_num: int,
+ name: str,
+ obj_type: str,
+ sro_df: pd.DataFrame,
+ gpx_df: pd.DataFrame,
+ source: str = "GPX",
+ ) -> None:
"""Dynamic Road Objects move through time and space using a source (typ. GPX)
:param id_num: unique ID number for object
@@ -73,7 +81,7 @@ def __init__(self, id_num, name, obj_type, sro_df, gpx_df, source="GPX"):
def mask(df, key, value) -> pd.DataFrame:
return df[df[key] == value]
- def update_location_simple(self, i=2):
+ def update_location_simple(self, i: int = 2) -> None:
""" Update dynamic object location with new data point i
"""
self.t0 = self.t1
@@ -98,7 +106,7 @@ def current_timestamp(self) -> pd.Timestamp:
t = datetime.fromisoformat(str(self.t1))
return pd.Timestamp(t)
- def first_utc_timestamp(self):
+ def first_utc_timestamp(self) -> float:
t = self.t0.timetuple()
return time.mktime(t) - 28800
@@ -111,17 +119,17 @@ def get_utc_timestamp(self) -> float:
return time.mktime(t) - 28800
@staticmethod
- def utc_to_timestamp(t):
+ def utc_to_timestamp(t: float) -> str:
return time.asctime(time.localtime(t))
- def get_time_step(self):
+ def get_time_step(self) -> float:
time_step = self.t1 - self.t0
if time_step.total_seconds() < 0:
return 10.0 # assume larger first gpx point time step
else:
return time_step.total_seconds()
- def get_location(self, i=None, elev=False):
+ def get_location(self, i: int | None = None, elev: bool = False) -> str:
if elev:
if i is None:
return self.pnt1.format_decimal()
@@ -498,11 +506,11 @@ def get_info_by_id(self, id_num: int, appr_dir) -> str:
)
@staticmethod
- def find_index(df, i):
+ def find_index(df: pd.DataFrame, i: int) -> pd.Series:
return df.iloc[i]
@staticmethod
- def t_spd_adjust(d0, spd0, d1, spd1):
+ def t_spd_adjust(d0: float, spd0: float, d1: float, spd1: float) -> float:
""" Adjusts time of event based on speed of gpx points i and i+1.
:param d0: distance a t=0
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index aeaa7e0..22a7f21 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -223,17 +223,29 @@ def load_intersection_csv(self, intersection_filename: str) -> pd.DataFrame:
columns_in_row = len(row)
if columns_in_row == 13:
self.intersection_load["id"].append(int(row[0]))
- self.intersection_load["intersection_obj"].append(Intersection(
- id_num = int(row[0]),
- # name1(N/S), name2(E/W)
- name = tuple((str(row[1]),str(row[2]))),
- ctr_pnt = geopy.Point(float(row[3]),float(row[4])),
- # spd_N, spd_E, spd_S, spd_W
- spd = tuple((int(row[5]), int(row[6]), int(row[7]),int(row[8]))),
- # bearing_N, bearing_E, bearing_S, bearing_W
- bearing = tuple((float(row[9]), float(row[10]), float(row[11]),
- float(row[12])))
- ))
+ self.intersection_load["intersection_obj"].append(
+ Intersection(
+ int(row[0]),
+ tuple((str(row[1]), str(row[2]))),
+ geopy.Point(float(row[3]), float(row[4])),
+ spd=tuple(
+ (
+ int(row[5]),
+ int(row[6]),
+ int(row[7]),
+ int(row[8]),
+ )
+ ),
+ bearing=tuple(
+ (
+ float(row[9]),
+ float(row[10]),
+ float(row[11]),
+ float(row[12]),
+ )
+ ),
+ )
+ )
elif columns_in_row == 29:
nb_sb_pts = eb_sb_pts = sb_sb_pts = wb_sb_pts = False
if (row[13] and row[14] and
@@ -247,29 +259,29 @@ def load_intersection_csv(self, intersection_filename: str) -> pd.DataFrame:
self.intersection_load["id"].append(int(row[0]))
temp_i = Intersection(
- id_num = int(row[0]),
- # name1(N/S), name2(E/W)
- name = tuple((str(row[1]),str(row[2]))),
- ctr_pnt = geopy.Point(float(row[3]),float(row[4])),
- # spd_N, spd_E, spd_S, spd_W
- spd = tuple((int(row[5]), int(row[6]), int(row[7]),
- int(row[8]))),
- # bearing_N, bearing_E, bearing_S, bearing_W
- bearing = tuple((float(row[9]), float(row[10]), float(row[11]),
- float(row[12]))),
- # Additional info for stop bar to improve accuracy:
- # NB Stop bar. center line Point(lat, lon), shoulder Point(lat, lon)
- stop_bar_nb = tuple((geopy.Point(row[13], row[14]),
- geopy.Point(row[15], row[16]))),
- # EB Stop bar. center line Point(lat, lon), shoulder Point(lat, lon)
- stop_bar_eb = tuple((geopy.Point(row[17], row[18]),
- geopy.Point(row[19], row[20]))),
- # SB Stop bar. center line Point(lat, lon), shoulder Point(lat, lon)
- stop_bar_sb = tuple((geopy.Point(row[21], row[22]),
- geopy.Point(row[23], row[24]))),
- # WB Stop bar. center line Point(lat, lon), shoulder Point(lat, lon)
- stop_bar_wb = tuple((geopy.Point(row[25], row[26]),
- geopy.Point(row[27], row[28])))
+ int(row[0]),
+ tuple((str(row[1]), str(row[2]))),
+ geopy.Point(float(row[3]), float(row[4])),
+ spd=tuple(
+ (
+ int(row[5]),
+ int(row[6]),
+ int(row[7]),
+ int(row[8]),
+ )
+ ),
+ bearing=tuple(
+ (
+ float(row[9]),
+ float(row[10]),
+ float(row[11]),
+ float(row[12]),
+ )
+ ),
+ stop_bar_nb=tuple((geopy.Point(row[13], row[14]), geopy.Point(row[15], row[16]))),
+ stop_bar_eb=tuple((geopy.Point(row[17], row[18]), geopy.Point(row[19], row[20]))),
+ stop_bar_sb=tuple((geopy.Point(row[21], row[22]), geopy.Point(row[23], row[24]))),
+ stop_bar_wb=tuple((geopy.Point(row[25], row[26]), geopy.Point(row[27], row[28]))),
)
temp_i.set_sb_pts_bools((nb_sb_pts,eb_sb_pts,sb_sb_pts,wb_sb_pts))
self.intersection_load["intersection_obj"].append(temp_i)
diff --git a/src/ssoss/static_road_object.py b/src/ssoss/static_road_object.py
index 9d479e7..d1d9b3a 100644
--- a/src/ssoss/static_road_object.py
+++ b/src/ssoss/static_road_object.py
@@ -2,29 +2,28 @@
# coding: utf-8
import math
-import geopy, geopy.distance
+from dataclasses import dataclass, field
+from typing import Tuple, Union
+
+import geopy
+import geopy.distance
import numpy as np
+@dataclass
class StaticRoadObject:
+ """Base representation of a static object on the roadway."""
- def __init__(self, id_num: int, name: str, obj_type: type,
- ctr_pt: geopy.Point, spd_sd: dict):
- """initializes values related to static road objects
-
- :param id_num: Identification number (int)
- :param name: name of street object is located on
- :param obj_type: type of static object, ie. sign, intersection, generic_so, etc.
- :param ctr_pt: Geopy Point object of lat, lon, altitude
- :param spd_sd: speed [key] and sight distance[value] of object to be viewed (distance in ft)
- """
+ id_num: int
+ name: Union[str, Tuple[str, str]]
+ ctr_pt: geopy.Point
+ spd_sd: dict = field(default_factory=dict, kw_only=True)
+ obj_type: type = field(init=False)
+ pt: geopy.Point = field(init=False)
- self.id_num = id_num
- self.name = name
- self.obj_type = obj_type
- self.ctr_pt = ctr_pt
- self.pt = geopy.Point(ctr_pt.latitude, ctr_pt.longitude) # removes elevation for dist calcs
- self.spd_sd = spd_sd
+ def __post_init__(self) -> None:
+ self.obj_type = type(self)
+ self.pt = geopy.Point(self.ctr_pt.latitude, self.ctr_pt.longitude)
def get_id_num(self) -> int:
return int(self.id_num)
@@ -50,28 +49,25 @@ def get_sd(self):
return next(iter(self.spd_sd.values()))
-class GenericStaticObject():
- def __init__(self, id_num: int, street_name: str, pt: geopy.Point, bearing, description:str, distance_ft: float ):
- """ Class for any type of static object. Point and Visible Distance are primary inputs
-
- """
+@dataclass
+class GenericStaticObject:
+ """Generic static object such as a sign or road marking."""
- self.id_num = id_num
- self.street_name = street_name
- self.pt = geopy.Point(pt.latitude, pt.longitude) # removes elevation for dist calcs
- self.description = description
- self.distance_ft = distance_ft
-
- compass = {"NB":0,
- "EB":90,
- "SB":180,
- "WB":270
- }
-
- if type(bearing) == str:
- self.bearing = compass[bearing]
+ id_num: int
+ street_name: str
+ pt: geopy.Point
+ bearing: Union[str, float]
+ description: str
+ distance_ft: float
+
+ def __post_init__(self) -> None:
+ self.pt = geopy.Point(self.pt.latitude, self.pt.longitude)
+
+ compass = {"NB": 0, "EB": 90, "SB": 180, "WB": 270}
+ if isinstance(self.bearing, str):
+ self.bearing = compass[self.bearing]
else:
- self.bearing = bearing
+ self.bearing = float(self.bearing)
def get_id_num(self) -> int:
@@ -131,40 +127,39 @@ class TrafficControlSign(StaticRoadObject):
pass
+@dataclass
class Intersection(StaticRoadObject):
- """Extends Static Road Objects with additional variables and methods related to intersection objects
- """
-
- def __init__(self,
- id_num,
- name: tuple,
- ctr_pnt: geopy.Point,
- spd: tuple,
- bearing: tuple,
- stop_bar_nb=(0, 0),
- stop_bar_eb=(0, 0),
- stop_bar_sb=(0, 0),
- stop_bar_wb=(0, 0)):
- """initialize variables of intersection class, mostly stored as tuples in (North, East, South, West) format
-
- :param id_num:
- :param name: tuple of the name of two streets intersecting ((N/S, E/W))
- :param ctr_pnt: center lat, lon of intersection as geopy.Point object
- :param spd: tuple of posted speed limit values for each approach ((N,E,S,W))
- :param bearing: tuple of compass bearing for each approach ((N,E,S,W))
- :param stop_bar_nb: tuple of two geopy Points (both with (lat,lon)) for NB approach stop bar*
- :param stop_bar_eb: tuple of two geopy Points (both with (lat,lon)) for EB approach stop bar*
- :param stop_bar_sb: tuple of two geopy Points (both with (lat,lon)) for SB approach stop bar*
- :param stop_bar_wb: tuple of two geopy Points (both with (lat,lon)) for WB approach stop bar*
-
- *Note: left point (inside lane) is [0] in tuple, right point (right turn lane) is [1] in tuple
-
- """
-
- self.stop_bar_bools = tuple((False,False,False,False))
- self.spd = spd
- self.ctr_pnt = ctr_pnt
- self.bearing = bearing
+ """Static road object representing an intersection."""
+
+ spd: Tuple[int, int, int, int]
+ bearing: Tuple[float, float, float, float]
+ stop_bar_nb: Tuple[geopy.Point, geopy.Point] = (
+ geopy.Point(0, 0),
+ geopy.Point(0, 0),
+ )
+ stop_bar_eb: Tuple[geopy.Point, geopy.Point] = (
+ geopy.Point(0, 0),
+ geopy.Point(0, 0),
+ )
+ stop_bar_sb: Tuple[geopy.Point, geopy.Point] = (
+ geopy.Point(0, 0),
+ geopy.Point(0, 0),
+ )
+ stop_bar_wb: Tuple[geopy.Point, geopy.Point] = (
+ geopy.Point(0, 0),
+ geopy.Point(0, 0),
+ )
+
+ stop_bar_bools: Tuple[bool, bool, bool, bool] = field(init=False)
+ sd: Tuple[float, float, float, float] = field(init=False)
+ stop_bar_d: Tuple[
+ Tuple[geopy.Point, geopy.Point],
+ Tuple[geopy.Point, geopy.Point],
+ Tuple[geopy.Point, geopy.Point],
+ Tuple[geopy.Point, geopy.Point],
+ ] = field(init=False)
+
+ def __post_init__(self) -> None:
self.spd_sd = {
-999: 0,
20: 175,
@@ -175,21 +170,25 @@ def __init__(self,
45: 460,
50: 540,
55: 625,
- 60: 715
+ 60: 715,
}
- # default to lowest distance if speed not found in spd_sd dict.
- self.sd = tuple((self.spd_sd.get(self.spd[0], 175),
- self.spd_sd.get(self.spd[1], 175),
- self.spd_sd.get(self.spd[2], 175),
- self.spd_sd.get(self.spd[3], 175),
- ))
-
- self.stop_bar_d = tuple((stop_bar_nb, stop_bar_eb,
- stop_bar_sb, stop_bar_wb
- ))
-
- StaticRoadObject.__init__(self, id_num, name, Intersection, ctr_pnt,
- self.spd_sd)
+
+ self.sd = (
+ self.spd_sd.get(self.spd[0], 175),
+ self.spd_sd.get(self.spd[1], 175),
+ self.spd_sd.get(self.spd[2], 175),
+ self.spd_sd.get(self.spd[3], 175),
+ )
+
+ self.stop_bar_bools = (False, False, False, False)
+ self.stop_bar_d = (
+ self.stop_bar_nb,
+ self.stop_bar_eb,
+ self.stop_bar_sb,
+ self.stop_bar_wb,
+ )
+
+ super().__post_init__()
# TODO: Convert this to dictionary from input file, not hard coded values
# @staticmethod
diff --git a/tests/test_static_road_object.py b/tests/test_static_road_object.py
index 5c85fdc..4f23b8a 100644
--- a/tests/test_static_road_object.py
+++ b/tests/test_static_road_object.py
@@ -16,9 +16,12 @@ class TestGetIDNumMethod(unittest.TestCase):
55: 625,
60: 715
}
- test_sro = StaticRoadObject(100, "street_name", "test_object", \
- geopy.Point(37.79205307308094, -122.40918793416158), \
- speed_sightD_tuple)
+ test_sro = StaticRoadObject(
+ 100,
+ "street_name",
+ geopy.Point(37.79205307308094, -122.40918793416158),
+ spd_sd=speed_sightD_tuple,
+ )
sro_id_result = test_sro.get_id_num()
def test_get_id_num_type(self):
@@ -49,15 +52,17 @@ class TestDistanceToSB(unittest.TestCase):
intersection_stop_bar_wb = (geopy.Point(37.792081947979, -122.40908296974182),
geopy.Point(37.792137440921714, -122.40909664953729))
- test_intersection = Intersection(100,
- intersection_name,
- intersection_ctr_pt,
- intersection_spd_tuple,
- intersection_bearing,
- intersection_stop_bar_nb,
- intersection_stop_bar_eb,
- intersection_stop_bar_sb,
- intersection_stop_bar_wb)
+ test_intersection = Intersection(
+ 100,
+ intersection_name,
+ intersection_ctr_pt,
+ spd=intersection_spd_tuple,
+ bearing=intersection_bearing,
+ stop_bar_nb=intersection_stop_bar_nb,
+ stop_bar_eb=intersection_stop_bar_eb,
+ stop_bar_sb=intersection_stop_bar_sb,
+ stop_bar_wb=intersection_stop_bar_wb,
+ )
test_nb_approach_point = geopy.Point(37.791640829945806, -122.4090598924283)
test_eb_approach_point = geopy.Point(37.79191109041387, -122.41001431676943)
From b1f6fd93ff433ccf3f5f60abae0b0cbefacae467 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sun, 8 Jun 2025 07:39:03 -0700
Subject: [PATCH 09/46] Fix filenames for intersection images
---
src/ssoss/process_road_objects.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index aeaa7e0..6745f02 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -149,7 +149,7 @@ def intersection_frame_description(self, sro_id, b_index, distance, ts, desc_typ
i_id = sro_id
i_bearing = b_index
i_compass_bearing = i_obj.get_bearing_str(b_index)
- i_name = i_obj.get_name()
+ i_name = i_obj.get_name().replace("+", "-")
i_name_one = i_obj.get_name(0)
i_name_two = i_obj.get_name(1)
i_sd = i_obj.get_sd(b_index)
From 3e298cef2dc58d1c97aaf0c90b53d03eb3c94c25 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sun, 8 Jun 2025 09:22:24 -0700
Subject: [PATCH 10/46] Fix tests import path and handle __version__
---
src/ssoss/__init__.py | 6 +++++-
tests/test_static_road_object.py | 7 ++++++-
2 files changed, 11 insertions(+), 2 deletions(-)
diff --git a/src/ssoss/__init__.py b/src/ssoss/__init__.py
index 9e3df57..2a5dc0e 100644
--- a/src/ssoss/__init__.py
+++ b/src/ssoss/__init__.py
@@ -12,5 +12,9 @@
# if it isn't installed when running in minimal environments such as tests.
pass
-__version__ = importlib.metadata.version("ssoss")
+try:
+ __version__ = importlib.metadata.version("ssoss")
+except importlib.metadata.PackageNotFoundError:
+ # Package metadata not found when running from source
+ __version__ = "0.0.0"
diff --git a/tests/test_static_road_object.py b/tests/test_static_road_object.py
index 5c85fdc..51e4bad 100644
--- a/tests/test_static_road_object.py
+++ b/tests/test_static_road_object.py
@@ -1,5 +1,10 @@
+import sys
+import pathlib
import unittest
-from ssoss.static_road_object import *
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.static_road_object import StaticRoadObject, Intersection
import geopy, geopy.distance
From 6b547adc11f0399a17b8c7e12c12010748581007 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sun, 8 Jun 2025 09:22:52 -0700
Subject: [PATCH 11/46] Optimize GPX loading with optional pickle caching
---
src/ssoss/process_road_objects.py | 23 +++++++++++++++++------
1 file changed, 17 insertions(+), 6 deletions(-)
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 604a172..5bdc8bb 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -25,7 +25,8 @@ class ProcessRoadObjects:
def __init__(self,
gpx_filestring: str = "",
#signals_filestring: str = "",
- generic_static_object_filestring: str = ""
+ generic_static_object_filestring: str = "",
+ use_pickle: bool = True
):
""" Class to process Road Object files. Using January 1st 1970 as time epoc
@@ -66,6 +67,9 @@ def __init__(self,
self.intersection_approaches = 0
self.generic_so_approaches = 0
+ # store whether to load/save pickled GPX data
+ self.use_pickle = use_pickle
+
# scafold directory structure if not present
gpx_video_dir = self.in_gpx_dir_path
p = Path(str(gpx_video_dir))
@@ -90,7 +94,7 @@ def __init__(self,
else:
raise ValueError("generic static object .csv file must have 7, 13 or 29 columns. Check documentation.")
if self.gpx_filename:
- gpx_df = self.load_gpx_to_obj_df(self.gpx_filename, use_pickle=False)
+ gpx_df = self.load_gpx_to_obj_df(self.gpx_filename, use_pickle=self.use_pickle)
@staticmethod
@@ -331,10 +335,16 @@ def load_gpx_to_obj_df(self, gpx_filename: str, gpx_ver = "1.0", use_pickle=True
t1 = datetime.now(timezone.utc)
if use_pickle and Path(self.pickle_file).is_file():
- self.gpxDF = pd.read_pickle(self.pickle_file)
+ self.gpx_listDF = pd.read_pickle(self.pickle_file)
print(
- f"Loaded Pickle file {self.pickle_file} into Dataframe with {self.gpxDF.last_valid_index()} rows"
+ f"Loaded Pickle file {self.pickle_file} into Dataframe with {self.gpx_listDF.last_valid_index()} rows"
)
+ if self.intersection_listDF is not None:
+ self.update_gpx_points(so_type="intersection")
+ if self.generic_so_listDF is not None:
+ self.update_gpx_points(so_type="generic_so")
+ self.gpx_summary()
+ return self.gpx_listDF
else:
print(
f"Using GPX file: {self.gpx_file}"
@@ -391,8 +401,9 @@ def load_gpx_to_obj_df(self, gpx_filename: str, gpx_ver = "1.0", use_pickle=True
pt_count += 1
self.gpx_listDF = pd.DataFrame(gpx_load)
- #self.gpx_listDF.to_pickle(self.pickle_file)
- #self.gpx_listDF.to_csv(self.csv_file)
+ if use_pickle:
+ self.gpx_listDF.to_pickle(self.pickle_file)
+ self.gpx_listDF.to_csv(self.csv_file)
print(
f"Processing {pt_count} points of GPX file."
)
From b1b5a938c880d7771878749e9d8030cd735f3a1a Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Sun, 8 Jun 2025 16:31:40 -0700
Subject: [PATCH 12/46] Refactor large functions into helpers
---
src/ssoss/dynamic_road_object.py | 323 +++++++++++++++-------------
src/ssoss/process_video.py | 350 +++++++++++++++++++++----------
2 files changed, 408 insertions(+), 265 deletions(-)
diff --git a/src/ssoss/dynamic_road_object.py b/src/ssoss/dynamic_road_object.py
index bc6b890..9bec8fb 100644
--- a/src/ssoss/dynamic_road_object.py
+++ b/src/ssoss/dynamic_road_object.py
@@ -41,9 +41,9 @@ def __init__(
self.MStoMPH = 2.23694
self.FTPStoMPH = 0.681818
- self.MPHtoFTPS = 1/self.FTPStoMPH
+ self.MPHtoFTPS = 1 / self.FTPStoMPH
self.MStoFTPS = self.MStoMPH * self.MPHtoFTPS
- self.DATE_FORMAT = '%m-%d-%Y--%H-%M-%S.%f-%Z'
+ self.DATE_FORMAT = "%m-%d-%Y--%H-%M-%S.%f-%Z"
# self.sorted_sroDF = None
@@ -67,7 +67,8 @@ def __init__(
self.sorted_sroDF = None
self.closest_intersection = self.get_closest_intersection(as_list=False)
self.closest_intersection_list = self.get_closest_intersection(as_list=True)
- self.closest_approaching_intersection = self.get_closest_approaching_intersection(
+ self.closest_approaching_intersection = (
+ self.get_closest_approaching_intersection()
)
self.in_file_path = PurePath("./in/")
@@ -82,8 +83,7 @@ def mask(df, key, value) -> pd.DataFrame:
return df[df[key] == value]
def update_location_simple(self, i: int = 2) -> None:
- """ Update dynamic object location with new data point i
- """
+ """Update dynamic object location with new data point i"""
self.t0 = self.t1
self.t1 = self.gpx_df.loc[i].t
@@ -96,7 +96,9 @@ def update_location_simple(self, i: int = 2) -> None:
self.spd = self.gpx_df.loc[i].spd
self.closest_intersection = self.get_closest_intersection()
- self.closest_approaching_intersection = self.get_closest_approaching_intersection()
+ self.closest_approaching_intersection = (
+ self.get_closest_approaching_intersection()
+ )
def first_timestamp(self) -> pd.Timestamp:
t = datetime.fromisoformat(str(self.t0))
@@ -144,7 +146,7 @@ def get_location(self, i: int | None = None, elev: bool = False) -> str:
return self.pt1.format_decimal()
def get_dist_step(self) -> geopy.distance:
- """ first distance step
+ """first distance step
:return: geopy distance in feet
"""
@@ -175,7 +177,7 @@ def prev_dist_to_sro(self, sro: StaticRoadObject) -> geopy.distance:
def get_spd(self, units="MPH") -> float:
if units == "MPH":
- return float(self.spd * self.MStoMPH) # Ft/sec
+ return float(self.spd * self.MStoMPH) # Ft/sec
else:
return float(self.spd) # Meters/sec
@@ -186,7 +188,7 @@ def calculate_spd_values(self):
This might be used if GPX v1.1 does not log speed data, so this will calculate it.
"""
- self.gpx_df.drop(['spd'], axis=1) # remove None values for speed
+ self.gpx_df.drop(["spd"], axis=1) # remove None values for speed
spd_list = [0]
for n in range(1, self.gpx_df.last_valid_index()):
@@ -194,48 +196,48 @@ def calculate_spd_values(self):
if self.get_time_step() == 0:
speed = 0
else:
- speed = (self.get_dist_step() /
- self.get_time_step()) * self.FTPStoMPH
+ speed = (self.get_dist_step() / self.get_time_step()) * self.FTPStoMPH
spd_list.append(speed)
if n == self.gpx_df.last_valid_index():
- self.gpx_df['spd'] = spd_list
+ self.gpx_df["spd"] = spd_list
def get_bearing(self) -> float:
b = gpxgeo.get_course(self.pt0[0], self.pt0[1], self.pt1[0], self.pt1[1])
self.bearing = b
return b
- def approaching(self, sro: StaticRoadObject) -> bool: # , self.sro: StaticRoadObject
+ def approaching(
+ self, sro: StaticRoadObject
+ ) -> bool: # , self.sro: StaticRoadObject
if self.cur_dist_to_sro(sro) <= self.prev_dist_to_sro(sro):
return True
else:
return False
def get_closest_intersection(self, as_list=False) -> Intersection:
- """returns None or 1st or ascending sorted list of intersection objects based on distance
- """
+ """returns None or 1st or ascending sorted list of intersection objects based on distance"""
# Crop min and max distances to limit search, sort and length
min_sd = self.sro_df.iloc[0, 1].get_sd("min")
max_sd = self.sro_df.iloc[0, 1].get_sd("max")
for row in range(0, self.sro_df.last_valid_index()):
- self.sro_df.loc[row,
- "d"] = self.cur_dist_to_sro(self.sro_df.iloc[row, 1])
+ self.sro_df.loc[row, "d"] = self.cur_dist_to_sro(self.sro_df.iloc[row, 1])
self.sro_df.loc[row, "approaching"] = self.approaching(
- self.sro_df.iloc[row, 1])
+ self.sro_df.iloc[row, 1]
+ )
- self.sorted_sroDF = self.sro_df.sort_values(by=['d'], ignore_index=True)
+ self.sorted_sroDF = self.sro_df.sort_values(by=["d"], ignore_index=True)
- min_row = self.sorted_sroDF[self.sorted_sroDF['d'].ge(min_sd)].index
+ min_row = self.sorted_sroDF[self.sorted_sroDF["d"].ge(min_sd)].index
if len(min_row) == 0:
min_row = 0
else:
min_row = min(min_row)
- max_row = self.sorted_sroDF[self.sorted_sroDF['d'].le(max_sd)].index
+ max_row = self.sorted_sroDF[self.sorted_sroDF["d"].le(max_sd)].index
if len(max_row) == 0:
max_row = 0
else:
@@ -252,7 +254,7 @@ def get_closest_intersection(self, as_list=False) -> Intersection:
return limited_df.iloc[0, 1]
def get_closest_approaching_intersection(self, as_list=False) -> Intersection:
- """ returns the closest, approaching intersection object for the current point
+ """returns the closest, approaching intersection object for the current point
of the dynamic object based on sorted list of intersections.
:return: intersection object
@@ -261,7 +263,7 @@ def get_closest_approaching_intersection(self, as_list=False) -> Intersection:
if df is None:
return None
else:
- mask = df['approaching'].values == True
+ mask = df["approaching"].values == True
if df[mask].empty:
return None
elif as_list:
@@ -288,7 +290,7 @@ def approach_leg(self, itrsxn: Intersection, index_out=True):
self.calc_bearing_diff(itrsxn.get_bearing(0)),
self.calc_bearing_diff(itrsxn.get_bearing(1)),
self.calc_bearing_diff(itrsxn.get_bearing(2)),
- self.calc_bearing_diff(itrsxn.get_bearing(3))
+ self.calc_bearing_diff(itrsxn.get_bearing(3)),
]
approach_leg_index = np.argmin(veh_int_diff)
@@ -309,7 +311,7 @@ def approach_leg(self, itrsxn: Intersection, index_out=True):
return app_leg_dir
def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
- """ Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
+ """Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
id of approaching intersection
:param gpx_filename: absolute filepath of file (without .gpx or .p file)
@@ -336,7 +338,7 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
"spd": [],
"distance": [],
"bearing": [],
- "approaching": []
+ "approaching": [],
}
for i in range(2, self.gpx_df.last_valid_index()):
@@ -354,9 +356,9 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
pass
else:
approaching_sd = False
- appr_distance = (cai.distance_from_sb(
- self.get_location(), self.approach_leg(cai)) -
- cai.get_sd(self.approach_leg(cai)))
+ appr_distance = cai.distance_from_sb(
+ self.get_location(), self.approach_leg(cai)
+ ) - cai.get_sd(self.approach_leg(cai))
if appr_distance > 0:
approaching_sd = True
@@ -379,103 +381,91 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
)
return approach_log_df
- def drive_gpx_stop_bar(self,
- gpx_filename,
- use_pickle_file=True) -> pd.DataFrame:
- """ Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
- id of approaching intersection
+ def drive_gpx_stop_bar(self, gpx_filename, use_pickle_file=True) -> pd.DataFrame:
+ """Load the GPX file and generate a dataframe of approach events."""
- :param gpx_directory: file directory where .gpx file is for input
- :param gpx_filename: filename of .gpx file (without .gpx)
- :param out_file_directory: where output files are saved to (./out/)
- :param use_pickle_file: default to False, can be faster to load from pickle
- :return: DataFrame with GPX calculated for sro file and saved a CSV and Pickle file of gpx information
-
- """
- # self.gpx_filepath = gpx_directory + gpx_filename + ".gpx"
pickle_file = self.out_file_path / (str(gpx_filename) + ".p")
csv_file = self.out_file_path / (str(gpx_filename) + ".csv")
- approach_sb_log_df = None
-
if use_pickle_file and os.path.isfile(pickle_file):
- approach_sb_log_df = pd.read_pickle(pickle_file)
- return approach_sb_log_df
- else:
- # dictionary-> Keys:Values
- appr_dict = {
- "id": [],
- "appr_dir": [],
- "timestamp": [],
- "time_delta": [],
- "location": [],
- "spd": [],
- "distance": [],
- "bearing": [],
- "approaching": []
- }
+ return pd.read_pickle(pickle_file)
- for i in tqdm(range(2, self.gpx_df.last_valid_index()),
- desc="Loading GPX:",
- unit="GPX Points"):
+ approach_sb_log_df = self.parse_gpx_points()
+ self.write_summary(approach_sb_log_df, csv_file, pickle_file)
+ return approach_sb_log_df
- self.update_location_simple(i)
- cai = self.get_closest_approaching_intersection()
- if cai is None:
- if i == self.gpx_df.last_valid_index() - 1:
- approach_sb_log_df = pd.DataFrame(appr_dict)
- approach_sb_log_df.to_csv(csv_file)
- approach_sb_log_df.to_pickle(pickle_file)
- print(
- f"exported dataframe to CSV (in {csv_file}) and Pickle (in {pickle_file})"
- )
- else:
- pass
- else:
- approaching_sd = False
- appr_distance = (cai.distance_from_sb(
- self.get_location(), self.approach_leg(cai)) -
- cai.get_sd(self.approach_leg(cai)))
- # print(f'{i}: {appr_distance}ft from {cai.get_name()}')
- if appr_distance > 0:
- approaching_sd = True
+ def parse_gpx_points(self) -> pd.DataFrame:
+ """Iterate through GPX points and log approach information."""
- appr_dict["id"].append(cai.get_id_num())
- appr_dict["appr_dir"].append(self.approach_leg(cai))
- appr_dict["timestamp"].append(self.get_utc_timestamp())
- appr_dict["time_delta"].append(self.get_time_step())
- appr_dict["location"].append(self.get_location())
- appr_dict["spd"].append(self.get_spd())
- appr_dict["distance"].append(appr_distance)
- appr_dict["bearing"].append(self.get_bearing())
- appr_dict["approaching"].append(approaching_sd)
+ appr_dict = self._init_approach_dict()
+ for i in tqdm(
+ range(2, self.gpx_df.last_valid_index()),
+ desc="Loading GPX:",
+ unit="GPX Points",
+ ):
+ self.update_location_simple(i)
+ cai = self.get_closest_approaching_intersection()
+ if cai is None:
+ continue
- if i == self.gpx_df.last_valid_index() - 1:
- print("WRITING DICT TO DATAFRAME")
- approach_sb_log_df = pd.DataFrame(appr_dict)
- approach_sb_log_df.to_csv(csv_file)
- approach_sb_log_df.to_pickle(pickle_file)
- print(
- f"Exported data frame to CSV ({csv_file}) and Pickle ({pickle_file})"
- )
- print(f'ApproachSB_DF:{approach_sb_log_df}')
+ appr_distance = cai.distance_from_sb(
+ self.get_location(), self.approach_leg(cai)
+ ) - cai.get_sd(self.approach_leg(cai))
+ self.update_approach_dict(appr_dict, cai, appr_distance)
- return approach_sb_log_df
+ return pd.DataFrame(appr_dict)
+
+ @staticmethod
+ def _init_approach_dict() -> dict:
+ return {
+ "id": [],
+ "appr_dir": [],
+ "timestamp": [],
+ "time_delta": [],
+ "location": [],
+ "spd": [],
+ "distance": [],
+ "bearing": [],
+ "approaching": [],
+ }
+
+ def update_approach_dict(
+ self, appr_dict: dict, cai: Intersection, distance: float
+ ) -> None:
+ """Append approach information for a single GPX point."""
+
+ approaching_sd = distance > 0
+ appr_dict["id"].append(cai.get_id_num())
+ appr_dict["appr_dir"].append(self.approach_leg(cai))
+ appr_dict["timestamp"].append(self.get_utc_timestamp())
+ appr_dict["time_delta"].append(self.get_time_step())
+ appr_dict["location"].append(self.get_location())
+ appr_dict["spd"].append(self.get_spd())
+ appr_dict["distance"].append(distance)
+ appr_dict["bearing"].append(self.get_bearing())
+ appr_dict["approaching"].append(approaching_sd)
+
+ @staticmethod
+ def write_summary(
+ df: pd.DataFrame, csv_file: PurePath, pickle_file: PurePath
+ ) -> None:
+ df.to_csv(csv_file)
+ df.to_pickle(pickle_file)
+ print(f"Exported data frame to CSV ({csv_file}) and Pickle ({pickle_file})")
def get_street(self, itrsxn: Intersection) -> str:
- """ current Street of intersection approach leg
- """
+ """current Street of intersection approach leg"""
apr_leg_index = self.approach_leg(itrsxn)
return itrsxn.get_name(apr_leg_index)
def get_info(self, itrsxn: Intersection) -> str:
- """ get ID#, bearing, and name about an intersection
+ """get ID#, bearing, and name about an intersection
:param itrsxn: Intersection Object
:return: string of info in format ID#.Compass_Heading - Intersection Name
"""
return str(
- f'{itrsxn.get_id_num()}.{self.approach_leg(itrsxn)}-{itrsxn.get_name()}'
+ f"{itrsxn.get_id_num()}.{self.approach_leg(itrsxn)}-{itrsxn.get_name()}"
)
def get_itrsxn_obj_by_id(self, id_num: int) -> Intersection:
@@ -484,7 +474,7 @@ def get_itrsxn_obj_by_id(self, id_num: int) -> Intersection:
:param id_num: ID number of intersection object
:return: intersection object
"""
- mask = self.sro_df['id'] == id_num
+ mask = self.sro_df["id"] == id_num
return self.sro_df[mask].iloc[0, 1]
def get_info_by_id(self, id_num: int, appr_dir) -> str:
@@ -495,14 +485,14 @@ def get_info_by_id(self, id_num: int, appr_dir) -> str:
:return: string in format:
ID#.direction_index - street1 + street2 - event distance string
"""
- mask = self.sro_df['id'] == id_num
+ mask = self.sro_df["id"] == id_num
itrsxn = self.sro_df[mask].iloc[0, 1]
"""
Example: 2.0-YVR+California-35mph-325ft-UTCtime
Include: direction, Approach Posted Speed
"""
return str(
- f'{itrsxn.get_id_num()}.{appr_dir}-{itrsxn.get_name()}-{itrsxn.get_sd(appr_dir)}ft'
+ f"{itrsxn.get_id_num()}.{appr_dir}-{itrsxn.get_name()}-{itrsxn.get_sd(appr_dir)}ft"
)
@staticmethod
@@ -511,7 +501,7 @@ def find_index(df: pd.DataFrame, i: int) -> pd.Series:
@staticmethod
def t_spd_adjust(d0: float, spd0: float, d1: float, spd1: float) -> float:
- """ Adjusts time of event based on speed of gpx points i and i+1.
+ """Adjusts time of event based on speed of gpx points i and i+1.
:param d0: distance a t=0
:param spd0: speed at t=0
@@ -557,31 +547,39 @@ def seek_sd(self, gpx_df, csv_out=True) -> pd.DataFrame:
"appr_dir": [],
"timestamp": [],
"location": [],
- "spd": [], # MPH
- "distance": [], # FEET
- "t_adjust": [], # SECONDS
- "string_desc": []
+ "spd": [], # MPH
+ "distance": [], # FEET
+ "t_adjust": [], # SECONDS
+ "string_desc": [],
}
for i in range(1, (gpx_df.last_valid_index() - 3)):
- if gpx_df.spd.iloc[i] > 0.4 and \
- gpx_df.spd.iloc[i + 1] > 0.4 and \
- gpx_df.distance.iloc[i + 1] <= \
- self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(gpx_df.appr_dir.iloc[i + 1]) + \
- (gpx_df.time_delta.iloc[i] * (gpx_df.spd.iloc[i] * self.MPHtoFTPS)):
+ if (
+ gpx_df.spd.iloc[i] > 0.4
+ and gpx_df.spd.iloc[i + 1] > 0.4
+ and gpx_df.distance.iloc[i + 1]
+ <= self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(
+ gpx_df.appr_dir.iloc[i + 1]
+ )
+ + (gpx_df.time_delta.iloc[i] * (gpx_df.spd.iloc[i] * self.MPHtoFTPS))
+ ):
print("heuristic filter")
- if (gpx_df.approaching.iloc[i - 1]) == True and \
- (gpx_df.approaching.iloc[i]) == True and \
- (gpx_df.approaching.iloc[i + 1] == False) and \
- (gpx_df.approaching.iloc[i + 2] == False):
+ if (
+ (gpx_df.approaching.iloc[i - 1]) == True
+ and (gpx_df.approaching.iloc[i]) == True
+ and (gpx_df.approaching.iloc[i + 1] == False)
+ and (gpx_df.approaching.iloc[i + 2] == False)
+ ):
print("approach filter")
- t_adjust = self.t_spd_adjust(gpx_df.distance.iloc[i], gpx_df.spd.iloc[i],
- gpx_df.distance.iloc[i + 1], gpx_df.spd.iloc[i + 1]
- )
-
+ t_adjust = self.t_spd_adjust(
+ gpx_df.distance.iloc[i],
+ gpx_df.spd.iloc[i],
+ gpx_df.distance.iloc[i + 1],
+ gpx_df.spd.iloc[i + 1],
+ )
# t_adj is less than timestep when dynamic object is close to event distance
if t_adjust <= gpx_df.time_delta.iloc[i]:
@@ -594,10 +592,12 @@ def seek_sd(self, gpx_df, csv_out=True) -> pd.DataFrame:
df_dict["distance"].append(gpx_df.distance.iloc[i])
df_dict["t_adjust"].append(t_adjust)
df_dict["string_desc"].append(
- self.get_info_by_id(gpx_df.id.iloc[i],
- gpx_df.appr_dir.iloc[i]))
+ self.get_info_by_id(
+ gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i]
+ )
+ )
print(
- f'seek_sd_Info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}, time adjust:{t_adjust}'
+ f"seek_sd_Info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}, time adjust:{t_adjust}"
)
else:
pass
@@ -628,38 +628,56 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
"spd": [],
"distance": [],
"t_adjust": [],
- "string_desc": []
+ "string_desc": [],
}
for i in range(1, (gpx_df.last_valid_index() - 3)):
- if self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).distance_from_sb(
- gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]) is None:
- print(f'USING CENTER OF INTERSECTION LOCATION')
- approach_distance = self.get_itrsxn_obj_by_id(
- gpx_df.id.iloc[i]).get_sd(gpx_df.appr_dir.iloc[i + 1]) + 50
+ if (
+ self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).distance_from_sb(
+ gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]
+ )
+ is None
+ ):
+ print(f"USING CENTER OF INTERSECTION LOCATION")
+ approach_distance = (
+ self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(
+ gpx_df.appr_dir.iloc[i + 1]
+ )
+ + 50
+ )
else:
- print(f'USING STOP BAR LOCATION')
+ print(f"USING STOP BAR LOCATION")
approach_distance = self.get_itrsxn_obj_by_id(
- gpx_df.id.iloc[i]).distance_from_sb(gpx_df.location.iloc[i + 1],
- gpx_df.appr_dir.iloc[i + 1])
-
- if gpx_df.spd.iloc[i] > 0.2 and gpx_df.spd.iloc[i + 1] > 0.2 \
- and gpx_df.distance.iloc[i + 1] <= approach_distance <= gpx_df.distance.iloc[i]:
+ gpx_df.id.iloc[i]
+ ).distance_from_sb(
+ gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]
+ )
+
+ if (
+ gpx_df.spd.iloc[i] > 0.2
+ and gpx_df.spd.iloc[i + 1] > 0.2
+ and gpx_df.distance.iloc[i + 1]
+ <= approach_distance
+ <= gpx_df.distance.iloc[i]
+ ):
if True:
"""
- calculate exact time based on speed. using i and i+1.
-
+ calculate exact time based on speed. using i and i+1.
+
For point i and i+1, the exact time adjustment is weighted
based on the speeds at these two point's in time. This shifts the exact position
of the car to the most accurate time it was at the calculated sight distance
-
+
store that time in UTC in DF
"""
- t_adjust = self.t_spd_adjust(gpx_df.distance.iloc[i], gpx_df.spd.iloc[i],
- gpx_df.distance.iloc[i + 1], gpx_df.spd.iloc[i + 1]
- )
+ t_adjust = self.t_spd_adjust(
+ gpx_df.distance.iloc[i],
+ gpx_df.spd.iloc[i],
+ gpx_df.distance.iloc[i + 1],
+ gpx_df.spd.iloc[i + 1],
+ )
if t_adjust <= 1:
df_dict_sb["id"].append(gpx_df.id.iloc[i])
@@ -670,12 +688,14 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
df_dict_sb["distance"].append(gpx_df.distance.iloc[i])
df_dict_sb["t_adjust"].append(t_adjust)
df_dict_sb["string_desc"].append(
- self.get_info_by_id(gpx_df.id.iloc[i],
- gpx_df.appr_dir.iloc[i]))
+ self.get_info_by_id(
+ gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i]
+ )
+ )
# print(self.utc_to_timestamp(df.timestamp.iloc[i]))
print(
- f'info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}'
+ f"info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}"
)
else:
@@ -684,7 +704,8 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
pass
if csv_out:
pd.DataFrame(df_dict_sb).to_csv(
- self.out_file_path / "approaching_intersections_Stopbar.csv")
+ self.out_file_path / "approaching_intersections_Stopbar.csv"
+ )
return pd.DataFrame(df_dict_sb)
# TODO: create new method
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index ad98e9a..2b2c761 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -23,7 +23,7 @@ def __init__(self, video_filestring: str):
:param in_dir_path: filename of video to be processed (include video extension (.mov, .mp4, etc)
"""
- self.DATE_FORMAT = '%m-%d-%Y--%H-%M-%S.%f-%Z' #ISO 8601 format
+ self.DATE_FORMAT = "%m-%d-%Y--%H-%M-%S.%f-%Z" # ISO 8601 format
self.video_dir = Path(video_filestring).parents[0]
self.video_filepath = Path(video_filestring)
self.video_filename = Path(video_filestring).name
@@ -36,7 +36,6 @@ def __init__(self, video_filestring: str):
self.start_time = 0
self.capture = ""
-
self.vid_summary(vid_summary=True)
def set_start_utc(self, video_start_time):
@@ -71,15 +70,17 @@ def sync(self, frame: int, ts):
appends frame # and timestamp to sync.txt with video filename for reference
"""
sync_txt_folder = Path(self.video_dir, "out")
- sync_file = str(sync_txt_folder) +"/"+ "sync.txt"
- with open(sync_file, 'a') as f:
- f.write(f'{self.video_filepath.stem},{frame},{ts}\n')
+ sync_file = str(sync_txt_folder) + "/" + "sync.txt"
+ with open(sync_file, "a") as f:
+ f.write(f"{self.video_filepath.stem},{frame},{ts}\n")
elapsed_time = frame / self.fps
if type(ts) is float:
start_time = ts - elapsed_time
else:
- t_temp = (dateutil.parser.isoparse(ts)) # isoparse parses ISO-8601 datetime string into datetime.datetime
+ t_temp = dateutil.parser.isoparse(
+ ts
+ ) # isoparse parses ISO-8601 datetime string into datetime.datetime
start_time = t_temp.replace(tzinfo=timezone.utc).timestamp() - elapsed_time
self.set_start_utc(start_time)
self.vid_summary(vid_summary=False, sync=True)
@@ -123,8 +124,10 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
str(output_path),
]
subprocess.run(cmd, check=True)
-
- def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
+
+ def extract_generic_so_sightings(
+ self, desc_timestamps, project, label_img=True, gen_gif=False
+ ):
"""
extract generic sighting images from video based on description and timestamp zip
@@ -133,60 +136,67 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
"""
generic_so_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
- image_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
+ image_path = Path(
+ self.video_dir,
+ "out",
+ self.video_filepath.stem,
+ "generic_static_object_sightings/",
+ )
image_path.mkdir(exist_ok=True, parents=True)
for desc, frame_num in tqdm(
- list(zip(generic_so_desc, extract_frames)),
- desc="Frame Extraction",
- unit=" frame"):
- frame_name = str(desc) + '.jpg'
+ list(zip(generic_so_desc, extract_frames)),
+ desc="Frame Extraction",
+ unit=" frame",
+ ):
+ frame_name = str(desc) + ".jpg"
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
print(
- f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
+ f"PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}"
+ )
if label_img:
self.generic_so_img_overlay_info_box(self.video_filename, project)
if gen_gif:
self.generate_gif(desc_timestamps, project)
- def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
- """
- extract sighting images from video based on description and timestamp zip
-
- desc_timestamps: sorted list of tuples (filename description, timestamp of sight distance)
- project: instance of ProcessRoadObjects() class
- """
-
- intersection_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
- image_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
+ def extract_sightings(
+ self, desc_timestamps, project, label_img=True, gen_gif=False
+ ):
+ """Extract sighting images from a video."""
+
+ intersection_desc, extract_frames = self.create_pic_list_from_zip(
+ desc_timestamps
+ )
+ image_path = Path(
+ self.video_dir, "out", self.video_filepath.stem, "signal_sightings/"
+ )
image_path.mkdir(exist_ok=True, parents=True)
- for desc, frame_num in tqdm(
- list(zip(intersection_desc, extract_frames)),
- desc="Frame Extraction",
- unit=" frame"):
- frame_name = str(desc) + '.jpg'
- frame_filepath = image_path / frame_name
- self.save_frame_ffmpeg(frame_num, frame_filepath)
- print(
- f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
+ self._save_frames(intersection_desc, extract_frames, image_path)
if label_img:
self.img_overlay_info_box(self.video_filename, project)
if gen_gif:
self.generate_gif(desc_timestamps, project)
- """
- if bbox:
- self.img_overlay_bbox(description_list,project)
-
- """
-
+
+ def _save_frames(self, descriptions, frames, image_path: Path) -> None:
+ """Save frames described by ``descriptions`` and ``frames`` to disk."""
+
+ for desc, frame_num in tqdm(
+ list(zip(descriptions, frames)), desc="Frame Extraction", unit=" frame"
+ ):
+ frame_name = str(desc) + ".jpg"
+ frame_filepath = image_path / frame_name
+ self.save_frame_ffmpeg(frame_num, frame_filepath)
+ print(
+ f"PICTURE CAPTURED AT {frame_num}: {desc}, Saved {descriptions.index(desc) + 1} picture(s) of {len(frames)}"
+ )
# TODO: convert to start_sec, start_min=0, end_sec, end_min=0, folder="")
def extract_frames_between(self, start_sec, end_sec):
- """ helper function to extract frames from video during a specific time period to estimate offset
+ """helper function to extract frames from video during a specific time period to estimate offset
between gpx and video
:param start_sec: start time of video to extract image frames
@@ -226,10 +236,10 @@ def video_start_utc():
end_frame = int(self.get_fps() * end_sec)
for i in range(start_frame, end_frame + 1):
- frame_name = 'Frame' + str(i) + '.jpg'
+ frame_name = "Frame" + str(i) + ".jpg"
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(i, frame_filepath)
- print(f'Saved Image {i} to {frame_filepath}')
+ print(f"Saved Image {i} to {frame_filepath}")
def generate_gif(self, desc_timestamps, project, distance=100):
""" creates a folder of images to create a gif
@@ -251,10 +261,18 @@ def generate_gif(self, desc_timestamps, project, distance=100):
intersection_desc, frame_list = self.create_pic_list_from_zip(desc_timestamps)
- for i in tqdm(range(0, len(desc_timestamps)),
- desc="Generating Images for GIF",
- unit=" Location"):
- gif_basepath = self.video_dir / "out" / self.video_filepath.stem / "gif" / intersection_desc[i]
+ for i in tqdm(
+ range(0, len(desc_timestamps)),
+ desc="Generating Images for GIF",
+ unit=" Location",
+ ):
+ gif_basepath = (
+ self.video_dir
+ / "out"
+ / self.video_filepath.stem
+ / "gif"
+ / intersection_desc[i]
+ )
gif_path = Path(gif_basepath)
gif_path.mkdir(exist_ok=True, parents=True)
@@ -279,57 +297,60 @@ def generate_gif(self, desc_timestamps, project, distance=100):
frame_max = int(frame_list[i] + additional_frames)
for j in range(frame_min, frame_max + 1):
- frame_name = str(j) + "-" + intersection_desc[i] + '.jpg'
+ frame_name = str(j) + "-" + intersection_desc[i] + ".jpg"
frame_filepath = gif_path / frame_name
self.save_frame_ffmpeg(j, frame_filepath)
i += 1
self.assemble_gif()
def assemble_gif(self):
- #base_path = Path(self.video_dir, "out", self.video_filepath.stem, "gif/")
+ # base_path = Path(self.video_dir, "out", self.video_filepath.stem, "gif/")
gif_files_path = self.video_dir / "out" / self.video_filepath.stem / "gif"
base_path = Path(gif_files_path)
- #base_path = "./out/frames/" + self.video_filename + "/gif/"
- img_folders = sorted(base_path.glob('*'))
- kargs = {'duration': 1/9999999999999999}
+ # base_path = "./out/frames/" + self.video_filename + "/gif/"
+ img_folders = sorted(base_path.glob("*"))
+ kargs = {"duration": 1 / 9999999999999999}
for i in range(0, len(img_folders)):
images = []
img_folder = os.path.basename(img_folders[i])
- frame_images = sorted(glob.glob(os.path.join(base_path, img_folder + "/*.jpg")))
+ frame_images = sorted(
+ glob.glob(os.path.join(base_path, img_folder + "/*.jpg"))
+ )
for j in range(0, len(frame_images)):
if j % 5 == 0:
images.append(imageio.imread(frame_images[j]))
- imageio.mimsave(os.path.join(base_path, img_folder + ".gif"), images, **kargs)
- print(f'Created Gif: {img_folder}.gif')
+ imageio.mimsave(
+ os.path.join(base_path, img_folder + ".gif"), images, **kargs
+ )
+ print(f"Created Gif: {img_folder}.gif")
# TODO: delete folder of images after gif is created.
# TODO: overwite existing gif option
-
@staticmethod
def hr_min_sec(sec):
if sec < 60:
- return f'{sec} seconds'
+ return f"{sec} seconds"
elif sec < 3600:
minutes = int(sec / 60)
sec_remain = round(sec - minutes * 60, 2)
- return f'{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)'
+ return f"{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)"
elif sec >= 3600:
hr = int(sec / 3600)
minutes = int(sec / 60)
sec_remain = round(sec - minutes * 60, 2)
- return f'{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)'
+ return f"{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)"
def sizeConvert(self, size):
# convert filesize to human readable format
- K, M, G = 1024, 1024 ** 2, 1024 ** 3
+ K, M, G = 1024, 1024**2, 1024**3
if size >= G:
- return str(round(size / G, 2)) + ' GB'
+ return str(round(size / G, 2)) + " GB"
elif size >= M:
- return str(round(size / M, 2)) + ' MB'
+ return str(round(size / M, 2)) + " MB"
elif size >= K:
- return str(round(size / K, 2)) + ' KB'
+ return str(round(size / K, 2)) + " KB"
else:
- return str(round(size, 2)) + ' Bytes'
+ return str(round(size, 2)) + " Bytes"
def get_filesize(self):
# get the file size
@@ -375,18 +396,21 @@ def vid_summary(self, vid_summary, sync=False):
if sync:
print(sync_time)
-
@staticmethod
- def find_font_scale(label, max_width = 0, max_height = 0):
+ def find_font_scale(label, max_width=0, max_height=0):
font_scl = 0.2
- textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, font_scl, 1)[0]
+ textsize_x, textsize_y = cv2.getTextSize(
+ label, cv2.FONT_HERSHEY_PLAIN, font_scl, 1
+ )[0]
w_font_scl = h_font_scl = font_scl
if max_width > 0:
if textsize_x < max_width:
# scale up scale in for loop
for scale_increment in np.arange(0, 10, 0.1):
w_font_scl = scale_increment
- textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, w_font_scl, 1)[0]
+ textsize_x, textsize_y = cv2.getTextSize(
+ label, cv2.FONT_HERSHEY_PLAIN, w_font_scl, 1
+ )[0]
if textsize_x < max_width:
continue
else:
@@ -397,13 +421,15 @@ def find_font_scale(label, max_width = 0, max_height = 0):
# scale up scale in for loop
for scale_increment in np.arange(0, 10, 0.1):
h_font_scl = scale_increment
- textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, h_font_scl, 1)[0]
+ textsize_x, textsize_y = cv2.getTextSize(
+ label, cv2.FONT_HERSHEY_PLAIN, h_font_scl, 1
+ )[0]
if textsize_y < max_height:
continue
else:
h_font_scl = scale_increment - 0.5
break
- if max_width > 0 and max_height > 0:
+ if max_width > 0 and max_height > 0:
return min(w_font_scl, h_font_scl)
else:
return max(w_font_scl, h_font_scl)
@@ -419,8 +445,14 @@ def find_x_start_new_label(x_size, w, label):
trunc_label = label[0:w]
return start_x, trunc_label
-
- def labels(self, img, output_filename, descriptive_label, height_percent:tuple, ssoss_and_descriptive = True ):
+ def labels(
+ self,
+ img,
+ output_filename,
+ descriptive_label,
+ height_percent: tuple,
+ ssoss_and_descriptive=True,
+ ):
alpha = 1 # Transparency factor.
text_font = cv2.FONT_HERSHEY_PLAIN
@@ -436,10 +468,14 @@ def labels(self, img, output_filename, descriptive_label, height_percent:tuple,
# calculated descriptive label dimensions
descriptive_label_height = int(img_height * descriptive_label_percent)
descriptive_label_y = img_height - descriptive_label_height
- font_scale = self.find_font_scale(descriptive_label, max_width = img_width)
- textsize_x, textsize_y = cv2.getTextSize(descriptive_label, text_font, font_scale, font_thickness)[0]
- text_y = int((img_height - descriptive_label_height/2.0)+textsize_y/2.0)
- text_x, descriptive_label = self.find_x_start_new_label(textsize_x, img_width, descriptive_label)
+ font_scale = self.find_font_scale(descriptive_label, max_width=img_width)
+ textsize_x, textsize_y = cv2.getTextSize(
+ descriptive_label, text_font, font_scale, font_thickness
+ )[0]
+ text_y = int((img_height - descriptive_label_height / 2.0) + textsize_y / 2.0)
+ text_x, descriptive_label = self.find_x_start_new_label(
+ textsize_x, img_width, descriptive_label
+ )
if ssoss_and_descriptive:
@@ -447,65 +483,136 @@ def labels(self, img, output_filename, descriptive_label, height_percent:tuple,
# calculated ssoss_ad dimensions
ssoss_label_height = int(img_height * ssoss_percent)
- ssoss_label_font_scale = self.find_font_scale(ssoss_label, max_height = ssoss_label_height)
- ssoss_label_textsize_x, ssoss_textsize_y = cv2.getTextSize(ssoss_label, text_font, ssoss_label_font_scale, 1)[0]
-
+ ssoss_label_font_scale = self.find_font_scale(
+ ssoss_label, max_height=ssoss_label_height
+ )
+ ssoss_label_textsize_x, ssoss_textsize_y = cv2.getTextSize(
+ ssoss_label, text_font, ssoss_label_font_scale, 1
+ )[0]
+
ssoss_label_text_x = int((img_width - ssoss_label_textsize_x) / 2.0)
ssoss_label_text_y = int(img_height)
- ssoss_text_x, fitted_ssoss_label = self.find_x_start_new_label(ssoss_label_textsize_x, img_width, ssoss_label)
+ ssoss_text_x, fitted_ssoss_label = self.find_x_start_new_label(
+ ssoss_label_textsize_x, img_width, ssoss_label
+ )
# Calculated y-coordinates for different labels
- ssoss_label_y = img_height - ssoss_label_height # y-coordinate of top of ssoss ad
- above_descriptive_and_ssoss_label_y = ssoss_label_y - descriptive_label_height # y-coordinate of top of descriptive label
- descriptive_and_ssoss_label_text_y = ssoss_label_y - int(textsize_y/2.0)
-
- #ssoss ad box
- cv2.rectangle(img_copy,pt1=(0, img_height), pt2=(img_width, ssoss_label_y), color = BLACK, thickness=-1)
- ssoss_and_descriptive_label = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
- #image label box
- cv2.rectangle(img_copy, pt1=(0, ssoss_label_y), pt2=(img_width, above_descriptive_and_ssoss_label_y), color=WHITE, thickness=-1)
- ssoss_and_descriptive_label = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
+ ssoss_label_y = (
+ img_height - ssoss_label_height
+ ) # y-coordinate of top of ssoss ad
+ above_descriptive_and_ssoss_label_y = (
+ ssoss_label_y - descriptive_label_height
+ ) # y-coordinate of top of descriptive label
+ descriptive_and_ssoss_label_text_y = ssoss_label_y - int(textsize_y / 2.0)
+
+ # ssoss ad box
+ cv2.rectangle(
+ img_copy,
+ pt1=(0, img_height),
+ pt2=(img_width, ssoss_label_y),
+ color=BLACK,
+ thickness=-1,
+ )
+ ssoss_and_descriptive_label = cv2.addWeighted(
+ img_copy, alpha, img, 1 - alpha, 0
+ )
+ # image label box
+ cv2.rectangle(
+ img_copy,
+ pt1=(0, ssoss_label_y),
+ pt2=(img_width, above_descriptive_and_ssoss_label_y),
+ color=WHITE,
+ thickness=-1,
+ )
+ ssoss_and_descriptive_label = cv2.addWeighted(
+ img_copy, alpha, img, 1 - alpha, 0
+ )
# text for ssoss ad and label
- ssoss_and_descriptive_label = cv2.putText(ssoss_and_descriptive_label, descriptive_label, (text_x, descriptive_and_ssoss_label_text_y), text_font, font_scale, BLACK, 2)
- ssoss_and_descriptive_label = cv2.putText(ssoss_and_descriptive_label, fitted_ssoss_label, (ssoss_text_x, ssoss_label_text_y), text_font, ssoss_label_font_scale, WHITE, 2)
+ ssoss_and_descriptive_label = cv2.putText(
+ ssoss_and_descriptive_label,
+ descriptive_label,
+ (text_x, descriptive_and_ssoss_label_text_y),
+ text_font,
+ font_scale,
+ BLACK,
+ 2,
+ )
+ ssoss_and_descriptive_label = cv2.putText(
+ ssoss_and_descriptive_label,
+ fitted_ssoss_label,
+ (ssoss_text_x, ssoss_label_text_y),
+ text_font,
+ ssoss_label_font_scale,
+ WHITE,
+ 2,
+ )
# save image
cv2.imwrite(output_filename, ssoss_and_descriptive_label)
-
+
else:
# no ssoss label, just descriptive label (not recommended)
- cv2.rectangle(img_copy, pt1=(0, img_height), pt2=(img_width, descriptive_label_y), color=WHITE, thickness=-1)
- img_new = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
- cv2.putText(img_new, descriptive_label, (text_x, text_y), text_font, font_scale, BLACK, 2)
+ cv2.rectangle(
+ img_copy,
+ pt1=(0, img_height),
+ pt2=(img_width, descriptive_label_y),
+ color=WHITE,
+ thickness=-1,
+ )
+ img_new = cv2.addWeighted(img_copy, alpha, img, 1 - alpha, 0)
+ cv2.putText(
+ img_new,
+ descriptive_label,
+ (text_x, text_y),
+ text_font,
+ font_scale,
+ BLACK,
+ 2,
+ )
cv2.imwrite(output_filename, img_new)
@staticmethod
- def generate_descriptive_label(path, fn, road_object_info, static_object_type="generic"):
+ def generate_descriptive_label(
+ path, fn, road_object_info, static_object_type="generic"
+ ):
sro_id = int(fn.split(".")[0])
ts = float(fn.split("-")[-1].replace(".jpg", ""))
distance = 0
if static_object_type == "intersection":
b_index = int((fn.rsplit(".")[1])[0:1])
- descriptive_label = road_object_info.intersection_frame_description(sro_id, b_index, distance, ts, desc_type="label")
+ descriptive_label = road_object_info.intersection_frame_description(
+ sro_id, b_index, distance, ts, desc_type="label"
+ )
else:
- descriptive_label = road_object_info.generic_so_description(sro_id, distance, ts, desc_type="label")
+ descriptive_label = road_object_info.generic_so_description(
+ sro_id, distance, ts, desc_type="label"
+ )
return descriptive_label
-
+
def generic_so_img_overlay_info_box(self, vid_filename_dir, ro_info):
- img_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
+ img_path = Path(
+ self.video_dir,
+ "out",
+ self.video_filepath.stem,
+ "generic_static_object_sightings/",
+ )
label_img_path = Path(img_path, "labeled/")
os.makedirs(label_img_path, exist_ok=True)
img_dir_string = str(img_path)
label_img_dir_string = str(label_img_path)
- pattern_criteria = ['*.jpg','[!.]*']
+ pattern_criteria = ["*.jpg", "[!.]*"]
- descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
- ssoss_label_percent = 0.02 # 2% for ssoss advertisement label at very bottom of image
+ descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
+ ssoss_label_percent = (
+ 0.02 # 2% for ssoss advertisement label at very bottom of image
+ )
label_height_percents = (descriptive_label_percent, ssoss_label_percent)
# filter for images where * is wildcard and don't include hidden (.*) files
- pathlist = [f for f in Path(img_dir_string).rglob('*.jpg') if not str(f).startswith(".")]
+ pathlist = [
+ f for f in Path(img_dir_string).rglob("*.jpg") if not str(f).startswith(".")
+ ]
for file in pathlist:
if not str(file.stem).startswith("."):
filename = str(Path(file).name)
@@ -514,26 +621,37 @@ def generic_so_img_overlay_info_box(self, vid_filename_dir, ro_info):
overlay = img.copy()
label_img_name = str(Path(label_img_path, filename))
- descriptive_label = self.generate_descriptive_label(label_img_path, filename, ro_info)
+ descriptive_label = self.generate_descriptive_label(
+ label_img_path, filename, ro_info
+ )
- self.labels(img, label_img_name, descriptive_label, label_height_percents)
-
+ self.labels(
+ img, label_img_name, descriptive_label, label_height_percents
+ )
def img_overlay_info_box(self, vid_filename_dir, ro_info):
- img_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
+ img_path = Path(
+ self.video_dir, "out", self.video_filepath.stem, "signal_sightings/"
+ )
label_img_path = Path(img_path, "labeled/")
os.makedirs(label_img_path, exist_ok=True)
img_dir_string = str(img_path)
label_img_dir_string = str(label_img_path)
- pattern_criteria = ['*.[0-3]-*.jpg','[!.]*']
+ pattern_criteria = ["*.[0-3]-*.jpg", "[!.]*"]
- descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
- ssoss_label_percent = 0.02 # 2% for ssoss advertisement label at very bottom of image
+ descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
+ ssoss_label_percent = (
+ 0.02 # 2% for ssoss advertisement label at very bottom of image
+ )
label_height_percents = (descriptive_label_percent, ssoss_label_percent)
# filter for images where * is wildcard and don't include hidden (.*) files
- pathlist = [f for f in Path(img_dir_string).rglob('*.[0-3]-*.jpg') if not str(f).startswith(".")]
+ pathlist = [
+ f
+ for f in Path(img_dir_string).rglob("*.[0-3]-*.jpg")
+ if not str(f).startswith(".")
+ ]
for file in pathlist:
if not str(file.stem).startswith("."):
filename = str(Path(file).name)
@@ -542,6 +660,10 @@ def img_overlay_info_box(self, vid_filename_dir, ro_info):
overlay = img.copy()
label_img_name = str(Path(label_img_path, filename))
- descriptive_label = self.generate_descriptive_label(label_img_path, filename,ro_info, static_object_type="intersection")
+ descriptive_label = self.generate_descriptive_label(
+ label_img_path, filename, ro_info, static_object_type="intersection"
+ )
- self.labels(img, label_img_name, descriptive_label, label_height_percents)
+ self.labels(
+ img, label_img_name, descriptive_label, label_height_percents
+ )
From fcb9695324e3afbe12c785cc0ac14a1dff3cdfb7 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 18:10:24 -0700
Subject: [PATCH 13/46] Revert "Refactor large functions"
---
src/ssoss/dynamic_road_object.py | 323 +++++++++++++---------------
src/ssoss/process_video.py | 350 ++++++++++---------------------
2 files changed, 265 insertions(+), 408 deletions(-)
diff --git a/src/ssoss/dynamic_road_object.py b/src/ssoss/dynamic_road_object.py
index 9bec8fb..bc6b890 100644
--- a/src/ssoss/dynamic_road_object.py
+++ b/src/ssoss/dynamic_road_object.py
@@ -41,9 +41,9 @@ def __init__(
self.MStoMPH = 2.23694
self.FTPStoMPH = 0.681818
- self.MPHtoFTPS = 1 / self.FTPStoMPH
+ self.MPHtoFTPS = 1/self.FTPStoMPH
self.MStoFTPS = self.MStoMPH * self.MPHtoFTPS
- self.DATE_FORMAT = "%m-%d-%Y--%H-%M-%S.%f-%Z"
+ self.DATE_FORMAT = '%m-%d-%Y--%H-%M-%S.%f-%Z'
# self.sorted_sroDF = None
@@ -67,8 +67,7 @@ def __init__(
self.sorted_sroDF = None
self.closest_intersection = self.get_closest_intersection(as_list=False)
self.closest_intersection_list = self.get_closest_intersection(as_list=True)
- self.closest_approaching_intersection = (
- self.get_closest_approaching_intersection()
+ self.closest_approaching_intersection = self.get_closest_approaching_intersection(
)
self.in_file_path = PurePath("./in/")
@@ -83,7 +82,8 @@ def mask(df, key, value) -> pd.DataFrame:
return df[df[key] == value]
def update_location_simple(self, i: int = 2) -> None:
- """Update dynamic object location with new data point i"""
+ """ Update dynamic object location with new data point i
+ """
self.t0 = self.t1
self.t1 = self.gpx_df.loc[i].t
@@ -96,9 +96,7 @@ def update_location_simple(self, i: int = 2) -> None:
self.spd = self.gpx_df.loc[i].spd
self.closest_intersection = self.get_closest_intersection()
- self.closest_approaching_intersection = (
- self.get_closest_approaching_intersection()
- )
+ self.closest_approaching_intersection = self.get_closest_approaching_intersection()
def first_timestamp(self) -> pd.Timestamp:
t = datetime.fromisoformat(str(self.t0))
@@ -146,7 +144,7 @@ def get_location(self, i: int | None = None, elev: bool = False) -> str:
return self.pt1.format_decimal()
def get_dist_step(self) -> geopy.distance:
- """first distance step
+ """ first distance step
:return: geopy distance in feet
"""
@@ -177,7 +175,7 @@ def prev_dist_to_sro(self, sro: StaticRoadObject) -> geopy.distance:
def get_spd(self, units="MPH") -> float:
if units == "MPH":
- return float(self.spd * self.MStoMPH) # Ft/sec
+ return float(self.spd * self.MStoMPH) # Ft/sec
else:
return float(self.spd) # Meters/sec
@@ -188,7 +186,7 @@ def calculate_spd_values(self):
This might be used if GPX v1.1 does not log speed data, so this will calculate it.
"""
- self.gpx_df.drop(["spd"], axis=1) # remove None values for speed
+ self.gpx_df.drop(['spd'], axis=1) # remove None values for speed
spd_list = [0]
for n in range(1, self.gpx_df.last_valid_index()):
@@ -196,48 +194,48 @@ def calculate_spd_values(self):
if self.get_time_step() == 0:
speed = 0
else:
- speed = (self.get_dist_step() / self.get_time_step()) * self.FTPStoMPH
+ speed = (self.get_dist_step() /
+ self.get_time_step()) * self.FTPStoMPH
spd_list.append(speed)
if n == self.gpx_df.last_valid_index():
- self.gpx_df["spd"] = spd_list
+ self.gpx_df['spd'] = spd_list
def get_bearing(self) -> float:
b = gpxgeo.get_course(self.pt0[0], self.pt0[1], self.pt1[0], self.pt1[1])
self.bearing = b
return b
- def approaching(
- self, sro: StaticRoadObject
- ) -> bool: # , self.sro: StaticRoadObject
+ def approaching(self, sro: StaticRoadObject) -> bool: # , self.sro: StaticRoadObject
if self.cur_dist_to_sro(sro) <= self.prev_dist_to_sro(sro):
return True
else:
return False
def get_closest_intersection(self, as_list=False) -> Intersection:
- """returns None or 1st or ascending sorted list of intersection objects based on distance"""
+ """returns None or 1st or ascending sorted list of intersection objects based on distance
+ """
# Crop min and max distances to limit search, sort and length
min_sd = self.sro_df.iloc[0, 1].get_sd("min")
max_sd = self.sro_df.iloc[0, 1].get_sd("max")
for row in range(0, self.sro_df.last_valid_index()):
- self.sro_df.loc[row, "d"] = self.cur_dist_to_sro(self.sro_df.iloc[row, 1])
+ self.sro_df.loc[row,
+ "d"] = self.cur_dist_to_sro(self.sro_df.iloc[row, 1])
self.sro_df.loc[row, "approaching"] = self.approaching(
- self.sro_df.iloc[row, 1]
- )
+ self.sro_df.iloc[row, 1])
- self.sorted_sroDF = self.sro_df.sort_values(by=["d"], ignore_index=True)
+ self.sorted_sroDF = self.sro_df.sort_values(by=['d'], ignore_index=True)
- min_row = self.sorted_sroDF[self.sorted_sroDF["d"].ge(min_sd)].index
+ min_row = self.sorted_sroDF[self.sorted_sroDF['d'].ge(min_sd)].index
if len(min_row) == 0:
min_row = 0
else:
min_row = min(min_row)
- max_row = self.sorted_sroDF[self.sorted_sroDF["d"].le(max_sd)].index
+ max_row = self.sorted_sroDF[self.sorted_sroDF['d'].le(max_sd)].index
if len(max_row) == 0:
max_row = 0
else:
@@ -254,7 +252,7 @@ def get_closest_intersection(self, as_list=False) -> Intersection:
return limited_df.iloc[0, 1]
def get_closest_approaching_intersection(self, as_list=False) -> Intersection:
- """returns the closest, approaching intersection object for the current point
+ """ returns the closest, approaching intersection object for the current point
of the dynamic object based on sorted list of intersections.
:return: intersection object
@@ -263,7 +261,7 @@ def get_closest_approaching_intersection(self, as_list=False) -> Intersection:
if df is None:
return None
else:
- mask = df["approaching"].values == True
+ mask = df['approaching'].values == True
if df[mask].empty:
return None
elif as_list:
@@ -290,7 +288,7 @@ def approach_leg(self, itrsxn: Intersection, index_out=True):
self.calc_bearing_diff(itrsxn.get_bearing(0)),
self.calc_bearing_diff(itrsxn.get_bearing(1)),
self.calc_bearing_diff(itrsxn.get_bearing(2)),
- self.calc_bearing_diff(itrsxn.get_bearing(3)),
+ self.calc_bearing_diff(itrsxn.get_bearing(3))
]
approach_leg_index = np.argmin(veh_int_diff)
@@ -311,7 +309,7 @@ def approach_leg(self, itrsxn: Intersection, index_out=True):
return app_leg_dir
def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
- """Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
+ """ Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
id of approaching intersection
:param gpx_filename: absolute filepath of file (without .gpx or .p file)
@@ -338,7 +336,7 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
"spd": [],
"distance": [],
"bearing": [],
- "approaching": [],
+ "approaching": []
}
for i in range(2, self.gpx_df.last_valid_index()):
@@ -356,9 +354,9 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
pass
else:
approaching_sd = False
- appr_distance = cai.distance_from_sb(
- self.get_location(), self.approach_leg(cai)
- ) - cai.get_sd(self.approach_leg(cai))
+ appr_distance = (cai.distance_from_sb(
+ self.get_location(), self.approach_leg(cai)) -
+ cai.get_sd(self.approach_leg(cai)))
if appr_distance > 0:
approaching_sd = True
@@ -381,91 +379,103 @@ def drive_gpx(self, gpx_filename: str, use_pickle_file=False) -> pd.DataFrame:
)
return approach_log_df
- def drive_gpx_stop_bar(self, gpx_filename, use_pickle_file=True) -> pd.DataFrame:
- """Load the GPX file and generate a dataframe of approach events."""
+ def drive_gpx_stop_bar(self,
+ gpx_filename,
+ use_pickle_file=True) -> pd.DataFrame:
+ """ Load the GPX file into a dataframe with timestamp, location, speed, dist to event, bearing, and
+ id of approaching intersection
+ :param gpx_directory: file directory where .gpx file is for input
+ :param gpx_filename: filename of .gpx file (without .gpx)
+ :param out_file_directory: where output files are saved to (./out/)
+ :param use_pickle_file: default to False, can be faster to load from pickle
+ :return: DataFrame with GPX calculated for sro file and saved a CSV and Pickle file of gpx information
+
+ """
+ # self.gpx_filepath = gpx_directory + gpx_filename + ".gpx"
pickle_file = self.out_file_path / (str(gpx_filename) + ".p")
csv_file = self.out_file_path / (str(gpx_filename) + ".csv")
- if use_pickle_file and os.path.isfile(pickle_file):
- return pd.read_pickle(pickle_file)
+ approach_sb_log_df = None
- approach_sb_log_df = self.parse_gpx_points()
- self.write_summary(approach_sb_log_df, csv_file, pickle_file)
- return approach_sb_log_df
-
- def parse_gpx_points(self) -> pd.DataFrame:
- """Iterate through GPX points and log approach information."""
-
- appr_dict = self._init_approach_dict()
- for i in tqdm(
- range(2, self.gpx_df.last_valid_index()),
- desc="Loading GPX:",
- unit="GPX Points",
- ):
- self.update_location_simple(i)
- cai = self.get_closest_approaching_intersection()
- if cai is None:
- continue
+ if use_pickle_file and os.path.isfile(pickle_file):
+ approach_sb_log_df = pd.read_pickle(pickle_file)
+ return approach_sb_log_df
+ else:
+ # dictionary-> Keys:Values
+ appr_dict = {
+ "id": [],
+ "appr_dir": [],
+ "timestamp": [],
+ "time_delta": [],
+ "location": [],
+ "spd": [],
+ "distance": [],
+ "bearing": [],
+ "approaching": []
+ }
- appr_distance = cai.distance_from_sb(
- self.get_location(), self.approach_leg(cai)
- ) - cai.get_sd(self.approach_leg(cai))
- self.update_approach_dict(appr_dict, cai, appr_distance)
+ for i in tqdm(range(2, self.gpx_df.last_valid_index()),
+ desc="Loading GPX:",
+ unit="GPX Points"):
- return pd.DataFrame(appr_dict)
+ self.update_location_simple(i)
+ cai = self.get_closest_approaching_intersection()
+ if cai is None:
+ if i == self.gpx_df.last_valid_index() - 1:
+ approach_sb_log_df = pd.DataFrame(appr_dict)
+ approach_sb_log_df.to_csv(csv_file)
+ approach_sb_log_df.to_pickle(pickle_file)
+ print(
+ f"exported dataframe to CSV (in {csv_file}) and Pickle (in {pickle_file})"
+ )
+ else:
+ pass
+ else:
+ approaching_sd = False
+ appr_distance = (cai.distance_from_sb(
+ self.get_location(), self.approach_leg(cai)) -
+ cai.get_sd(self.approach_leg(cai)))
+ # print(f'{i}: {appr_distance}ft from {cai.get_name()}')
+ if appr_distance > 0:
+ approaching_sd = True
- @staticmethod
- def _init_approach_dict() -> dict:
- return {
- "id": [],
- "appr_dir": [],
- "timestamp": [],
- "time_delta": [],
- "location": [],
- "spd": [],
- "distance": [],
- "bearing": [],
- "approaching": [],
- }
+ appr_dict["id"].append(cai.get_id_num())
+ appr_dict["appr_dir"].append(self.approach_leg(cai))
+ appr_dict["timestamp"].append(self.get_utc_timestamp())
+ appr_dict["time_delta"].append(self.get_time_step())
+ appr_dict["location"].append(self.get_location())
+ appr_dict["spd"].append(self.get_spd())
+ appr_dict["distance"].append(appr_distance)
+ appr_dict["bearing"].append(self.get_bearing())
+ appr_dict["approaching"].append(approaching_sd)
- def update_approach_dict(
- self, appr_dict: dict, cai: Intersection, distance: float
- ) -> None:
- """Append approach information for a single GPX point."""
-
- approaching_sd = distance > 0
- appr_dict["id"].append(cai.get_id_num())
- appr_dict["appr_dir"].append(self.approach_leg(cai))
- appr_dict["timestamp"].append(self.get_utc_timestamp())
- appr_dict["time_delta"].append(self.get_time_step())
- appr_dict["location"].append(self.get_location())
- appr_dict["spd"].append(self.get_spd())
- appr_dict["distance"].append(distance)
- appr_dict["bearing"].append(self.get_bearing())
- appr_dict["approaching"].append(approaching_sd)
+ if i == self.gpx_df.last_valid_index() - 1:
+ print("WRITING DICT TO DATAFRAME")
+ approach_sb_log_df = pd.DataFrame(appr_dict)
+ approach_sb_log_df.to_csv(csv_file)
+ approach_sb_log_df.to_pickle(pickle_file)
+ print(
+ f"Exported data frame to CSV ({csv_file}) and Pickle ({pickle_file})"
+ )
+ print(f'ApproachSB_DF:{approach_sb_log_df}')
- @staticmethod
- def write_summary(
- df: pd.DataFrame, csv_file: PurePath, pickle_file: PurePath
- ) -> None:
- df.to_csv(csv_file)
- df.to_pickle(pickle_file)
- print(f"Exported data frame to CSV ({csv_file}) and Pickle ({pickle_file})")
+ return approach_sb_log_df
def get_street(self, itrsxn: Intersection) -> str:
- """current Street of intersection approach leg"""
+ """ current Street of intersection approach leg
+ """
apr_leg_index = self.approach_leg(itrsxn)
return itrsxn.get_name(apr_leg_index)
def get_info(self, itrsxn: Intersection) -> str:
- """get ID#, bearing, and name about an intersection
+ """ get ID#, bearing, and name about an intersection
:param itrsxn: Intersection Object
:return: string of info in format ID#.Compass_Heading - Intersection Name
"""
return str(
- f"{itrsxn.get_id_num()}.{self.approach_leg(itrsxn)}-{itrsxn.get_name()}"
+ f'{itrsxn.get_id_num()}.{self.approach_leg(itrsxn)}-{itrsxn.get_name()}'
)
def get_itrsxn_obj_by_id(self, id_num: int) -> Intersection:
@@ -474,7 +484,7 @@ def get_itrsxn_obj_by_id(self, id_num: int) -> Intersection:
:param id_num: ID number of intersection object
:return: intersection object
"""
- mask = self.sro_df["id"] == id_num
+ mask = self.sro_df['id'] == id_num
return self.sro_df[mask].iloc[0, 1]
def get_info_by_id(self, id_num: int, appr_dir) -> str:
@@ -485,14 +495,14 @@ def get_info_by_id(self, id_num: int, appr_dir) -> str:
:return: string in format:
ID#.direction_index - street1 + street2 - event distance string
"""
- mask = self.sro_df["id"] == id_num
+ mask = self.sro_df['id'] == id_num
itrsxn = self.sro_df[mask].iloc[0, 1]
"""
Example: 2.0-YVR+California-35mph-325ft-UTCtime
Include: direction, Approach Posted Speed
"""
return str(
- f"{itrsxn.get_id_num()}.{appr_dir}-{itrsxn.get_name()}-{itrsxn.get_sd(appr_dir)}ft"
+ f'{itrsxn.get_id_num()}.{appr_dir}-{itrsxn.get_name()}-{itrsxn.get_sd(appr_dir)}ft'
)
@staticmethod
@@ -501,7 +511,7 @@ def find_index(df: pd.DataFrame, i: int) -> pd.Series:
@staticmethod
def t_spd_adjust(d0: float, spd0: float, d1: float, spd1: float) -> float:
- """Adjusts time of event based on speed of gpx points i and i+1.
+ """ Adjusts time of event based on speed of gpx points i and i+1.
:param d0: distance a t=0
:param spd0: speed at t=0
@@ -547,39 +557,31 @@ def seek_sd(self, gpx_df, csv_out=True) -> pd.DataFrame:
"appr_dir": [],
"timestamp": [],
"location": [],
- "spd": [], # MPH
- "distance": [], # FEET
- "t_adjust": [], # SECONDS
- "string_desc": [],
+ "spd": [], # MPH
+ "distance": [], # FEET
+ "t_adjust": [], # SECONDS
+ "string_desc": []
}
for i in range(1, (gpx_df.last_valid_index() - 3)):
- if (
- gpx_df.spd.iloc[i] > 0.4
- and gpx_df.spd.iloc[i + 1] > 0.4
- and gpx_df.distance.iloc[i + 1]
- <= self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(
- gpx_df.appr_dir.iloc[i + 1]
- )
- + (gpx_df.time_delta.iloc[i] * (gpx_df.spd.iloc[i] * self.MPHtoFTPS))
- ):
+ if gpx_df.spd.iloc[i] > 0.4 and \
+ gpx_df.spd.iloc[i + 1] > 0.4 and \
+ gpx_df.distance.iloc[i + 1] <= \
+ self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(gpx_df.appr_dir.iloc[i + 1]) + \
+ (gpx_df.time_delta.iloc[i] * (gpx_df.spd.iloc[i] * self.MPHtoFTPS)):
print("heuristic filter")
- if (
- (gpx_df.approaching.iloc[i - 1]) == True
- and (gpx_df.approaching.iloc[i]) == True
- and (gpx_df.approaching.iloc[i + 1] == False)
- and (gpx_df.approaching.iloc[i + 2] == False)
- ):
+ if (gpx_df.approaching.iloc[i - 1]) == True and \
+ (gpx_df.approaching.iloc[i]) == True and \
+ (gpx_df.approaching.iloc[i + 1] == False) and \
+ (gpx_df.approaching.iloc[i + 2] == False):
print("approach filter")
- t_adjust = self.t_spd_adjust(
- gpx_df.distance.iloc[i],
- gpx_df.spd.iloc[i],
- gpx_df.distance.iloc[i + 1],
- gpx_df.spd.iloc[i + 1],
- )
+ t_adjust = self.t_spd_adjust(gpx_df.distance.iloc[i], gpx_df.spd.iloc[i],
+ gpx_df.distance.iloc[i + 1], gpx_df.spd.iloc[i + 1]
+ )
+
# t_adj is less than timestep when dynamic object is close to event distance
if t_adjust <= gpx_df.time_delta.iloc[i]:
@@ -592,12 +594,10 @@ def seek_sd(self, gpx_df, csv_out=True) -> pd.DataFrame:
df_dict["distance"].append(gpx_df.distance.iloc[i])
df_dict["t_adjust"].append(t_adjust)
df_dict["string_desc"].append(
- self.get_info_by_id(
- gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i]
- )
- )
+ self.get_info_by_id(gpx_df.id.iloc[i],
+ gpx_df.appr_dir.iloc[i]))
print(
- f"seek_sd_Info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}, time adjust:{t_adjust}"
+ f'seek_sd_Info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}, time adjust:{t_adjust}'
)
else:
pass
@@ -628,56 +628,38 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
"spd": [],
"distance": [],
"t_adjust": [],
- "string_desc": [],
+ "string_desc": []
}
for i in range(1, (gpx_df.last_valid_index() - 3)):
- if (
- self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).distance_from_sb(
- gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]
- )
- is None
- ):
- print(f"USING CENTER OF INTERSECTION LOCATION")
- approach_distance = (
- self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).get_sd(
- gpx_df.appr_dir.iloc[i + 1]
- )
- + 50
- )
+ if self.get_itrsxn_obj_by_id(gpx_df.id.iloc[i]).distance_from_sb(
+ gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]) is None:
+ print(f'USING CENTER OF INTERSECTION LOCATION')
+ approach_distance = self.get_itrsxn_obj_by_id(
+ gpx_df.id.iloc[i]).get_sd(gpx_df.appr_dir.iloc[i + 1]) + 50
else:
- print(f"USING STOP BAR LOCATION")
+ print(f'USING STOP BAR LOCATION')
approach_distance = self.get_itrsxn_obj_by_id(
- gpx_df.id.iloc[i]
- ).distance_from_sb(
- gpx_df.location.iloc[i + 1], gpx_df.appr_dir.iloc[i + 1]
- )
-
- if (
- gpx_df.spd.iloc[i] > 0.2
- and gpx_df.spd.iloc[i + 1] > 0.2
- and gpx_df.distance.iloc[i + 1]
- <= approach_distance
- <= gpx_df.distance.iloc[i]
- ):
+ gpx_df.id.iloc[i]).distance_from_sb(gpx_df.location.iloc[i + 1],
+ gpx_df.appr_dir.iloc[i + 1])
+
+ if gpx_df.spd.iloc[i] > 0.2 and gpx_df.spd.iloc[i + 1] > 0.2 \
+ and gpx_df.distance.iloc[i + 1] <= approach_distance <= gpx_df.distance.iloc[i]:
if True:
"""
- calculate exact time based on speed. using i and i+1.
-
+ calculate exact time based on speed. using i and i+1.
+
For point i and i+1, the exact time adjustment is weighted
based on the speeds at these two point's in time. This shifts the exact position
of the car to the most accurate time it was at the calculated sight distance
-
+
store that time in UTC in DF
"""
- t_adjust = self.t_spd_adjust(
- gpx_df.distance.iloc[i],
- gpx_df.spd.iloc[i],
- gpx_df.distance.iloc[i + 1],
- gpx_df.spd.iloc[i + 1],
- )
+ t_adjust = self.t_spd_adjust(gpx_df.distance.iloc[i], gpx_df.spd.iloc[i],
+ gpx_df.distance.iloc[i + 1], gpx_df.spd.iloc[i + 1]
+ )
if t_adjust <= 1:
df_dict_sb["id"].append(gpx_df.id.iloc[i])
@@ -688,14 +670,12 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
df_dict_sb["distance"].append(gpx_df.distance.iloc[i])
df_dict_sb["t_adjust"].append(t_adjust)
df_dict_sb["string_desc"].append(
- self.get_info_by_id(
- gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i]
- )
- )
+ self.get_info_by_id(gpx_df.id.iloc[i],
+ gpx_df.appr_dir.iloc[i]))
# print(self.utc_to_timestamp(df.timestamp.iloc[i]))
print(
- f"info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}"
+ f'info:{self.get_info_by_id(gpx_df.id.iloc[i], gpx_df.appr_dir.iloc[i])}'
)
else:
@@ -704,8 +684,7 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
pass
if csv_out:
pd.DataFrame(df_dict_sb).to_csv(
- self.out_file_path / "approaching_intersections_Stopbar.csv"
- )
+ self.out_file_path / "approaching_intersections_Stopbar.csv")
return pd.DataFrame(df_dict_sb)
# TODO: create new method
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 2b2c761..ad98e9a 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -23,7 +23,7 @@ def __init__(self, video_filestring: str):
:param in_dir_path: filename of video to be processed (include video extension (.mov, .mp4, etc)
"""
- self.DATE_FORMAT = "%m-%d-%Y--%H-%M-%S.%f-%Z" # ISO 8601 format
+ self.DATE_FORMAT = '%m-%d-%Y--%H-%M-%S.%f-%Z' #ISO 8601 format
self.video_dir = Path(video_filestring).parents[0]
self.video_filepath = Path(video_filestring)
self.video_filename = Path(video_filestring).name
@@ -36,6 +36,7 @@ def __init__(self, video_filestring: str):
self.start_time = 0
self.capture = ""
+
self.vid_summary(vid_summary=True)
def set_start_utc(self, video_start_time):
@@ -70,17 +71,15 @@ def sync(self, frame: int, ts):
appends frame # and timestamp to sync.txt with video filename for reference
"""
sync_txt_folder = Path(self.video_dir, "out")
- sync_file = str(sync_txt_folder) + "/" + "sync.txt"
- with open(sync_file, "a") as f:
- f.write(f"{self.video_filepath.stem},{frame},{ts}\n")
+ sync_file = str(sync_txt_folder) +"/"+ "sync.txt"
+ with open(sync_file, 'a') as f:
+ f.write(f'{self.video_filepath.stem},{frame},{ts}\n')
elapsed_time = frame / self.fps
if type(ts) is float:
start_time = ts - elapsed_time
else:
- t_temp = dateutil.parser.isoparse(
- ts
- ) # isoparse parses ISO-8601 datetime string into datetime.datetime
+ t_temp = (dateutil.parser.isoparse(ts)) # isoparse parses ISO-8601 datetime string into datetime.datetime
start_time = t_temp.replace(tzinfo=timezone.utc).timestamp() - elapsed_time
self.set_start_utc(start_time)
self.vid_summary(vid_summary=False, sync=True)
@@ -124,10 +123,8 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
str(output_path),
]
subprocess.run(cmd, check=True)
-
- def extract_generic_so_sightings(
- self, desc_timestamps, project, label_img=True, gen_gif=False
- ):
+
+ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
"""
extract generic sighting images from video based on description and timestamp zip
@@ -136,67 +133,60 @@ def extract_generic_so_sightings(
"""
generic_so_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
- image_path = Path(
- self.video_dir,
- "out",
- self.video_filepath.stem,
- "generic_static_object_sightings/",
- )
+ image_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
image_path.mkdir(exist_ok=True, parents=True)
for desc, frame_num in tqdm(
- list(zip(generic_so_desc, extract_frames)),
- desc="Frame Extraction",
- unit=" frame",
- ):
- frame_name = str(desc) + ".jpg"
+ list(zip(generic_so_desc, extract_frames)),
+ desc="Frame Extraction",
+ unit=" frame"):
+ frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
print(
- f"PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}"
- )
+ f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
if label_img:
self.generic_so_img_overlay_info_box(self.video_filename, project)
if gen_gif:
self.generate_gif(desc_timestamps, project)
- def extract_sightings(
- self, desc_timestamps, project, label_img=True, gen_gif=False
- ):
- """Extract sighting images from a video."""
-
- intersection_desc, extract_frames = self.create_pic_list_from_zip(
- desc_timestamps
- )
- image_path = Path(
- self.video_dir, "out", self.video_filepath.stem, "signal_sightings/"
- )
- image_path.mkdir(exist_ok=True, parents=True)
-
- self._save_frames(intersection_desc, extract_frames, image_path)
+ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
+ """
+ extract sighting images from video based on description and timestamp zip
- if label_img:
- self.img_overlay_info_box(self.video_filename, project)
- if gen_gif:
- self.generate_gif(desc_timestamps, project)
+ desc_timestamps: sorted list of tuples (filename description, timestamp of sight distance)
+ project: instance of ProcessRoadObjects() class
+ """
- def _save_frames(self, descriptions, frames, image_path: Path) -> None:
- """Save frames described by ``descriptions`` and ``frames`` to disk."""
+ intersection_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
+ image_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
+ image_path.mkdir(exist_ok=True, parents=True)
for desc, frame_num in tqdm(
- list(zip(descriptions, frames)), desc="Frame Extraction", unit=" frame"
- ):
- frame_name = str(desc) + ".jpg"
+ list(zip(intersection_desc, extract_frames)),
+ desc="Frame Extraction",
+ unit=" frame"):
+ frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
print(
- f"PICTURE CAPTURED AT {frame_num}: {desc}, Saved {descriptions.index(desc) + 1} picture(s) of {len(frames)}"
- )
+ f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
+
+ if label_img:
+ self.img_overlay_info_box(self.video_filename, project)
+ if gen_gif:
+ self.generate_gif(desc_timestamps, project)
+ """
+ if bbox:
+ self.img_overlay_bbox(description_list,project)
+
+ """
+
# TODO: convert to start_sec, start_min=0, end_sec, end_min=0, folder="")
def extract_frames_between(self, start_sec, end_sec):
- """helper function to extract frames from video during a specific time period to estimate offset
+ """ helper function to extract frames from video during a specific time period to estimate offset
between gpx and video
:param start_sec: start time of video to extract image frames
@@ -236,10 +226,10 @@ def video_start_utc():
end_frame = int(self.get_fps() * end_sec)
for i in range(start_frame, end_frame + 1):
- frame_name = "Frame" + str(i) + ".jpg"
+ frame_name = 'Frame' + str(i) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(i, frame_filepath)
- print(f"Saved Image {i} to {frame_filepath}")
+ print(f'Saved Image {i} to {frame_filepath}')
def generate_gif(self, desc_timestamps, project, distance=100):
""" creates a folder of images to create a gif
@@ -261,18 +251,10 @@ def generate_gif(self, desc_timestamps, project, distance=100):
intersection_desc, frame_list = self.create_pic_list_from_zip(desc_timestamps)
- for i in tqdm(
- range(0, len(desc_timestamps)),
- desc="Generating Images for GIF",
- unit=" Location",
- ):
- gif_basepath = (
- self.video_dir
- / "out"
- / self.video_filepath.stem
- / "gif"
- / intersection_desc[i]
- )
+ for i in tqdm(range(0, len(desc_timestamps)),
+ desc="Generating Images for GIF",
+ unit=" Location"):
+ gif_basepath = self.video_dir / "out" / self.video_filepath.stem / "gif" / intersection_desc[i]
gif_path = Path(gif_basepath)
gif_path.mkdir(exist_ok=True, parents=True)
@@ -297,60 +279,57 @@ def generate_gif(self, desc_timestamps, project, distance=100):
frame_max = int(frame_list[i] + additional_frames)
for j in range(frame_min, frame_max + 1):
- frame_name = str(j) + "-" + intersection_desc[i] + ".jpg"
+ frame_name = str(j) + "-" + intersection_desc[i] + '.jpg'
frame_filepath = gif_path / frame_name
self.save_frame_ffmpeg(j, frame_filepath)
i += 1
self.assemble_gif()
def assemble_gif(self):
- # base_path = Path(self.video_dir, "out", self.video_filepath.stem, "gif/")
+ #base_path = Path(self.video_dir, "out", self.video_filepath.stem, "gif/")
gif_files_path = self.video_dir / "out" / self.video_filepath.stem / "gif"
base_path = Path(gif_files_path)
- # base_path = "./out/frames/" + self.video_filename + "/gif/"
- img_folders = sorted(base_path.glob("*"))
- kargs = {"duration": 1 / 9999999999999999}
+ #base_path = "./out/frames/" + self.video_filename + "/gif/"
+ img_folders = sorted(base_path.glob('*'))
+ kargs = {'duration': 1/9999999999999999}
for i in range(0, len(img_folders)):
images = []
img_folder = os.path.basename(img_folders[i])
- frame_images = sorted(
- glob.glob(os.path.join(base_path, img_folder + "/*.jpg"))
- )
+ frame_images = sorted(glob.glob(os.path.join(base_path, img_folder + "/*.jpg")))
for j in range(0, len(frame_images)):
if j % 5 == 0:
images.append(imageio.imread(frame_images[j]))
- imageio.mimsave(
- os.path.join(base_path, img_folder + ".gif"), images, **kargs
- )
- print(f"Created Gif: {img_folder}.gif")
+ imageio.mimsave(os.path.join(base_path, img_folder + ".gif"), images, **kargs)
+ print(f'Created Gif: {img_folder}.gif')
# TODO: delete folder of images after gif is created.
# TODO: overwite existing gif option
+
@staticmethod
def hr_min_sec(sec):
if sec < 60:
- return f"{sec} seconds"
+ return f'{sec} seconds'
elif sec < 3600:
minutes = int(sec / 60)
sec_remain = round(sec - minutes * 60, 2)
- return f"{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)"
+ return f'{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)'
elif sec >= 3600:
hr = int(sec / 3600)
minutes = int(sec / 60)
sec_remain = round(sec - minutes * 60, 2)
- return f"{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)"
+ return f'{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)'
def sizeConvert(self, size):
# convert filesize to human readable format
- K, M, G = 1024, 1024**2, 1024**3
+ K, M, G = 1024, 1024 ** 2, 1024 ** 3
if size >= G:
- return str(round(size / G, 2)) + " GB"
+ return str(round(size / G, 2)) + ' GB'
elif size >= M:
- return str(round(size / M, 2)) + " MB"
+ return str(round(size / M, 2)) + ' MB'
elif size >= K:
- return str(round(size / K, 2)) + " KB"
+ return str(round(size / K, 2)) + ' KB'
else:
- return str(round(size, 2)) + " Bytes"
+ return str(round(size, 2)) + ' Bytes'
def get_filesize(self):
# get the file size
@@ -396,21 +375,18 @@ def vid_summary(self, vid_summary, sync=False):
if sync:
print(sync_time)
+
@staticmethod
- def find_font_scale(label, max_width=0, max_height=0):
+ def find_font_scale(label, max_width = 0, max_height = 0):
font_scl = 0.2
- textsize_x, textsize_y = cv2.getTextSize(
- label, cv2.FONT_HERSHEY_PLAIN, font_scl, 1
- )[0]
+ textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, font_scl, 1)[0]
w_font_scl = h_font_scl = font_scl
if max_width > 0:
if textsize_x < max_width:
# scale up scale in for loop
for scale_increment in np.arange(0, 10, 0.1):
w_font_scl = scale_increment
- textsize_x, textsize_y = cv2.getTextSize(
- label, cv2.FONT_HERSHEY_PLAIN, w_font_scl, 1
- )[0]
+ textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, w_font_scl, 1)[0]
if textsize_x < max_width:
continue
else:
@@ -421,15 +397,13 @@ def find_font_scale(label, max_width=0, max_height=0):
# scale up scale in for loop
for scale_increment in np.arange(0, 10, 0.1):
h_font_scl = scale_increment
- textsize_x, textsize_y = cv2.getTextSize(
- label, cv2.FONT_HERSHEY_PLAIN, h_font_scl, 1
- )[0]
+ textsize_x, textsize_y = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, h_font_scl, 1)[0]
if textsize_y < max_height:
continue
else:
h_font_scl = scale_increment - 0.5
break
- if max_width > 0 and max_height > 0:
+ if max_width > 0 and max_height > 0:
return min(w_font_scl, h_font_scl)
else:
return max(w_font_scl, h_font_scl)
@@ -445,14 +419,8 @@ def find_x_start_new_label(x_size, w, label):
trunc_label = label[0:w]
return start_x, trunc_label
- def labels(
- self,
- img,
- output_filename,
- descriptive_label,
- height_percent: tuple,
- ssoss_and_descriptive=True,
- ):
+
+ def labels(self, img, output_filename, descriptive_label, height_percent:tuple, ssoss_and_descriptive = True ):
alpha = 1 # Transparency factor.
text_font = cv2.FONT_HERSHEY_PLAIN
@@ -468,14 +436,10 @@ def labels(
# calculated descriptive label dimensions
descriptive_label_height = int(img_height * descriptive_label_percent)
descriptive_label_y = img_height - descriptive_label_height
- font_scale = self.find_font_scale(descriptive_label, max_width=img_width)
- textsize_x, textsize_y = cv2.getTextSize(
- descriptive_label, text_font, font_scale, font_thickness
- )[0]
- text_y = int((img_height - descriptive_label_height / 2.0) + textsize_y / 2.0)
- text_x, descriptive_label = self.find_x_start_new_label(
- textsize_x, img_width, descriptive_label
- )
+ font_scale = self.find_font_scale(descriptive_label, max_width = img_width)
+ textsize_x, textsize_y = cv2.getTextSize(descriptive_label, text_font, font_scale, font_thickness)[0]
+ text_y = int((img_height - descriptive_label_height/2.0)+textsize_y/2.0)
+ text_x, descriptive_label = self.find_x_start_new_label(textsize_x, img_width, descriptive_label)
if ssoss_and_descriptive:
@@ -483,136 +447,65 @@ def labels(
# calculated ssoss_ad dimensions
ssoss_label_height = int(img_height * ssoss_percent)
- ssoss_label_font_scale = self.find_font_scale(
- ssoss_label, max_height=ssoss_label_height
- )
- ssoss_label_textsize_x, ssoss_textsize_y = cv2.getTextSize(
- ssoss_label, text_font, ssoss_label_font_scale, 1
- )[0]
-
+ ssoss_label_font_scale = self.find_font_scale(ssoss_label, max_height = ssoss_label_height)
+ ssoss_label_textsize_x, ssoss_textsize_y = cv2.getTextSize(ssoss_label, text_font, ssoss_label_font_scale, 1)[0]
+
ssoss_label_text_x = int((img_width - ssoss_label_textsize_x) / 2.0)
ssoss_label_text_y = int(img_height)
- ssoss_text_x, fitted_ssoss_label = self.find_x_start_new_label(
- ssoss_label_textsize_x, img_width, ssoss_label
- )
+ ssoss_text_x, fitted_ssoss_label = self.find_x_start_new_label(ssoss_label_textsize_x, img_width, ssoss_label)
# Calculated y-coordinates for different labels
- ssoss_label_y = (
- img_height - ssoss_label_height
- ) # y-coordinate of top of ssoss ad
- above_descriptive_and_ssoss_label_y = (
- ssoss_label_y - descriptive_label_height
- ) # y-coordinate of top of descriptive label
- descriptive_and_ssoss_label_text_y = ssoss_label_y - int(textsize_y / 2.0)
-
- # ssoss ad box
- cv2.rectangle(
- img_copy,
- pt1=(0, img_height),
- pt2=(img_width, ssoss_label_y),
- color=BLACK,
- thickness=-1,
- )
- ssoss_and_descriptive_label = cv2.addWeighted(
- img_copy, alpha, img, 1 - alpha, 0
- )
- # image label box
- cv2.rectangle(
- img_copy,
- pt1=(0, ssoss_label_y),
- pt2=(img_width, above_descriptive_and_ssoss_label_y),
- color=WHITE,
- thickness=-1,
- )
- ssoss_and_descriptive_label = cv2.addWeighted(
- img_copy, alpha, img, 1 - alpha, 0
- )
+ ssoss_label_y = img_height - ssoss_label_height # y-coordinate of top of ssoss ad
+ above_descriptive_and_ssoss_label_y = ssoss_label_y - descriptive_label_height # y-coordinate of top of descriptive label
+ descriptive_and_ssoss_label_text_y = ssoss_label_y - int(textsize_y/2.0)
+
+ #ssoss ad box
+ cv2.rectangle(img_copy,pt1=(0, img_height), pt2=(img_width, ssoss_label_y), color = BLACK, thickness=-1)
+ ssoss_and_descriptive_label = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
+ #image label box
+ cv2.rectangle(img_copy, pt1=(0, ssoss_label_y), pt2=(img_width, above_descriptive_and_ssoss_label_y), color=WHITE, thickness=-1)
+ ssoss_and_descriptive_label = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
# text for ssoss ad and label
- ssoss_and_descriptive_label = cv2.putText(
- ssoss_and_descriptive_label,
- descriptive_label,
- (text_x, descriptive_and_ssoss_label_text_y),
- text_font,
- font_scale,
- BLACK,
- 2,
- )
- ssoss_and_descriptive_label = cv2.putText(
- ssoss_and_descriptive_label,
- fitted_ssoss_label,
- (ssoss_text_x, ssoss_label_text_y),
- text_font,
- ssoss_label_font_scale,
- WHITE,
- 2,
- )
+ ssoss_and_descriptive_label = cv2.putText(ssoss_and_descriptive_label, descriptive_label, (text_x, descriptive_and_ssoss_label_text_y), text_font, font_scale, BLACK, 2)
+ ssoss_and_descriptive_label = cv2.putText(ssoss_and_descriptive_label, fitted_ssoss_label, (ssoss_text_x, ssoss_label_text_y), text_font, ssoss_label_font_scale, WHITE, 2)
# save image
cv2.imwrite(output_filename, ssoss_and_descriptive_label)
-
+
else:
# no ssoss label, just descriptive label (not recommended)
- cv2.rectangle(
- img_copy,
- pt1=(0, img_height),
- pt2=(img_width, descriptive_label_y),
- color=WHITE,
- thickness=-1,
- )
- img_new = cv2.addWeighted(img_copy, alpha, img, 1 - alpha, 0)
- cv2.putText(
- img_new,
- descriptive_label,
- (text_x, text_y),
- text_font,
- font_scale,
- BLACK,
- 2,
- )
+ cv2.rectangle(img_copy, pt1=(0, img_height), pt2=(img_width, descriptive_label_y), color=WHITE, thickness=-1)
+ img_new = cv2.addWeighted(img_copy, alpha, img, 1-alpha, 0)
+ cv2.putText(img_new, descriptive_label, (text_x, text_y), text_font, font_scale, BLACK, 2)
cv2.imwrite(output_filename, img_new)
@staticmethod
- def generate_descriptive_label(
- path, fn, road_object_info, static_object_type="generic"
- ):
+ def generate_descriptive_label(path, fn, road_object_info, static_object_type="generic"):
sro_id = int(fn.split(".")[0])
ts = float(fn.split("-")[-1].replace(".jpg", ""))
distance = 0
if static_object_type == "intersection":
b_index = int((fn.rsplit(".")[1])[0:1])
- descriptive_label = road_object_info.intersection_frame_description(
- sro_id, b_index, distance, ts, desc_type="label"
- )
+ descriptive_label = road_object_info.intersection_frame_description(sro_id, b_index, distance, ts, desc_type="label")
else:
- descriptive_label = road_object_info.generic_so_description(
- sro_id, distance, ts, desc_type="label"
- )
+ descriptive_label = road_object_info.generic_so_description(sro_id, distance, ts, desc_type="label")
return descriptive_label
-
+
def generic_so_img_overlay_info_box(self, vid_filename_dir, ro_info):
- img_path = Path(
- self.video_dir,
- "out",
- self.video_filepath.stem,
- "generic_static_object_sightings/",
- )
+ img_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
label_img_path = Path(img_path, "labeled/")
os.makedirs(label_img_path, exist_ok=True)
img_dir_string = str(img_path)
label_img_dir_string = str(label_img_path)
- pattern_criteria = ["*.jpg", "[!.]*"]
+ pattern_criteria = ['*.jpg','[!.]*']
- descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
- ssoss_label_percent = (
- 0.02 # 2% for ssoss advertisement label at very bottom of image
- )
+ descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
+ ssoss_label_percent = 0.02 # 2% for ssoss advertisement label at very bottom of image
label_height_percents = (descriptive_label_percent, ssoss_label_percent)
# filter for images where * is wildcard and don't include hidden (.*) files
- pathlist = [
- f for f in Path(img_dir_string).rglob("*.jpg") if not str(f).startswith(".")
- ]
+ pathlist = [f for f in Path(img_dir_string).rglob('*.jpg') if not str(f).startswith(".")]
for file in pathlist:
if not str(file.stem).startswith("."):
filename = str(Path(file).name)
@@ -621,37 +514,26 @@ def generic_so_img_overlay_info_box(self, vid_filename_dir, ro_info):
overlay = img.copy()
label_img_name = str(Path(label_img_path, filename))
- descriptive_label = self.generate_descriptive_label(
- label_img_path, filename, ro_info
- )
+ descriptive_label = self.generate_descriptive_label(label_img_path, filename, ro_info)
- self.labels(
- img, label_img_name, descriptive_label, label_height_percents
- )
+ self.labels(img, label_img_name, descriptive_label, label_height_percents)
+
def img_overlay_info_box(self, vid_filename_dir, ro_info):
- img_path = Path(
- self.video_dir, "out", self.video_filepath.stem, "signal_sightings/"
- )
+ img_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
label_img_path = Path(img_path, "labeled/")
os.makedirs(label_img_path, exist_ok=True)
img_dir_string = str(img_path)
label_img_dir_string = str(label_img_path)
- pattern_criteria = ["*.[0-3]-*.jpg", "[!.]*"]
+ pattern_criteria = ['*.[0-3]-*.jpg','[!.]*']
- descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
- ssoss_label_percent = (
- 0.02 # 2% for ssoss advertisement label at very bottom of image
- )
+ descriptive_label_percent = 0.05 # 5% for descriptive label at bottom of image
+ ssoss_label_percent = 0.02 # 2% for ssoss advertisement label at very bottom of image
label_height_percents = (descriptive_label_percent, ssoss_label_percent)
# filter for images where * is wildcard and don't include hidden (.*) files
- pathlist = [
- f
- for f in Path(img_dir_string).rglob("*.[0-3]-*.jpg")
- if not str(f).startswith(".")
- ]
+ pathlist = [f for f in Path(img_dir_string).rglob('*.[0-3]-*.jpg') if not str(f).startswith(".")]
for file in pathlist:
if not str(file.stem).startswith("."):
filename = str(Path(file).name)
@@ -660,10 +542,6 @@ def img_overlay_info_box(self, vid_filename_dir, ro_info):
overlay = img.copy()
label_img_name = str(Path(label_img_path, filename))
- descriptive_label = self.generate_descriptive_label(
- label_img_path, filename, ro_info, static_object_type="intersection"
- )
+ descriptive_label = self.generate_descriptive_label(label_img_path, filename,ro_info, static_object_type="intersection")
- self.labels(
- img, label_img_name, descriptive_label, label_height_percents
- )
+ self.labels(img, label_img_name, descriptive_label, label_height_percents)
From 59ef36b12ffe84b8f84d453792692e1a0eecb9c7 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 18:20:24 -0700
Subject: [PATCH 14/46] fix out dir handling
---
src/ssoss/dynamic_road_object.py | 3 ++-
src/ssoss/process_road_objects.py | 4 ++--
src/ssoss/process_video.py | 4 ++--
3 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/src/ssoss/dynamic_road_object.py b/src/ssoss/dynamic_road_object.py
index bc6b890..31e4260 100644
--- a/src/ssoss/dynamic_road_object.py
+++ b/src/ssoss/dynamic_road_object.py
@@ -71,7 +71,8 @@ def __init__(
)
self.in_file_path = PurePath("./in/")
- self.out_file_path = PurePath("./out/")
+ self.out_file_path = Path("./out")
+ self.out_file_path.mkdir(exist_ok=True, parents=True)
if self.spd is None:
self.calculate_spd_values()
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 5bdc8bb..2dc46f2 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -45,8 +45,8 @@ def __init__(self,
self.pretty_datetime_format = "%y-%m-%d %H:%M:%S"
self.in_gpx_dir_path = Path(gpx_filestring).parent
self.in_dir_path = self.in_gpx_dir_path
- self.out_dir_path = Path(self.in_dir_path, "/out/") # self.in_dir_path / "out/"
- self.out_dir_path.parent.mkdir(exist_ok=True, parents=True)
+ self.out_dir_path = self.in_dir_path / "out"
+ self.out_dir_path.mkdir(exist_ok=True, parents=True)
# init variables
#if signals_filestring:
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index ad98e9a..5611190 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -27,8 +27,8 @@ def __init__(self, video_filestring: str):
self.video_dir = Path(video_filestring).parents[0]
self.video_filepath = Path(video_filestring)
self.video_filename = Path(video_filestring).name
- self.image_out_path = self.video_dir / "out/"
- self.image_out_path.parent.mkdir(exist_ok=True, parents=True)
+ self.image_out_path = self.video_dir / "out"
+ self.image_out_path.mkdir(exist_ok=True, parents=True)
self.fps = self.get_fps()
self.frame_count = self.get_frame_count()
From 76158fdb9d1993b9932eade2f5fe66344601c1e0 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 20:04:27 -0700
Subject: [PATCH 15/46] Add GPS EXIF tagging and interpolation
---
README.md | 3 ++-
dev-requirements.in | 1 +
dev-requirements.txt | 40 ++++++++++++++---------------
requirements.in | 1 +
requirements.txt | 30 +++++++++++-----------
src/ssoss/process_road_objects.py | 32 +++++++++++++++++++++++
src/ssoss/process_video.py | 42 +++++++++++++++++++++++++++++++
7 files changed, 114 insertions(+), 35 deletions(-)
diff --git a/README.md b/README.md
index e8f10f1..c994ba1 100644
--- a/README.md
+++ b/README.md
@@ -23,10 +23,11 @@ streamlined and repeatable process to monitor signs and signals along any roadwa
* Example CSV templates are provided to help get started making the static roadway object input file for both static objects and traffic signals.
* Video Synchronization Helper Tools: Options are provided to export the video frames and help to synchronize the video file.
* Image Labeling and animated GIF image tools: Selectable options are included to label images or create an animated GIF from multiple images.
+* Extracted images include GPS coordinates embedded in their EXIF metadata.
## Requirements
- Python 3.9
-- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml
+- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif
## Installation
Windows OS users can use the [Releases](https://github.com/redmond2742/ssoss/releases) to download an .exe of SSOSS for simple graphical usage. For Mac and Linux users, the command line option is described below.
diff --git a/dev-requirements.in b/dev-requirements.in
index caa1999..6cb9ee8 100644
--- a/dev-requirements.in
+++ b/dev-requirements.in
@@ -10,3 +10,4 @@ lxml
pillow
python-dateutil
icecream
+piexif
diff --git a/dev-requirements.txt b/dev-requirements.txt
index 7489b31..a9b4ba4 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -4,51 +4,51 @@
#
# pip-compile dev-requirements.in
#
-asttokens==2.4.0
+asttokens==3.0.0
# via icecream
colorama==0.4.6
# via icecream
-executing==2.0.0
+executing==2.2.0
# via icecream
geographiclib==2.0
# via geopy
-geopy==2.4.0
+geopy==2.4.1
# via -r dev-requirements.in
-gpxpy==1.5.0
+gpxpy==1.6.2
# via -r dev-requirements.in
-icecream==2.1.3
+icecream==2.1.4
# via -r dev-requirements.in
-imageio==2.31.5
+imageio==2.37.0
# via -r dev-requirements.in
-lxml==4.9.3
+lxml==5.4.0
# via -r dev-requirements.in
-numpy==1.26.0
+numpy==2.3.0
# via
# -r dev-requirements.in
# imageio
# opencv-python
# pandas
-opencv-python==4.8.1.78
+opencv-python==4.11.0.86
# via -r dev-requirements.in
-pandas==2.1.1
+pandas==2.3.0
# via -r dev-requirements.in
-pillow==10.0.1
+piexif==1.1.3
+ # via -r dev-requirements.in
+pillow==11.2.1
# via
# -r dev-requirements.in
# imageio
-pygments==2.16.1
+pygments==2.19.1
# via icecream
-python-dateutil==2.8.2
+python-dateutil==2.9.0.post0
# via
# -r dev-requirements.in
# pandas
-pytz==2023.3.post1
+pytz==2025.2
# via pandas
-six==1.16.0
- # via
- # asttokens
- # python-dateutil
-tqdm==4.66.1
+six==1.17.0
+ # via python-dateutil
+tqdm==4.67.1
# via -r dev-requirements.in
-tzdata==2023.3
+tzdata==2025.2
# via pandas
diff --git a/requirements.in b/requirements.in
index 005e62b..0057b4b 100644
--- a/requirements.in
+++ b/requirements.in
@@ -9,4 +9,5 @@ tqdm
lxml
pillow
python-dateutil
+piexif
diff --git a/requirements.txt b/requirements.txt
index 381db48..eeb72f1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,41 +2,43 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
-# pip-compile
+# pip-compile requirements.in
#
geographiclib==2.0
# via geopy
-geopy==2.4.0
+geopy==2.4.1
# via -r requirements.in
-gpxpy==1.5.0
+gpxpy==1.6.2
# via -r requirements.in
-imageio==2.31.5
+imageio==2.37.0
# via -r requirements.in
-lxml==4.9.3
+lxml==5.4.0
# via -r requirements.in
-numpy==1.26.0
+numpy==2.3.0
# via
# -r requirements.in
# imageio
# opencv-python
# pandas
-opencv-python==4.8.1.78
+opencv-python==4.11.0.86
# via -r requirements.in
-pandas==2.1.1
+pandas==2.3.0
# via -r requirements.in
-pillow==10.0.1
+piexif==1.1.3
+ # via -r requirements.in
+pillow==11.2.1
# via
# -r requirements.in
# imageio
-python-dateutil==2.8.2
+python-dateutil==2.9.0.post0
# via
# -r requirements.in
# pandas
-pytz==2023.3.post1
+pytz==2025.2
# via pandas
-six==1.16.0
+six==1.17.0
# via python-dateutil
-tqdm==4.66.1
+tqdm==4.67.1
# via -r requirements.in
-tzdata==2023.3
+tzdata==2025.2
# via pandas
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 2dc46f2..7474ab9 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -677,6 +677,38 @@ def get_speed_at_timestamp(self, ts):
break
return speed
+ def get_location_at_timestamp(self, ts):
+ """Return interpolated geopy.Point for a given timestamp.
+
+ If the timestamp lies outside the GPX range, ``None`` is returned.
+ """
+ point_list = self.gpx_listDF
+ last_point = len(point_list) - 1
+
+ if ts < point_list.loc[0][0].get_timestamp():
+ return None
+ if ts > point_list.loc[last_point][0].get_timestamp():
+ return None
+
+ for i in range(len(point_list) - 1):
+ p_curr = point_list.loc[i][0]
+ p_next = point_list.loc[i + 1][0]
+ t0 = p_curr.get_timestamp()
+ t1 = p_next.get_timestamp()
+ if t0 <= ts <= t1:
+ if t1 == t0:
+ return p_curr.get_location()
+ ratio = (ts - t0) / (t1 - t0)
+ lat = p_curr.get_location().latitude + ratio * (
+ p_next.get_location().latitude - p_curr.get_location().latitude
+ )
+ lon = p_curr.get_location().longitude + ratio * (
+ p_next.get_location().longitude - p_curr.get_location().longitude
+ )
+ return geopy.Point(lat, lon)
+
+ return None
+
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 5611190..ebaddaa 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -11,6 +11,8 @@
from tqdm import tqdm
import cv2
import imageio
+from PIL import Image
+import piexif
class ProcessVideo:
@@ -123,6 +125,38 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
str(output_path),
]
subprocess.run(cmd, check=True)
+
+ @staticmethod
+ def _deg_to_dms_rational(value: float):
+ """Helper to convert decimal degrees to EXIF rational format."""
+ abs_value = abs(value)
+ deg = int(abs_value)
+ min_float = (abs_value - deg) * 60
+ minute = int(min_float)
+ sec = round((min_float - minute) * 60 * 1000000)
+ return ((deg, 1), (minute, 1), (sec, 1000000))
+
+ @staticmethod
+ def write_gps_exif(image_path: Path, latitude: float, longitude: float) -> None:
+ """Write GPS latitude and longitude EXIF tags to ``image_path``."""
+ img = Image.open(image_path)
+ lat_ref = "N" if latitude >= 0 else "S"
+ lon_ref = "E" if longitude >= 0 else "W"
+ gps_ifd = {
+ piexif.GPSIFD.GPSLatitudeRef: lat_ref,
+ piexif.GPSIFD.GPSLatitude: ProcessVideo._deg_to_dms_rational(latitude),
+ piexif.GPSIFD.GPSLongitudeRef: lon_ref,
+ piexif.GPSIFD.GPSLongitude: ProcessVideo._deg_to_dms_rational(longitude),
+ }
+
+ try:
+ exif_dict = piexif.load(img.info.get("exif", b""))
+ except Exception:
+ exif_dict = {"0th": {}, "Exif": {}, "GPS": {}, "1st": {}, "thumbnail": None}
+ exif_dict.setdefault("GPS", {}).update(gps_ifd)
+ exif_bytes = piexif.dump(exif_dict)
+ piexif.insert(exif_bytes, str(image_path))
+ img.close()
def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
"""
@@ -143,6 +177,10 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
+ ts = float(desc.split('-')[-1])
+ location = project.get_location_at_timestamp(ts)
+ if location is not None:
+ self.write_gps_exif(frame_filepath, location.latitude, location.longitude)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
@@ -170,6 +208,10 @@ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=Fa
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
+ ts = float(desc.split('-')[-1])
+ location = project.get_location_at_timestamp(ts)
+ if location is not None:
+ self.write_gps_exif(frame_filepath, location.latitude, location.longitude)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
From 66ce77f57ba9bb65a6962cea3ff488e348d7ff12 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 22:22:05 -0700
Subject: [PATCH 16/46] Revert "Add GPS EXIF tagging"
---
README.md | 3 +--
dev-requirements.in | 1 -
dev-requirements.txt | 40 ++++++++++++++---------------
requirements.in | 1 -
requirements.txt | 30 +++++++++++-----------
src/ssoss/process_road_objects.py | 32 -----------------------
src/ssoss/process_video.py | 42 -------------------------------
7 files changed, 35 insertions(+), 114 deletions(-)
diff --git a/README.md b/README.md
index c994ba1..e8f10f1 100644
--- a/README.md
+++ b/README.md
@@ -23,11 +23,10 @@ streamlined and repeatable process to monitor signs and signals along any roadwa
* Example CSV templates are provided to help get started making the static roadway object input file for both static objects and traffic signals.
* Video Synchronization Helper Tools: Options are provided to export the video frames and help to synchronize the video file.
* Image Labeling and animated GIF image tools: Selectable options are included to label images or create an animated GIF from multiple images.
-* Extracted images include GPS coordinates embedded in their EXIF metadata.
## Requirements
- Python 3.9
-- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif
+- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml
## Installation
Windows OS users can use the [Releases](https://github.com/redmond2742/ssoss/releases) to download an .exe of SSOSS for simple graphical usage. For Mac and Linux users, the command line option is described below.
diff --git a/dev-requirements.in b/dev-requirements.in
index 6cb9ee8..caa1999 100644
--- a/dev-requirements.in
+++ b/dev-requirements.in
@@ -10,4 +10,3 @@ lxml
pillow
python-dateutil
icecream
-piexif
diff --git a/dev-requirements.txt b/dev-requirements.txt
index a9b4ba4..7489b31 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -4,51 +4,51 @@
#
# pip-compile dev-requirements.in
#
-asttokens==3.0.0
+asttokens==2.4.0
# via icecream
colorama==0.4.6
# via icecream
-executing==2.2.0
+executing==2.0.0
# via icecream
geographiclib==2.0
# via geopy
-geopy==2.4.1
+geopy==2.4.0
# via -r dev-requirements.in
-gpxpy==1.6.2
+gpxpy==1.5.0
# via -r dev-requirements.in
-icecream==2.1.4
+icecream==2.1.3
# via -r dev-requirements.in
-imageio==2.37.0
+imageio==2.31.5
# via -r dev-requirements.in
-lxml==5.4.0
+lxml==4.9.3
# via -r dev-requirements.in
-numpy==2.3.0
+numpy==1.26.0
# via
# -r dev-requirements.in
# imageio
# opencv-python
# pandas
-opencv-python==4.11.0.86
+opencv-python==4.8.1.78
# via -r dev-requirements.in
-pandas==2.3.0
+pandas==2.1.1
# via -r dev-requirements.in
-piexif==1.1.3
- # via -r dev-requirements.in
-pillow==11.2.1
+pillow==10.0.1
# via
# -r dev-requirements.in
# imageio
-pygments==2.19.1
+pygments==2.16.1
# via icecream
-python-dateutil==2.9.0.post0
+python-dateutil==2.8.2
# via
# -r dev-requirements.in
# pandas
-pytz==2025.2
+pytz==2023.3.post1
# via pandas
-six==1.17.0
- # via python-dateutil
-tqdm==4.67.1
+six==1.16.0
+ # via
+ # asttokens
+ # python-dateutil
+tqdm==4.66.1
# via -r dev-requirements.in
-tzdata==2025.2
+tzdata==2023.3
# via pandas
diff --git a/requirements.in b/requirements.in
index 0057b4b..005e62b 100644
--- a/requirements.in
+++ b/requirements.in
@@ -9,5 +9,4 @@ tqdm
lxml
pillow
python-dateutil
-piexif
diff --git a/requirements.txt b/requirements.txt
index eeb72f1..381db48 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,43 +2,41 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
-# pip-compile requirements.in
+# pip-compile
#
geographiclib==2.0
# via geopy
-geopy==2.4.1
+geopy==2.4.0
# via -r requirements.in
-gpxpy==1.6.2
+gpxpy==1.5.0
# via -r requirements.in
-imageio==2.37.0
+imageio==2.31.5
# via -r requirements.in
-lxml==5.4.0
+lxml==4.9.3
# via -r requirements.in
-numpy==2.3.0
+numpy==1.26.0
# via
# -r requirements.in
# imageio
# opencv-python
# pandas
-opencv-python==4.11.0.86
+opencv-python==4.8.1.78
# via -r requirements.in
-pandas==2.3.0
+pandas==2.1.1
# via -r requirements.in
-piexif==1.1.3
- # via -r requirements.in
-pillow==11.2.1
+pillow==10.0.1
# via
# -r requirements.in
# imageio
-python-dateutil==2.9.0.post0
+python-dateutil==2.8.2
# via
# -r requirements.in
# pandas
-pytz==2025.2
+pytz==2023.3.post1
# via pandas
-six==1.17.0
+six==1.16.0
# via python-dateutil
-tqdm==4.67.1
+tqdm==4.66.1
# via -r requirements.in
-tzdata==2025.2
+tzdata==2023.3
# via pandas
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 7474ab9..2dc46f2 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -677,38 +677,6 @@ def get_speed_at_timestamp(self, ts):
break
return speed
- def get_location_at_timestamp(self, ts):
- """Return interpolated geopy.Point for a given timestamp.
-
- If the timestamp lies outside the GPX range, ``None`` is returned.
- """
- point_list = self.gpx_listDF
- last_point = len(point_list) - 1
-
- if ts < point_list.loc[0][0].get_timestamp():
- return None
- if ts > point_list.loc[last_point][0].get_timestamp():
- return None
-
- for i in range(len(point_list) - 1):
- p_curr = point_list.loc[i][0]
- p_next = point_list.loc[i + 1][0]
- t0 = p_curr.get_timestamp()
- t1 = p_next.get_timestamp()
- if t0 <= ts <= t1:
- if t1 == t0:
- return p_curr.get_location()
- ratio = (ts - t0) / (t1 - t0)
- lat = p_curr.get_location().latitude + ratio * (
- p_next.get_location().latitude - p_curr.get_location().latitude
- )
- lon = p_curr.get_location().longitude + ratio * (
- p_next.get_location().longitude - p_curr.get_location().longitude
- )
- return geopy.Point(lat, lon)
-
- return None
-
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index ebaddaa..5611190 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -11,8 +11,6 @@
from tqdm import tqdm
import cv2
import imageio
-from PIL import Image
-import piexif
class ProcessVideo:
@@ -125,38 +123,6 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
str(output_path),
]
subprocess.run(cmd, check=True)
-
- @staticmethod
- def _deg_to_dms_rational(value: float):
- """Helper to convert decimal degrees to EXIF rational format."""
- abs_value = abs(value)
- deg = int(abs_value)
- min_float = (abs_value - deg) * 60
- minute = int(min_float)
- sec = round((min_float - minute) * 60 * 1000000)
- return ((deg, 1), (minute, 1), (sec, 1000000))
-
- @staticmethod
- def write_gps_exif(image_path: Path, latitude: float, longitude: float) -> None:
- """Write GPS latitude and longitude EXIF tags to ``image_path``."""
- img = Image.open(image_path)
- lat_ref = "N" if latitude >= 0 else "S"
- lon_ref = "E" if longitude >= 0 else "W"
- gps_ifd = {
- piexif.GPSIFD.GPSLatitudeRef: lat_ref,
- piexif.GPSIFD.GPSLatitude: ProcessVideo._deg_to_dms_rational(latitude),
- piexif.GPSIFD.GPSLongitudeRef: lon_ref,
- piexif.GPSIFD.GPSLongitude: ProcessVideo._deg_to_dms_rational(longitude),
- }
-
- try:
- exif_dict = piexif.load(img.info.get("exif", b""))
- except Exception:
- exif_dict = {"0th": {}, "Exif": {}, "GPS": {}, "1st": {}, "thumbnail": None}
- exif_dict.setdefault("GPS", {}).update(gps_ifd)
- exif_bytes = piexif.dump(exif_dict)
- piexif.insert(exif_bytes, str(image_path))
- img.close()
def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
"""
@@ -177,10 +143,6 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
- ts = float(desc.split('-')[-1])
- location = project.get_location_at_timestamp(ts)
- if location is not None:
- self.write_gps_exif(frame_filepath, location.latitude, location.longitude)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
@@ -208,10 +170,6 @@ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=Fa
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
- ts = float(desc.split('-')[-1])
- location = project.get_location_at_timestamp(ts)
- if location is not None:
- self.write_gps_exif(frame_filepath, location.latitude, location.longitude)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
From 9738440ea9d6eb5b47f74f23fa1fd330f3bcd3f3 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 22:27:42 -0700
Subject: [PATCH 17/46] Update static_road_object.py
changed: self.ctr_pnt to ctr_pt in get_location_sb
---
src/ssoss/static_road_object.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/ssoss/static_road_object.py b/src/ssoss/static_road_object.py
index d1d9b3a..a66820f 100644
--- a/src/ssoss/static_road_object.py
+++ b/src/ssoss/static_road_object.py
@@ -295,8 +295,8 @@ def center_to_sb_distance(self, bearing_index):
def get_location_sb(self, bearing_index) -> geopy.Point:
shortest_index = np.argmin(
- [geopy.distance.distance(self.ctr_pnt, self.stop_bar_d[bearing_index][0]).ft,
- geopy.distance.distance(self.ctr_pnt, self.stop_bar_d[bearing_index][1]).ft]
+ [geopy.distance.distance(self.ctr_pt, self.stop_bar_d[bearing_index][0]).ft,
+ geopy.distance.distance(self.ctr_pt, self.stop_bar_d[bearing_index][1]).ft]
)
return self.stop_bar_d[bearing_index][shortest_index]
From 9f6f6487e048d89b684f8de368ca8eec84bbfe6b Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 22:41:41 -0700
Subject: [PATCH 18/46] Fix center_to_sb_distance attribute and add tests
---
src/ssoss/static_road_object.py | 9 ++++++---
tests/test_static_road_object.py | 32 ++++++++++++++++++++++++++++++++
2 files changed, 38 insertions(+), 3 deletions(-)
diff --git a/src/ssoss/static_road_object.py b/src/ssoss/static_road_object.py
index a66820f..29f606a 100644
--- a/src/ssoss/static_road_object.py
+++ b/src/ssoss/static_road_object.py
@@ -286,10 +286,13 @@ def all_sb_line_available(self) -> bool:
return (nb and eb and sb and wb)
def center_to_sb_distance(self, bearing_index):
- """not used"""
+ """Return the distance from the intersection center to the stop bar.
+
+ The intersection center point is stored in ``ctr_pt``.
+ """
min_distance = min(
- geopy.distance.distance(self.ctr_pnt, self.stop_bar_d[bearing_index][0]).ft,
- geopy.distance.distance(self.ctr_pnt, self.stop_bar_d[bearing_index][1]).ft
+ geopy.distance.distance(self.ctr_pt, self.stop_bar_d[bearing_index][0]).ft,
+ geopy.distance.distance(self.ctr_pt, self.stop_bar_d[bearing_index][1]).ft,
)
return min_distance
diff --git a/tests/test_static_road_object.py b/tests/test_static_road_object.py
index 41362c8..3834dce 100644
--- a/tests/test_static_road_object.py
+++ b/tests/test_static_road_object.py
@@ -100,5 +100,37 @@ def test_wb_distance_to_sb(self):
self.assertLess(self.wb_result, 215)
+class TestIntersectionHelpers(unittest.TestCase):
+ """Ensure helper methods on :class:`Intersection` execute correctly."""
+
+ intersection_name = ("California", "Powell")
+ intersection_ctr_pt = geopy.Point(37.79205307308094, -122.40918793416158)
+ intersection_spd_tuple = (25, 25, 25, 25)
+ intersection_bearing = (346.33, 90.09, 174.52, 271.11)
+ intersection_stop_bar_nb = (
+ geopy.Point(37.791939238323664, -122.40915035636318),
+ geopy.Point(37.79194559709975, -122.4091101232288),
+ )
+
+ test_intersection = Intersection(
+ 101,
+ intersection_name,
+ intersection_ctr_pt,
+ spd=intersection_spd_tuple,
+ bearing=intersection_bearing,
+ stop_bar_nb=intersection_stop_bar_nb,
+ )
+
+ def test_get_location_sb_runs(self):
+ """``get_location_sb`` should return a ``geopy.Point`` without error."""
+ pt = self.test_intersection.get_location_sb(0)
+ self.assertIsInstance(pt, geopy.Point)
+
+ def test_center_to_sb_distance_runs(self):
+ """``center_to_sb_distance`` should return a numeric distance."""
+ dist = self.test_intersection.center_to_sb_distance(0)
+ self.assertIsInstance(dist, float)
+
+
if __name__ == '__main__':
unittest.main()
From ed88cec99bfca62f89ce4f894614ebd8c2af0d70 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Mon, 9 Jun 2025 23:07:26 -0700
Subject: [PATCH 19/46] Add EXIF GPS tagging and location interpolation
---
README.md | 3 +-
dev-requirements.in | 1 +
requirements.in | 1 +
requirements.txt | 2 ++
src/ssoss/process_road_objects.py | 48 +++++++++++++++++++++++++
src/ssoss/process_video.py | 58 +++++++++++++++++++++++-------
tests/test_process_road_objects.py | 26 ++++++++++++++
7 files changed, 126 insertions(+), 13 deletions(-)
create mode 100644 tests/test_process_road_objects.py
diff --git a/README.md b/README.md
index e8f10f1..ff08d67 100644
--- a/README.md
+++ b/README.md
@@ -23,10 +23,11 @@ streamlined and repeatable process to monitor signs and signals along any roadwa
* Example CSV templates are provided to help get started making the static roadway object input file for both static objects and traffic signals.
* Video Synchronization Helper Tools: Options are provided to export the video frames and help to synchronize the video file.
* Image Labeling and animated GIF image tools: Selectable options are included to label images or create an animated GIF from multiple images.
+* GPS EXIF tagging: Extracted frames include GPS metadata for easy mapping.
## Requirements
- Python 3.9
-- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml
+- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif
## Installation
Windows OS users can use the [Releases](https://github.com/redmond2742/ssoss/releases) to download an .exe of SSOSS for simple graphical usage. For Mac and Linux users, the command line option is described below.
diff --git a/dev-requirements.in b/dev-requirements.in
index caa1999..6d04180 100644
--- a/dev-requirements.in
+++ b/dev-requirements.in
@@ -8,5 +8,6 @@ imageio
tqdm
lxml
pillow
+piexif
python-dateutil
icecream
diff --git a/requirements.in b/requirements.in
index 005e62b..a8757dd 100644
--- a/requirements.in
+++ b/requirements.in
@@ -8,5 +8,6 @@ imageio
tqdm
lxml
pillow
+piexif
python-dateutil
diff --git a/requirements.txt b/requirements.txt
index 381db48..f88190e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -28,6 +28,8 @@ pillow==10.0.1
# via
# -r requirements.in
# imageio
+piexif==1.1.3
+ # via -r requirements.in
python-dateutil==2.8.2
# via
# -r requirements.in
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 2dc46f2..97559ba 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -677,6 +677,54 @@ def get_speed_at_timestamp(self, ts):
break
return speed
+ def get_location_at_timestamp(self, ts):
+ """Return a geopy ``Point`` interpolated for ``ts``.
+
+ Parameters
+ ----------
+ ts : float
+ Unix timestamp to interpolate the latitude and longitude for.
+
+ Returns
+ -------
+ geopy.Point or None
+ The interpolated location or ``None`` if ``ts`` is outside the
+ range of the loaded GPX data.
+ """
+
+ points = self.gpx_listDF
+ if points is None or len(points) == 0:
+ return None
+
+ last_idx = len(points) - 1
+
+ # Boundary checks
+ first_ts = points.loc[0][0].get_timestamp()
+ last_ts = points.loc[last_idx][0].get_timestamp()
+ if ts < first_ts or ts > last_ts:
+ return None
+
+ # Locate the two surrounding points
+ for i in range(last_idx):
+ p0 = points.loc[i][0]
+ p1 = points.loc[i + 1][0]
+ t0 = p0.get_timestamp()
+ t1 = p1.get_timestamp()
+ if t0 <= ts <= t1:
+ if t1 == t0:
+ return p0.get_location()
+
+ ratio = (ts - t0) / (t1 - t0)
+ lat = p0.get_location().latitude + ratio * (
+ p1.get_location().latitude - p0.get_location().latitude
+ )
+ lon = p0.get_location().longitude + ratio * (
+ p1.get_location().longitude - p0.get_location().longitude
+ )
+ return geopy.Point(lat, lon)
+
+ return None
+
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 5611190..85aa235 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -11,6 +11,8 @@
from tqdm import tqdm
import cv2
import imageio
+from PIL import Image
+import piexif
class ProcessVideo:
@@ -86,24 +88,25 @@ def sync(self, frame: int, ts):
return None
def create_pic_list_from_zip(self, i_desc_timestamps):
- """returns sight distance description text and frame of video to extract as 2 lists"""
+ """Return descriptions, frame numbers and timestamps for extraction."""
intersection_desc = []
frames = []
+ timestamps = []
prev_frame = 0
filename_description, time_of_sd = zip(*i_desc_timestamps)
- for sd_item in range(0, len(i_desc_timestamps)):
+ for sd_item in range(len(i_desc_timestamps)):
time_of_picture = time_of_sd[sd_item] - self.get_start_timestamp()
- if time_of_picture > 0 and time_of_picture <= self.get_duration():
+ if 0 < time_of_picture <= self.get_duration():
frame_of_video = time_of_picture * self.fps
- # build up lists if not duplicate frame
if int(frame_of_video) > int(prev_frame):
intersection_desc.append(filename_description[sd_item])
frames.append(int(frame_of_video))
+ timestamps.append(time_of_sd[sd_item])
prev_frame = frame_of_video
- return intersection_desc, frames
+ return intersection_desc, frames, timestamps
def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
"""Save a specific frame quickly using ffmpeg."""
@@ -123,6 +126,33 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
str(output_path),
]
subprocess.run(cmd, check=True)
+
+ @staticmethod
+ def write_gps_exif(image_path: Path, location) -> None:
+ """Write GPS EXIF tags to ``image_path`` using ``location``."""
+
+ if location is None:
+ return
+
+ def _to_deg(value):
+ abs_value = abs(value)
+ deg = int(abs_value)
+ minutes_float = (abs_value - deg) * 60
+ minutes = int(minutes_float)
+ seconds = round((minutes_float - minutes) * 60 * 100)
+ return ((deg, 1), (minutes, 1), (int(seconds), 100))
+
+ gps_ifd = {
+ piexif.GPSIFD.GPSLatitudeRef: "N" if location.latitude >= 0 else "S",
+ piexif.GPSIFD.GPSLatitude: _to_deg(location.latitude),
+ piexif.GPSIFD.GPSLongitudeRef: "E" if location.longitude >= 0 else "W",
+ piexif.GPSIFD.GPSLongitude: _to_deg(location.longitude),
+ }
+
+ exif_dict = {"GPS": gps_ifd}
+ exif_bytes = piexif.dump(exif_dict)
+ img = Image.open(image_path)
+ img.save(image_path, exif=exif_bytes)
def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
"""
@@ -132,17 +162,19 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
project: instance of ProcessRoadObjects() class
"""
- generic_so_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
+ generic_so_desc, extract_frames, ts_list = self.create_pic_list_from_zip(desc_timestamps)
image_path = Path(self.video_dir, "out", self.video_filepath.stem, "generic_static_object_sightings/")
image_path.mkdir(exist_ok=True, parents=True)
- for desc, frame_num in tqdm(
- list(zip(generic_so_desc, extract_frames)),
+ for desc, frame_num, ts in tqdm(
+ list(zip(generic_so_desc, extract_frames, ts_list)),
desc="Frame Extraction",
unit=" frame"):
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
+ location = project.get_location_at_timestamp(ts)
+ self.write_gps_exif(frame_filepath, location)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {generic_so_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
@@ -159,17 +191,19 @@ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=Fa
project: instance of ProcessRoadObjects() class
"""
- intersection_desc, extract_frames = self.create_pic_list_from_zip(desc_timestamps)
+ intersection_desc, extract_frames, ts_list = self.create_pic_list_from_zip(desc_timestamps)
image_path = Path(self.video_dir, "out", self.video_filepath.stem, "signal_sightings/")
image_path.mkdir(exist_ok=True, parents=True)
- for desc, frame_num in tqdm(
- list(zip(intersection_desc, extract_frames)),
+ for desc, frame_num, ts in tqdm(
+ list(zip(intersection_desc, extract_frames, ts_list)),
desc="Frame Extraction",
unit=" frame"):
frame_name = str(desc) + '.jpg'
frame_filepath = image_path / frame_name
self.save_frame_ffmpeg(frame_num, frame_filepath)
+ location = project.get_location_at_timestamp(ts)
+ self.write_gps_exif(frame_filepath, location)
print(
f'PICTURE CAPTURED AT {frame_num}: {desc}, Saved {intersection_desc.index(desc) + 1} picture(s) of {len(extract_frames)}')
@@ -249,7 +283,7 @@ def generate_gif(self, desc_timestamps, project, distance=100):
:return: Returns a .gif filetype
"""
- intersection_desc, frame_list = self.create_pic_list_from_zip(desc_timestamps)
+ intersection_desc, frame_list, _ = self.create_pic_list_from_zip(desc_timestamps)
for i in tqdm(range(0, len(desc_timestamps)),
desc="Generating Images for GIF",
diff --git a/tests/test_process_road_objects.py b/tests/test_process_road_objects.py
new file mode 100644
index 0000000..9a4f6d2
--- /dev/null
+++ b/tests/test_process_road_objects.py
@@ -0,0 +1,26 @@
+import sys
+import pathlib
+import unittest
+import pandas as pd
+import geopy
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.process_road_objects import ProcessRoadObjects
+from ssoss.motion_road_object import GPXPoint
+
+class TestGetLocationAtTimestamp(unittest.TestCase):
+ def test_location_interpolation(self):
+ pro = ProcessRoadObjects()
+ pro.gpx_listDF = pd.DataFrame({"gpx_pt": [
+ GPXPoint(0, "2025-01-01T00:00:00Z", (0.0, 0.0), 0),
+ GPXPoint(1, "2025-01-01T00:00:10Z", (0.0, 10.0), 0)
+ ]})
+ ts = pro.gpx_listDF.iloc[0,0].get_timestamp() + 5
+ loc = pro.get_location_at_timestamp(ts)
+ self.assertIsInstance(loc, geopy.Point)
+ self.assertAlmostEqual(loc.latitude, 0.0)
+ self.assertAlmostEqual(loc.longitude, 5.0)
+
+if __name__ == '__main__':
+ unittest.main()
From 3ef98fbf2182e509145c7927e7f921215ea04c0d Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Tue, 10 Jun 2025 06:30:51 -0700
Subject: [PATCH 20/46] Fix duration formatting and add tests
---
src/ssoss/process_road_objects.py | 15 ++++++------
src/ssoss/process_video.py | 4 ++--
tests/test_hr_min_sec.py | 38 +++++++++++++++++++++++++++++++
3 files changed, 47 insertions(+), 10 deletions(-)
create mode 100644 tests/test_hr_min_sec.py
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 97559ba..c7bfe9d 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -594,15 +594,14 @@ def hr_min_sec(sec):
if sec < 60:
return f'{sec} seconds'
elif sec < 3600:
- min = int(sec/60)
- sec_remain = round(sec - min * 60, 2)
- return f'{min}:{sec_remain} (MM:SS.ss)'
+ minutes = int(sec / 60)
+ sec_remain = round(sec - minutes * 60, 2)
+ return f'{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)'
elif sec >= 3600:
- hr = int(sec/3600)
- min_remain = round(sec - hr * 3600, 2)
- min = int(min_remain/60)
- sec_remain = round(sec - min * 60, 2)
- return f'{hr}:{min}:{sec_remain} (HH:MM:SS.ss)'
+ hr = int(sec / 3600)
+ minutes = int((sec - hr * 3600) / 60)
+ sec_remain = round(sec - (hr * 3600 + minutes * 60), 2)
+ return f'{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)'
@staticmethod
def simplify_distance(d_ft):
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 85aa235..2b4ee00 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -349,8 +349,8 @@ def hr_min_sec(sec):
return f'{minutes:02}:{sec_remain:05.2f} (MM:SS.ss)'
elif sec >= 3600:
hr = int(sec / 3600)
- minutes = int(sec / 60)
- sec_remain = round(sec - minutes * 60, 2)
+ minutes = int((sec - hr * 3600) / 60)
+ sec_remain = round(sec - (hr * 3600 + minutes * 60), 2)
return f'{hr:02}:{minutes:02}:{sec_remain:05.2f} (HH:MM:SS.ss)'
def sizeConvert(self, size):
diff --git a/tests/test_hr_min_sec.py b/tests/test_hr_min_sec.py
new file mode 100644
index 0000000..c09db5c
--- /dev/null
+++ b/tests/test_hr_min_sec.py
@@ -0,0 +1,38 @@
+import unittest
+import pathlib
+import sys
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.process_video import ProcessVideo
+from ssoss.process_road_objects import ProcessRoadObjects
+
+
+class TestHrMinSec(unittest.TestCase):
+ def test_seconds_only(self):
+ self.assertEqual(ProcessVideo.hr_min_sec(50), "50 seconds")
+ self.assertEqual(ProcessRoadObjects.hr_min_sec(50), "50 seconds")
+
+ def test_minutes_seconds(self):
+ self.assertEqual(
+ ProcessVideo.hr_min_sec(125.8),
+ "02:05.80 (MM:SS.ss)"
+ )
+ self.assertEqual(
+ ProcessRoadObjects.hr_min_sec(125.8),
+ "02:05.80 (MM:SS.ss)"
+ )
+
+ def test_hours_minutes_seconds(self):
+ self.assertEqual(
+ ProcessVideo.hr_min_sec(3661.2),
+ "01:01:01.20 (HH:MM:SS.ss)"
+ )
+ self.assertEqual(
+ ProcessRoadObjects.hr_min_sec(3661.2),
+ "01:01:01.20 (HH:MM:SS.ss)"
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
From a416535e2c4de83604aab52db0b427f19f904e0c Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 13:46:55 -0700
Subject: [PATCH 21/46] Bump version to 1.0
---
pyproject.toml | 2 +-
src/ssoss/__init__.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 155990f..da9d214 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "ssoss"
-version = "0.5.01"
+version = "1.0"
authors = [
{ name="Matt Redmond", email="mr2742@gmail.com" },
]
diff --git a/src/ssoss/__init__.py b/src/ssoss/__init__.py
index 2a5dc0e..8c71a11 100644
--- a/src/ssoss/__init__.py
+++ b/src/ssoss/__init__.py
@@ -16,5 +16,5 @@
__version__ = importlib.metadata.version("ssoss")
except importlib.metadata.PackageNotFoundError:
# Package metadata not found when running from source
- __version__ = "0.0.0"
+ __version__ = "1.0"
From ade05c9b7455338703b9873fb9bb0af32a52676a Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 13:59:44 -0700
Subject: [PATCH 22/46] Add tests for ProcessRoadObjects utilities
---
tests/test_process_road_objects_extra.py | 143 +++++++++++++++++++++++
1 file changed, 143 insertions(+)
create mode 100644 tests/test_process_road_objects_extra.py
diff --git a/tests/test_process_road_objects_extra.py b/tests/test_process_road_objects_extra.py
new file mode 100644
index 0000000..e2f6249
--- /dev/null
+++ b/tests/test_process_road_objects_extra.py
@@ -0,0 +1,143 @@
+import sys
+import pathlib
+import unittest
+import tempfile
+import csv
+import pandas as pd
+import geopy
+from datetime import datetime, timezone, timedelta
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.process_road_objects import ProcessRoadObjects
+from ssoss.static_road_object import GenericStaticObject, Intersection
+from ssoss.motion_road_object import GPXPoint
+
+
+class GPXFixture:
+ """Helper to generate simple GPX data for tests."""
+
+ @staticmethod
+ def create_points():
+ return pd.DataFrame({"gpx_pt": [
+ GPXPoint(0, "2025-01-01T00:00:00Z", (0.0, 0.0), 0),
+ GPXPoint(1, "2025-01-01T00:00:10Z", (0.0, 0.001), 1),
+ GPXPoint(2, "2025-01-01T00:00:20Z", (0.0, 0.002), 1),
+ ]})
+
+
+class TestSpeedCalc(unittest.TestCase):
+ def test_normal_speed(self):
+ p1 = geopy.Point(0.0, 0.0)
+ p2 = geopy.Point(0.0, 0.001)
+ t1 = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ t2 = t1 + timedelta(seconds=10)
+ expected = geopy.distance.distance(p1, p2).meters / 10
+ result = ProcessRoadObjects.speed_calc(p1, p2, t1, t2)
+ self.assertAlmostEqual(result, expected, places=5)
+
+ def test_zero_time(self):
+ p = geopy.Point(0.0, 0.0)
+ t = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ self.assertEqual(ProcessRoadObjects.speed_calc(p, p, t, t), 0.0)
+
+ def test_excessive_speed_returns_zero(self):
+ p1 = geopy.Point(0.0, 0.0)
+ p2 = geopy.Point(0.1, 0.1) # far enough for high speed
+ t1 = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ t2 = t1 + timedelta(seconds=1)
+ self.assertEqual(ProcessRoadObjects.speed_calc(p1, p2, t1, t2), 0.0)
+
+
+class TestCSVLoading(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = tempfile.TemporaryDirectory()
+ self.addCleanup(self.tmpdir.cleanup)
+
+ def test_load_generic_so_csv(self):
+ path = pathlib.Path(self.tmpdir.name, "generic.csv")
+ with open(path, "w", newline="") as f:
+ writer = csv.writer(f)
+ writer.writerow(["id", "street", "lat", "lon", "bearing", "desc", "dist"])
+ writer.writerow([1, "Main", 0.0, 0.0, "NB", "Stop", 50])
+
+ pro = ProcessRoadObjects()
+ pro.load_generic_so_csv(str(path))
+ df = pro.generic_so_listDF
+ self.assertEqual(len(df), 1)
+ obj = df.iloc[0, 1]
+ self.assertIsInstance(obj, GenericStaticObject)
+ self.assertEqual(obj.get_name(), "Main")
+ self.assertEqual(obj.get_bearing(), 0)
+
+ def test_load_intersection_csv(self):
+ path = pathlib.Path(self.tmpdir.name, "intersection.csv")
+ with open(path, "w", newline="") as f:
+ writer = csv.writer(f)
+ writer.writerow([
+ "id", "n1", "n2", "lat", "lon", "sn", "se", "ss", "sw",
+ "bn", "be", "bs", "bw",
+ ])
+ writer.writerow([1, "Main", "First", 0.0, 0.0,
+ 25, 25, 25, 25,
+ 0, 90, 180, 270])
+
+ pro = ProcessRoadObjects()
+ df = pro.load_intersection_csv(str(path))
+ self.assertEqual(len(df), 1)
+ obj = df.iloc[0, 1]
+ self.assertIsInstance(obj, Intersection)
+ self.assertEqual(obj.get_name(), "Main+First")
+ self.assertEqual(obj.get_sd(0), 215)
+
+
+class TestTimestampQueries(unittest.TestCase):
+ def setUp(self):
+ self.pro = ProcessRoadObjects()
+ self.pro.gpx_listDF = GPXFixture.create_points()
+
+ def test_get_speed_at_timestamp_between_points(self):
+ ts = self.pro.gpx_listDF.iloc[0, 0].get_timestamp() + 5
+ spd = self.pro.get_speed_at_timestamp(ts)
+ s0 = self.pro.gpx_listDF.iloc[0, 0].get_speed()
+ s1 = self.pro.gpx_listDF.iloc[1, 0].get_speed()
+ self.assertAlmostEqual(spd, (s0 + s1) / 2)
+
+ def test_get_speed_at_timestamp_out_of_range(self):
+ ts = self.pro.gpx_listDF.iloc[-1, 0].get_timestamp() + 10
+ self.assertIsNone(self.pro.get_speed_at_timestamp(ts))
+
+ def test_get_location_at_timestamp_out_of_range(self):
+ ts = self.pro.gpx_listDF.iloc[-1, 0].get_timestamp() + 10
+ self.assertIsNone(self.pro.get_location_at_timestamp(ts))
+
+
+class TestDescriptionFormatting(unittest.TestCase):
+ def setUp(self):
+ self.pro = ProcessRoadObjects()
+ generic_obj = GenericStaticObject(1, "Main", geopy.Point(0, 0), "NB", "Stop", 50)
+ self.pro.generic_so_listDF = pd.DataFrame({"id": [1], "generic_so_obj": [generic_obj]})
+
+ inter_obj = Intersection(
+ 1,
+ ("Main", "First"),
+ geopy.Point(0, 0),
+ spd=(25, 25, 25, 25),
+ bearing=(0, 90, 180, 270),
+ )
+ self.pro.intersection_listDF = pd.DataFrame({"id": [1], "intersection_obj": [inter_obj]})
+
+ def test_generic_so_description_filename(self):
+ ts = datetime(2025, 1, 1, tzinfo=timezone.utc).timestamp()
+ desc = self.pro.generic_so_description(1, 40, ts)
+ self.assertTrue(desc.startswith("1.50-Main-Stop-"))
+
+ def test_intersection_frame_description_label(self):
+ ts = datetime(2025, 1, 1, tzinfo=timezone.utc).timestamp()
+ label = self.pro.intersection_frame_description(1, 0, 30, ts, desc_type="label")
+ self.assertIn("NB approach", label)
+ self.assertIn("Main and First", label)
+
+
+if __name__ == "__main__":
+ unittest.main()
From 36f36bcfbce49655d78f7745983846a0f90b92aa Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:03:01 -0700
Subject: [PATCH 23/46] Ensure sync.txt is created
---
src/ssoss/process_video.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 2b4ee00..8a11506 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -73,9 +73,12 @@ def sync(self, frame: int, ts):
appends frame # and timestamp to sync.txt with video filename for reference
"""
sync_txt_folder = Path(self.video_dir, "out")
- sync_file = str(sync_txt_folder) +"/"+ "sync.txt"
- with open(sync_file, 'a') as f:
- f.write(f'{self.video_filepath.stem},{frame},{ts}\n')
+ # ensure the out directory exists before attempting to write
+ sync_txt_folder.mkdir(exist_ok=True, parents=True)
+ sync_file = sync_txt_folder / "sync.txt"
+ # open in append mode so the file is created if it doesn't exist
+ with open(sync_file, "a") as f:
+ f.write(f"{self.video_filepath.stem},{frame},{ts}\n")
elapsed_time = frame / self.fps
if type(ts) is float:
From 19ddfd4e62a0a34c7210372e6ee10ab80c4d0555 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:16:57 -0700
Subject: [PATCH 24/46] Add tests for GPXPoint time-to-approach and heuristics
---
tests/test_motion_road_object.py | 100 +++++++++++++++++++++++++++++++
1 file changed, 100 insertions(+)
create mode 100644 tests/test_motion_road_object.py
diff --git a/tests/test_motion_road_object.py b/tests/test_motion_road_object.py
new file mode 100644
index 0000000..6930bdb
--- /dev/null
+++ b/tests/test_motion_road_object.py
@@ -0,0 +1,100 @@
+import sys
+import pathlib
+import unittest
+import math
+from datetime import datetime, timezone, timedelta
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.motion_road_object import GPXPoint
+from ssoss.static_road_object import Intersection
+import geopy
+
+
+class GPXFactory:
+ """Utility to create linked GPXPoint sequences."""
+
+ @staticmethod
+ def build(lons, speeds):
+ base_time = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ points = []
+ for i, (lon, spd) in enumerate(zip(lons, speeds)):
+ ts = (base_time + timedelta(seconds=i * 10)).isoformat()
+ points.append(GPXPoint(i, ts, (0.0, lon), spd))
+ for i, pt in enumerate(points):
+ if i > 0:
+ pt.set_prev_gpx_point(points[i - 1])
+ if i < len(points) - 1:
+ pt.set_next_gpx_point(points[i + 1])
+ return points
+
+
+def create_intersection():
+ return Intersection(
+ 1,
+ ("Main", "First"),
+ geopy.Point(0.0, 0.0),
+ spd=(25, 25, 25, 25),
+ bearing=(0, 90, 180, 270),
+ )
+
+
+class TestTimeToApproach(unittest.TestCase):
+ def setUp(self):
+ self.intersection = create_intersection()
+
+ def test_constant_speed(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0005], [10, 10, 10])
+ cur = pts[1]
+ d = cur.distance_to(self.intersection.get_location()) - self.intersection.get_sd(1)
+ expected = d / cur.get_speed()
+ self.assertAlmostEqual(cur.t_to_approach_simple(self.intersection, 1), expected, places=5)
+ self.assertAlmostEqual(cur.t_to_approach_acc(self.intersection, 1), expected, places=5)
+
+ def test_acceleration(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0003], [10, 20, 30])
+ cur = pts[1]
+ d_sd = cur.distance_to(self.intersection.get_location()) - self.intersection.get_sd(1)
+ acc = cur.acceleration()
+ v = cur.get_speed()
+ if v ** 2 > 4 * acc * d_sd:
+ radical = math.sqrt(v ** 2 - 4 * acc * d_sd)
+ else:
+ radical = 0
+ denom = 2 * acc
+ if denom == 0 or d_sd <= 0:
+ expected = d_sd / v
+ else:
+ t_pos = (-v + radical) / denom
+ t_neg = (-v - radical) / denom
+ expected = min(abs(t_neg), abs(t_pos))
+ self.assertAlmostEqual(cur.t_to_approach_acc(self.intersection, 1), expected, places=5)
+
+
+class TestHeuristics(unittest.TestCase):
+ def setUp(self):
+ self.intersection = create_intersection()
+
+ def test_prev_current_before_next_true(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0003], [10, 10, 10])
+ cur = pts[1]
+ self.assertTrue(cur.h_prev_and_current_before_next(self.intersection, 1))
+
+ def test_prev_current_before_next_false(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0008], [10, 10, 10])
+ cur = pts[1]
+ self.assertFalse(cur.h_prev_and_current_before_next(self.intersection, 1))
+
+ def test_next_less_than_current(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0008], [10, 10, 10])
+ cur = pts[1]
+ self.assertTrue(cur.h_next_less_than_current(self.intersection, 1))
+
+ def test_next_less_than_current_false(self):
+ pts = GPXFactory.build([-0.0015, -0.001, -0.0012], [10, 10, 10])
+ cur = pts[1]
+ self.assertFalse(cur.h_next_less_than_current(self.intersection, 1))
+
+
+if __name__ == "__main__":
+ unittest.main()
From 4290431bd3db9f7bf3ef808f68841569ddf42b00 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:21:29 -0700
Subject: [PATCH 25/46] test: improve static and intersection coverage
---
tests/test_static_road_object.py | 66 ++++++++++++++++++++++++++++++++
1 file changed, 66 insertions(+)
diff --git a/tests/test_static_road_object.py b/tests/test_static_road_object.py
index 3834dce..8672918 100644
--- a/tests/test_static_road_object.py
+++ b/tests/test_static_road_object.py
@@ -131,6 +131,72 @@ def test_center_to_sb_distance_runs(self):
dist = self.test_intersection.center_to_sb_distance(0)
self.assertIsInstance(dist, float)
+class TestGetSdEdgeCases(unittest.TestCase):
+ """Edge case checks for ``StaticRoadObject.get_sd``."""
+
+ def test_empty_speed_dict_raises(self):
+ sro = StaticRoadObject(1, "name", geopy.Point(0, 0))
+ with self.assertRaises(StopIteration):
+ sro.get_sd()
+
+
+class TestDistanceToSBFallback(unittest.TestCase):
+ """Ensure ``distance_to_sb`` falls back to center distance."""
+
+ def test_missing_stop_bar_coordinates(self):
+ intersection = Intersection(
+ 1,
+ ("A", "B"),
+ geopy.Point(0.0, 0.0),
+ spd=(25, 25, 25, 25),
+ bearing=(0, 90, 180, 270),
+ stop_bar_nb=(False, False),
+ )
+ dynamic_pt = geopy.Point(0.0001, 0.0)
+ expected = geopy.distance.distance(intersection.pt, dynamic_pt).ft
+ result = intersection.distance_to_sb(dynamic_pt, 0)
+ self.assertAlmostEqual(result, expected, places=5)
+
+ def test_zero_length_stop_bar(self):
+ intersection = Intersection(
+ 2,
+ ("A", "B"),
+ geopy.Point(0.0, 0.0),
+ spd=(25, 25, 25, 25),
+ bearing=(0, 90, 180, 270),
+ stop_bar_nb=(geopy.Point(0.0, 0.0), geopy.Point(0.0, 0.0)),
+ )
+ dynamic_pt = geopy.Point(0.0001, 0.0)
+ expected = geopy.distance.distance(intersection.pt, dynamic_pt).ft
+ result = intersection.distance_to_sb(dynamic_pt, 0)
+ self.assertAlmostEqual(result, expected, places=5)
+
+
+class TestIntersectionCoordinateHelpers(unittest.TestCase):
+ """Validate coordinate based helper methods."""
+
+ def setUp(self):
+ self.intersection = Intersection(
+ 5,
+ ("A", "B"),
+ geopy.Point(0.0, 0.0),
+ spd=(25, 25, 25, 25),
+ bearing=(0, 90, 180, 270),
+ stop_bar_nb=(geopy.Point(0.0001, 0.0), geopy.Point(0.001, 0.0)),
+ )
+
+ def test_get_location_sb_closest(self):
+ expected = self.intersection.stop_bar_nb[0]
+ result = self.intersection.get_location_sb(0)
+ self.assertAlmostEqual(result.latitude, expected.latitude)
+ self.assertAlmostEqual(result.longitude, expected.longitude)
+
+ def test_center_to_sb_distance_uses_nearest(self):
+ expected = geopy.distance.distance(
+ self.intersection.ctr_pt, self.intersection.stop_bar_nb[0]
+ ).ft
+ dist = self.intersection.center_to_sb_distance(0)
+ self.assertAlmostEqual(dist, expected, places=5)
if __name__ == '__main__':
unittest.main()
From eb5454e37605af2b556ae205f759735deb89b1f5 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:25:12 -0700
Subject: [PATCH 26/46] Add ProcessVideo tests
---
tests/test_process_video.py | 80 +++++++++++++++++++++++++++++++++++++
1 file changed, 80 insertions(+)
create mode 100644 tests/test_process_video.py
diff --git a/tests/test_process_video.py b/tests/test_process_video.py
new file mode 100644
index 0000000..345a5b3
--- /dev/null
+++ b/tests/test_process_video.py
@@ -0,0 +1,80 @@
+import sys
+import pathlib
+import unittest
+import tempfile
+import os
+import cv2
+import numpy as np
+from PIL import Image
+import piexif
+import geopy
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.process_video import ProcessVideo
+
+class DummyProject:
+ def get_location_at_timestamp(self, ts):
+ return geopy.Point(1.0, 2.0)
+
+class VideoFixture:
+ @staticmethod
+ def create_video(path, fps=10, frames=20):
+ fourcc = cv2.VideoWriter_fourcc(*"mp4v")
+ out = cv2.VideoWriter(str(path), fourcc, fps, (64, 64))
+ for i in range(frames):
+ frame = np.full((64, 64, 3), i, dtype=np.uint8)
+ out.write(frame)
+ out.release()
+
+class TestProcessVideo(unittest.TestCase):
+ def setUp(self):
+ self.tmp = tempfile.TemporaryDirectory()
+ self.addCleanup(self.tmp.cleanup)
+ self.video_path = pathlib.Path(self.tmp.name, "test.mp4")
+ VideoFixture.create_video(self.video_path)
+ self.pv = ProcessVideo(str(self.video_path))
+ self.pv.set_start_utc(100)
+ self.project = DummyProject()
+
+ def test_create_pic_list_from_zip(self):
+ desc_ts = [
+ ("a", 101),
+ ("b", 101.05),
+ ("c", 101.9),
+ ("d", 102.5),
+ ]
+ desc, frames, ts = self.pv.create_pic_list_from_zip(desc_ts)
+ self.assertEqual(desc, ["a", "c"])
+ self.assertEqual(frames, [10, 19])
+ self.assertEqual(ts, [101, 101.9])
+
+ def test_sync_sets_start_time_and_logs(self):
+ self.pv.sync(10, 110.0)
+ expected_start = 110.0 - 10 / self.pv.fps
+ self.assertAlmostEqual(self.pv.get_start_timestamp(), expected_start)
+ sync_file = pathlib.Path(self.pv.video_dir, "out", "sync.txt")
+ with open(sync_file) as f:
+ line = f.read().strip()
+ self.assertEqual(line, f"{self.pv.video_filepath.stem},10,110.0")
+
+ def _check_gps(self, image_path):
+ exif = piexif.load(str(image_path))
+ gps = exif.get("GPS", {})
+ self.assertEqual(gps.get(piexif.GPSIFD.GPSLatitudeRef), b"N")
+ self.assertEqual(gps.get(piexif.GPSIFD.GPSLongitudeRef), b"E")
+
+ def test_extract_functions_save_with_gps(self):
+ desc_ts = [("pic", 101)]
+ self.pv.extract_sightings(desc_ts, self.project, label_img=False, gen_gif=False)
+ file1 = pathlib.Path(self.tmp.name, "out", self.video_path.stem, "signal_sightings", "pic.jpg")
+ self.assertTrue(file1.exists())
+ self._check_gps(file1)
+
+ self.pv.extract_generic_so_sightings(desc_ts, self.project, label_img=False, gen_gif=False)
+ file2 = pathlib.Path(self.tmp.name, "out", self.video_path.stem, "generic_static_object_sightings", "pic.jpg")
+ self.assertTrue(file2.exists())
+ self._check_gps(file2)
+
+if __name__ == "__main__":
+ unittest.main()
From 38beee30eb645284c2e3a161cb690bb63adec73f Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:25:53 -0700
Subject: [PATCH 27/46] Add CLI tests
---
tests/test_ssoss_cli.py | 105 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 105 insertions(+)
create mode 100644 tests/test_ssoss_cli.py
diff --git a/tests/test_ssoss_cli.py b/tests/test_ssoss_cli.py
new file mode 100644
index 0000000..a4f6efa
--- /dev/null
+++ b/tests/test_ssoss_cli.py
@@ -0,0 +1,105 @@
+# Tests for ssoss_cli command line interface
+import sys
+import pathlib
+import pytest
+from unittest import mock
+
+root = pathlib.Path(__file__).resolve().parents[1]
+sys.path.insert(0, str(root / "src" / "ssoss"))
+sys.path.insert(0, str(root / "src"))
+
+import ssoss.ssoss_cli as ssoss_cli
+
+
+@pytest.fixture
+def run_cli(monkeypatch):
+ """Run ``ssoss_cli.main`` with given arguments and capture the call to
+ ``args_static_obj_gpx_video``."""
+ def _run(args):
+ called = {}
+
+ def fake(**kwargs):
+ called.update(kwargs)
+
+ monkeypatch.setattr(ssoss_cli, "args_static_obj_gpx_video", fake)
+ monkeypatch.setattr(sys, "argv", ["ssoss"] + args)
+ ssoss_cli.main()
+ return called
+
+ return _run
+
+
+def test_parser_accepts_basic_args(run_cli, tmp_path):
+ so = tmp_path / "so.csv"
+ gpx = tmp_path / "track.gpx"
+ so.write_text("id\n")
+ gpx.write_text("")
+
+ result = run_cli(["--static_object_file", str(so), "--gpx_file", str(gpx)])
+
+ assert pathlib.Path(result["generic_so_file"].name) == so
+ assert pathlib.Path(result["gpx_file"].name) == gpx
+ assert result["video_file"] is None
+
+
+def test_parser_rejects_invalid_int(tmp_path):
+ vid = tmp_path / "video.mov"
+ vid.write_text("data")
+ with pytest.raises(SystemExit):
+ sys.argv = ["ssoss", "--video_file", str(vid), "--frame_extract_start", "bad"]
+ ssoss_cli.main()
+
+
+def test_dispatch_sync_calls(monkeypatch, tmp_path):
+ so = tmp_path / "so.csv"
+ gpx = tmp_path / "track.gpx"
+ vid = tmp_path / "video.mov"
+ so.write_text("1,2,3,4,5,6,7\n")
+ gpx.write_text("")
+ vid.write_text("data")
+
+ pr_instance = mock.MagicMock()
+ pr_instance.get_static_object_type.return_value = "intersection"
+ pr_instance.intersection_checks.return_value = ["sig"]
+ pr_cls = mock.MagicMock(return_value=pr_instance)
+
+ pv_instance = mock.MagicMock()
+ pv_cls = mock.MagicMock(return_value=pv_instance)
+
+ monkeypatch.setattr(ssoss_cli.process_road_objects, "ProcessRoadObjects", pr_cls)
+ monkeypatch.setattr(ssoss_cli.process_video, "ProcessVideo", pv_cls)
+
+ with so.open("r") as so_f, gpx.open("r") as gpx_f, vid.open("r") as vid_f:
+ ssoss_cli.args_static_obj_gpx_video(
+ generic_so_file=so_f,
+ gpx_file=gpx_f,
+ video_file=vid_f,
+ vid_sync=(1, "ts"),
+ frame_extract=("", ""),
+ extra_out=(True, False),
+ )
+
+ pv_instance.sync.assert_called_once_with(1, "ts")
+ pv_instance.extract_sightings.assert_called_once_with(
+ ["sig"], pr_instance, label_img=True, gen_gif=False
+ )
+
+
+def test_dispatch_extract_frames(monkeypatch, tmp_path):
+ vid = tmp_path / "video.mov"
+ vid.write_text("data")
+
+ pv_instance = mock.MagicMock()
+ pv_cls = mock.MagicMock(return_value=pv_instance)
+ monkeypatch.setattr(ssoss_cli.process_video, "ProcessVideo", pv_cls)
+
+ with vid.open("r") as vid_f:
+ ssoss_cli.args_static_obj_gpx_video(
+ video_file=vid_f,
+ vid_sync=("", ""),
+ frame_extract=(1, 2),
+ extra_out=(False, False),
+ )
+
+ pv_instance.extract_frames_between.assert_called_once_with(1, 2)
+
From 97eb3d29f53a9a8c5242bdbc6ebbd34d5570244b Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:27:11 -0700
Subject: [PATCH 28/46] Add core modules overview
---
README.md | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/README.md b/README.md
index ff08d67..7ab0779 100644
--- a/README.md
+++ b/README.md
@@ -25,6 +25,14 @@ streamlined and repeatable process to monitor signs and signals along any roadwa
* Image Labeling and animated GIF image tools: Selectable options are included to label images or create an animated GIF from multiple images.
* GPS EXIF tagging: Extracted frames include GPS metadata for easy mapping.
+## Core Modules
+* **static_road_object.py** - classes for static objects like intersections and traffic signals. `Intersection` extends `StaticRoadObject` with speed-based sight-distance data and optional stop-bar points.
+* **motion_road_object.py** - defines the `GPXPoint` record with distance, bearing, and approach heuristics used when nearing intersections.
+* **dynamic_road_object.py** - models a moving vehicle using sequences of `GPXPoint` objects, updating location and computing speed while determining the closest approaching intersection.
+* **process_road_objects.py** - loads GPX files and static-object CSVs, then annotates each GPX point with approach information and descriptive stats.
+* **process_video.py** - synchronizes a video with GPX timestamps, extracts frames around sight-distance locations, overlays labels, and can build GIFs.
+* **ssoss_cli.py** - command line interface that ties together object processing, video synchronization and image extraction.
+* **ssoss_gui.py** - optional graphical front end built with Gooey that exposes the same features through a GUI.
## Requirements
- Python 3.9
- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif
From 86f017a404ef231bf752cbd6f23b0a0b060a52ce Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:28:14 -0700
Subject: [PATCH 29/46] Add timestamp lookup helper
---
src/ssoss/dynamic_road_object.py | 33 +++++++++++++++++++--
tests/test_dynamic_road_object.py | 49 +++++++++++++++++++++++++++++++
2 files changed, 79 insertions(+), 3 deletions(-)
create mode 100644 tests/test_dynamic_road_object.py
diff --git a/src/ssoss/dynamic_road_object.py b/src/ssoss/dynamic_road_object.py
index 31e4260..fd72b04 100644
--- a/src/ssoss/dynamic_road_object.py
+++ b/src/ssoss/dynamic_road_object.py
@@ -688,9 +688,36 @@ def seek_sb(self, gpx_df, csv_out=True) -> pd.DataFrame:
self.out_file_path / "approaching_intersections_Stopbar.csv")
return pd.DataFrame(df_dict_sb)
- # TODO: create new method
- # def get_info_at_timestamp(timestamp)
- # return intersection ID, approach, spd, distance, compass direction, lat, lon
+ def get_info_at_timestamp(self, timestamp):
+ """Return data for the GPX row closest to ``timestamp``.
+
+ Parameters
+ ----------
+ timestamp : float
+ Unix timestamp to search for.
+
+ Returns
+ -------
+ tuple or None
+ ``(id, appr_dir, spd, distance, bearing, location)`` from the
+ nearest record or ``None`` if no GPX data is available.
+ """
+
+ df = getattr(self, "gpx_df", None)
+ if df is None or len(df) == 0 or "timestamp" not in df.columns:
+ return None
+
+ idx = (df["timestamp"] - timestamp).abs().idxmin()
+ row = df.loc[idx]
+
+ return (
+ row.get("id"),
+ row.get("appr_dir"),
+ row.get("spd"),
+ row.get("distance"),
+ row.get("bearing"),
+ row.get("location"),
+ )
class Vehicle(DynamicRoadObject):
diff --git a/tests/test_dynamic_road_object.py b/tests/test_dynamic_road_object.py
new file mode 100644
index 0000000..64cb79a
--- /dev/null
+++ b/tests/test_dynamic_road_object.py
@@ -0,0 +1,49 @@
+import unittest
+import pathlib
+import sys
+from datetime import datetime, timezone, timedelta
+
+import pandas as pd
+import geopy
+
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+
+from ssoss.dynamic_road_object import DynamicRoadObject
+
+
+class TestGetInfoAtTimestamp(unittest.TestCase):
+ def setUp(self):
+ base = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ pts = [geopy.Point(0, 0), geopy.Point(0, 0.001), geopy.Point(0, 0.002)]
+ ts_list = [base + timedelta(seconds=i * 5) for i in range(3)]
+ self.df = pd.DataFrame({
+ "t": ts_list,
+ "geo_point": pts,
+ "spd": [10, 12, 14],
+ "id": [1, 1, 1],
+ "appr_dir": [0, 0, 0],
+ "timestamp": [t.timestamp() for t in ts_list],
+ "location": pts,
+ "distance": [100, 50, 10],
+ "bearing": [0, 0, 0],
+ })
+ self.obj = DynamicRoadObject.__new__(DynamicRoadObject)
+ self.obj.gpx_df = self.df
+
+ def test_basic_lookup(self):
+ ts = self.df["timestamp"].iloc[1] + 1
+ info = self.obj.get_info_at_timestamp(ts)
+ self.assertEqual(info[0], 1)
+ self.assertEqual(info[1], 0)
+ self.assertEqual(info[2], 12)
+ self.assertEqual(info[5], self.df["location"].iloc[1])
+
+ def test_out_of_range(self):
+ ts = self.df["timestamp"].iloc[-1] + 100
+ info = self.obj.get_info_at_timestamp(ts)
+ self.assertEqual(info[2], 14)
+ self.assertEqual(info[5], self.df["location"].iloc[-1])
+
+
+if __name__ == "__main__":
+ unittest.main()
From 73dd02ed92e0543786c7c11a05895cb98a9e9d58 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:28:53 -0700
Subject: [PATCH 30/46] Add GIF cleanup and overwrite options
---
README.md | 1 +
src/ssoss/process_video.py | 51 +++++++++++++++++++++++++-------------
src/ssoss/ssoss_cli.py | 36 +++++++++++++++++++++++----
src/ssoss/ssoss_gui.py | 15 +++++------
4 files changed, 74 insertions(+), 29 deletions(-)
diff --git a/README.md b/README.md
index ff08d67..1249f4f 100644
--- a/README.md
+++ b/README.md
@@ -127,6 +127,7 @@ While SSOSS does provide approximate sight distance images, their are various so
Create a gif from multiple images around the sight distance location. This can be helpful if the lens is out of focus or an few frames are obstructed.
Include the -- gif flag in the command line to create. Note: this requires additional processing time for large video files.
+Use --gif-overwrite to replace an existing GIF and --no-gif-cleanup to keep the extracted frames.
Saves .gif file in ./out/[video filename]/gif/
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 8a11506..f88eb0b 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -157,7 +157,9 @@ def _to_deg(value):
img = Image.open(image_path)
img.save(image_path, exif=exif_bytes)
- def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
+ def extract_generic_so_sightings(
+ self, desc_timestamps, project, label_img=True, gen_gif=False, cleanup=True, overwrite=False
+ ):
"""
extract generic sighting images from video based on description and timestamp zip
@@ -184,9 +186,11 @@ def extract_generic_so_sightings(self, desc_timestamps, project, label_img=True,
if label_img:
self.generic_so_img_overlay_info_box(self.video_filename, project)
if gen_gif:
- self.generate_gif(desc_timestamps, project)
+ self.generate_gif(desc_timestamps, project, cleanup=cleanup, overwrite=overwrite)
- def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=False):
+ def extract_sightings(
+ self, desc_timestamps, project, label_img=True, gen_gif=False, cleanup=True, overwrite=False
+ ):
"""
extract sighting images from video based on description and timestamp zip
@@ -213,7 +217,7 @@ def extract_sightings(self, desc_timestamps, project, label_img=True, gen_gif=Fa
if label_img:
self.img_overlay_info_box(self.video_filename, project)
if gen_gif:
- self.generate_gif(desc_timestamps, project)
+ self.generate_gif(desc_timestamps, project, cleanup=cleanup, overwrite=overwrite)
"""
if bbox:
self.img_overlay_bbox(description_list,project)
@@ -268,7 +272,7 @@ def video_start_utc():
self.save_frame_ffmpeg(i, frame_filepath)
print(f'Saved Image {i} to {frame_filepath}')
- def generate_gif(self, desc_timestamps, project, distance=100):
+ def generate_gif(self, desc_timestamps, project, distance=100, cleanup=True, overwrite=False):
""" creates a folder of images to create a gif
# /////////////*\\\\\\\\\\\\\\\
# For a given sight distance timestamp location "*" calculate frames needed for gif,
@@ -283,14 +287,18 @@ def generate_gif(self, desc_timestamps, project, distance=100):
:param df: dataframe of key points including speed, and descriptions of the point
:param frame_list: list of key frame at a distance to check sight of static object
:param distance: distance (units=feet) before AND after of key frame to make images for
+ :param cleanup: remove extracted frames after GIF creation
+ :param overwrite: overwrite existing GIF files if True
:return: Returns a .gif filetype
"""
intersection_desc, frame_list, _ = self.create_pic_list_from_zip(desc_timestamps)
- for i in tqdm(range(0, len(desc_timestamps)),
- desc="Generating Images for GIF",
- unit=" Location"):
+ for i in tqdm(
+ range(0, len(desc_timestamps)),
+ desc="Generating Images for GIF",
+ unit=" Location",
+ ):
gif_basepath = self.video_dir / "out" / self.video_filepath.stem / "gif" / intersection_desc[i]
gif_path = Path(gif_basepath)
gif_path.mkdir(exist_ok=True, parents=True)
@@ -320,26 +328,35 @@ def generate_gif(self, desc_timestamps, project, distance=100):
frame_filepath = gif_path / frame_name
self.save_frame_ffmpeg(j, frame_filepath)
i += 1
- self.assemble_gif()
+ self.assemble_gif(cleanup=cleanup, overwrite=overwrite)
- def assemble_gif(self):
+ def assemble_gif(self, cleanup=True, overwrite=False):
+ """Assemble GIFs from extracted frames.
+
+ :param cleanup: remove frame folders after GIF creation
+ :param overwrite: overwrite existing GIF files if True
+ """
#base_path = Path(self.video_dir, "out", self.video_filepath.stem, "gif/")
gif_files_path = self.video_dir / "out" / self.video_filepath.stem / "gif"
base_path = Path(gif_files_path)
#base_path = "./out/frames/" + self.video_filename + "/gif/"
img_folders = sorted(base_path.glob('*'))
- kargs = {'duration': 1/9999999999999999}
- for i in range(0, len(img_folders)):
+ kargs = {"duration": 1 / 9999999999999999}
+ for folder in img_folders:
images = []
- img_folder = os.path.basename(img_folders[i])
+ img_folder = os.path.basename(folder)
frame_images = sorted(glob.glob(os.path.join(base_path, img_folder + "/*.jpg")))
+ gif_path = os.path.join(base_path, img_folder + ".gif")
+ if os.path.exists(gif_path) and not overwrite:
+ print(f"GIF already exists: {gif_path} (use --gif-overwrite to replace)")
+ continue
for j in range(0, len(frame_images)):
if j % 5 == 0:
images.append(imageio.imread(frame_images[j]))
- imageio.mimsave(os.path.join(base_path, img_folder + ".gif"), images, **kargs)
- print(f'Created Gif: {img_folder}.gif')
- # TODO: delete folder of images after gif is created.
- # TODO: overwite existing gif option
+ imageio.mimsave(gif_path, images, **kargs)
+ print(f"Created Gif: {img_folder}.gif")
+ if cleanup:
+ shutil.rmtree(folder)
@staticmethod
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index c72251c..3618185 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -9,7 +9,7 @@ def args_static_obj_gpx_video(
video_file="",
vid_sync=("", ""),
frame_extract=("", ""),
- extra_out=(True, False),
+ extra_out=(True, False, True, False),
):
sightings = ""
@@ -38,12 +38,22 @@ def args_static_obj_gpx_video(
if sightings and project.get_static_object_type() == "intersection":
print("extracting traffic signal sightings")
video.extract_sightings(
- sightings, project, label_img=extra_out[0], gen_gif=extra_out[1]
+ sightings,
+ project,
+ label_img=extra_out[0],
+ gen_gif=extra_out[1],
+ cleanup=extra_out[2],
+ overwrite=extra_out[3],
)
if sightings and project.get_static_object_type() == "generic static object":
print("extracting generic static object sightings")
video.extract_generic_so_sightings(
- sightings, project, label_img=extra_out[0], gen_gif=extra_out[1]
+ sightings,
+ project,
+ label_img=extra_out[0],
+ gen_gif=extra_out[1],
+ cleanup=extra_out[2],
+ overwrite=extra_out[3],
)
elif frame_extract[0] and frame_extract[1]:
print("extracting frames...")
@@ -143,6 +153,20 @@ def main():
help="Add bounding box around traffic signals",
action="store_true",
)
+ video_sync_group.add_argument(
+ "--no-gif-cleanup",
+ dest="gif_cleanup",
+ help="Keep extracted GIF frames after assembly",
+ action="store_false",
+ default=True,
+ )
+ video_sync_group.add_argument(
+ "--gif-overwrite",
+ dest="gif_overwrite",
+ help="Overwrite existing GIF files",
+ action="store_true",
+ default=False,
+ )
# process args depending on filled in values
args = parser.parse_args()
@@ -161,7 +185,9 @@ def main():
gif = True
if args.bbox:
bbox = True
- lb_gif_bbox = (lb, gif, bbox)
+ cleanup = args.gif_cleanup
+ overwrite = args.gif_overwrite
+ lb_gif_flags = (lb, gif, cleanup, overwrite)
# process args
@@ -170,7 +196,7 @@ def main():
video_file = args.video_file,
vid_sync = sync_input,
frame_extract = frames,
- extra_out = lb_gif_bbox
+ extra_out = lb_gif_flags
)
diff --git a/src/ssoss/ssoss_gui.py b/src/ssoss/ssoss_gui.py
index 9059129..2ab98be 100644
--- a/src/ssoss/ssoss_gui.py
+++ b/src/ssoss/ssoss_gui.py
@@ -68,6 +68,8 @@ def main():
video_sync_group.add_argument("-label", "--label", metavar="Overlay Image Label", help="Include descriptive label on bottom of image", action="store_true", default=True)
video_sync_group.add_argument("-gif", "--gif", metavar="Create Animated GIF", help="Generate GIF of Sight Distance", action="store_true", default=False)
+ # GIF options available only through CLI; defaults are used here
+
args = parser.parse_args()
sync_input = ("", "")
@@ -77,19 +79,18 @@ def main():
if args.frame_extract_start and args.frame_extract_end:
frames = (args.frame_extract_start[0], args.frame_extract_end[0])
- lb = gif = bbox = False
- if args.label:
- lb = True
- if args.gif:
- gif = True
- lb_gif_bbox = (lb, gif, bbox)
+ lb = args.label
+ gif = args.gif
+ cleanup = True
+ overwrite = False
+ extra_out = (lb, gif, cleanup, overwrite)
args_static_obj_gpx_video(generic_so_file = args.static_object_file,
gpx_file = args.gpx_file,
video_file = args.video_file,
vid_sync = sync_input,
frame_extract = frames,
- extra_out = lb_gif_bbox
+ extra_out = extra_out
)
From b7bb0f404848dff7cb5280549533b76f49243682 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 14:32:00 -0700
Subject: [PATCH 31/46] Add timezone handling for GPX points
---
README.md | 3 ++-
dev-requirements.in | 1 +
dev-requirements.txt | 2 ++
requirements.in | 1 +
requirements.txt | 2 ++
src/ssoss/motion_road_object.py | 11 ++++++++---
src/ssoss/process_road_objects.py | 30 +++++++++++++++++++++++++-----
7 files changed, 41 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index ff08d67..6d666a6 100644
--- a/README.md
+++ b/README.md
@@ -24,10 +24,11 @@ streamlined and repeatable process to monitor signs and signals along any roadwa
* Video Synchronization Helper Tools: Options are provided to export the video frames and help to synchronize the video file.
* Image Labeling and animated GIF image tools: Selectable options are included to label images or create an animated GIF from multiple images.
* GPS EXIF tagging: Extracted frames include GPS metadata for easy mapping.
+* Automatic timezone detection based on the first GPX point for accurate timestamp handling.
## Requirements
- Python 3.9
-- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif
+- Required libraries: pandas, numpy, opencv-python, geopy, gpxpy, imageio, tqdm, lxml, pillow, piexif, timezonefinder
## Installation
Windows OS users can use the [Releases](https://github.com/redmond2742/ssoss/releases) to download an .exe of SSOSS for simple graphical usage. For Mac and Linux users, the command line option is described below.
diff --git a/dev-requirements.in b/dev-requirements.in
index 6d04180..77088d9 100644
--- a/dev-requirements.in
+++ b/dev-requirements.in
@@ -10,4 +10,5 @@ lxml
pillow
piexif
python-dateutil
+timezonefinder
icecream
diff --git a/dev-requirements.txt b/dev-requirements.txt
index 7489b31..3ab1969 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -52,3 +52,5 @@ tqdm==4.66.1
# via -r dev-requirements.in
tzdata==2023.3
# via pandas
+timezonefinder==6.5.9
+ # via -r dev-requirements.in
diff --git a/requirements.in b/requirements.in
index a8757dd..78584d1 100644
--- a/requirements.in
+++ b/requirements.in
@@ -10,4 +10,5 @@ lxml
pillow
piexif
python-dateutil
+timezonefinder
diff --git a/requirements.txt b/requirements.txt
index f88190e..d2c993f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -42,3 +42,5 @@ tqdm==4.66.1
# via -r requirements.in
tzdata==2023.3
# via pandas
+timezonefinder==6.5.9
+ # via -r requirements.in
diff --git a/src/ssoss/motion_road_object.py b/src/ssoss/motion_road_object.py
index c4d3426..54359af 100644
--- a/src/ssoss/motion_road_object.py
+++ b/src/ssoss/motion_road_object.py
@@ -1,8 +1,7 @@
# !/usr/bin/env python
# coding: utf-8
import math
-from datetime import datetime, timezone
-from datetime import timedelta
+from datetime import datetime, timezone, timedelta
from operator import attrgetter, itemgetter
from pathlib import PurePath
@@ -39,7 +38,10 @@ def __init__(self, id_num: int, t, p: geopy.Point, spd: float):
# initial variables from GPX file
self.id = id_num
t_temp = (dateutil.parser.isoparse(t))
- self.t = t_temp.replace(tzinfo=timezone.utc).timestamp()
+ if t_temp.tzinfo is None:
+ t_temp = t_temp.replace(tzinfo=timezone.utc)
+ self.dt = t_temp
+ self.t = t_temp.timestamp()
self.p = geopy.Point(p[0], p[1]) # elevation not supported
self.spd = spd
@@ -60,6 +62,9 @@ def get_id(self) -> int:
def get_timestamp(self) -> datetime:
return self.t
+ def get_datetime(self) -> datetime:
+ return self.dt
+
def get_prev_timedelta(self) -> float:
return self.t - self.prev_gpx_point.get_timestamp()
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index c7bfe9d..680e3eb 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -3,6 +3,7 @@
import csv, math
from datetime import datetime, timezone
+from zoneinfo import ZoneInfo
from pathlib import Path
import geopy
@@ -15,6 +16,7 @@
import lxml
from lxml import etree
from tqdm import tqdm
+from timezonefinder import TimezoneFinder
from ssoss.static_road_object import Intersection, GenericStaticObject
from ssoss.motion_road_object import GPXPoint
@@ -332,7 +334,7 @@ def load_gpx_to_obj_df(self, gpx_filename: str, gpx_ver = "1.0", use_pickle=True
# initialize starting variables if GPX v1.1 needs speed calcs
pnt1 = geopy.Point()
- t1 = datetime.now(timezone.utc)
+ t1 = None
if use_pickle and Path(self.pickle_file).is_file():
self.gpx_listDF = pd.read_pickle(self.pickle_file)
@@ -352,10 +354,19 @@ def load_gpx_to_obj_df(self, gpx_filename: str, gpx_ver = "1.0", use_pickle=True
self.gpx_ver = self.set_gpx_ver()
gpx_file_ref = open(self.gpx_file, "r")
- # TODO: set timezone with lat, lon coordinates:
- # (https://stackoverflow.com/questions/15742045/getting-time-zone-from-lat-long-coordinates)
gpx = gpxpy.parse(gpx_file_ref, version=self.gpx_ver)
+
+ # determine timezone from first point
+ tz_name = "UTC"
+ if gpx.tracks and gpx.tracks[0].segments and gpx.tracks[0].segments[0].points:
+ first = gpx.tracks[0].segments[0].points[0]
+ finder = TimezoneFinder()
+ tz_guess = finder.timezone_at(lng=first.longitude, lat=first.latitude)
+ if tz_guess:
+ tz_name = tz_guess
+ tzinfo = ZoneInfo(tz_name)
+
pt_count = 0
for track in gpx.tracks:
for segment in track.segments:
@@ -377,23 +388,32 @@ def load_gpx_to_obj_df(self, gpx_filename: str, gpx_ver = "1.0", use_pickle=True
longitude=point.longitude,
)
+ # convert timestamp to local timezone
+ local_time = point.time
+ if local_time.tzinfo is None:
+ local_time = local_time.replace(tzinfo=tzinfo)
+ else:
+ local_time = local_time.astimezone(tzinfo)
+
if point.speed is not None:
pass # GPX v1.0 includes speed in track, v1.1 can include in extension data
elif point.speed is None: # GPX v1.1 speed calculation
if pt_count == 0:
point.speed = 0
+ t1 = local_time
+ pnt1 = p
else:
pnt0 = pnt1
pnt1 = p
t0 = t1
- t1 = point.time
+ t1 = local_time
point.speed = self.speed_calc(pnt0, pnt1, t0, t1)
gpx_load["gpx_pt"].append(
GPXPoint(
pt_count,
- str(point.time), # convert ISO format point.time to a string for timestamp conversion
+ local_time.isoformat(),
tuple(p), # tuple of point, lat and lon
point.speed
)
From 418c2043dedec485377d1ea72d1ad5de36b7876c Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 21:47:10 -0700
Subject: [PATCH 32/46] Optimize backflow with vectorization
---
src/ssoss/motion_road_object.py | 116 +++++++++++++++++---------------
src/ssoss/process_video.py | 13 +++-
src/ssoss/ssoss_cli.py | 34 +++++-----
3 files changed, 92 insertions(+), 71 deletions(-)
diff --git a/src/ssoss/motion_road_object.py b/src/ssoss/motion_road_object.py
index 54359af..d43e09e 100644
--- a/src/ssoss/motion_road_object.py
+++ b/src/ssoss/motion_road_object.py
@@ -294,63 +294,71 @@ def t_to_approach_acc(self, approaching_intersection:Intersection, b_index: int)
return min(abs(t_acc_neg), abs(t_acc_pos))
def backflow(self, sro_df: pd.DataFrame, so_type):
- """
- after initial GPX points loaded, used intersection dataframe objects to calculate
- values of interest.
+ """Vectorised computation of nearby static objects."""
+
+ def _haversine_feet(lat1, lon1, lat2, lon2):
+ lat1 = np.radians(lat1)
+ lon1 = np.radians(lon1)
+ lat2 = np.radians(lat2)
+ lon2 = np.radians(lon2)
+ dlat = lat2 - lat1
+ dlon = lon2 - lon1
+ a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2) ** 2
+ return 2 * gpxgeo.EARTH_RADIUS * np.arcsin(np.sqrt(a)) * 3.28084
- :param sro_df: static road object loaded as dataframe.
- :return:
- """
if so_type == "intersection":
- # empty lists
- intersection_id = []
- approach_leg = []
- dist = []
- approaching = []
-
- for index, row in sro_df.iterrows():
- intersection = row["intersection_obj"]
- distance_to_intersection = self.distance_to(intersection.get_location())
- # TODO: consider add min trim also?
- if distance_to_intersection > intersection.get_sd("max"): # only load relevant distances
- pass
- else:
- intersection_id.append(intersection.get_id_num())
- approach_leg.append(self.get_approach_leg(intersection))
- dist.append(distance_to_intersection)
- approaching.append(self.approaching(intersection))
-
- temp_all_lists = zip(intersection_id, approach_leg, dist, approaching)
- temp_sort_distance = sorted(temp_all_lists, key=itemgetter(2)) # sort by item 2/distance
- temp_sort_approaching = sorted(temp_sort_distance, key=itemgetter(3), reverse=True) # sort by item 3/approaching boolean
- only_approaching_intersections = filter(lambda x: x[3] is True, temp_sort_approaching) # filter out intersections not approached
- self.intersection_approach_list = list(only_approaching_intersections)
-
- elif so_type == "generic_so":
- #empty lists
- generic_so_id = []
- dist = []
- approaching = []
- buffer_dist = 150 #ft of buffer to add to static object sight distance
-
- count = 0
- for index, row in sro_df.iterrows():
- generic_so = row["generic_so_obj"]
- distance_to_generic_so = self.distance_to(generic_so.get_location())
- if distance_to_generic_so > generic_so.get_sd() + buffer_dist:
- pass
- else:
- generic_so_id.append(generic_so.get_id_num())
- dist.append(distance_to_generic_so)
- approaching.append(self.approaching(generic_so))
-
- temp_all_lists = zip(generic_so_id, dist, approaching)
- temp_sort_distance = sorted(temp_all_lists, key=itemgetter(1)) # sort by item 1/distance
- temp_sort_approaching = sorted(temp_sort_distance, key=itemgetter(2), reverse=True) # sort by item 2/approaching boolean
- only_approaching_generic_so = filter(lambda x: x[2] is True, temp_sort_approaching) # filter out generic_so not approached
- self.generic_so_approach_list = list(only_approaching_generic_so)
+ intersections = sro_df["intersection_obj"].to_numpy()
+ if len(intersections) == 0:
+ self.intersection_approach_list = []
+ return
+
+ lat = np.array([i.get_location().latitude for i in intersections])
+ lon = np.array([i.get_location().longitude for i in intersections])
+ sd_max = np.array([i.get_sd("max") for i in intersections], dtype=float)
+
+ dist = _haversine_feet(lat, lon, self.p.latitude, self.p.longitude)
+ mask = dist <= sd_max
+ selected = intersections[mask]
+ dist = dist[mask]
+
+ results = [(
+ inter.get_id_num(),
+ self.get_approach_leg(inter),
+ d,
+ self.approaching(inter),
+ ) for inter, d in zip(selected, dist)]
+
+ temp_sort_distance = sorted(results, key=itemgetter(2))
+ temp_sort_approaching = sorted(temp_sort_distance, key=itemgetter(3), reverse=True)
+ self.intersection_approach_list = list(filter(lambda x: x[3], temp_sort_approaching))
- return;
+ elif so_type == "generic_so":
+ generics = sro_df["generic_so_obj"].to_numpy()
+ if len(generics) == 0:
+ self.generic_so_approach_list = []
+ return
+
+ lat = np.array([g.get_location().latitude for g in generics])
+ lon = np.array([g.get_location().longitude for g in generics])
+ sd = np.array([g.get_sd() for g in generics], dtype=float)
+
+ dist = _haversine_feet(lat, lon, self.p.latitude, self.p.longitude)
+ buffer_dist = 150.0
+ mask = dist <= sd + buffer_dist
+ selected = generics[mask]
+ dist = dist[mask]
+
+ results = [(
+ so.get_id_num(),
+ d,
+ self.approaching(so),
+ ) for so, d in zip(selected, dist)]
+
+ temp_sort_distance = sorted(results, key=itemgetter(1))
+ temp_sort_approaching = sorted(temp_sort_distance, key=itemgetter(2), reverse=True)
+ self.generic_so_approach_list = list(filter(lambda x: x[2], temp_sort_approaching))
+
+ return
def three_pt_approach(self,d0, d1, d2, approach_distance) -> bool:
""" check if d0 & d1 points are before approach distance and d2 is after"""
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index f88eb0b..6a25c4d 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -128,7 +128,18 @@ def save_frame_ffmpeg(self, frame_number: int, output_path: Path) -> None:
"1",
str(output_path),
]
- subprocess.run(cmd, check=True)
+ try:
+ subprocess.run(cmd, check=True)
+ except FileNotFoundError:
+ # Fallback to OpenCV if ffmpeg is unavailable
+ cap = cv2.VideoCapture(str(self.video_filepath))
+ cap.set(cv2.CAP_PROP_POS_FRAMES, frame_number)
+ ret, frame = cap.read()
+ cap.release()
+ if ret:
+ cv2.imwrite(str(output_path), frame)
+ else:
+ raise RuntimeError(f"Unable to read frame {frame_number}")
@staticmethod
def write_gps_exif(image_path: Path, location) -> None:
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index 3618185..f648436 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -31,30 +31,32 @@ def args_static_obj_gpx_video(
process_road_objects.ProcessRoadObjects(gpx_filestring=gpx_file.name)
+ # ``extra_out`` may be shorter than four elements in tests
+ defaults = (True, False, True, False)
+ supplied_len = len(extra_out)
+ extra = list(extra_out) + list(defaults[supplied_len:])
+ extra_out = tuple(extra[:4])
+
if video_file:
video = process_video.ProcessVideo(video_file.name)
if vid_sync[0] and vid_sync[1]:
video.sync(int(vid_sync[0]), vid_sync[1])
if sightings and project.get_static_object_type() == "intersection":
print("extracting traffic signal sightings")
- video.extract_sightings(
- sightings,
- project,
- label_img=extra_out[0],
- gen_gif=extra_out[1],
- cleanup=extra_out[2],
- overwrite=extra_out[3],
- )
+ kwargs = {"label_img": extra_out[0], "gen_gif": extra_out[1]}
+ if supplied_len > 2:
+ kwargs["cleanup"] = extra_out[2]
+ if supplied_len > 3:
+ kwargs["overwrite"] = extra_out[3]
+ video.extract_sightings(sightings, project, **kwargs)
if sightings and project.get_static_object_type() == "generic static object":
print("extracting generic static object sightings")
- video.extract_generic_so_sightings(
- sightings,
- project,
- label_img=extra_out[0],
- gen_gif=extra_out[1],
- cleanup=extra_out[2],
- overwrite=extra_out[3],
- )
+ kwargs = {"label_img": extra_out[0], "gen_gif": extra_out[1]}
+ if supplied_len > 2:
+ kwargs["cleanup"] = extra_out[2]
+ if supplied_len > 3:
+ kwargs["overwrite"] = extra_out[3]
+ video.extract_generic_so_sightings(sightings, project, **kwargs)
elif frame_extract[0] and frame_extract[1]:
print("extracting frames...")
video.extract_frames_between(frame_extract[0], frame_extract[1])
From 01747ee91f9c7357a8ba2deb7c05af3746f5144d Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 22:15:10 -0700
Subject: [PATCH 33/46] Fix pandas Series indexing warning
---
src/ssoss/process_road_objects.py | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 680e3eb..65ac8f2 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -684,15 +684,15 @@ def get_speed_at_timestamp(self, ts):
# boundary conditions
# first point check
- if ts < point_list.loc[0][0].get_timestamp():
+ if ts < point_list.iloc[0, 0].get_timestamp():
return None
# last point check
- if ts > point_list.loc[last_point][0].get_timestamp():
+ if ts > point_list.iloc[last_point, 0].get_timestamp():
return None
for i in range(len(point_list)-1):
- if point_list.loc[i][0].get_timestamp() <= ts <= point_list.loc[i + 1][0].get_timestamp():
- speed = self.avg_speed(point_list.loc[i][0].get_speed(), point_list.loc[i+1][0].get_speed())
+ if point_list.iloc[i, 0].get_timestamp() <= ts <= point_list.iloc[i + 1, 0].get_timestamp():
+ speed = self.avg_speed(point_list.iloc[i, 0].get_speed(), point_list.iloc[i+1, 0].get_speed())
break
return speed
@@ -718,15 +718,15 @@ def get_location_at_timestamp(self, ts):
last_idx = len(points) - 1
# Boundary checks
- first_ts = points.loc[0][0].get_timestamp()
- last_ts = points.loc[last_idx][0].get_timestamp()
+ first_ts = points.iloc[0, 0].get_timestamp()
+ last_ts = points.iloc[last_idx, 0].get_timestamp()
if ts < first_ts or ts > last_ts:
return None
# Locate the two surrounding points
for i in range(last_idx):
- p0 = points.loc[i][0]
- p1 = points.loc[i + 1][0]
+ p0 = points.iloc[i, 0]
+ p1 = points.iloc[i + 1, 0]
t0 = p0.get_timestamp()
t1 = p1.get_timestamp()
if t0 <= ts <= t1:
From fa0e6e49081a1ab14684134fb78abc8e26adf00c Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 22:19:56 -0700
Subject: [PATCH 34/46] Prevent duplicate entries in sync log
---
README.md | 2 +-
src/ssoss/process_video.py | 13 ++++++++++---
tests/test_process_video.py | 8 ++++++++
3 files changed, 19 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index e54a366..24be504 100644
--- a/README.md
+++ b/README.md
@@ -116,7 +116,7 @@ where ### is the frame number of the image.
Use the frame number and the GPX recorded time to line up the best point to synchronize the video using the Sync method.
##### Sync.txt Logger
-Automatically saves frame number and timestamp to sync.txt file in the ./out/ directory so a log of when a video file was synchronized is saved.
+Automatically saves frame number and timestamp to sync.txt file in the ./out/ directory so a log of when a video file was synchronized is saved. Duplicate lines are ignored to prevent redundant entries.
### Sources of Error
While SSOSS does provide approximate sight distance images, their are various sources of error that should be try to be minimized. Here are the major sources of error and how they can be mitigated.
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 6a25c4d..5468275 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -71,14 +71,21 @@ def sync(self, frame: int, ts):
"""
finds start time of video based on frame and timestamp
appends frame # and timestamp to sync.txt with video filename for reference
+ duplicate entries are ignored
"""
sync_txt_folder = Path(self.video_dir, "out")
# ensure the out directory exists before attempting to write
sync_txt_folder.mkdir(exist_ok=True, parents=True)
sync_file = sync_txt_folder / "sync.txt"
- # open in append mode so the file is created if it doesn't exist
- with open(sync_file, "a") as f:
- f.write(f"{self.video_filepath.stem},{frame},{ts}\n")
+ line = f"{self.video_filepath.stem},{frame},{ts}"
+ existing_lines = set()
+ if sync_file.exists():
+ with open(sync_file, "r") as f:
+ existing_lines = {l.strip() for l in f}
+ if line not in existing_lines:
+ # open in append mode so the file is created if it doesn't exist
+ with open(sync_file, "a") as f:
+ f.write(line + "\n")
elapsed_time = frame / self.fps
if type(ts) is float:
diff --git a/tests/test_process_video.py b/tests/test_process_video.py
index 345a5b3..b1f9d7f 100644
--- a/tests/test_process_video.py
+++ b/tests/test_process_video.py
@@ -58,6 +58,14 @@ def test_sync_sets_start_time_and_logs(self):
line = f.read().strip()
self.assertEqual(line, f"{self.pv.video_filepath.stem},10,110.0")
+ def test_sync_does_not_duplicate_lines(self):
+ self.pv.sync(10, 110.0)
+ self.pv.sync(10, 110.0)
+ sync_file = pathlib.Path(self.pv.video_dir, "out", "sync.txt")
+ with open(sync_file) as f:
+ lines = [l.strip() for l in f]
+ self.assertEqual(lines, [f"{self.pv.video_filepath.stem},10,110.0"])
+
def _check_gps(self, image_path):
exif = piexif.load(str(image_path))
gps = exif.get("GPS", {})
From c6fa3f6a640316acbc2b56565053e92c77b15e04 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Wed, 11 Jun 2025 23:09:03 -0700
Subject: [PATCH 35/46] Add signal layer builder CLI
---
README.md | 10 ++
dev-requirements.in | 3 +
dev-requirements.txt | 6 ++
pyproject.toml | 3 +-
requirements.in | 3 +
requirements.txt | 3 +
src/ssoss/cli.py | 20 ++++
src/ssoss/signal_layer.py | 205 ++++++++++++++++++++++++++++++++++++++
8 files changed, 252 insertions(+), 1 deletion(-)
create mode 100644 src/ssoss/cli.py
create mode 100644 src/ssoss/signal_layer.py
diff --git a/README.md b/README.md
index 24be504..c543c5d 100644
--- a/README.md
+++ b/README.md
@@ -140,6 +140,16 @@ Use --gif-overwrite to replace an existing GIF and --no-gif-cleanup to keep the
Saves .gif file in ./out/[video filename]/gif/
+### Signal Visibility Layer
+Compile field photos into a map layer:
+```bash
+ssoss build-signal-layer \
+ --blocked-folder /data/photos/blocked_signals \
+ --clear-folder /data/photos/clear_signals \
+ --output-dir /data/outputs
+```
+Load `signal_visibility.gpkg` in QGIS using *Layer → Add Layer → Add Vector Layer*.
+
### Label Image
Add a label to the bottom of the image by including the --label flag in the command line.
diff --git a/dev-requirements.in b/dev-requirements.in
index 77088d9..d895d1d 100644
--- a/dev-requirements.in
+++ b/dev-requirements.in
@@ -12,3 +12,6 @@ piexif
python-dateutil
timezonefinder
icecream
+click
+geopandas
+folium
diff --git a/dev-requirements.txt b/dev-requirements.txt
index 3ab1969..35a4e54 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -54,3 +54,9 @@ tzdata==2023.3
# via pandas
timezonefinder==6.5.9
# via -r dev-requirements.in
+click==8.2.1
+ # via -r dev-requirements.in
+geopandas==1.1.0
+ # via -r dev-requirements.in
+folium==0.19.7
+ # via -r dev-requirements.in
diff --git a/pyproject.toml b/pyproject.toml
index da9d214..9bdfbc9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,4 +27,5 @@ files = ["requirements.txt"]
"Bug Tracker" = "https://github.com/redmond2742/ssoss/issues"
[project.scripts]
-ssoss = "ssoss.ssoss_cli:main"
+ssoss = "ssoss.cli:cli"
+
diff --git a/requirements.in b/requirements.in
index 78584d1..83c1ab2 100644
--- a/requirements.in
+++ b/requirements.in
@@ -11,4 +11,7 @@ pillow
piexif
python-dateutil
timezonefinder
+click
+geopandas
+folium
diff --git a/requirements.txt b/requirements.txt
index d2c993f..42dc5c1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -43,4 +43,7 @@ tqdm==4.66.1
tzdata==2023.3
# via pandas
timezonefinder==6.5.9
+click==8.2.1
+geopandas==1.1.0
+folium==0.19.7
# via -r requirements.in
diff --git a/src/ssoss/cli.py b/src/ssoss/cli.py
new file mode 100644
index 0000000..de8f703
--- /dev/null
+++ b/src/ssoss/cli.py
@@ -0,0 +1,20 @@
+import sys
+import click
+
+from . import ssoss_cli
+from .signal_layer import build_signal_layer
+
+
+@click.group(invoke_without_command=True, context_settings={"help_option_names": ["-h", "--help"]})
+@click.pass_context
+def cli(ctx):
+ """SSOSS command line interface."""
+ if ctx.invoked_subcommand is None:
+ ssoss_cli.main()
+
+
+cli.add_command(build_signal_layer)
+
+if __name__ == "__main__":
+ cli()
+
diff --git a/src/ssoss/signal_layer.py b/src/ssoss/signal_layer.py
new file mode 100644
index 0000000..e3a30b5
--- /dev/null
+++ b/src/ssoss/signal_layer.py
@@ -0,0 +1,205 @@
+import base64
+import io
+import sys
+from datetime import datetime
+from pathlib import Path
+
+import click
+import gpxpy
+import geopandas as gpd
+import pandas as pd
+from PIL import Image, ExifTags
+from shapely.geometry import Point
+import folium
+
+
+# map exif tag numbers to names for convenience
+_EXIF_TAGS = {v: k for k, v in ExifTags.TAGS.items()}
+_GPS_TAGS = ExifTags.GPSTAGS
+
+
+def _dms_to_deg(value, ref):
+ if not value:
+ return None
+ deg = value[0][0] / value[0][1]
+ min_ = value[1][0] / value[1][1]
+ sec = value[2][0] / value[2][1]
+ sign = -1 if ref in ["S", "W"] else 1
+ return sign * (deg + min_ / 60 + sec / 3600)
+
+
+def _extract_exif(path: Path):
+ """Return (lat, lon, heading, dt) from image exif or None."""
+ try:
+ with Image.open(path) as img:
+ exif = img._getexif() or {}
+ except Exception:
+ return None
+
+ gps = exif.get(_EXIF_TAGS.get("GPSInfo"))
+ if not gps:
+ return None
+ gps_data = { _GPS_TAGS.get(k): v for k, v in gps.items() if k in _GPS_TAGS }
+ lat = _dms_to_deg(gps_data.get("GPSLatitude"), gps_data.get("GPSLatitudeRef"))
+ lon = _dms_to_deg(gps_data.get("GPSLongitude"), gps_data.get("GPSLongitudeRef"))
+ if lat is None or lon is None:
+ return None
+ heading = gps_data.get("GPSImgDirection") or gps_data.get("GPSDestBearing")
+ if isinstance(heading, tuple):
+ heading = heading[0] / heading[1]
+ dt_str = exif.get(_EXIF_TAGS.get("DateTimeOriginal")) or exif.get(_EXIF_TAGS.get("DateTime"))
+ dt = None
+ if dt_str:
+ try:
+ dt = datetime.strptime(dt_str, "%Y:%m:%d %H:%M:%S")
+ except ValueError:
+ pass
+ return lat, lon, heading, dt
+
+
+def _extract_gpx(path: Path):
+ gpx_path = path.with_suffix(".gpx")
+ if not gpx_path.exists():
+ return None
+ try:
+ with gpx_path.open() as f:
+ gpx = gpxpy.parse(f)
+ except Exception:
+ return None
+ point = None
+ if gpx.waypoints:
+ point = gpx.waypoints[0]
+ elif gpx.tracks:
+ point = gpx.tracks[0].segments[0].points[0]
+ if not point:
+ return None
+ return (
+ point.latitude,
+ point.longitude,
+ getattr(point, "course", None),
+ point.time.replace(tzinfo=None) if point.time else None,
+ )
+
+
+def _load_photo_map(csv_path: Path):
+ if not csv_path:
+ return {}
+ try:
+ df = pd.read_csv(csv_path)
+ except Exception:
+ return {}
+ df = df.set_index("filename")
+ return df.to_dict("index")
+
+
+def _approach_from_heading(heading):
+ if heading is None:
+ return None
+ idx = int(((heading % 360) + 45) // 90) % 4
+ return ["NB", "EB", "SB", "WB"][idx]
+
+
+def _thumb_base64(path: Path, max_size=(200, 200)) -> str:
+ with Image.open(path) as im:
+ im.thumbnail(max_size)
+ buf = io.BytesIO()
+ im.save(buf, format="JPEG", optimize=True, quality=80)
+ b64 = base64.b64encode(buf.getvalue()).decode("utf-8")
+ return f"data:image/jpeg;base64,{b64}"
+
+
+@click.command("build-signal-layer")
+@click.option("--blocked-folder", type=click.Path(exists=True, file_okay=False), required=True)
+@click.option("--clear-folder", type=click.Path(exists=True, file_okay=False), required=True)
+@click.option("--output-dir", type=click.Path(file_okay=False), required=True)
+@click.option("--photos-csv", type=click.Path(exists=True, dir_okay=False), help="Optional CSV with photo metadata")
+def build_signal_layer(blocked_folder, clear_folder, output_dir, photos_csv):
+ """Build a geospatial layer of signal photo locations."""
+ blocked = Path(blocked_folder)
+ clear = Path(clear_folder)
+ out_dir = Path(output_dir)
+ out_dir.mkdir(parents=True, exist_ok=True)
+
+ mapping = _load_photo_map(Path(photos_csv) if photos_csv else None)
+
+ files = [
+ *(p for p in blocked.rglob("*") if p.suffix.lower() in {".jpg", ".jpeg", ".png"}),
+ *(p for p in clear.rglob("*") if p.suffix.lower() in {".jpg", ".jpeg", ".png"}),
+ ]
+
+ records = []
+ for path in files:
+ info = _extract_exif(path)
+ if not info:
+ info = _extract_gpx(path)
+ if not info and mapping:
+ meta = mapping.get(path.name)
+ if meta:
+ info = (
+ meta.get("lat"),
+ meta.get("lon"),
+ meta.get("heading"),
+ pd.to_datetime(meta.get("capture_dt")) if meta.get("capture_dt") else None,
+ )
+ intersection_id = meta.get("intersection_id")
+ else:
+ intersection_id = None
+ else:
+ intersection_id = None
+ if not info or info[0] is None or info[1] is None:
+ click.echo(f"Skipping {path}: no location", err=True)
+ continue
+ lat, lon, heading, dt = info
+ visibility = "blocked" if blocked in path.parents else "clear"
+ approach = _approach_from_heading(heading)
+ records.append(
+ {
+ "photo_path": str(path),
+ "visibility": visibility,
+ "intersection_id": intersection_id,
+ "approach_leg": approach,
+ "heading_deg": heading,
+ "capture_dt": dt,
+ "geometry": Point(lon, lat),
+ "thumbnail": _thumb_base64(path),
+ }
+ )
+
+ if not records:
+ click.echo("No photos found", err=True)
+ return
+
+ gdf = gpd.GeoDataFrame(records, geometry="geometry", crs="EPSG:4326")
+
+ gpkg_path = out_dir / "signal_visibility.gpkg"
+ gdf.to_file(gpkg_path, layer="signals", driver="GPKG")
+ gdf.to_file(out_dir / "signal_visibility.geojson", driver="GeoJSON")
+
+ # folium map
+ center = [gdf.geometry.y.mean(), gdf.geometry.x.mean()]
+ fmap = folium.Map(location=center, zoom_start=18)
+ for _, row in gdf.iterrows():
+ popup = folium.Popup(f" {Path(row['photo_path']).name} ID: {row['intersection_id']}", max_width=200)
+ if row["visibility"] == "clear":
+ folium.CircleMarker(
+ location=[row.geometry.y, row.geometry.x],
+ radius=6,
+ color="green",
+ fill=True,
+ fill_opacity=0.9,
+ popup=popup,
+ ).add_to(fmap)
+ else:
+ folium.Marker(
+ location=[row.geometry.y, row.geometry.x],
+ icon=folium.Icon(color="red", icon="remove", prefix="fa"),
+ popup=popup,
+ ).add_to(fmap)
+
+ fmap.save(str(out_dir / "signal_visibility.html"))
+ click.echo(f"Saved outputs to {out_dir}")
+
+
+if __name__ == "__main__":
+ build_signal_layer()
+
From ae008a2e2d4c20274ac797869f1acba69ad90a3b Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Thu, 12 Jun 2025 21:28:02 -0700
Subject: [PATCH 36/46] fix cli imports and update docs
---
README.md | 18 +++++++++---------
src/ssoss/ssoss_cli.py | 4 ++--
2 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/README.md b/README.md
index c543c5d..6581d25 100644
--- a/README.md
+++ b/README.md
@@ -85,16 +85,16 @@ Collect data simultaneously:
### C. Data Processing: Argparse Command Line
```Shell
-(ssoss_virtual_env) python ssoss_cli.py --help
+(ssoss_virtual_env) ssoss --help
```
#### Basic Usage
```Shell
-(ssoss_virtual_env) python ssoss_cli.py --static_objects signals.csv
- --gpx_file drive.gpx
- --video_file vid.mov
- --sync_frame 456
- --sync_timestamp 2022-10-24T14:21:54.32Z
+(ssoss_virtual_env) ssoss --static_objects signals.csv \
+ --gpx_file drive.gpx \
+ --video_file vid.mov \
+ --sync_frame 456 \
+ --sync_timestamp 2022-10-24T14:21:54.32Z
```
#### Sync GPX & Video Process
@@ -104,9 +104,9 @@ video frames between two time values. When looking at the GPX points, the approx
and all the frames can be extracted. This method is:
```Shell
- (ssoss_virtual_env) python ssoss_cli.py -video_file vid.mov
- --frame_extract_start 4
- --frame_extract_end 6
+ (ssoss_virtual_env) ssoss -video_file vid.mov \
+ --frame_extract_start 4 \
+ --frame_extract_end 6
```
Check the printed logs to see the saved output location. Default is:
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index f648436..50964a3 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -1,6 +1,6 @@
import argparse
-import process_road_objects
-import process_video
+from . import process_road_objects
+from . import process_video
def args_static_obj_gpx_video(
From 0bfd56c8792f03f5d357a9b3a999b51fc4f5634f Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Thu, 12 Jun 2025 22:08:06 -0700
Subject: [PATCH 37/46] Bump version to 1.1
---
pyproject.toml | 2 +-
src/ssoss/__init__.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9bdfbc9..baedf24 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "ssoss"
-version = "1.0"
+version = "1.1"
authors = [
{ name="Matt Redmond", email="mr2742@gmail.com" },
]
diff --git a/src/ssoss/__init__.py b/src/ssoss/__init__.py
index 8c71a11..e04de60 100644
--- a/src/ssoss/__init__.py
+++ b/src/ssoss/__init__.py
@@ -16,5 +16,5 @@
__version__ = importlib.metadata.version("ssoss")
except importlib.metadata.PackageNotFoundError:
# Package metadata not found when running from source
- __version__ = "1.0"
+ __version__ = "1.1"
From b8f7bd2dfa46b7386fe9618c76fc207fb9449f14 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 13:33:29 -0700
Subject: [PATCH 38/46] Add autosync option
---
README.md | 3 +++
src/ssoss/ssoss_cli.py | 55 ++++++++++++++++++++++++++++++++++++++++-
tests/test_ssoss_cli.py | 10 ++++++++
3 files changed, 67 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 6581d25..34c96c8 100644
--- a/README.md
+++ b/README.md
@@ -95,6 +95,9 @@ Collect data simultaneously:
--video_file vid.mov \
--sync_frame 456 \
--sync_timestamp 2022-10-24T14:21:54.32Z
+
+(ssoss_virtual_env) ssoss --video_file 09-15-2023--14-12-24.123-UTC.mov \
+ --autosync
```
#### Sync GPX & Video Process
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index 50964a3..90a39d8 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -1,8 +1,48 @@
import argparse
+import re
+from datetime import datetime
+from pathlib import Path
+from zoneinfo import ZoneInfo
from . import process_road_objects
from . import process_video
+def _timestamp_from_filename(path: str) -> str:
+ """Extract ISO 8601 timestamp from ``path``.
+
+ The filename should contain a timestamp formatted as
+ ``MM-DD-YYYY--HH-MM-SS.sss-ZZZ`` where ``ZZZ`` is a timezone
+ abbreviation such as ``UTC`` or ``PDT``.
+ """
+
+ base = Path(path).stem
+ m = re.search(
+ r"(?P\d{2}-\d{2}-\d{4}--\d{2}-\d{2}-\d{2}\.\d+)-(?P[A-Za-z]+)$",
+ base,
+ )
+ if not m:
+ raise ValueError("No timestamp found in video filename")
+
+ ts_str = m.group("ts")
+ zone = m.group("zone")
+
+ dt = datetime.strptime(ts_str, "%m-%d-%Y--%H-%M-%S.%f")
+ zone_map = {
+ "UTC": "UTC",
+ "PST": "America/Los_Angeles",
+ "PDT": "America/Los_Angeles",
+ "MST": "America/Denver",
+ "MDT": "America/Denver",
+ "CST": "America/Chicago",
+ "CDT": "America/Chicago",
+ "EST": "America/New_York",
+ "EDT": "America/New_York",
+ }
+ tz_name = zone_map.get(zone.upper(), "UTC")
+ dt = dt.replace(tzinfo=ZoneInfo(tz_name))
+ return dt.isoformat()
+
+
def args_static_obj_gpx_video(
generic_so_file="",
gpx_file="",
@@ -139,6 +179,11 @@ def main():
help="2. Sync Timestamp ('2022-10-24T14:21:54.988Z') for video. Sync with frame number also",
type=str,
)
+ video_sync_group.add_argument(
+ "--autosync",
+ action="store_true",
+ help="Sync using timestamp embedded in video filename",
+ )
video_sync_group.add_argument(
"--label",
@@ -175,7 +220,15 @@ def main():
sync_input = ("", "")
frames = ("", "")
- if args.sync_frame and args.sync_timestamp:
+ if args.autosync:
+ if not args.video_file:
+ parser.error("--autosync requires --video_file")
+ try:
+ ts = _timestamp_from_filename(args.video_file.name)
+ sync_input = (1, ts)
+ except ValueError as e:
+ parser.error(str(e))
+ elif args.sync_frame and args.sync_timestamp:
sync_input = (args.sync_frame, args.sync_timestamp)
if args.frame_extract_start and args.frame_extract_end:
frames = (args.frame_extract_start[0], args.frame_extract_end[0])
diff --git a/tests/test_ssoss_cli.py b/tests/test_ssoss_cli.py
index a4f6efa..34e5e4c 100644
--- a/tests/test_ssoss_cli.py
+++ b/tests/test_ssoss_cli.py
@@ -103,3 +103,13 @@ def test_dispatch_extract_frames(monkeypatch, tmp_path):
pv_instance.extract_frames_between.assert_called_once_with(1, 2)
+
+def test_autosync_uses_filename(run_cli, tmp_path):
+ vid = tmp_path / "09-15-2023--14-12-24.123-UTC.mov"
+ vid.write_text("data")
+
+ result = run_cli(["--video_file", str(vid), "--autosync"])
+
+ assert result["vid_sync"][0] == 1
+ assert result["vid_sync"][1].startswith("2023-09-15T14:12:24.123000")
+
From c0b3d417b816947166404a0cdbffb9d5640723a8 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:02:52 -0700
Subject: [PATCH 39/46] Add geodesic-based track interpolation
---
src/ssoss/__init__.py | 1 +
src/ssoss/interpolation.py | 76 +++++++++++++++++++++++++++++++++++++
tests/test_interpolation.py | 74 ++++++++++++++++++++++++++++++++++++
3 files changed, 151 insertions(+)
create mode 100644 src/ssoss/interpolation.py
create mode 100644 tests/test_interpolation.py
diff --git a/src/ssoss/__init__.py b/src/ssoss/__init__.py
index e04de60..961c55a 100644
--- a/src/ssoss/__init__.py
+++ b/src/ssoss/__init__.py
@@ -3,6 +3,7 @@
from ssoss.process_road_objects import *
from ssoss.process_video import *
from ssoss.static_road_object import *
+from ssoss.interpolation import position_at_time, time_at_distance
import importlib.metadata
try:
from icecream import install
diff --git a/src/ssoss/interpolation.py b/src/ssoss/interpolation.py
new file mode 100644
index 0000000..9a3dbd6
--- /dev/null
+++ b/src/ssoss/interpolation.py
@@ -0,0 +1,76 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime, timedelta
+from geographiclib.geodesic import Geodesic
+
+
+def _prep_track(track_df: pd.DataFrame):
+ if "t" in track_df.columns:
+ t = pd.to_datetime(track_df["t"], utc=True)
+ elif "time" in track_df.columns:
+ t = pd.to_datetime(track_df["time"], utc=True)
+ elif "timestamp" in track_df.columns:
+ t = pd.to_datetime(track_df["timestamp"], unit="s", utc=True)
+ else:
+ raise ValueError("track_df must contain a time column (t/time/timestamp)")
+
+ if "lat" in track_df.columns and "lon" in track_df.columns:
+ lat = track_df["lat"].astype(float)
+ lon = track_df["lon"].astype(float)
+ elif "latitude" in track_df.columns and "longitude" in track_df.columns:
+ lat = track_df["latitude"].astype(float)
+ lon = track_df["longitude"].astype(float)
+ else:
+ raise ValueError("track_df must contain lat/lon columns")
+
+ df = pd.DataFrame({"t": t, "lat": lat, "lon": lon})
+ df.sort_values("t", inplace=True)
+ df.reset_index(drop=True, inplace=True)
+
+ df["lat"] = df["lat"].rolling(window=5, center=True, min_periods=1).mean()
+ df["lon"] = df["lon"].rolling(window=5, center=True, min_periods=1).mean()
+
+ dist = [0.0]
+ for i in range(1, len(df)):
+ p0 = df.iloc[i - 1]
+ p1 = df.iloc[i]
+ d = Geodesic.WGS84.Inverse(p0.lat, p0.lon, p1.lat, p1.lon)["s12"]
+ dist.append(d)
+ df["distance_m"] = np.cumsum(dist)
+ df["time_s"] = (df["t"] - df["t"].iloc[0]).dt.total_seconds()
+ return df, df["t"].iloc[0]
+
+
+def position_at_time(track_df: pd.DataFrame, when: datetime) -> tuple[float, float]:
+ df, t0 = _prep_track(track_df)
+ ts = pd.to_datetime(when, utc=True)
+ t_sec = (ts - t0).total_seconds()
+ if t_sec < df["time_s"].iloc[0] or t_sec > df["time_s"].iloc[-1]:
+ raise ValueError("time outside track range")
+
+ idx = np.searchsorted(df["time_s"], t_sec) - 1
+ idx = np.clip(idx, 0, len(df) - 2)
+ t0s = df["time_s"].iloc[idx]
+ t1s = df["time_s"].iloc[idx + 1]
+ ratio = (t_sec - t0s) / (t1s - t0s)
+ p0 = df.iloc[idx]
+ p1 = df.iloc[idx + 1]
+ inv = Geodesic.WGS84.Inverse(p0.lat, p0.lon, p1.lat, p1.lon)
+ pt = Geodesic.WGS84.Direct(p0.lat, p0.lon, inv["azi1"], inv["s12"] * ratio)
+ return pt["lat2"], pt["lon2"]
+
+
+def time_at_distance(track_df: pd.DataFrame, distance_m: float) -> datetime:
+ df, t0 = _prep_track(track_df)
+ if distance_m < 0 or distance_m > df["distance_m"].iloc[-1]:
+ raise ValueError("distance outside track range")
+
+ idx = np.searchsorted(df["distance_m"], distance_m) - 1
+ idx = np.clip(idx, 0, len(df) - 2)
+ d0 = df["distance_m"].iloc[idx]
+ d1 = df["distance_m"].iloc[idx + 1]
+ ratio = (distance_m - d0) / (d1 - d0)
+ t0s = df["time_s"].iloc[idx]
+ t1s = df["time_s"].iloc[idx + 1]
+ t_sec = t0s + ratio * (t1s - t0s)
+ return t0 + timedelta(seconds=float(t_sec))
diff --git a/tests/test_interpolation.py b/tests/test_interpolation.py
new file mode 100644
index 0000000..3d7a3b1
--- /dev/null
+++ b/tests/test_interpolation.py
@@ -0,0 +1,74 @@
+import pathlib
+import sys
+sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1] / "src"))
+import unittest
+from datetime import datetime, timedelta, timezone
+import numpy as np
+import pandas as pd
+from geopy.distance import geodesic
+
+from ssoss.interpolation import position_at_time, time_at_distance
+
+
+class TestInterpolationAccuracy(unittest.TestCase):
+ def setUp(self):
+ rng = np.random.default_rng(0)
+ self.base = datetime(2025, 1, 1, tzinfo=timezone.utc)
+ n = 121
+ times = [self.base + timedelta(seconds=i) for i in range(n)]
+
+ # ground truth path in meters
+ t_arr = np.arange(n)
+ x_true = np.linspace(0, 200, n) + 20 * np.sin(t_arr * 0.3)
+ y_true = 30 * np.sin(t_arr * 0.15)
+
+ lat0 = 37.0
+ lon0 = -122.0
+ rad = np.pi / 180
+ cos_lat0 = np.cos(lat0 * rad)
+ lat_true = lat0 + (y_true / 6378137.0) * 180 / np.pi
+ lon_true = lon0 + (x_true / (6378137.0 * cos_lat0)) * 180 / np.pi
+
+ # add noise
+ x_noisy = x_true + rng.normal(0, 5, size=n)
+ y_noisy = y_true + rng.normal(0, 5, size=n)
+ lat_noisy = lat0 + (y_noisy / 6378137.0) * 180 / np.pi
+ lon_noisy = lon0 + (x_noisy / (6378137.0 * cos_lat0)) * 180 / np.pi
+
+ self.track = pd.DataFrame({"t": times, "lat": lat_noisy, "lon": lon_noisy})
+ self.truth = pd.DataFrame({"t": times, "lat": lat_true, "lon": lon_true})
+
+ # ground truth cumulative distance
+ x_t = (lon_true - lon0) * rad * 6378137.0 * cos_lat0
+ y_t = (lat_true - lat0) * rad * 6378137.0
+ dist = np.hypot(np.diff(x_t), np.diff(y_t))
+ self.dist_true = np.insert(np.cumsum(dist), 0, 0)
+ self.time_s = t_arr
+
+ def test_position_accuracy(self):
+ errs = []
+ for frac in np.linspace(0, 1, 200, endpoint=False):
+ t_sec = frac * self.time_s[-1]
+ when = self.base + timedelta(seconds=float(t_sec))
+ lat, lon = position_at_time(self.track, when)
+ lat_gt = np.interp(t_sec, self.time_s, self.truth["lat"])
+ lon_gt = np.interp(t_sec, self.time_s, self.truth["lon"])
+ d = geodesic((lat_gt, lon_gt), (lat, lon)).meters
+ errs.append(d)
+ self.assertLess(np.percentile(errs, 95), 6.0)
+
+ def test_time_accuracy(self):
+ errs = []
+ max_d = self.dist_true[-1]
+ for frac in np.linspace(0, 1, 200, endpoint=False):
+ d = frac * max_d
+ ts = time_at_distance(self.track, d)
+ t_sec_gt = np.interp(d, self.dist_true, self.time_s)
+ ts_gt = self.base + timedelta(seconds=float(t_sec_gt))
+ diff = abs((ts - ts_gt).total_seconds())
+ errs.append(diff)
+ self.assertLess(np.percentile(errs, 95), 3)
+
+
+if __name__ == "__main__":
+ unittest.main()
From eb300a5dfcf0d432b41e3ffc6d5d84d9199f68af Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:24:56 -0700
Subject: [PATCH 40/46] Handle direct execution of ssoss_cli
---
src/ssoss/ssoss_cli.py | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index 90a39d8..f355217 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -3,8 +3,16 @@
from datetime import datetime
from pathlib import Path
from zoneinfo import ZoneInfo
-from . import process_road_objects
-from . import process_video
+# When executed as part of the ``ssoss`` package, ``__package__`` will be set
+# and the relative imports below work as expected. Running the module as a
+# stand-alone script (e.g. ``python ssoss_cli.py``) leaves ``__package__`` empty
+# which causes relative imports to fail. Handle both execution modes here.
+if __package__ in {None, ""}:
+ import process_road_objects
+ import process_video
+else:
+ from . import process_road_objects
+ from . import process_video
def _timestamp_from_filename(path: str) -> str:
From 3c0b23cebf9201c8a0a45d9dad65aad88996b7e7 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:26:01 -0700
Subject: [PATCH 41/46] Handle help option in CLI
---
src/ssoss/cli.py | 19 ++++++++++++++++---
src/ssoss/ssoss_cli.py | 4 ++--
2 files changed, 18 insertions(+), 5 deletions(-)
diff --git a/src/ssoss/cli.py b/src/ssoss/cli.py
index de8f703..8e31c56 100644
--- a/src/ssoss/cli.py
+++ b/src/ssoss/cli.py
@@ -5,12 +5,25 @@
from .signal_layer import build_signal_layer
-@click.group(invoke_without_command=True, context_settings={"help_option_names": ["-h", "--help"]})
+@click.group(invoke_without_command=True, add_help_option=False)
+@click.option("-h", "--help", "show_help", is_flag=True, is_eager=True,
+ help="Show this message and exit.")
@click.pass_context
-def cli(ctx):
+def cli(ctx, show_help):
"""SSOSS command line interface."""
if ctx.invoked_subcommand is None:
- ssoss_cli.main()
+ if show_help:
+ try:
+ ssoss_cli.main(["--help"])
+ except SystemExit:
+ pass
+ if cli.commands:
+ click.echo("\nCommands:")
+ for name, cmd in cli.commands.items():
+ click.echo(f" {name:<20} {cmd.get_short_help_str()}")
+ ctx.exit()
+ else:
+ ssoss_cli.main()
cli.add_command(build_signal_layer)
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index 90a39d8..1e92409 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -102,7 +102,7 @@ def args_static_obj_gpx_video(
video.extract_frames_between(frame_extract[0], frame_extract[1])
-def main():
+def main(argv=None):
parser = argparse.ArgumentParser(
prog="Safe Sightings of Signs and Signals",
description="Software to help verify visible traffic signs and signals using GPX and Video files",
@@ -216,7 +216,7 @@ def main():
)
# process args depending on filled in values
- args = parser.parse_args()
+ args = parser.parse_args(argv)
sync_input = ("", "")
frames = ("", "")
From 70f3f7d9c59982271a02df18522a4940b59abe13 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:46:38 -0700
Subject: [PATCH 42/46] Improve video sync summary
---
src/ssoss/process_video.py | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 5468275..cd68bff 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -37,6 +37,8 @@ def __init__(self, video_filestring: str):
self.duration = self.get_duration()
self.start_time = 0
self.capture = ""
+ self.sync_frame = None
+ self.sync_timestamp = None
self.vid_summary(vid_summary=True)
@@ -94,6 +96,8 @@ def sync(self, frame: int, ts):
t_temp = (dateutil.parser.isoparse(ts)) # isoparse parses ISO-8601 datetime string into datetime.datetime
start_time = t_temp.replace(tzinfo=timezone.utc).timestamp() - elapsed_time
self.set_start_utc(start_time)
+ self.sync_frame = frame
+ self.sync_timestamp = ts
self.vid_summary(vid_summary=False, sync=True)
return None
@@ -408,6 +412,10 @@ def get_filesize(self):
file_byte = os.path.getsize(self.video_filepath)
return self.sizeConvert(file_byte)
+ def get_filesize_bytes(self):
+ """Return the raw file size in bytes."""
+ return os.path.getsize(self.video_filepath)
+
def vid_summary(self, vid_summary, sync=False):
# display values
width = 70
@@ -420,6 +428,11 @@ def vid_summary(self, vid_summary, sync=False):
# get vcap property
vid_width = vid_file.get(cv2.CAP_PROP_FRAME_WIDTH) # float `width`
vid_height = vid_file.get(cv2.CAP_PROP_FRAME_HEIGHT) # float `height`
+ else:
+ vid_width = vid_height = 0
+ file_bytes = self.get_filesize_bytes()
+ data_rate = round(file_bytes / self.get_duration() / (1024 * 1024), 2)
+ avg_frame_size = round(file_bytes / self.frame_count / 1024, 2)
summary = f"""
{symbol * width}
@@ -431,6 +444,8 @@ def vid_summary(self, vid_summary, sync=False):
# Frames Per Second: {self.fps}
# Total Number of Frames: {self.frame_count:,}
# Total Duration: {self.hr_min_sec(self.get_duration())}
+ # Avg. Frame Size: {avg_frame_size} KB
+ # Data Rate: {data_rate} MB/sec
{symbol * width}
"""
@@ -438,8 +453,12 @@ def vid_summary(self, vid_summary, sync=False):
{symbol * width}
{" " * (int(width/2)-int(len(sync_title)/2))}{sync_title}
{symbol * width}
+ # Sync Frame: {self.sync_frame}
+ # Sync Timestamp: {self.sync_timestamp}
# Start Time: {datetime.fromtimestamp(self.start_time, tz=None)}
# End Time: {datetime.fromtimestamp(self.start_time + self.get_duration(), tz=None)}
+ # Avg. Frame Size: {avg_frame_size} KB
+ # Data Rate: {data_rate} MB/sec
{symbol * width}
"""
if vid_summary:
From 15288dae534f5af7b8de6269fc267be60fa83223 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:53:03 -0700
Subject: [PATCH 43/46] Improve video sync summary
---
src/ssoss/process_video.py | 11 ++++++++++-
src/ssoss/ssoss_cli.py | 6 ++++--
tests/test_ssoss_cli.py | 2 +-
3 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 5468275..f955d23 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -37,6 +37,7 @@ def __init__(self, video_filestring: str):
self.duration = self.get_duration()
self.start_time = 0
self.capture = ""
+ self.sync_source = "Not synced"
self.vid_summary(vid_summary=True)
@@ -67,11 +68,13 @@ def get_duration(self, seconds_output=True):
else:
return timedelta(seconds=self.duration)
- def sync(self, frame: int, ts):
+ def sync(self, frame: int, ts, autosync: bool = False):
"""
finds start time of video based on frame and timestamp
appends frame # and timestamp to sync.txt with video filename for reference
duplicate entries are ignored
+ ``autosync`` indicates the timestamp was derived from the
+ filename rather than provided explicitly
"""
sync_txt_folder = Path(self.video_dir, "out")
# ensure the out directory exists before attempting to write
@@ -93,6 +96,10 @@ def sync(self, frame: int, ts):
else:
t_temp = (dateutil.parser.isoparse(ts)) # isoparse parses ISO-8601 datetime string into datetime.datetime
start_time = t_temp.replace(tzinfo=timezone.utc).timestamp() - elapsed_time
+ if autosync:
+ self.sync_source = "Auto sync using filename timestamp"
+ else:
+ self.sync_source = f"Frame {frame} at {ts}"
self.set_start_utc(start_time)
self.vid_summary(vid_summary=False, sync=True)
return None
@@ -438,6 +445,8 @@ def vid_summary(self, vid_summary, sync=False):
{symbol * width}
{" " * (int(width/2)-int(len(sync_title)/2))}{sync_title}
{symbol * width}
+ # Video File: {self.video_filename}
+ # Sync Source: {self.sync_source}
# Start Time: {datetime.fromtimestamp(self.start_time, tz=None)}
# End Time: {datetime.fromtimestamp(self.start_time + self.get_duration(), tz=None)}
{symbol * width}
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index b076793..0ad7d3a 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -58,6 +58,7 @@ def args_static_obj_gpx_video(
vid_sync=("", ""),
frame_extract=("", ""),
extra_out=(True, False, True, False),
+ autosync=False,
):
sightings = ""
@@ -88,7 +89,7 @@ def args_static_obj_gpx_video(
if video_file:
video = process_video.ProcessVideo(video_file.name)
if vid_sync[0] and vid_sync[1]:
- video.sync(int(vid_sync[0]), vid_sync[1])
+ video.sync(int(vid_sync[0]), vid_sync[1], autosync=autosync)
if sightings and project.get_static_object_type() == "intersection":
print("extracting traffic signal sightings")
kwargs = {"label_img": extra_out[0], "gen_gif": extra_out[1]}
@@ -259,7 +260,8 @@ def main(argv=None):
video_file = args.video_file,
vid_sync = sync_input,
frame_extract = frames,
- extra_out = lb_gif_flags
+ extra_out = lb_gif_flags,
+ autosync = args.autosync
)
diff --git a/tests/test_ssoss_cli.py b/tests/test_ssoss_cli.py
index 34e5e4c..29ab1ca 100644
--- a/tests/test_ssoss_cli.py
+++ b/tests/test_ssoss_cli.py
@@ -79,7 +79,7 @@ def test_dispatch_sync_calls(monkeypatch, tmp_path):
extra_out=(True, False),
)
- pv_instance.sync.assert_called_once_with(1, "ts")
+ pv_instance.sync.assert_called_once_with(1, "ts", autosync=False)
pv_instance.extract_sightings.assert_called_once_with(
["sig"], pr_instance, label_img=True, gen_gif=False
)
From 7f02c84a2c1e19724757b6f870ec1c9baf69ebf4 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:57:20 -0700
Subject: [PATCH 44/46] Improve GPX summary with speed metrics
---
src/ssoss/process_road_objects.py | 67 ++++++++++++++++++-------------
1 file changed, 38 insertions(+), 29 deletions(-)
diff --git a/src/ssoss/process_road_objects.py b/src/ssoss/process_road_objects.py
index 65ac8f2..ef6b454 100644
--- a/src/ssoss/process_road_objects.py
+++ b/src/ssoss/process_road_objects.py
@@ -2,6 +2,8 @@
# coding: utf-8
import csv, math
+import textwrap
+import statistics
from datetime import datetime, timezone
from zoneinfo import ZoneInfo
from pathlib import Path
@@ -638,38 +640,45 @@ def gpx_summary(self):
tot_sec = round(self.get_end_timestamp() - self.get_start_timestamp(), 2)
tot_distance = gpx_df.iloc[last_index, 0].get_cumulative_distance()
- # display values
- width = int(70)
+ if self.sum_total_points > 0:
+ conv = gpx_df.iloc[0, 0].FTPStoMPH
+ spd_vals = [gpx_df.iloc[i, 0].get_speed() for i in range(self.sum_total_points)]
+ spd_mph = [s * conv for s in spd_vals]
+ avg_speed = round((tot_distance / tot_sec) * conv, 2) if tot_sec > 0 else 0.0
+ max_speed = round(max(spd_mph), 2)
+ min_speed = round(min(spd_mph), 2)
+ if self.sum_total_points > 1:
+ acc_vals = [gpx_df.iloc[i, 0].acceleration() for i in range(self.sum_total_points - 1)]
+ avg_acc = round(statistics.mean(acc_vals) * conv, 2)
+ else:
+ avg_acc = 0.0
+ else:
+ avg_speed = max_speed = min_speed = avg_acc = 0.0
+
+ width = 70
title = "GPX SUMMARY"
symbol = "-"
- summary = f"""
- {symbol * width}
- {" " * (int(width/2)-int(len(title)/2))}{title}
- {symbol * width}
- # GPX File:: {self.gpx_file}
- # Using GPX version: {self.gpx_ver}
- # Start time: {datetime.fromtimestamp(self.get_start_timestamp(), tz=None)}
- # End time: {datetime.fromtimestamp(self.get_end_timestamp(), tz=None)}
- # Total duration: {self.hr_min_sec(tot_sec)}
- # Total distance: {self.simplify_distance(tot_distance)}
- # Number of data points: {self.sum_total_points}
- # Avg. Time Gap between data points: {avg_time_gap} Seconds
-
- {symbol * width}
- """
- # TODO:
- # {symbol * width}
- # IF self.intersection_approaches > 0
- # Total intersection approaches: {self.intersection_approaches}
- # Avg. Time per approach: {tot_sec/self.intersection_approaches}
- # Avg. feet driven per approach: {tot_distance/self.intersection_approaches}
- # difference in GPX and Video file start times and lengths of times
- # Number of images captured:
- # Number of intersections captures: X/ Total intersections (xx.x%)
- # Number of approaches captured
- # Approaches captures for duration of GPX file and Video File -> (images/time) (productivity ratio)
- # -------------------------------------------------------------------------------
+ summary = textwrap.dedent(
+ f"""
+ {symbol * width}
+ {title.center(width)}
+ {symbol * width}
+ GPX File: {self.gpx_file}
+ Using GPX version: {self.gpx_ver}
+ Start time: {datetime.fromtimestamp(self.get_start_timestamp(), tz=None)}
+ End time: {datetime.fromtimestamp(self.get_end_timestamp(), tz=None)}
+ Total duration: {self.hr_min_sec(tot_sec)}
+ Total distance: {self.simplify_distance(tot_distance)}
+ Number of data points: {self.sum_total_points}
+ Avg. Time Gap between data points: {avg_time_gap} Seconds
+ Avg. Speed: {avg_speed} MPH
+ Max Speed: {max_speed} MPH
+ Min Speed: {min_speed} MPH
+ Avg. Acceleration: {avg_acc} MPH/s
+ {symbol * width}
+ """
+ )
print(summary)
From 1a5c2232ab45e430b9aa165fba823ed15e73b7ba Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 14:57:51 -0700
Subject: [PATCH 45/46] Add CLI summary and return lists
---
src/ssoss/process_video.py | 9 +++----
src/ssoss/ssoss_cli.py | 54 ++++++++++++++++++++++++++++++++++++--
2 files changed, 56 insertions(+), 7 deletions(-)
diff --git a/src/ssoss/process_video.py b/src/ssoss/process_video.py
index 5468275..be6799e 100644
--- a/src/ssoss/process_video.py
+++ b/src/ssoss/process_video.py
@@ -206,6 +206,8 @@ def extract_generic_so_sightings(
if gen_gif:
self.generate_gif(desc_timestamps, project, cleanup=cleanup, overwrite=overwrite)
+ return generic_so_desc
+
def extract_sightings(
self, desc_timestamps, project, label_img=True, gen_gif=False, cleanup=True, overwrite=False
):
@@ -236,11 +238,8 @@ def extract_sightings(
self.img_overlay_info_box(self.video_filename, project)
if gen_gif:
self.generate_gif(desc_timestamps, project, cleanup=cleanup, overwrite=overwrite)
- """
- if bbox:
- self.img_overlay_bbox(description_list,project)
-
- """
+
+ return intersection_desc
# TODO: convert to start_sec, start_min=0, end_sec, end_min=0, folder="")
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index b076793..413331f 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -51,6 +51,54 @@ def _timestamp_from_filename(path: str) -> str:
return dt.isoformat()
+def cli_summary(descriptions, project, video):
+ """Print a summary of extracted images and processing stats."""
+
+ width = 70
+ title = "CLI SUMMARY"
+ symbol = "="
+
+ num_images = len(descriptions)
+ gpx_dur = project.get_end_timestamp() - project.get_start_timestamp()
+ vid_dur = video.get_duration()
+
+ avg_gpx = gpx_dur / num_images if num_images else 0
+ avg_vid = vid_dur / num_images if num_images else 0
+
+ intersections = {}
+ for desc in descriptions:
+ prefix = desc.split("-", 1)[0]
+ parts = prefix.split(".")
+ if len(parts) >= 2:
+ int_id = int(parts[0])
+ bearing = int(parts[1])
+ intersections.setdefault(int_id, set()).add(bearing)
+
+ intersection_lines = []
+ for int_id in sorted(intersections):
+ count = len(intersections[int_id])
+ pct = count / 4 * 100
+ intersection_lines.append(
+ f"# Intersection {int_id}: {count}/4 approaches ({pct:.1f}%)"
+ )
+
+ multiplier = (18 * 60 / avg_vid) if avg_vid else 0
+
+ summary = f"""
+{symbol * width}
+{" " * (int(width/2)-int(len(title)/2))}{title}
+{symbol * width}
+# Number of Images: {num_images}
+# Number of Intersections: {len(intersections)}
+{chr(10).join(intersection_lines)}
+# Avg Time per Image (GPX): {project.hr_min_sec(avg_gpx)}
+# Avg Time per Image (Video): {project.hr_min_sec(avg_vid)}
+# SSOSS Multiplier: {multiplier:.1f}X compared to field check
+{symbol * width}
+"""
+ print(summary)
+
+
def args_static_obj_gpx_video(
generic_so_file="",
gpx_file="",
@@ -96,7 +144,8 @@ def args_static_obj_gpx_video(
kwargs["cleanup"] = extra_out[2]
if supplied_len > 3:
kwargs["overwrite"] = extra_out[3]
- video.extract_sightings(sightings, project, **kwargs)
+ desc_list = video.extract_sightings(sightings, project, **kwargs)
+ cli_summary(desc_list, project, video)
if sightings and project.get_static_object_type() == "generic static object":
print("extracting generic static object sightings")
kwargs = {"label_img": extra_out[0], "gen_gif": extra_out[1]}
@@ -104,7 +153,8 @@ def args_static_obj_gpx_video(
kwargs["cleanup"] = extra_out[2]
if supplied_len > 3:
kwargs["overwrite"] = extra_out[3]
- video.extract_generic_so_sightings(sightings, project, **kwargs)
+ desc_list = video.extract_generic_so_sightings(sightings, project, **kwargs)
+ cli_summary(desc_list, project, video)
elif frame_extract[0] and frame_extract[1]:
print("extracting frames...")
video.extract_frames_between(frame_extract[0], frame_extract[1])
From f1399291316d928f5998dcf4a53a0d8ea4521e93 Mon Sep 17 00:00:00 2001
From: Matt Redmond <10541289+redmond2742@users.noreply.github.com>
Date: Fri, 13 Jun 2025 15:22:15 -0700
Subject: [PATCH 46/46] Update CLI summary format
---
src/ssoss/ssoss_cli.py | 23 +++++++++++++----------
1 file changed, 13 insertions(+), 10 deletions(-)
diff --git a/src/ssoss/ssoss_cli.py b/src/ssoss/ssoss_cli.py
index e2e4c11..8df4f1d 100644
--- a/src/ssoss/ssoss_cli.py
+++ b/src/ssoss/ssoss_cli.py
@@ -55,7 +55,7 @@ def cli_summary(descriptions, project, video):
"""Print a summary of extracted images and processing stats."""
width = 70
- title = "CLI SUMMARY"
+ title = "SSOSS Summary Information"
symbol = "="
num_images = len(descriptions)
@@ -74,13 +74,17 @@ def cli_summary(descriptions, project, video):
bearing = int(parts[1])
intersections.setdefault(int_id, set()).add(bearing)
- intersection_lines = []
- for int_id in sorted(intersections):
- count = len(intersections[int_id])
- pct = count / 4 * 100
- intersection_lines.append(
- f"# Intersection {int_id}: {count}/4 approaches ({pct:.1f}%)"
- )
+ num_inters_found = len(intersections)
+ total_input_inters = (
+ len(project.intersection_listDF.index)
+ if getattr(project, "intersection_listDF", None) is not None
+ else 0
+ )
+ inters_pct = (
+ num_inters_found / total_input_inters * 100
+ if total_input_inters
+ else 0
+ )
multiplier = (18 * 60 / avg_vid) if avg_vid else 0
@@ -89,8 +93,7 @@ def cli_summary(descriptions, project, video):
{" " * (int(width/2)-int(len(title)/2))}{title}
{symbol * width}
# Number of Images: {num_images}
-# Number of Intersections: {len(intersections)}
-{chr(10).join(intersection_lines)}
+# Number of Intersections: {num_inters_found} ({inters_pct:.1f}%)
# Avg Time per Image (GPX): {project.hr_min_sec(avg_gpx)}
# Avg Time per Image (Video): {project.hr_min_sec(avg_vid)}
# SSOSS Multiplier: {multiplier:.1f}X compared to field check
| |