1+ #!/usr/bin/env python3 
2+ 
3+ """ 
4+   Mod updater script 
5+ 
6+   This script packs up the coop mod files, writes them to /opt/faf/data/content/legacy-featured-mod-files/.../, and updates the database. 
7+ 
8+   Code is mostly self-explanatory - haha, fat chance! Read it from bottom to top and don't blink. Blink and you're dead, no wait where were we? 
9+   To adapt this duct-tape based blob of shit for new mission voice overs, just change files array at the very bottom. 
10+ 
11+   Environment variables required: 
12+     PATCH_VERSION 
13+     DATABASE_HOST 
14+     DATABASE_NAME 
15+     DATABASE_USERNAME 
16+     DATABASE_PASSWORD 
17+ """ 
18+ import  glob 
19+ import  hashlib 
20+ import  json 
21+ import  os 
22+ import  re 
23+ import  shutil 
24+ import  subprocess 
25+ import  sys 
26+ import  tempfile 
27+ import  urllib .request 
28+ import  urllib .error 
29+ import  zipfile 
30+ 
31+ import  mysql .connector 
32+ 
33+ FIXED_ZIP_TIMESTAMP  =  (1980 , 1 , 1 , 0 , 0 , 0 )  # year, month, day, hour, min, sec 
34+ 
35+ 
36+ def  get_db_connection ():
37+     """Establish and return a MySQL connection using environment variables.""" 
38+     host  =  os .getenv ("DATABASE_HOST" , "localhost" )
39+     db  =  os .getenv ("DATABASE_NAME" , "faf" )
40+     user  =  os .getenv ("DATABASE_USERNAME" , "root" )
41+     password  =  os .getenv ("DATABASE_PASSWORD" , "banana" )
42+ 
43+     return  mysql .connector .connect (
44+         host = host ,
45+         user = user ,
46+         password = password ,
47+         database = db ,
48+     )
49+ 
50+ 
51+ def  read_db (conn , mod ):
52+     """ 
53+       Read latest versions and md5's from db 
54+       Returns dict {fileId: {version, name, md5}} 
55+     """ 
56+     query  =  f""" 
57+         SELECT uf.fileId, uf.version, uf.name, uf.md5 
58+         FROM ( 
59+             SELECT fileId, MAX(version) AS version 
60+             FROM updates_{ mod }  
61+             GROUP BY fileId 
62+         ) AS maxthings 
63+         INNER JOIN updates_{ mod }  
64+         ON maxthings.fileId = uf.fileId AND maxthings.version = uf.version; 
65+     """ 
66+ 
67+     with  conn .cursor () as  cursor :
68+         cursor .execute (query )
69+ 
70+         oldfiles  =  {}
71+         for  (fileId , version , name , md5 ) in  cursor .fetchall ():
72+             oldfiles [int (fileId )] =  {
73+                 "version" : version ,
74+                 "name" : name ,
75+                 "md5" : md5 ,
76+             }
77+ 
78+         return  oldfiles 
79+ 
80+ 
81+ def  update_db (conn , mod , fileId , version , name , md5 , dryrun ):
82+     """ 
83+     Delete and reinsert a file record in updates_{mod}_files 
84+     """ 
85+     delete_query  =  f"DELETE FROM updates_{ mod }  
86+     insert_query  =  f""" 
87+         INSERT INTO updates_{ mod }  
88+         VALUES (%s, %s, %s, %s, 0) 
89+     """ 
90+ 
91+     print (f"Updating DB for { name } { fileId } { version }  )
92+ 
93+     if  not  dryrun :
94+         try :
95+             with  conn .cursor () as  cursor :
96+                 cursor .execute (delete_query , (fileId , version ))
97+                 cursor .execute (insert_query , (fileId , version , name , md5 ))
98+                 conn .commit ()
99+         except  mysql .connector .Error  as  err :
100+             print (f"MySQL error while updating { name } { err }  )
101+             conn .rollback ()
102+             exit (1 )
103+     else :
104+         print (f"Dryrun: would run for { name }  )
105+ 
106+ def  calc_md5 (fname ):
107+     hash_md5  =  hashlib .md5 ()
108+     with  open (fname , "rb" ) as  f :
109+         for  chunk  in  iter (lambda : f .read (4096 ), b"" ):
110+             hash_md5 .update (chunk )
111+     return  hash_md5 .hexdigest ()
112+ 
113+ def  zipdir (path , ziph ):
114+     if  not  os .path .exists (path ):
115+         print (f"Warning: { path }  )
116+         return 
117+ 
118+     if  os .path .isdir (path ):
119+         for  root , dirs , files  in  os .walk (path ):
120+             files .sort ()  # deterministic order 
121+             dirs .sort ()   # deterministic order 
122+             for  file  in  files :
123+                 full_path  =  os .path .join (root , file )
124+                 arcname  =  os .path .relpath (full_path , start = path )  # preserve folder structure 
125+                 info  =  zipfile .ZipInfo (arcname , FIXED_ZIP_TIMESTAMP )
126+                 with  open (full_path , "rb" ) as  f :
127+                     data  =  f .read ()
128+                 ziph .writestr (info , data , compress_type = zipfile .ZIP_DEFLATED )
129+     else :
130+         # single file outside a directory 
131+         arcname  =  os .path .basename (path )
132+         info  =  zipfile .ZipInfo (arcname , FIXED_ZIP_TIMESTAMP )
133+         with  open (path , "rb" ) as  f :
134+             data  =  f .read ()
135+         ziph .writestr (info , data , compress_type = zipfile .ZIP_DEFLATED )
136+ 
137+ 
138+ 
139+ def  create_file (conn , mod , fileId , version , name , source , target_dir , old_md5 , dryrun ):
140+     """Pack or copy files, compare MD5, update DB if changed.""" 
141+     target_dir  =  os .path .join (target_dir , f"updates_{ mod }  )
142+     os .makedirs (target_dir , exist_ok = True )
143+ 
144+     name  =  name .format (version )
145+     target_name  =  os .path .join (target_dir , name )
146+ 
147+     print (f"Processing { name } { fileId }  )
148+ 
149+     if  isinstance (source , list ):
150+         print (f"Zipping { source } { target_name }  )
151+         fd , fname  =  tempfile .mkstemp ("_"  +  name , "patcher_" )
152+         os .close (fd )
153+         with  zipfile .ZipFile (fname , "w" , zipfile .ZIP_DEFLATED ) as  zf :
154+             for  sm  in  source :
155+                 zipdir (sm , zf )
156+         rename  =  True 
157+         checksum  =  calc_md5 (fname )
158+     else :
159+         rename  =  False 
160+         fname  =  source 
161+         if  source  is  None :
162+             checksum  =  calc_md5 (target_name ) if  os .path .exists (target_name ) else  None 
163+         else :
164+             checksum  =  calc_md5 (fname )
165+ 
166+     if  checksum  is  None :
167+         print (f"Skipping { name }  )
168+         return 
169+ 
170+     print (f"Compared checksums: Old { old_md5 } { checksum }  )
171+ 
172+     # Otherwise proceed with copy + DB update 
173+     print (f"Detected content change for { name } { old_md5 } { checksum }  )
174+     if  fname  is  not None :
175+         print (f"Copying { fname } { target_name }  )
176+         if  not  dryrun :
177+             shutil .copy (fname , target_name )
178+     else :
179+         print ("No source file, not moving" )
180+ 
181+     if  os .path .exists (target_name ):
182+         update_db (conn , mod , fileId , version , name , checksum , dryrun )
183+         if  not  dryrun :
184+             try :
185+                 os .chmod (target_name , 0o664 )
186+             except  PermissionError :
187+                 print (f"Warning: Could not chmod { target_name }  )
188+     else :
189+         print (f"Target file { target_name }  )
190+ 
191+ 
192+ def  do_files (conn , mod , version , files , target_dir , dryrun ):
193+     """Process all files for given mod/version.""" 
194+     current_files  =  read_db (conn , mod )
195+     for  name , fileId , source  in  files :
196+         old_md5  =  current_files .get (fileId , {}).get ("md5" )
197+         create_file (conn , mod , fileId , version , name , source , target_dir , old_md5 , dryrun )
198+ 
199+ 
200+ def  prepare_repo ():
201+     """Clone or update the fa-coop repository and checkout the specified ref.""" 
202+     repo_url  =  os .getenv ("GIT_REPO_URL" , "https://github.com/FAForever/fa-coop.git" )
203+     git_ref  =  os .getenv ("GIT_REF" , "v"  +  os .getenv ("PATCH_VERSION" ))
204+     workdir  =  os .getenv ("GIT_WORKDIR" , "/tmp/fa-coop" )
205+ 
206+     if  not  git_ref :
207+         print ("Error: GIT_REF or PATCH_VERSION must be specified." )
208+         sys .exit (1 )
209+ 
210+     print (f"=== Preparing repository { repo_url } { git_ref } { workdir }  )
211+ 
212+     # Clone if not exists 
213+     if  not  os .path .isdir (os .path .join (workdir , ".git" )):
214+         print (f"Cloning repository into { workdir }  )
215+         subprocess .check_call (["git" , "clone" , repo_url , workdir ])
216+     else :
217+         print (f"Repository already exists in { workdir }  )
218+         subprocess .check_call (["git" , "-C" , workdir , "fetch" , "--all" , "--tags" ])
219+ 
220+     # Checkout the desired ref 
221+     print (f"Checking out { git_ref }  )
222+     subprocess .check_call (["git" , "-C" , workdir , "fetch" , "--tags" ])
223+     subprocess .check_call (["git" , "-C" , workdir , "checkout" , git_ref ])
224+ 
225+     print (f"=== Repository ready at { workdir }  )
226+     return  workdir 
227+ 
228+ 
229+ def  download_vo_assets (version , target_dir ):
230+     """ 
231+     Download VO .nx2 files from latest GitHub release of fa-coop, 
232+     rename them for the given patch version, and copy to target directory. 
233+     """ 
234+     os .makedirs (target_dir , exist_ok = True )
235+     print (f"Fetching VO assets for patch version { version }  )
236+ 
237+     # 1. Get latest release JSON from GitHub 
238+     api_url  =  "https://api.github.com/repos/FAForever/fa-coop/releases/latest" 
239+     with  urllib .request .urlopen (api_url ) as  response :
240+         release_info  =  json .load (response )
241+ 
242+     # 2. Filter assets ending with .nx2 
243+     nx2_urls  =  [
244+         asset ["browser_download_url" ]
245+         for  asset  in  release_info .get ("assets" , [])
246+         if  asset ["browser_download_url" ].endswith (".nx2" )
247+     ]
248+ 
249+     if  not  nx2_urls :
250+         print ("No VO .nx2 assets found in the latest release." )
251+         return 
252+ 
253+     temp_dir  =  os .path .join ("/tmp" , f"vo_download_{ version }  )
254+     os .makedirs (temp_dir , exist_ok = True )
255+ 
256+     # 3. Download each .nx2 file 
257+     for  url  in  nx2_urls :
258+         filename  =  os .path .basename (url )
259+         dest_path  =  os .path .join (temp_dir , filename )
260+         print (f"Downloading { url } { dest_path }  )
261+         urllib .request .urlretrieve (url , dest_path )
262+ 
263+     # 4. Rename files to include patch version (e.g., A01_VO.v49.nx2) 
264+     for  filepath  in  glob .glob (os .path .join (temp_dir , "*.nx2" )):
265+         base  =  os .path .basename (filepath )
266+         # Insert .vXX. before the extension 
267+         new_name  =  re .sub (r"\.nx2$" , f".v{ version }  , base )
268+         new_path  =  os .path .join (temp_dir , new_name )
269+         os .rename (filepath , new_path )
270+ 
271+     # 5. Copy to target directory 
272+     for  filepath  in  glob .glob (os .path .join (temp_dir , "*.nx2" )):
273+         target_path  =  os .path .join (target_dir , os .path .basename (filepath ))
274+         print (f"Copying { filepath } { target_path }  )
275+         shutil .copy (filepath , target_path )
276+         # Set permissions like in your script 
277+         os .chmod (target_path , 0o664 )
278+         try :
279+             shutil .chown (target_path , group = "www-data" )
280+         except  Exception :
281+             print (f"Warning: could not chown { target_path }  )
282+ 
283+     print ("VO assets processed successfully." )
284+ 
285+ 
286+ def  main ():
287+     mod  =  "coop" 
288+     dryrun  =  os .getenv ("DRY_RUN" , "false" ).lower () in  ("1" , "true" , "yes" )
289+     version  =  os .getenv ("PATCH_VERSION" )
290+ 
291+     if  version  is  None :
292+         print ('Please pass patch version in environment variable PATCH_VERSION' )
293+         sys .exit (1 )
294+ 
295+     print (f"=== Starting mod updater for version { version } { dryrun }  )
296+ 
297+     # /updater_{mod}_files will be appended by create_file 
298+     target_dir  =  '/tmp/legacy-featured-mod-files' 
299+ 
300+     # Prepare git repo 
301+     repo_dir  =  prepare_repo ()
302+ 
303+     # Download VO assets 
304+     vo_dir  =  os .path .join (target_dir , f"updates_{ mod }  )
305+     download_vo_assets (version , vo_dir )
306+ 
307+     # target filename / fileId in updates_{mod}_files table / source files with version placeholder 
308+     # if source files is single string, file is copied directly 
309+     # if source files is a list, files are zipped 
310+     files  =  [
311+         ('init_coop.v{}.lua' , 1 , os .path .join (repo_dir , 'init_coop.lua' )),
312+         ('lobby_coop_v{}.cop' , 2 , [
313+             os .path .join (repo_dir , 'lua' ),
314+             os .path .join (repo_dir , 'mods' ),
315+             os .path .join (repo_dir , 'units' ),
316+             os .path .join (repo_dir , 'mod_info.lua' ),
317+             os .path .join (repo_dir , 'readme.md' ),
318+             os .path .join (repo_dir , 'changelog.md' ),
319+         ]),
320+         ('A01_VO.v{}.nx2' , 3 , None ),
321+         ('A02_VO.v{}.nx2' , 4 , None ),
322+         ('A03_VO.v{}.nx2' , 5 , None ),
323+         ('A04_VO.v{}.nx2' , 6 , None ),
324+         ('A05_VO.v{}.nx2' , 7 , None ),
325+         ('A06_VO.v{}.nx2' , 8 , None ),
326+         ('C01_VO.v{}.nx2' , 9 , None ),
327+         ('C02_VO.v{}.nx2' , 10 , None ),
328+         ('C03_VO.v{}.nx2' , 11 , None ),
329+         ('C04_VO.v{}.nx2' , 12 , None ),
330+         ('C05_VO.v{}.nx2' , 13 , None ),
331+         ('C06_VO.v{}.nx2' , 14 , None ),
332+         ('E01_VO.v{}.nx2' , 15 , None ),
333+         ('E02_VO.v{}.nx2' , 16 , None ),
334+         ('E03_VO.v{}.nx2' , 17 , None ),
335+         ('E04_VO.v{}.nx2' , 18 , None ),
336+         ('E05_VO.v{}.nx2' , 19 , None ),
337+         ('E06_VO.v{}.nx2' , 20 , None ),
338+         ('Prothyon16_VO.v{}.nx2' , 21 , None ),
339+         ('TCR_VO.v{}.nx2' , 22 , None ),
340+         ('SCCA_Briefings.v{}.nx2' , 23 , None ),
341+         ('SCCA_FMV.nx2.v{}.nx2' , 24 , None ),
342+         ('FAF_Coop_Operation_Tight_Spot_VO.v{}.nx2' , 25 , None ),
343+     ]
344+ 
345+     conn  =  get_db_connection ()
346+     try :
347+         do_files (conn , mod , version , files , target_dir , dryrun )
348+     finally :
349+         conn .close ()
350+ 
351+     print (f"=== Deployment finished for version { version } { dryrun }  )
352+ 
353+ 
354+ if  __name__  ==  "__main__" :
355+     main ()
0 commit comments