@@ -69,9 +69,12 @@ println("# Parsing arguments...")
6969 add_arg_group! (s, " Line list parameters" , :argg_line_list_param )
7070 @add_arg_table! s begin
7171 " --line_list_filename"
72- help = " Line list file "
72+ help = " Line list filename (input) "
7373 arg_type = String
7474 # default = joinpath(pkgdir(NeidSolarScripts),"data","solar_line_list_espresso.csv")
75+ " --line_list_output_filename"
76+ help = " Line list filename (output)"
77+ arg_type = String
7578 " --recompute_line_weights"
7679 help = " Force recalculation of SNR-based line weight factors."
7780 action = :store_true
@@ -98,6 +101,9 @@ println("# Parsing arguments...")
98101 help = " Filename for anchor locations to use in computing continuum to normalize by."
99102 arg_type = String
100103 # default = "/home/eford/Code/RvSpectMLEcoSystem/NeidSolarScripts/data/neidMaster_HR_SmoothLampSED_20210101.fits"
104+ " --anchors_filename_output"
105+ help = " Filename to write anchor locations to use in computing continuum to normalize by."
106+ arg_type = String
101107 " --smoothing_half_width"
102108 help = " Half width for window to use for smoothing prior to findind local maximum."
103109 arg_type = Int64
@@ -191,7 +197,8 @@ println("# Parsing arguments...")
191197 " --max_num_spectra_clean"
192198 help = " Maximum number of spectra to include in clean spectrum."
193199 arg_type = Int
194- default = 65 # based on 60 minutes of integration time from 55s exposures
200+ default = 130 # based on 120 minutes of integration time from 55s exposures
201+ # default = 65 # based on 60 minutes of integration time from 55s exposures
195202 end
196203
197204 return parse_args (s)
@@ -326,8 +333,8 @@ if verbose println("# Reading manifest of files to process.") end
326333 if args[" target" ] == " Sun" || args[" target" ] == " Solar"
327334 @assert hasproperty (df_files,:alt_sun ) # TODO : Compute if not avaliable?
328335 @assert hasproperty (df_files,:Δfwhm² ) # TODO : Compute if not avaliable?
329- if ! hasproperty (df_files,:solar_hour_angle )
330- df_files[! ," solar_hour_angle " ] = SolarRotation. get_solar_hour_angle (df_files. bjd,obs= :WIYN )
336+ if ! hasproperty (df_files,:hour_angle )
337+ df_files[! ," hour_angle " ] = SolarRotation. get_solar_hour_angle (df_files. bjd,obs= :WIYN )
331338 end
332339 # if eltype(df_files[!,:order_snrs]) == String # TODO : Compute if not avaliable?
333340 end
@@ -367,7 +374,7 @@ if verbose println("# Reading manifest of files to process.") end
367374 @filter ( args[" max_airmass" ] == nothing || _. airmass <= args[" max_airmass" ] ) |>
368375 DataFrame
369376 df_files_use = df_files_use |>
370- @filter ( args[" max_solar_hour_angle" ] == nothing || abs (_. solar_hour_angle ) <= args[" max_solar_hour_angle" ] ) |>
377+ @filter ( args[" max_solar_hour_angle" ] == nothing || abs (_. hour_angle ) <= args[" max_solar_hour_angle" ] ) |>
371378 DataFrame
372379 df_files_use = df_files_use |>
373380 @filter ( args[" start_time" ] == nothing || Time (julian2datetime (_. bjd)) >= start_time ) |>
@@ -409,13 +416,15 @@ df_files_cleanest = df_files_use |>
409416 @filter ( min_drp_minor_version <= Base. thisminor (VersionNumber (_. drpversion)) <= max_drp_minor_version ) |>
410417 # @filter( _.airmass <= 2.0 ) |>
411418 @filter ( _. airmass <= args[" max_airmass_clean" ] ) |>
412- @filter ( abs (_. solar_hour_angle ) <= args[" max_solar_hour_angle_clean" ] ) |>
413- @orderby ( abs (_. solar_hour_angle ) ) |>
419+ @filter ( abs (_. hour_angle ) <= args[" max_solar_hour_angle_clean" ] ) |>
420+ @orderby ( abs (_. hour_angle ) ) |>
414421 @take (args[" max_num_spectra_clean" ] ) |>
415422 DataFrame
416423 println (" # Found " , size (df_files_cleanest,1 ), " files considered clean." )
417424 # @assert size(df_files_cleanest,1) >= 1
418425
426+ clean_obs_mask = map (fn-> in (fn, df_files_cleanest. Filename),df_files_use. Filename)
427+
419428if verbose println (now ()) end
420429
421430if @isdefined (sed_filename)
@@ -534,16 +543,20 @@ pipeline_plan = PipelinePlan()
534543 @assert isfile (args[" anchors_filename" ]) && filesize (args[" anchors_filename" ])> 0
535544 anchors = load (args[" anchors_filename" ]," anchors" )
536545 else
546+ println (" # Computing Continuum model." )
537547 if @isdefined sed
538548 @warn (" DRP v1.1 now provides blaze in L2 file. This script has not been updated to use explicit an SED model." )
539549 (anchors, continuum, f_filtered) = Continuum. calc_continuum (spec. λ, mean_clean_flux_sed_normalized, mean_clean_var_sed_normalized; fwhm = args[" fwhm_continuum" ]* 1000 , ν = args[" nu_continuum" ],
540550 stretch_factor = args[" stretch_factor" ], merging_threshold = args[" merging_threshold" ], smoothing_half_width = args[" smoothing_half_width" ], min_R_factor = args[" min_rollingpin_r" ],
541551 orders_to_use = orders_to_use_for_continuum, verbose = false )
552+ save (args[" anchors_filename_output" ], Dict (" anchors" => anchors) )
542553 else
543554 (anchors, continuum, f_filtered) = Continuum. calc_continuum (spec. λ, mean_clean_flux, mean_clean_var; fwhm = args[" fwhm_continuum" ]* 1000 , ν = args[" nu_continuum" ],
544555 stretch_factor = args[" stretch_factor" ], merging_threshold = args[" merging_threshold" ], smoothing_half_width = args[" smoothing_half_width" ], min_R_factor = args[" min_rollingpin_r" ],
545556 orders_to_use = orders_to_use_for_continuum, verbose = false )
557+ save (args[" anchors_filename_output" ], Dict (" anchors" => anchors) )
546558 end
559+ println (" # Stored anchors used for continuum model." )
547560 end
548561 normalization_anchors_list = anchors
549562
@@ -575,7 +588,9 @@ line_width = line_width_50_default
575588 max_mask_scale_factor = max (mask_scale_factor, max (lsf_width,
576589 line_width/ sqrt (8 * log (2 )))/ default_ccf_mask_v_width (NEID2D ())) # 4.0
577590 max_bc = RvSpectMLBase. max_bc
578- # max_bc = RvSpectMLBase.max_bc_earth_rotation
591+ if args[" target" ] == Sun || args[" target" ] == " Solar"
592+ max_bc = RvSpectMLBase. max_bc_solar
593+ end
579594 # max_orders = min_order(NEID2D()):max_order(NEID2D())
580595 # good_orders = orders_to_use_default(NEID2D())
581596 # orders_to_use = max_orders
@@ -589,11 +604,16 @@ line_width = line_width_50_default
589604 # orders_to_use = good_orders
590605 # order_list_timeseries = extract_orders(all_spectra,pipeline_plan, orders_to_use=orders_to_use, recalc=true )
591606 touch (line_list_filename)
592- # line_list_filename = "G2.espresso.mas"
593- linelist_for_ccf_fn_w_path = joinpath (pkgdir (EchelleCCFs)," data" ," masks" ,line_list_filename)
594- line_list_espresso = prepare_line_list (linelist_for_ccf_fn_w_path, all_spectra, pipeline_plan, v_center_to_avoid_tellurics= ccf_mid_velocity,
607+ if (isfile (line_list_filename) && (filesize (line_list_filename)> 0 ))
608+ line_list_input_filename = line_list_filename
609+ else
610+ line_list_input_filename = joinpath (pkgdir (EchelleCCFs)," data" ," masks" ," espresso+neid_mask_97_to_108.mas" )
611+ end
612+ line_list_espresso = prepare_line_list (line_list_input_filename, all_spectra, pipeline_plan, v_center_to_avoid_tellurics= ccf_mid_velocity,
595613 Δv_to_avoid_tellurics = 2 * max_bc+ range_no_mask_change* line_width_50_default+ max_mask_scale_factor* default_ccf_mask_v_width (NEID2D ()), orders_to_use= #= orders_to_use=# 56 : 108 , recalc= true , verbose= true )
596- # CSV.write(line_list_filename, line_list_espresso)
614+ if args[" recompute_line_weights" ] && ! isnothing (args[" line_list_output_filename" ])
615+ CSV. write (args[" line_list_output_filename" ], line_list_espresso)
616+ end
597617 end
598618 # file_hashes[line_list_filename] = bytes2hex(sha256(line_list_filename))
599619 file_hashes[line_list_filename] = bytes2hex (open (md5,line_list_filename))
@@ -637,6 +657,7 @@ println("# Saving results to ", daily_ccf_filename, ".")
637657 f[" Δfwhm" ] = Δfwhm
638658 f[" orders_to_use" ] = orders_to_use
639659 f[" manifest" ] = df_files_use
660+ f[" clean_obs_mask" ] = clean_obs_mask
640661 f[" calc_order_ccf_args" ] = args
641662 f[" ccf_line_list" ] = line_list_espresso
642663 if (size (df_files_cleanest,1 ) >= 1 ) && any (.! iszero .(mean_clean_flux))
0 commit comments