@@ -16,7 +16,7 @@ println("# Parsing arguments...")
16
16
add_arg_group! (s, " Files" , :argg_files )
17
17
@add_arg_table! s begin
18
18
" manifest"
19
- help = " Manifest file (CVS) containing FITS files to analyze.\n Expects columns named Filename, bjd, target, and continuum_filename."
19
+ help = " Manifest file (CVS) containing FITS files to analyze.\n Expects columns named Filename, bjd, target, and used for continuum continuum_filename."
20
20
arg_type = String
21
21
default = " manifest.csv"
22
22
# required = true
@@ -431,7 +431,7 @@ if @isdefined(sed_filename)
431
431
sed = Continuum. read_master_sed_neid (filename= sed_filename)
432
432
end
433
433
min_order_for_continuum = min_order (NEID2D ()) # 12
434
- max_order_for_continuum = max_order (NEID2D ())
434
+ max_order_for_continuum = max_order (NEID2D ())- 1 # Current DRP returns useless last order
435
435
orders_to_use_for_continuum = min_order_for_continuum: max_order_for_continuum
436
436
437
437
pipeline_plan = PipelinePlan ()
@@ -489,7 +489,8 @@ pipeline_plan = PipelinePlan()
489
489
if @isdefined sed
490
490
@warn (" DRP v1.1 now provides blaze in L2 file. This script has not been updated to use explicit an SED model." )
491
491
(f_norm, var_norm) = Continuum. normalize_by_sed (spec. λ,spec. flux,spec. var, sed; poly_order = args[" order_poly_continuum" ], half_width = args[" continuum_poly_half_width" ], quantile = args[" quantile_fit_continuum" ], orders_to_use= orders_to_use_for_continuum)
492
- if row. Filename ∈ df_files_cleanest. Filename
492
+ sed_norm_failed = any ([all (isnan .(view (f_norm,:,ord))) for ord in 1 : (size (f_norm,2 )- 1 )]) # check if any orders besides the last order are all nans
493
+ if row. Filename ∈ df_files_cleanest. Filename && ! sed_norm_failed
493
494
mean_clean_flux_sed_normalized .+ = f_norm # .*weight
494
495
mean_clean_var_sed_normalized .+ = var_norm # .*weight
495
496
global mean_clean_flux_sed_normalized_weight_sum += weight
@@ -512,17 +513,22 @@ pipeline_plan = PipelinePlan()
512
513
else
513
514
(anchors, continuum, f_filtered) = Continuum. calc_continuum (spec. λ, f_norm, var_norm; fwhm = args[" fwhm_continuum" ]* 1000 , ν = args[" nu_continuum" ],
514
515
stretch_factor = args[" stretch_factor" ], merging_threshold = args[" merging_threshold" ], smoothing_half_width = args[" smoothing_half_width" ], min_R_factor = args[" min_rollingpin_r" ],
515
- orders_to_use = orders_to_use_for_continuum, verbose = false )
516
+ orders_to_use = orders_to_use_for_continuum, verbose = true )
516
517
push! (normalization_anchors_list, anchors)
517
518
end
518
519
#=
519
520
continuum = load(row.continuum_filename, "continuum")
520
521
#file_hashes[row.continuum_filename] = bytes2hex(sha256(row.continuum_filename))
521
522
file_hashes[row.continuum_filename] = bytes2hex(open(md5,row.continuum_filename))
522
523
=#
523
- f_norm ./= continuum
524
- var_norm ./= continuum.^ 2
525
- continuum_norm_failed = any ([all (isnan .(all_spectra[i]. flux[:,i])) for i in 1 : size (all_spectra[i]. flux,2 )][1 : end - 1 ]) # check if any orders besides the last order are all nans
524
+ # f_norm ./= continuum
525
+ # var_norm ./= continuum.^2
526
+ for ord in orders_to_use_for_continuum
527
+ f_norm[:,ord] ./= view (continuum,:,ord)
528
+ var_norm[:,ord] ./= view (continuum,:,ord).^ 2
529
+ end
530
+ # continuum_norm_failed = any([all(isnan.(all_spectra[i].flux[:,i])) for i in 1:size(all_spectra[i].flux,2)][1:end-1]) #check if any orders besides the last order are all nans
531
+ continuum_norm_failed = any ([all (isnan .(view (f_norm,:,ord))) for ord in orders_to_use_for_continuum]) # check if any orders used for continuum are all nans
526
532
if row. Filename ∈ df_files_cleanest. Filename && ! continuum_norm_failed
527
533
mean_clean_flux_continuum_normalized .+ = f_norm # .*weight
528
534
mean_clean_var_continuum_normalized .+ = var_norm # .*weight
@@ -563,7 +569,7 @@ pipeline_plan = PipelinePlan()
563
569
else
564
570
(anchors, continuum, f_filtered) = Continuum. calc_continuum (spec. λ, mean_clean_flux, mean_clean_var; fwhm = args[" fwhm_continuum" ]* 1000 , ν = args[" nu_continuum" ],
565
571
stretch_factor = args[" stretch_factor" ], merging_threshold = args[" merging_threshold" ], smoothing_half_width = args[" smoothing_half_width" ], min_R_factor = args[" min_rollingpin_r" ],
566
- orders_to_use = orders_to_use_for_continuum, verbose = false )
572
+ orders_to_use = orders_to_use_for_continuum, verbose = true )
567
573
if ! isnothing (args[" anchors_filename_output" ])
568
574
println (" # Storing anchors used for continuum model in " ,args[" anchors_filename_output" ], " ." )
569
575
save (args[" anchors_filename_output" ], Dict (" anchors" => anchors) )
@@ -576,9 +582,11 @@ pipeline_plan = PipelinePlan()
576
582
for (i,row) in enumerate (eachrow (df_files_use))
577
583
(anchors, continuum, f_filtered) = Continuum. calc_continuum (all_spectra[i]. λ, all_spectra[i]. flux, all_spectra[i]. var,
578
584
anchors, smoothing_half_width = args[" smoothing_half_width" ], orders_to_use= orders_to_use_for_continuum)
579
- all_spectra[i]. flux ./= continuum
580
- all_spectra[i]. var ./= continuum.^ 2
581
- continuum_norm_failed = any ([all (isnan .(all_spectra[i]. flux[:,i])) for i in 1 : size (all_spectra[i]. flux,2 )][1 : end - 1 ]) # check if any orders besides the last order are all nans
585
+ for ord in orders_to_use_for_continuum
586
+ all_spectra[i]. flux[:,ord] ./= view (continuum,:,ord)
587
+ all_spectra[i]. var[:,ord] ./= view (continuum,:,ord).^ 2
588
+ end
589
+ continuum_norm_failed = any ([all (isnan .(view (all_spectra[i]. flux,:,ord))) for ord in orders_to_use_for_continuum]) # check if any orders used for continuum are all nans
582
590
if row. Filename ∈ df_files_cleanest. Filename && ! continuum_norm_failed
583
591
mean_clean_flux_continuum_normalized .+ = all_spectra[i]. flux # .*weight
584
592
mean_clean_var_continuum_normalized .+ = all_spectra[i]. var # .*weight
@@ -628,9 +636,13 @@ line_width = line_width_50_default
628
636
# outputs["line_list_espresso"] = line_list_espresso
629
637
630
638
if args[" variable_mask_scale" ]
631
- maxΔfwhm² = - 0.569375
632
- @assert maximum (df_files_use. Δfwhm²) < maxΔfwhm²
633
- Δfwhm = 1000.0 .* sqrt .(maxΔfwhm².- df_files_use. Δfwhm²[1 : length (all_spectra)]) # How much to increase fwhm by to acheive uniform fwhm
639
+ # maxΔfwhm² = -0.569375
640
+ maxΔfwhm² = - 0.56930
641
+ @assert maximum (df_files_use. Δfwhm²) <= maxΔfwhm²
642
+ if maximum (df_files_use. Δfwhm²) > maxΔfwhm²
643
+ println (" # Warning: dangerously large maximum(df_files_use.Δfwhm²) = " , maximum (df_files_use. Δfwhm²a) )
644
+ end
645
+ Δfwhm = 1000.0 .* sqrt .(clamp .(maxΔfwhm².- df_files_use. Δfwhm²[1 : length (all_spectra)], 0. , Inf )) # How much to increase fwhm by to acheive uniform fwhm
634
646
else
635
647
Δfwhm = zeros (0 )
636
648
end
0 commit comments