Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/Stingray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ export apply_gtis
export fill_bad_time_intervals!
export create_filtered_lightcurve
export check_gtis
export split_by_gtis, intersect_gtis
export split_by_gtis, intersect_gtis,get_gti_lengths
include("powerspectrum.jl")
export Powerspectrum
export AveragedPowerspectrum, AbstractPowerSpectrum
Expand Down
34 changes: 28 additions & 6 deletions src/crossspectrum.jl
Original file line number Diff line number Diff line change
Expand Up @@ -370,21 +370,43 @@ function CrossSpectrum(
freq = freq[fgt0]
end

# Process ONLY THE FIRST valid segment
# Process ONLY THE FIRST valid segment with improved length handling
cross_power = nothing
pds1_power = nothing
pds2_power = nothing
segment_photons1 = 0.0
segment_photons2 = 0.0

for i = 1:minseg
if length(lcs1[i].counts) != length(lcs2[i].counts)
lc1_seg = lcs1[i]
lc2_seg = lcs2[i]

# Handle different segment lengths by truncating to common length
min_len = min(length(lc1_seg.counts), length(lc2_seg.counts))

if min_len < 2
continue
end

# Use common length
counts1 = lc1_seg.counts[1:min_len]
counts2 = lc2_seg.counts[1:min_len]

# Ensure we have the expected number of bins or adjust
if length(counts1) != n_bin || length(counts2) != n_bin
# If segment is longer than expected, truncate
if length(counts1) >= n_bin && length(counts2) >= n_bin
counts1 = counts1[1:n_bin]
counts2 = counts2[1:n_bin]
else
# If too short, skip this segment
continue
end
end

# Calculate FFTs
ft1 = fft(Float64.(lcs1[i].counts))
ft2 = fft(Float64.(lcs2[i].counts))
ft1 = fft(Float64.(counts1))
ft2 = fft(Float64.(counts2))

# Calculate cross spectrum and power spectra
cross_power = ft1 .* conj.(ft2)
Expand All @@ -399,8 +421,8 @@ function CrossSpectrum(
end

# Store photon counts
segment_photons1 = sum(lcs1[i].counts)
segment_photons2 = sum(lcs2[i].counts)
segment_photons1 = sum(counts1)
segment_photons2 = sum(counts2)

# BREAK after first valid segment - this is the key difference!
break
Expand Down
117 changes: 88 additions & 29 deletions src/events.jl
Original file line number Diff line number Diff line change
Expand Up @@ -422,11 +422,16 @@ by applying the same filtering mask derived from the source column.
function filter_on!(f, src_col::AbstractVector, ev::EventList)
@assert size(src_col) == size(ev.times) "Source column size must match times size"

# Handle empty input - return immediately
if isempty(src_col) || isempty(ev.times)
return ev
end

# Modified from Base.filter! implementation for multiple arrays
# Use two pointers: i for reading, j for writing
j = firstindex(ev.times)

for i in eachindex(ev.times)
for i in eachindex(src_col) # Use src_col indices, not ev.times
predicate = f(src_col[i])::Bool

if predicate
Expand All @@ -446,18 +451,18 @@ function filter_on!(f, src_col::AbstractVector, ev::EventList)
end
end

# Resize all arrays to new length
if j <= lastindex(ev.times)
new_length = j - 1
resize!(ev.times, new_length)
# Calculate new length correctly
new_length = j - firstindex(ev.times)

# Resize all arrays to new length (handles 0 length correctly)
resize!(ev.times, new_length)

if !isnothing(ev.energies)
resize!(ev.energies, new_length)
end
if !isnothing(ev.energies)
resize!(ev.energies, new_length)
end

for (_, col) in ev.meta.extra_columns
resize!(col, new_length)
end
for (_, col) in ev.meta.extra_columns
resize!(col, new_length)
end

ev
Expand Down Expand Up @@ -1050,7 +1055,7 @@ function readevents(
gti_hdu_indices::Union{Vector{Int},Nothing} = nothing,
combine_gtis::Bool = true,
apply_gti_filter::Bool = false,
convert_to_mjd::Bool = false, # New parameter to control MJD conversion
convert_to_mjd::Bool = false,
kwargs...,
)::EventList{Vector{T},FITSMetadata{FITSIO.FITSHeader}}

Expand All @@ -1067,8 +1072,12 @@ function readevents(
# Read GTI first if requested
gti_data, gti_source = nothing, nothing
if load_gti
gti_data, gti_source =
read_gti_from_fits(path; gti_hdu_candidates, gti_hdu_indices, combine_gtis)
gti_data, gti_source = read_gti_from_fits(
path;
gti_hdu_candidates,
gti_hdu_indices,
combine_gtis
)

if !isnothing(gti_data)
@debug "Found GTI data" n_intervals = size(gti_data, 1) time_range =
Expand Down Expand Up @@ -1122,12 +1131,10 @@ function readevents(
# Read extra columns
extra_data = Dict{String,Vector}()
for col_name in extra_columns
actual_col_idx =
findfirst(col -> uppercase(col) == uppercase(col_name), all_cols)
actual_col_idx = findfirst(col -> uppercase(col) == uppercase(col_name), all_cols)
if !isnothing(actual_col_idx)
actual_col_name = all_cols[actual_col_idx]
extra_data[col_name] =
read(selected_hdu, actual_col_name, case_sensitive = false)
extra_data[col_name] = read(selected_hdu, actual_col_name, case_sensitive = false)
else
@warn "Column '$col_name' not found in FITS file"
end
Expand Down Expand Up @@ -1181,8 +1188,7 @@ function readevents(

corrected_time = time .+ effective_timezero .+ bin_center_correction
@debug "Applied bin-centering correction" time_zero = effective_timezero timedel =
time_del timepixr = effective_timepixr bin_correction =
bin_center_correction
time_del timepixr = effective_timepixr bin_correction = bin_center_correction
else
corrected_time = time .+ effective_timezero
@debug "Applied time zero correction" time_zero = effective_timezero
Expand All @@ -1191,13 +1197,51 @@ function readevents(
# Convert to MJD only if requested
if convert_to_mjd
time = convert(Vector{T}, sec_to_mjd(corrected_time, mjd_ref))

# Apply the same time corrections to GTI data
if !isnothing(gti_data)
# Apply time zero correction to GTI
gti_corrected = gti_data .+ effective_timezero

# Apply bin-centering correction to GTI if applicable
if !isnothing(time_del) && time_del > 0.0
effective_timepixr = isnothing(time_pixr) ? 0.0 : time_pixr
bin_center_correction = (0.5 - effective_timepixr) * time_del
gti_corrected = gti_corrected .+ bin_center_correction
end

# Convert GTI to MJD using the same reference
gti_data = sec_to_mjd.(gti_corrected, mjd_ref)
@debug "Applied same time corrections to GTI data"
end

@debug "Converted to MJD(TT)" mjd_ref = mjd_ref
else
time = convert(Vector{T}, corrected_time)

# Apply time corrections to GTI even when not converting to MJD
if !isnothing(gti_data)
# Apply time zero correction to GTI
gti_corrected = gti_data .+ effective_timezero

# Apply bin-centering correction to GTI if applicable
if !isnothing(time_del) && time_del > 0.0
effective_timepixr = isnothing(time_pixr) ? 0.0 : time_pixr
bin_center_correction = (0.5 - effective_timepixr) * time_del
gti_corrected = gti_corrected .+ bin_center_correction
end

gti_data = gti_corrected
@debug "Applied time corrections to GTI data"
end

@debug "Kept times in seconds (corrected)"
end

@debug "Final time range" time_range = (minimum(time), maximum(time))
if !isnothing(gti_data)
@debug "Final GTI range" gti_range = (minimum(gti_data), maximum(gti_data))
end
end

# Validate data consistency
Expand All @@ -1207,14 +1251,28 @@ function readevents(

# Apply GTI filtering if requested
if apply_gti_filter && !isnothing(gti_data)
gti_mask, _ = create_gti_mask(time, gti_data)

time = time[gti_mask]
if !isnothing(energy)
energy = energy[gti_mask]
end
for (col_name, col_data) in extra_data
extra_data[col_name] = col_data[gti_mask]
@debug "Applying GTI filter" n_events_before = length(time)

try
gti_mask, _ = create_gti_mask(time, gti_data)
n_kept = sum(gti_mask)

@debug "GTI filter results" events_kept = n_kept events_removed = (length(time) - n_kept)

if n_kept > 0
time = time[gti_mask]
if !isnothing(energy)
energy = energy[gti_mask]
end
for (col_name, col_data) in extra_data
extra_data[col_name] = col_data[gti_mask]
end
@debug "Successfully applied GTI filter"
else
@warn "GTI filtering removed all events - check time system consistency" event_range = extrema(time) gti_range = extrema(gti_data)
end
catch e
@warn "GTI filtering failed: $e - proceeding without GTI filter"
end
end

Expand All @@ -1231,12 +1289,13 @@ function readevents(
for (col_name, col_data) in extra_data
extra_data[col_name] = col_data[sort_indices]
end
@debug "Events sorted by time"
else
@assert false "Times are not sorted (pass `sort = true` to force sorting)"
end
end

# Create metadata with all the new timing information
# Create metadata with all timing information
meta = FITSMetadata(
path,
hdu,
Expand Down
1 change: 1 addition & 0 deletions src/gti.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1097,6 +1097,7 @@ function fill_bad_time_intervals!(

return el
end
get_gti_lengths(gti::AbstractMatrix{<:Real}) = vec(diff(gti; dims=2))
#todo
# create a function that fills the bad time intervals in a light curve
# in order to maintain optiizational sampling for periodograms
Expand Down
59 changes: 44 additions & 15 deletions src/plotting/plots_recipes_crossspectrum.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
plot(cs)
plot(cs, plot_type=:amplitude, xscale=:log10)

# Phase analysis with errors
plot(cs, plot_type=:phase_lag, show_errors=true, freq_range=(0.1, 10))

# Multiple plots
Expand Down Expand Up @@ -163,6 +162,28 @@ plot(cs, plot_type=:coherence, show_noise_level=true)

return freq_plot, time_lag_plot

elseif plot_type == :raw_coherence
if is_averaged(cs)
title --> "Averaged Raw Coherence Function ($(cs.m) segments)"
else
title --> "Single Raw Coherence Function"
end
xlabel --> "Frequency (Hz)"
ylabel --> "Raw Coherence"
xaxis --> xscale
yaxis --> :identity
ylims --> (0, 1.1)

# Raw coherence calculation (biased)
raw_coherence_plot =
abs2.(cs.power[freq_mask]) ./ (cs.ps1[freq_mask] .* cs.ps2[freq_mask])

seriestype --> :line
color --> color
label --> "Raw Coherence"

return freq_plot, raw_coherence_plot

elseif plot_type == :coherence
if is_averaged(cs)
title --> "Averaged Coherence Function ($(cs.m) segments)"
Expand All @@ -174,14 +195,15 @@ plot(cs, plot_type=:coherence, show_noise_level=true)
xaxis --> xscale
yaxis --> :identity
ylims --> (0, 1.1)

coherence_plot =
abs2.(cs.power[freq_mask]) ./ (cs.ps1[freq_mask] .* cs.ps2[freq_mask])


# Use the bias-corrected coherence function
coherence_values = coherence(cs)
coherence_plot = coherence_values[freq_mask]

seriestype --> :line
color --> color
label --> "Coherence"

return freq_plot, coherence_plot

elseif plot_type == :snr
Expand All @@ -192,18 +214,18 @@ plot(cs, plot_type=:coherence, show_noise_level=true)
end
xlabel --> "Frequency (Hz)"
ylabel --> "S/N Ratio"
xaxis --> xscale
xaxis --> :log10
yaxis --> :identity

noise_level = theoretical_noise_level(cs)
snr_plot = abs.(cs.power[freq_mask]) ./ noise_level
# Use empirical noise level (consistent with signal_to_noise_ratio function)
snr_plot = signal_to_noise_ratio(cs) # This uses white_noise_level internally

seriestype --> :line
color --> color
label --> "S/N Ratio"

return freq_plot, snr_plot
color --> :purple
label --> "S/N Ratio (Empirical)"

return cs.freq, snr_plot

elseif plot_type == :cross_power_raw
title --> "Cross Power (Raw Complex Values)"
xlabel --> "Frequency (Hz)"
Expand Down Expand Up @@ -298,6 +320,7 @@ plot(cs, Val(:significant_detections), threshold=5.0)
cs::CrossSpectrum{T},
::Val{:significant_detections};
threshold = 3.0,
use_empirical_noise = true, # New parameter for consistency
) where {T}

if is_averaged(cs)
Expand All @@ -313,7 +336,13 @@ plot(cs, Val(:significant_detections), threshold=5.0)
grid --> true
legend --> :topright

noise_level = theoretical_noise_level(cs)
# Use consistent noise level estimation
noise_level = if use_empirical_noise
white_noise_level(cs) # Same as signal_to_noise_ratio() uses
else
theoretical_noise_level(cs) # Original behavior
end

snr_data = abs.(cs.power) ./ noise_level
significant_mask = snr_data .> threshold

Expand Down Expand Up @@ -343,7 +372,7 @@ plot(cs, Val(:significant_detections), threshold=5.0)
color --> :black
linestyle --> :dash
alpha --> 0.7
label --> "Noise Level"
label --> use_empirical_noise ? "Empirical Noise Level" : "Theoretical Noise Level"
Float64[], Float64[]
end

Expand Down
Loading