Skip to content

Commit

Permalink
Merge pull request #233 from krishnatejavedula/logging_info
Browse files Browse the repository at this point in the history
Change print() to logging.info()
  • Loading branch information
israelmcmc authored Aug 13, 2024
2 parents d74b7cf + 08e56ce commit 607d417
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 31 deletions.
32 changes: 16 additions & 16 deletions cosipy/response/FullDetectorResponse.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
try :
norm = str(line[1])
except :
print(f"norm not found in the file ! We assume {norm}")
logger.info(f"norm not found in the file ! We assume {norm}")

if norm =="Linear" :
emin = int(line[2])
Expand Down Expand Up @@ -255,12 +255,12 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
assert nevents_sim != 0,"number of simulated events is 0 !"


print("normalisation is {0}".format(norm))
logger.info("normalisation is {0}".format(norm))
if sparse == None :
print("Sparse paramater not found in the file : We assume this is a non sparse matrice !")
logger.info("Sparse paramater not found in the file : We assume this is a non sparse matrice !")
sparse = False
else :
print("Sparse matrice ? {0}".format(sparse))
logger.info("Sparse matrice ? {0}".format(sparse))
edges = ()

for axis_edges, axis_type in zip(axes_edges, axes_types):
Expand Down Expand Up @@ -298,8 +298,8 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal

# If fast method fails, use long method, which should work in all cases.
except:
print("Initial attempt failed.")
print("Using long method...")
logger.info("Initial attempt failed.")
logger.info("Using long method...")
nlines = sum(1 for _ in gzip.open(filename,"rt"))

# Preallocate arrays
Expand All @@ -308,7 +308,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal

# Calculate the memory usage in Gigabytes
memory_size = ((nlines * data.itemsize)+(axes.ndim*nlines*coords.itemsize))/(1024*1024*1024)
print(f"Estimated RAM you need to read the file : {memory_size} GB")
logger.info(f"Estimated RAM you need to read the file : {memory_size} GB")



Expand All @@ -320,7 +320,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal

# Calculate the memory usage in Gigabytes
memory_size = (nlines * data.itemsize)/(1024*1024*1024)
print(f"Estimated RAM you need to read the file : {memory_size} GB")
logger.info(f"Estimated RAM you need to read the file : {memory_size} GB")


# Loop
Expand Down Expand Up @@ -381,7 +381,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
if binLine :
#check we have same number of bin than values read
if len(line)!=nbins :
print("nb of bin content read ({0}) != nb of bins {1}".format(len(line),nbins))
logger.info("nb of bin content read ({0}) != nb of bins {1}".format(len(line),nbins))
sys.exit()

for i in tqdm(range(nbins), desc="Processing", unit="bin"):
Expand All @@ -393,7 +393,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal

break

print("response file read ! Now we create the histogram and weight in order to "+
logger.info("response file read ! Now we create the histogram and weight in order to "+
"get the effective area")
# create histpy histogram

Expand Down Expand Up @@ -424,11 +424,11 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
gauss_int = 0.5 * (1 + erf( (edges[0]-Gauss_mean)/(4*np.sqrt(2)) ) ) + 0.5 * (1 + erf( (edges[1]-Gauss_mean)/(4*np.sqrt(2)) ) )

assert gauss_int == 1, "The gaussian spectrum is not fully contained in this single bin !"
print("Only one bin so we will use the Mono normalisation")
logger.info("Only one bin so we will use the Mono normalisation")
norm ="Mono"

if Spectrumfile is not None and norm=="file":
print("normalisation : spectrum file")
logger.info("normalisation : spectrum file")
# From spectrum file
spec = pd.read_csv(Spectrumfile, sep=" ")
spec = spec.iloc[:-1]
Expand All @@ -443,7 +443,7 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
nperchannel_norm = hspec[:]

elif norm=="powerlaw":
print("normalisation : powerlaw with index {0} with energy range [{1}-{2}]keV".format(alpha,emin,emax))
logger.info("normalisation : powerlaw with index {0} with energy range [{1}-{2}]keV".format(alpha,emin,emax))
# From powerlaw

e_lo = dr.axes['Ei'].lower_bounds
Expand All @@ -466,11 +466,11 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal
nperchannel_norm = (e_hi**a - e_lo**a) / (emax**a - emin**a)

elif norm =="Linear" :
print("normalisation : linear with energy range [{0}-{1}]".format(emin,emax))
logger.info("normalisation : linear with energy range [{0}-{1}]".format(emin,emax))
nperchannel_norm = ewidth / (emax-emin)

elif norm=="Mono" :
print("normalisation : mono")
logger.info("normalisation : mono")

nperchannel_norm = np.array([1.])

Expand Down Expand Up @@ -1163,7 +1163,7 @@ def command_dump():
apar.error(f"Argument '{option}' not valid for 'dump' command")

if args.output is None:
print(result)
logger.info(result)
else:
logger.info(f"Saving result to {Path(args.output).resolve()}")
f = open(args.output, 'a')
Expand Down
6 changes: 3 additions & 3 deletions cosipy/spacecraftfile/SpacecraftFile.py
Original file line number Diff line number Diff line change
Expand Up @@ -956,7 +956,7 @@ def plot_rmf(self, file_name = None, save_name = None, dpi = 300):

# Read the MATRIX extension
matrix_ext = self.rmf['MATRIX']
#print(repr(matrix_hdu.header[:60]))
#logger.info(repr(matrix_hdu.header[:60]))
energy_low = matrix_ext.data["ENERG_LO"] # energy bin lower edges for measured energies
energy_high = matrix_ext.data["ENERG_HI"] # energy bin higher edges for measured energies
data = matrix_ext.data
Expand Down Expand Up @@ -988,13 +988,13 @@ def plot_rmf(self, file_name = None, save_name = None, dpi = 300):
energy_all_edges = np.append(energy_low,energy_high[-1])
#bin_edges = np.array([incident_energy_bins,incident_energy_bins]) # doesn't work
bin_edges = np.vstack((energy_all_edges, energy_all_edges))
#print(bin_edges)
#logger.info(bin_edges)

self.probability = []
for i in np.arange(10):
for j in np.arange(10):
self.probability.append(rmf_matrix[i][j])
#print(type(probability))
#logger.info(type(probability))

plt.hist2d(x=x_center_coords,y=y_center_coords,weights=self.probability,bins=bin_edges, norm=LogNorm())
plt.xscale('log')
Expand Down
10 changes: 6 additions & 4 deletions cosipy/ts_map/TSMap.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

import astropy.io.fits as fits

import logging
logger = logging.getLogger(__name__)

class TSMap:

Expand Down Expand Up @@ -175,8 +177,8 @@ def ts_grid_data(self):
for j in range(self.log_like.axes['dec'].nbins):

# progress
print(f"\rra = {i:2d}/{self.log_like.axes['ra'].nbins} ", end = "")
print(f"dec = {j:2d}/{self.log_like.axes['dec'].nbins} ", end = "")
logger.info(f"\rra = {i:2d}/{self.log_like.axes['ra'].nbins} ", end = "")
logger.info(f"dec = {j:2d}/{self.log_like.axes['dec'].nbins} ", end = "")

# changing the position parameters
# converting rad to deg due to ra and dec in 3ML PointSource
Expand Down Expand Up @@ -237,10 +239,10 @@ def print_best_fit(self):
else:
self.best_ra = (self.ts.axes['ra'].centers[self.argmax[0]]) * (180/np.pi) # deg
self.best_dec = self.ts.axes['dec'].centers[self.argmax[1]] * (180/np.pi) # deg
print(f"Best fit position: RA = {self.best_ra} deg, Dec = {self.best_dec} deg")
logger.info(f"Best fit position: RA = {self.best_ra} deg, Dec = {self.best_dec} deg")

# convert to significance based on Wilk's theorem
print(f"Expected significance: {stats.norm.isf(stats.chi2.sf(self.ts_max, df = 2)):.1f} sigma")
logger.info(f"Expected significance: {stats.norm.isf(stats.chi2.sf(self.ts_max, df = 2)):.1f} sigma")

def save_ts(self, output_file_name):

Expand Down
19 changes: 11 additions & 8 deletions cosipy/ts_map/fast_ts_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
import gc
import matplotlib.pyplot as plt

import logging
logger = logging.getLogger(__name__)

class FastTSMap():

def __init__(self, data, bkg_model, response_path, orientation = None, cds_frame = "local", scheme = "RING"):
Expand Down Expand Up @@ -236,22 +239,22 @@ def get_ei_cds_array(hypothesis_coord, energy_channel, response_path, spectrum,
quiet = True)
#time_coord_convert_end = time.time()
#time_coord_convert_used = time_coord_convert_end - time_coord_convert_start
#print(f"The time used for coordinate conversion is {time_coord_convert_used}s.")
#logger.info(f"The time used for coordinate conversion is {time_coord_convert_used}s.")

#time_dwell_start = time.time()
# get the dwell time map: the map of the time spent on each pixel in the local frame
dwell_time_map = orientation.get_dwell_map(response = response_path)
#time_dwell_end = time.time()
#time_dwell_used = time_dwell_end - time_dwell_start
#print(f"The time used for dwell time map is {time_dwell_used}s.")
#logger.info(f"The time used for dwell time map is {time_dwell_used}s.")

#time_psr_start = time.time()
# convolve the response with the dwell_time_map to get the point source response
with FullDetectorResponse.open(response_path) as response:
psr = response.get_point_source_response(dwell_time_map)
#time_psr_end = time.time()
#time_psr_used = time_psr_end - time_psr_start
#print(f"The time used for psr is {time_psr_used}s.")
#logger.info(f"The time used for psr is {time_psr_used}s.")

elif cds_frame == "galactic":

Expand All @@ -272,7 +275,7 @@ def get_ei_cds_array(hypothesis_coord, energy_channel, response_path, spectrum,

#time_cds_end = time.time()
#time_cds_used = time_cds_end - time_cds_start
#print(f"The time used for cds is {time_cds_used}s.")
#logger.info(f"The time used for cds is {time_cds_used}s.")

return ei_cds_array

Expand Down Expand Up @@ -391,10 +394,10 @@ def parallel_ts_fit(self, hypothesis_coords, energy_channel, spectrum, ts_scheme
# if you don't specify the number of cpu cores to use or the specified number of cpu cores is the same as the total number of cores you have
# it will use the [total_cores - 1] number of cores to run the parallel computation.
cores = total_cores - 1
print(f"You have total {total_cores} CPU cores, using {cores} CPU cores for parallel computation.")
logger.info(f"You have total {total_cores} CPU cores, using {cores} CPU cores for parallel computation.")
else:
cores = cpu_cores
print(f"You have total {total_cores} CPU cores, using {cores} CPU cores for parallel computation.")
logger.info(f"You have total {total_cores} CPU cores, using {cores} CPU cores for parallel computation.")

start = time.time()
multiprocessing.set_start_method(start_method, force = True)
Expand All @@ -410,7 +413,7 @@ def parallel_ts_fit(self, hypothesis_coords, energy_channel, spectrum, ts_scheme

elapsed_seconds = end - start
elapsed_minutes = elapsed_seconds/60
print(f"The time used for the parallel TS map computation is {elapsed_minutes} minutes")
logger.info(f"The time used for the parallel TS map computation is {elapsed_minutes} minutes")

results = np.array(results) # turn to a numpy array
results = results[results[:, 0].argsort()] # arrange the order by the pixel numbering
Expand Down Expand Up @@ -536,6 +539,6 @@ def show_memory_info(hint):

info = p.memory_full_info()
memory = info.uss / 1024. / 1024
print('{} memory used: {} MB'.format(hint, memory))
logger.info('{} memory used: {} MB'.format(hint, memory))


0 comments on commit 607d417

Please sign in to comment.