Skip to content

Commit

Permalink
Merge pull request #178 from krishnatejavedula/feature/logging
Browse files Browse the repository at this point in the history
Change print() to logging.info() #48
  • Loading branch information
ckarwin authored May 23, 2024
2 parents 7c660b5 + aa80592 commit 16c23c2
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 54 deletions.
24 changes: 10 additions & 14 deletions cosipy/data_io/BinnedData.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def get_binned_data(self, unbinned_data=None, output_name=None, \
but it does not explicitly return it.
"""

# Make print statement:
print("binning data...")
# Log message:
logger.info("binning data...")

# Option to read in unbinned data file:
if unbinned_data:
Expand All @@ -72,18 +72,14 @@ def get_binned_data(self, unbinned_data=None, output_name=None, \
num_bins = round(delta_t / self.time_bins)
new_bin_size = delta_t / num_bins
if self.time_bins != new_bin_size:
print()
print("Note: time bins must be equally spaced between min and max time.")
print("Using time bin size [s]: " + str(new_bin_size))
print()
logger.info("Note: time bins must be equally spaced between min and max time.")
logger.info("Using time bin size [s]: " + str(new_bin_size))
time_bin_edges = np.linspace(min_time,max_time,num_bins+1)

if type(self.time_bins).__name__ == 'list':
# Check that bins correspond to min and max time:
if (self.time_bins[0] > min_time) | (self.time_bins[-1] < max_time):
print()
print("ERROR: Time bins do not cover the full selected data range!")
print()
logger.error("ERROR: Time bins do not cover the full selected data range!")
sys.exit()
time_bin_edges = np.array(self.time_bins)

Expand Down Expand Up @@ -262,7 +258,7 @@ def plot_psichi_map(self):
Plot psichi healpix map.
"""

print("plotting psichi in Galactic coordinates...")
logger.info("plotting psichi in Galactic coordinates...")
plot, ax = self.binned_data.project('PsiChi').plot(ax_kw = {'coord':'G'})
ax.get_figure().set_figwidth(4)
ax.get_figure().set_figheight(3)
Expand Down Expand Up @@ -339,8 +335,8 @@ def get_raw_spectrum(self, binned_data=None, time_rate=False, output_name=None):
If True, calculates ct/keV/s. The defualt is ct/keV.
"""

# Make print statement:
print("getting raw spectrum...")
# Log message:
logger.info("getting raw spectrum...")

# Option to read in binned data from hdf5 file:
if binned_data:
Expand Down Expand Up @@ -384,8 +380,8 @@ def get_raw_lightcurve(self, binned_data=None, output_name=None):
Prefix of output files. Writes both pdf and dat file.
"""

# Make print statement:
print("getting raw lightcurve...")
# Log message:
logger.info("getting raw lightcurve...")

# Option to read in binned data from hdf5 file:
if binned_data:
Expand Down
11 changes: 6 additions & 5 deletions cosipy/data_io/ReadTraTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import numpy as np
import sys
import pandas as pd
import logging
logger = logging.getLogger(__name__)

try:
# Load MEGAlib into ROOT
Expand Down Expand Up @@ -61,15 +63,14 @@ def read_tra_old(self,make_plots=True):
# tra file to use:
tra_file = self.data_file

# Make print statement:
print()
print("Read tra test...")
print()
# Log message:
logger.info("Read tra test...")


# Check if file exists:
Reader = M.MFileEventsTra()
if Reader.Open(M.MString(tra_file)) == False:
print("Unable to open file %s. Aborting!" %self.data_file)
logger.error("Unable to open file %s. Aborting!" %self.data_file)
sys.exit()

# Initialise empty lists:
Expand Down
45 changes: 21 additions & 24 deletions cosipy/data_io/UnBinnedData.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import time
logger = logging.getLogger(__name__)


class UnBinnedData(DataIO):
"""Handles unbinned data."""

Expand Down Expand Up @@ -128,7 +129,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,
if run_test == True:
c_E0 = 510.999

print("Preparing to read file...")
logger.info("Preparing to read file...")

# Open .tra.gz file:
if self.data_file.endswith(".gz"):
Expand All @@ -143,8 +144,8 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,

# If fast method fails, use long method, which should work in all cases.
except:
print("Initial attempt failed.")
print("Using long method...")
logger.info("Initial attempt failed.")
logger.info("Using long method...")
g = gzip.open(self.data_file,"rt")
num_lines = sum(1 for line in g)
g.close()
Expand All @@ -159,21 +160,19 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,
num_lines = float(proc.communicate()[0])

except:
print("Initial attempt failed.")
print("Using long method...")
logger.info("Initial attempt failed.")
logger.info("Using long method...")
g = open(self.data_file,"rt")
num_lines = sum(1 for line in g)
g.close()

else:
print()
print("ERROR: Input data file must have '.tra' or '.gz' extenstion.")
print()
logger.error("ERROR: Input data file must have '.tra' or '.gz' extenstion.")
sys.exit()


# Read tra file line by line:
print("Reading file...")
logger.info("Reading file...")
N_events = 0 # number of events
pbar = tqdm(total=num_lines) # start progress bar
for line in f:
Expand All @@ -196,7 +195,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,
if event_max != None:
if N_events >= event_max:
pbar.close()
print("Stopping here: only reading a subset of events")
logger.info("Stopping here: only reading a subset of events")
break

# Total photon energy and Compton angle:
Expand Down Expand Up @@ -258,14 +257,14 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,

# Close progress bar:
pbar.close()
print("Making COSI data set...")
print("total events to procecss: " + str(len(erg)))
logger.info("Making COSI data set...")
logger.info("total events to procecss: " + str(len(erg)))

# Clear unused memory:
gc.collect()

# Initialize arrays:
print("Initializing arrays...")
logger.info("Initializing arrays...")
erg = np.array(erg)
phi = np.array(phi)
tt = np.array(tt)
Expand All @@ -281,8 +280,8 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,
# Check if the input data has pointing information,
# if not, get it from the spacecraft file:
if (use_ori == False) & (len(lonZ)==0):
print("WARNING: No pointing information in input data.")
print("Getting pointing information from spacecraft file.")
logger.warning("WARNING: No pointing information in input data.")
logger.warning("Getting pointing information from spacecraft file.")
use_ori = True

# Option to get X and Z pointing information from orientation file:
Expand Down Expand Up @@ -346,7 +345,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,
self.chi_gal_test = chi_gal_rad - np.pi

# Make observation dictionary
print("Making dictionary...")
logger.info("Making dictionary...")
cosi_dataset = {'Energies':erg,
'TimeTags':tt,
'Xpointings (glon,glat)':np.array([lonX,latX]).T,
Expand All @@ -362,13 +361,13 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False,

# Option to write unbinned data to file (either fits or hdf5):
if output_name != None:
print("Saving file...")
logger.info("Saving file...")
self.write_unbinned_output(output_name)

# Get processing time:
end_time = time.time()
processing_time = end_time - start_time
print("total processing time [s]: " + str(processing_time))
logger.info("total processing time [s]: " + str(processing_time))

return

Expand Down Expand Up @@ -592,7 +591,7 @@ def select_data(self, output_name=None, unbinned_data=None):
Only cuts in time are allowed for now.
"""

print("Making data selections...")
logger.info("Making data selections...")

# Option to read in unbinned data file:
if unbinned_data:
Expand All @@ -612,7 +611,7 @@ def select_data(self, output_name=None, unbinned_data=None):

# Write unbinned data to file (either fits or hdf5):
if output_name != None:
print("Saving file...")
logger.info("Saving file...")
self.write_unbinned_output(output_name)

return
Expand All @@ -633,9 +632,7 @@ def combine_unbinned_data(self, input_files, output_name=None):
counter = 0
for each in input_files:

print()
print("adding %s..." %each)
print()
logger.info("adding %s..." % each)

# Read dict from hdf5 or fits:
if self.unbinned_output == 'hdf5':
Expand All @@ -661,4 +658,4 @@ def combine_unbinned_data(self, input_files, output_name=None):
if output_name != None:
self.write_unbinned_output(output_name)

return
return
11 changes: 7 additions & 4 deletions cosipy/spacecraftfile/SpacecraftFile.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@

from .scatt_map import SpacecraftAttitudeMap

import logging
logger = logging.getLogger(__name__)

class SpacecraftFile():

def __init__(self, time, x_pointings = None, y_pointings = None, z_pointings = None, attitude = None,
Expand Down Expand Up @@ -337,7 +340,7 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui

self.target_name = target_name
if quiet == False:
print("Now converting to the Spacecraft frame...")
logger.info("Now converting to the Spacecraft frame...")
self.src_path_cartesian = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value),
representation_type = 'cartesian',
frame = SpacecraftFrame())
Expand All @@ -348,7 +351,7 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui
self.src_path_cartesian.y,
self.src_path_cartesian.z)
if quiet == False:
print(f"Conversion completed!")
logger.info(f"Conversion completed!")

# generate the numpy array of l and b to save to a npy file
l = np.array(self.src_path_spherical[2].deg) # note that 0 is Quanty, 1 is latitude and 2 is longitude and they are in rad not deg
Expand Down Expand Up @@ -543,12 +546,12 @@ def get_psr_rsp(self, response = None, dwell_map = None, dts = None):
self.Em_hi = np.float32(self.Em_edges[1:])

# get the effective area and matrix
print("Getting the effective area ...")
logger.info("Getting the effective area ...")
self.areas = np.float32(np.array(self.psr.project('Ei').to_dense().contents))/self.dts.to_value(u.second).sum()
spectral_response = np.float32(np.array(self.psr.project(['Ei','Em']).to_dense().contents))
self.matrix = np.float32(np.zeros((self.Ei_lo.size,self.Em_lo.size))) # initate the matrix

print("Getting the energy redistribution matrix ...")
logger.info("Getting the energy redistribution matrix ...")
for i in np.arange(self.Ei_lo.size):
new_raw = spectral_response[i,:]/spectral_response[i,:].sum()
self.matrix[i,:] = new_raw
Expand Down
14 changes: 7 additions & 7 deletions cosipy/threeml/COSILike.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,11 @@ def __init__(self, name, dr, data, bkg, sc_orientation,
# consistent way for point srcs and extended srcs.
self.precomputed_psr_file = precomputed_psr_file
if self.precomputed_psr_file != None:
print("... loading the pre-computed image response ...")
logger.info("... loading the pre-computed image response ...")
self.image_response = DetectorResponse.open(self.precomputed_psr_file)
# in the near future, we will implement ExtendedSourceResponse class, which should be used here (HY).
# probably, it is better to move this loading part outside of this class. Then, we don't have to load the response everytime we start the fitting (HY).
print("--> done")
logger.info("--> done")

def set_model(self, model):
"""
Expand Down Expand Up @@ -195,7 +195,7 @@ def set_model(self, model):

if self._psr is None or len(point_sources) != len(self._psr):

print("... Calculating point source responses ...")
logger.info("... Calculating point source responses ...")

self._psr = {}
self._source_location = {} # Should the poition information be in the point source response? (HY)
Expand All @@ -214,15 +214,15 @@ def set_model(self, model):
else:
raise RuntimeError("Unknown coordinate system")

print(f"--> done (source name : {name})")
logger.info(f"--> done (source name : {name})")

print(f"--> all done")
logger.info(f"--> all done")

# check if the source location is updated or not
for name, source in point_sources.items():

if source.position.sky_coord != self._source_location[name]:
print(f"... Re-calculating the point source response of {name} ...")
logger.info(f"... Re-calculating the point source response of {name} ...")
coord = source.position.sky_coord

self._source_location[name] = copy.deepcopy(coord) # to avoid same memory issue
Expand All @@ -236,7 +236,7 @@ def set_model(self, model):
else:
raise RuntimeError("Unknown coordinate system")

print(f"--> done (source name : {name})")
logger.info(f"--> done (source name : {name})")

# Get expectation for point sources:
for name,source in point_sources.items():
Expand Down

0 comments on commit 16c23c2

Please sign in to comment.