From aa805928b8a1af5f7d8131ac4e084f53a209e047 Mon Sep 17 00:00:00 2001 From: Krishna Teja Vedula Date: Thu, 16 May 2024 18:28:22 -0400 Subject: [PATCH] Change print() to logging.info() #48 - Replaced print statements with logging in various files to improve logging consistency, clarity, and debugging capabilities. - Updated logging statements in the following files: - ./data_io/UnBinnedData.py - ./data_io/ReadTraTest.py - ./data_io/BinnedData.py - ./threeml/COSILike.py - ./spacecraftfile/SpacecraftFile.py --- cosipy/data_io/BinnedData.py | 24 ++++++------- cosipy/data_io/ReadTraTest.py | 11 +++--- cosipy/data_io/UnBinnedData.py | 45 ++++++++++++------------- cosipy/spacecraftfile/SpacecraftFile.py | 11 +++--- cosipy/threeml/COSILike.py | 14 ++++---- 5 files changed, 51 insertions(+), 54 deletions(-) diff --git a/cosipy/data_io/BinnedData.py b/cosipy/data_io/BinnedData.py index e6e9d645..c83fd86e 100644 --- a/cosipy/data_io/BinnedData.py +++ b/cosipy/data_io/BinnedData.py @@ -53,8 +53,8 @@ def get_binned_data(self, unbinned_data=None, output_name=None, \ but it does not explicitly return it. """ - # Make print statement: - print("binning data...") + # Log message: + logger.info("binning data...") # Option to read in unbinned data file: if unbinned_data: @@ -72,18 +72,14 @@ def get_binned_data(self, unbinned_data=None, output_name=None, \ num_bins = round(delta_t / self.time_bins) new_bin_size = delta_t / num_bins if self.time_bins != new_bin_size: - print() - print("Note: time bins must be equally spaced between min and max time.") - print("Using time bin size [s]: " + str(new_bin_size)) - print() + logger.info("Note: time bins must be equally spaced between min and max time.") + logger.info("Using time bin size [s]: " + str(new_bin_size)) time_bin_edges = np.linspace(min_time,max_time,num_bins+1) if type(self.time_bins).__name__ == 'list': # Check that bins correspond to min and max time: if (self.time_bins[0] > min_time) | (self.time_bins[-1] < max_time): - print() - print("ERROR: Time bins do not cover the full selected data range!") - print() + logger.error("ERROR: Time bins do not cover the full selected data range!") sys.exit() time_bin_edges = np.array(self.time_bins) @@ -262,7 +258,7 @@ def plot_psichi_map(self): Plot psichi healpix map. """ - print("plotting psichi in Galactic coordinates...") + logger.info("plotting psichi in Galactic coordinates...") plot, ax = self.binned_data.project('PsiChi').plot(ax_kw = {'coord':'G'}) ax.get_figure().set_figwidth(4) ax.get_figure().set_figheight(3) @@ -339,8 +335,8 @@ def get_raw_spectrum(self, binned_data=None, time_rate=False, output_name=None): If True, calculates ct/keV/s. The defualt is ct/keV. """ - # Make print statement: - print("getting raw spectrum...") + # Log message: + logger.info("getting raw spectrum...") # Option to read in binned data from hdf5 file: if binned_data: @@ -384,8 +380,8 @@ def get_raw_lightcurve(self, binned_data=None, output_name=None): Prefix of output files. Writes both pdf and dat file. """ - # Make print statement: - print("getting raw lightcurve...") + # Log message: + logger.info("getting raw lightcurve...") # Option to read in binned data from hdf5 file: if binned_data: diff --git a/cosipy/data_io/ReadTraTest.py b/cosipy/data_io/ReadTraTest.py index 681049e1..9a030a8c 100644 --- a/cosipy/data_io/ReadTraTest.py +++ b/cosipy/data_io/ReadTraTest.py @@ -4,6 +4,8 @@ import numpy as np import sys import pandas as pd +import logging +logger = logging.getLogger(__name__) try: # Load MEGAlib into ROOT @@ -61,15 +63,14 @@ def read_tra_old(self,make_plots=True): # tra file to use: tra_file = self.data_file - # Make print statement: - print() - print("Read tra test...") - print() + # Log message: + logger.info("Read tra test...") + # Check if file exists: Reader = M.MFileEventsTra() if Reader.Open(M.MString(tra_file)) == False: - print("Unable to open file %s. Aborting!" %self.data_file) + logger.error("Unable to open file %s. Aborting!" %self.data_file) sys.exit() # Initialise empty lists: diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index 1a32ed46..1a0f29c0 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -23,6 +23,7 @@ import time logger = logging.getLogger(__name__) + class UnBinnedData(DataIO): """Handles unbinned data.""" @@ -128,7 +129,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, if run_test == True: c_E0 = 510.999 - print("Preparing to read file...") + logger.info("Preparing to read file...") # Open .tra.gz file: if self.data_file.endswith(".gz"): @@ -143,8 +144,8 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, # If fast method fails, use long method, which should work in all cases. except: - print("Initial attempt failed.") - print("Using long method...") + logger.info("Initial attempt failed.") + logger.info("Using long method...") g = gzip.open(self.data_file,"rt") num_lines = sum(1 for line in g) g.close() @@ -159,21 +160,19 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, num_lines = float(proc.communicate()[0]) except: - print("Initial attempt failed.") - print("Using long method...") + logger.info("Initial attempt failed.") + logger.info("Using long method...") g = open(self.data_file,"rt") num_lines = sum(1 for line in g) g.close() else: - print() - print("ERROR: Input data file must have '.tra' or '.gz' extenstion.") - print() + logger.error("ERROR: Input data file must have '.tra' or '.gz' extenstion.") sys.exit() # Read tra file line by line: - print("Reading file...") + logger.info("Reading file...") N_events = 0 # number of events pbar = tqdm(total=num_lines) # start progress bar for line in f: @@ -196,7 +195,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, if event_max != None: if N_events >= event_max: pbar.close() - print("Stopping here: only reading a subset of events") + logger.info("Stopping here: only reading a subset of events") break # Total photon energy and Compton angle: @@ -258,14 +257,14 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, # Close progress bar: pbar.close() - print("Making COSI data set...") - print("total events to procecss: " + str(len(erg))) + logger.info("Making COSI data set...") + logger.info("total events to procecss: " + str(len(erg))) # Clear unused memory: gc.collect() # Initialize arrays: - print("Initializing arrays...") + logger.info("Initializing arrays...") erg = np.array(erg) phi = np.array(phi) tt = np.array(tt) @@ -281,8 +280,8 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, # Check if the input data has pointing information, # if not, get it from the spacecraft file: if (use_ori == False) & (len(lonZ)==0): - print("WARNING: No pointing information in input data.") - print("Getting pointing information from spacecraft file.") + logger.warning("WARNING: No pointing information in input data.") + logger.warning("Getting pointing information from spacecraft file.") use_ori = True # Option to get X and Z pointing information from orientation file: @@ -346,7 +345,7 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, self.chi_gal_test = chi_gal_rad - np.pi # Make observation dictionary - print("Making dictionary...") + logger.info("Making dictionary...") cosi_dataset = {'Energies':erg, 'TimeTags':tt, 'Xpointings (glon,glat)':np.array([lonX,latX]).T, @@ -362,13 +361,13 @@ def read_tra(self, output_name=None, run_test=False, use_ori=False, # Option to write unbinned data to file (either fits or hdf5): if output_name != None: - print("Saving file...") + logger.info("Saving file...") self.write_unbinned_output(output_name) # Get processing time: end_time = time.time() processing_time = end_time - start_time - print("total processing time [s]: " + str(processing_time)) + logger.info("total processing time [s]: " + str(processing_time)) return @@ -592,7 +591,7 @@ def select_data(self, output_name=None, unbinned_data=None): Only cuts in time are allowed for now. """ - print("Making data selections...") + logger.info("Making data selections...") # Option to read in unbinned data file: if unbinned_data: @@ -612,7 +611,7 @@ def select_data(self, output_name=None, unbinned_data=None): # Write unbinned data to file (either fits or hdf5): if output_name != None: - print("Saving file...") + logger.info("Saving file...") self.write_unbinned_output(output_name) return @@ -633,9 +632,7 @@ def combine_unbinned_data(self, input_files, output_name=None): counter = 0 for each in input_files: - print() - print("adding %s..." %each) - print() + logger.info("adding %s..." % each) # Read dict from hdf5 or fits: if self.unbinned_output == 'hdf5': @@ -661,4 +658,4 @@ def combine_unbinned_data(self, input_files, output_name=None): if output_name != None: self.write_unbinned_output(output_name) - return + return \ No newline at end of file diff --git a/cosipy/spacecraftfile/SpacecraftFile.py b/cosipy/spacecraftfile/SpacecraftFile.py index 10c37bec..6ca325e1 100644 --- a/cosipy/spacecraftfile/SpacecraftFile.py +++ b/cosipy/spacecraftfile/SpacecraftFile.py @@ -14,6 +14,9 @@ from .scatt_map import SpacecraftAttitudeMap +import logging +logger = logging.getLogger(__name__) + class SpacecraftFile(): def __init__(self, time, x_pointings = None, y_pointings = None, z_pointings = None, attitude = None, @@ -337,7 +340,7 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui self.target_name = target_name if quiet == False: - print("Now converting to the Spacecraft frame...") + logger.info("Now converting to the Spacecraft frame...") self.src_path_cartesian = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value), representation_type = 'cartesian', frame = SpacecraftFrame()) @@ -348,7 +351,7 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui self.src_path_cartesian.y, self.src_path_cartesian.z) if quiet == False: - print(f"Conversion completed!") + logger.info(f"Conversion completed!") # generate the numpy array of l and b to save to a npy file l = np.array(self.src_path_spherical[2].deg) # note that 0 is Quanty, 1 is latitude and 2 is longitude and they are in rad not deg @@ -543,12 +546,12 @@ def get_psr_rsp(self, response = None, dwell_map = None, dts = None): self.Em_hi = np.float32(self.Em_edges[1:]) # get the effective area and matrix - print("Getting the effective area ...") + logger.info("Getting the effective area ...") self.areas = np.float32(np.array(self.psr.project('Ei').to_dense().contents))/self.dts.to_value(u.second).sum() spectral_response = np.float32(np.array(self.psr.project(['Ei','Em']).to_dense().contents)) self.matrix = np.float32(np.zeros((self.Ei_lo.size,self.Em_lo.size))) # initate the matrix - print("Getting the energy redistribution matrix ...") + logger.info("Getting the energy redistribution matrix ...") for i in np.arange(self.Ei_lo.size): new_raw = spectral_response[i,:]/spectral_response[i,:].sum() self.matrix[i,:] = new_raw diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index 639de5df..fb013c2a 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -114,11 +114,11 @@ def __init__(self, name, dr, data, bkg, sc_orientation, # consistent way for point srcs and extended srcs. self.precomputed_psr_file = precomputed_psr_file if self.precomputed_psr_file != None: - print("... loading the pre-computed image response ...") + logger.info("... loading the pre-computed image response ...") self.image_response = DetectorResponse.open(self.precomputed_psr_file) # in the near future, we will implement ExtendedSourceResponse class, which should be used here (HY). # probably, it is better to move this loading part outside of this class. Then, we don't have to load the response everytime we start the fitting (HY). - print("--> done") + logger.info("--> done") def set_model(self, model): """ @@ -195,7 +195,7 @@ def set_model(self, model): if self._psr is None or len(point_sources) != len(self._psr): - print("... Calculating point source responses ...") + logger.info("... Calculating point source responses ...") self._psr = {} self._source_location = {} # Should the poition information be in the point source response? (HY) @@ -214,15 +214,15 @@ def set_model(self, model): else: raise RuntimeError("Unknown coordinate system") - print(f"--> done (source name : {name})") + logger.info(f"--> done (source name : {name})") - print(f"--> all done") + logger.info(f"--> all done") # check if the source location is updated or not for name, source in point_sources.items(): if source.position.sky_coord != self._source_location[name]: - print(f"... Re-calculating the point source response of {name} ...") + logger.info(f"... Re-calculating the point source response of {name} ...") coord = source.position.sky_coord self._source_location[name] = copy.deepcopy(coord) # to avoid same memory issue @@ -236,7 +236,7 @@ def set_model(self, model): else: raise RuntimeError("Unknown coordinate system") - print(f"--> done (source name : {name})") + logger.info(f"--> done (source name : {name})") # Get expectation for point sources: for name,source in point_sources.items():