diff --git a/NuRadioReco/detector/RNO_G/db_mongo_read.py b/NuRadioReco/detector/RNO_G/db_mongo_read.py index 792198159..907c77496 100644 --- a/NuRadioReco/detector/RNO_G/db_mongo_read.py +++ b/NuRadioReco/detector/RNO_G/db_mongo_read.py @@ -1,3 +1,18 @@ +""" +Interface to the MongoDB that contains RNO-G hardware and calibration information + +The :mod:`NuRadioReco.detector.RNO_G.db_mongo_read` module and the `Database` class herein mostly serve as the +backend of the `NuRadioReco.detector.RNO_G.rnog_detector.Detector` class. Most users +will want to use that class to obtain information about deployed RNO-G stations and hardware. +`NuRadioReco.detector.RNO_G.rnog_detector.Detector` class has an interface similar to that of +other detector descriptions in NuRadioMC, and is documented there. + +However, for some specific use cases (e.g. finding measurements for individual hardware components +that have not been deployed to the field), one can use the `Database` class directly, using the +`Database.get_component_data` method. + +""" + import six import os import urllib.parse @@ -720,7 +735,38 @@ def get_channel_signal_chain_measurement(self, station_id=None, channel_id=None, def get_component_data(self, component_type, component_id, supplementary_info, primary_time, verbose=True, sparameter='S21'): - """ returns the current primary measurement of the component, reads in the component collection""" + """ + returns the current primary measurement of the component, reads in the component collection + + Returns a single measurement (e.g. gain of an IGLU) + + Examples + -------- + + .. code-block:: + + import NuRadioReco.detector.RNO_G.db_mongo_read + import datetime + + db = NuRadioReco.detector.RNO_G.db_mongo_read.Database() + + # gives you the entry in the database + database_entry = db.get_component_data( + component_type='iglu_board', + component_id='C0069', + supplementary_info={}, # if you want a DRAB you have to specify the channel: {'channel_id':0} + verbose=True, + sparameter='S21', # you can also read the other S parameters + primary_time=datetime.datetime.now()) + + + # extract the gain + phase data + y_axis_units = database_entry['y-axis_units'] + frequencies = database_entry['frequencies'] + gain_data = database_entry['mag'] + phase_data = database_entry['phase'] + + """ # define a search filter search_filter = [{'$match': {'name': component_id}}, {'$unwind': '$measurements'}, {'$match': {}}] diff --git a/NuRadioReco/framework/base_station.py b/NuRadioReco/framework/base_station.py index 6bb95bfe3..cf3de9c81 100644 --- a/NuRadioReco/framework/base_station.py +++ b/NuRadioReco/framework/base_station.py @@ -101,7 +101,11 @@ def set_station_time(self, time, format=None): def get_station_time(self, format='isot'): """ - Returns a astropy.time.Time object + Returns the station time as an astropy.time.Time object + + The station time corresponds to the absolute time at which the event + starts, i.e. all times in Channel, Trigger and ElectricField objects + are measured relative to this time. Parameters ---------- @@ -112,7 +116,7 @@ def get_station_time(self, format='isot'): Returns ------- - _station_time: astropy.time.Time + station_time: astropy.time.Time """ if self._station_time is None: return None diff --git a/NuRadioReco/framework/trigger.py b/NuRadioReco/framework/trigger.py index fb5b801fa..741ea1059 100644 --- a/NuRadioReco/framework/trigger.py +++ b/NuRadioReco/framework/trigger.py @@ -103,7 +103,19 @@ def set_trigger_time(self, time): def get_trigger_time(self): """ - get the trigger time (absolute time with respect to the beginning of the event) + Get the trigger time. + + Returns trigger time, i.e. the first time in the event where the trigger condition was fulfilled. + This is defined relative to the `station_time `. + + Returns + ------- + trigger_time : float + The trigger time + + See Also + -------- + get_trigger_times : function to return all times where the trigger condition was fulfilled """ return self._trigger_time @@ -119,7 +131,22 @@ def set_trigger_times(self, times): def get_trigger_times(self): """ - get the trigger times (time with respect to beginning of trace) + Get the trigger times + + For some triggers, in addition to the time of the first trigger, + also all subsequent times where the trigger condition were fulfilled are + stored. For triggers that do not store this information, this method + is equivalent to `get_trigger_time` with the exception that it returns + an array (of shape (1,)) instead of a scalar. + + Returns + ------- + trigger_times : np.ndarray + Array of all times where the trigger condition was satisfied + + See Also + -------- + get_trigger_time : method to return the (first) trigger time """ if self._trigger_times is None and not np.isnan(self._trigger_time): return np.array(self._trigger_time) @@ -231,6 +258,12 @@ def __init__(self, name, threshold, channels=None, number_of_coincidences=1, default: 1 channel_coincidence_window: float or None (default) the coincidence time between triggers of different channels + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'simple_threshold', pre_trigger_times=pre_trigger_times) self._threshold = threshold @@ -269,6 +302,12 @@ def __init__(self, name, threshold_factor, power_mean, power_std, output_passband: (float, float) tuple Frequencies for a 6th-order Butterworth filter to be applied after the diode filtering. + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, triggered_channels, 'envelope_phased', pre_trigger_times=pre_trigger_times) self._triggered_channels = triggered_channels @@ -318,6 +357,12 @@ def __init__(self, name, threshold, channels=None, secondary_channels=None, the size of the stride between calculating the phasing (units of ADC time ticks) maximum_amps: list of floats (length equal to that of `phasing_angles`) the maximum value of all the integration windows for each of the phased waveforms + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'simple_phased', pre_trigger_times=pre_trigger_times) self._primary_channels = channels @@ -357,6 +402,12 @@ def __init__(self, name, threshold_high, threshold_low, high_low_window, number_of_coincidences: int the number of channels that need to fulfill the trigger condition default: 1 + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'high_low', pre_trigger_times=pre_trigger_times) self._number_of_coincidences = number_of_coincidences @@ -387,6 +438,12 @@ def __init__(self, name, threshold, channel_coincidence_window, channels=None, n number_of_coincidences: int the number of channels that need to fulfill the trigger condition default: 1 + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'int_power', pre_trigger_times=pre_trigger_times) self._number_of_coincidences = number_of_coincidences @@ -422,6 +479,12 @@ def __init__(self, name, passband, order, threshold, number_of_coincidences=2, default: 1 channel_coincidence_window: float or None (default) the coincidence time between triggers of different channels + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'envelope_trigger', pre_trigger_times=pre_trigger_times) self._passband = passband @@ -433,7 +496,8 @@ def __init__(self, name, passband, order, threshold, number_of_coincidences=2, class RNOGSurfaceTrigger(Trigger): from NuRadioReco.utilities import units def __init__(self, name, threshold, number_of_coincidences=1, - channel_coincidence_window=60*units.ns, channels=[13, 16, 19], temperature=250*units.kelvin, Vbias=2*units.volt, pre_trigger_times=55 * units.ns): + channel_coincidence_window=60*units.ns, channels=[13, 16, 19], + temperature=250*units.kelvin, Vbias=2*units.volt, pre_trigger_times=55 * units.ns): """ initialize trigger class @@ -455,6 +519,12 @@ def __init__(self, name, threshold, number_of_coincidences=1, temperature of the trigger board Vbias: float bias voltage on the trigger board + pre_trigger_times: float or dict of floats + the time before the trigger time that should be read out + if a dict is given, the keys are the channel_ids, and the value is the pre_trigger_time between the + start of the trace and the trigger time. + if only a float is given, the same pre_trigger_time is used for all channels + """ Trigger.__init__(self, name, channels, 'rnog_surface_trigger', pre_trigger_times=pre_trigger_times) self._threshold = threshold diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 65710d7b1..890c3b715 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -213,8 +213,11 @@ def __init__(self, run_table_path=None, load_run_table=True, log_level=logging.N self.logger.warn("No connect to RunTable database could be established. " "Runs can not be filtered.") except ImportError: - self.logger.warn("Import of run table failed. Runs can not be filtered.! \n" - "You can get the interface from GitHub: git@github.com:RNO-G/rnog-runtable.git") + self.logger.warn( + "import run_table failed. You can still use readRNOGData, but runs can not be filtered. " + "To install the run table, run\n\n" + "\tpip install git+ssh://git@github.com/RNO-G/rnog-runtable.git\n" + ) else: # some users may mistakenly try to pass the .root files to __init__ # we check for this and raise a (hopefully) helpful error message diff --git a/documentation/make_docs.py b/documentation/make_docs.py index 92be243e7..d6aa254b5 100644 --- a/documentation/make_docs.py +++ b/documentation/make_docs.py @@ -105,7 +105,7 @@ logger.info("excluding modules: {}".format(exclude_modules)) subprocess.run( [ - 'sphinx-apidoc', '-efMT', '--ext-autodoc', '--ext-intersphinx', + 'sphinx-apidoc', '-efMT', '-d', '1', '--ext-autodoc', '--ext-intersphinx', '--ext-coverage', '--ext-githubpages', '-o', output_folder, module_path, *exclude_modules ], stdout=pipe_stdout diff --git a/documentation/source/Introduction/pages/contributing.rst b/documentation/source/Introduction/pages/contributing.rst index 4c8bfc4a9..dfc522f9e 100644 --- a/documentation/source/Introduction/pages/contributing.rst +++ b/documentation/source/Introduction/pages/contributing.rst @@ -174,17 +174,17 @@ Dependencies are also maintained in ``pyproject.toml``. To update the dependenci under ``[tool.poetry.dependencies]``. Acceptable version specifications are ``"4.1.1"`` (4.1.1 only), ``">=4.1.1"`` (4.1.1 or greater), or ``"*"`` (any version). Please do not use poetry-specific version specifiers like ``^`` or ``~``. -* If you are adding an **optional** dependency, add your dependency under ``[tool.poetry.dev-dependencies]``. +* If you are adding an **optional** dependency, you can specify this by adding ``optional=true``. Additionally, please name the feature that requires this dependency, and add it under ``[tool.poetry.extras]``. E.g. in order to generate the documentation, we require ``Sphinx``, ``sphinx-rtd-theme`` and ``numpydoc`` to be installed. This is specified in ``pyproject.toml`` as follows: .. code-block:: - [tool.poetry.dev-dependencies] - Sphinx = "*" - sphinx-rtd-theme = "*" - numpydoc = "*" + [tool.poetry.dependencies] + Sphinx = {version = "*", optional = true} + sphinx-rtd-theme = {version = "*", optional = true} + numpydoc = {version = "*", optional = true} [tool.poetry.extras] documentation = ["Sphinx", "sphinx-rtd-theme", "numpydoc"] diff --git a/documentation/source/Introduction/pages/installation.rst b/documentation/source/Introduction/pages/installation.rst index 7356788e5..59da8a71a 100644 --- a/documentation/source/Introduction/pages/installation.rst +++ b/documentation/source/Introduction/pages/installation.rst @@ -16,7 +16,8 @@ using ``pip``: pip install NuRadioMC NuRadioMC/NuRadioReco will then be available from Python using ``import NuRadioMC`` and ``import NuRadioReco``, respectively. -The pip installation will also install all core dependencies. +The pip installation will install all core dependencies. Some :ref:`optional dependencies ` +can be installed by appending ``[option]``, i.e. ``pip install NuRadioMC[option]``. .. Important:: @@ -32,7 +33,7 @@ The pip installation will also install all core dependencies. instead, or install it manually (see below). Development version ---------------------------- +------------------- The most recent version of ``NuRadioMC`` is available on `github `__. It can be downloaded manually from the `repository website `__, or cloned using ``git`` @@ -80,7 +81,7 @@ To install all (optional and non-optional) dependencies available in pip at once .. code-block:: Bash - pip install numpy scipy matplotlib astropy tinydb tinydb-serialization aenum h5py mysql-python pymongo dash plotly toml peakutils + pip install numpy scipy matplotlib astropy tinydb tinydb-serialization aenum h5py mysql-connector-python pymongo dash plotly toml peakutils future radiotools filelock mattak pygdsm MCEq crflux Note that some optional dependencies are not pip-installable and need to be :ref:`installed manually ` @@ -136,30 +137,17 @@ Core Dependencies pip install aenum -Optional Dependencies -^^^^^^^^^^^^^^^^^^^^^ - -These packages are recommended to be able to use all of NuRadioMC/NuRadioReco's features: - - h5py to open HDF5 files: -.. code-block:: Bash - - pip install h5py - -- uproot to open RNO-G root files: - -.. code-block:: bash - - pip install uproot awkward + .. code-block:: Bash -- To access some detector databases: + pip install h5py -- For SQL datbases install `MySQL `_ and mysql-python: +- filelock: .. code-block:: Bash - pip install mysql-python + pip install filelock - For `MongoDB `_ databases install: @@ -174,24 +162,33 @@ These packages are recommended to be able to use all of NuRadioMC/NuRadioReco's pip install dash pip install plotly - If you want templates to show up in the Event Display, you need to set up an environment variable NURADIORECOTEMPLATES and have it point to the template directory. +Optional Dependencies +^^^^^^^^^^^^^^^^^^^^^ -- The documentation is created using `Sphinx `_. We use the ``readthedocs`` theme, and the ``numpydoc`` format is used in our docstrings. - This dependency is needed only if you want to generate the documentation locally - the `online documentation `_ is generated by a Github action automatically. - Note that we use the `sphinx autodoc `_ - feature, which tries to import all modules it documents. So if you are missing some optional dependencies, it will not generate correct documentation for all the code. +These packages are recommended to be able to use all of NuRadioMC/NuRadioReco's features. +They can be installed by including adding ``[option]`` when installing NuRadioMC. Alternatively, +use ``pip install nuradiomc[all]`` to install all optional dependencies. - .. code-block:: Bash +- ``[RNO-G]`` - pip install sphinx sphinx_rtd_theme numpydoc + `mattak `__ is required to open RNO-G root files: -- Some debug plots need peakutils: + .. code-block:: bash - .. code-block:: Bash + pip install mattak - pip install peakutils +- ``[rno-g-extras]`` -- Proposal to use :mod:`NuRadioMC.EvtGen.NuRadioProposal` module: + Optionally, to filter RNO-G data (during read in) the `RNO-G run table database https://github.com/RNO-G/rnog-runtable`__ + can be used. Note that this requires membership of the RNO-G Github organisation (not public): + + .. code-block:: bash + + pip install git+ssh://git@github.com/RNO-G/rnog-runtable.git + +- ``[proposal]`` + + ``proposal`` is needed to use :mod:`NuRadioMC.EvtGen.NuRadioProposal` module (simulating secondary particles): .. code-block:: bash @@ -203,13 +200,46 @@ These packages are recommended to be able to use all of NuRadioMC/NuRadioReco's - if the linux kernel is too old (eg. on some computing clusters), refer to `this step-by-step guide `_ +- ``[galacticnoise]`` -- To use the channelGalacticNoiseAdder, you need the `PyGDSM `_ package. + To use the channelGalacticNoiseAdder, you need the `PyGDSM `_ package. .. code-block:: Bash pip install git+https://github.com/telegraphic/pygdsm +- ``[muon-flux]`` + + Needed for some muon flux calculations + + .. code-block:: bash + + pip install MCEq crflux + +- ``[documentation]`` + + The documentation is created using `Sphinx `_. We use the ``readthedocs`` theme, and the ``numpydoc`` format is used in our docstrings. + This dependency is needed only if you want to generate the documentation locally - the `online documentation `_ is generated by a Github action automatically. + Note that we use the `sphinx autodoc `_ + feature, which tries to import all modules it documents. So if you are missing some optional dependencies, it will not generate correct documentation for all the code. + + .. code-block:: Bash + + pip install sphinx sphinx_rtd_theme numpydoc + +- Some debug plots need peakutils: + + .. code-block:: Bash + + pip install peakutils + +- For SQL databases install `MySQL `_ and mysql-python: + + .. code-block:: Bash + + pip install mysql-connector-python + + Not pip-installable packages ____________________________ diff --git a/documentation/source/NuRadioMC/pages/HDF5_structure.rst b/documentation/source/NuRadioMC/pages/HDF5_structure.rst index 479bb2ae3..25a3840e8 100644 --- a/documentation/source/NuRadioMC/pages/HDF5_structure.rst +++ b/documentation/source/NuRadioMC/pages/HDF5_structure.rst @@ -55,28 +55,29 @@ The top-level attributes can be accessed using ``f.attrs``. These contain: .. _hdf5-attrs-table: .. csv-table:: HDF5 attributes - :header: "Key", "Description" - :widths: auto - :delim: | - - ``NuRadioMC_EvtGen_version`` ``NuRadioMC_EvtGen_version_hash`` ``NuRadioMC_version`` ``NuRadioMC_version_hash`` | Versions of the generator/framework as integer as hash - ``Emin`` ``Emax`` | Define energy range for neutrino energies - ``phimax`` ``phimin`` | Define azimuth range for incoming neutrino directions - ``thetamax`` ``thetamin`` | Define zenith range for incoming neutrino directions - ``flavors`` | A list of particle flavors that were simulated, using the PDG convention. - ``n_events`` | Total number of generated/simulated events(including those that did not trigger) - ``fiducial_xmax`` ``fiducial_xmin`` ``fiducial_ymax`` ``fiducial_ymin`` ``fiducial_zmax`` ``fiducial_zmin`` / ``fiducial_rmax`` ``fiducial_rmin`` ``fiducial_zmax`` ``fiducial_zmin`` | Specify the simulated qubic/cylindrical fiducial volume. An event has to produce an interaction within this volume. However, in case of a muon or tau CC interaction the first interaction can occur outside - ``rmax`` ``rmin`` ``zmax`` ``zmin`` / ``xmax`` ``xmin`` ``ymax`` ``ymin`` ``zmax`` ``zmin`` | Specify the qubic/cylindrical volume in which neutrino interactions are generated - ``volume`` | Volume of the above specified volume - ``area`` | Surface area of the above specified volume - ``start_event_id`` | ``event_id`` of the first event in the file - ``trigger_names`` | List of the names of the different triggers simulated - ``Tnoise`` | (explicit) noise temperature used in simulation - ``n_samples`` | Samples of the to-be generated antenna signals - ``config`` | The (yaml-style) config file used for the simulation - ``deposited`` | - ``detector`` | The (json-format) detector description used for the simulation - ``dt`` | The time resolution, i.e. the inverse of the sampling rate used for the simulation. This is not necessarily the same as the sampling rate of the simulated channels! + :header-rows: 1 + :widths: auto + :delim: | + + Key | Description + ``NuRadioMC_EvtGen_version`` ``NuRadioMC_EvtGen_version_hash`` ``NuRadioMC_version`` ``NuRadioMC_version_hash`` | Versions of the generator/framework as integer as hash + ``Emin`` ``Emax`` | Define energy range for neutrino energies + ``phimax`` ``phimin`` | Define azimuth range for incoming neutrino directions + ``thetamax`` ``thetamin`` | Define zenith range for incoming neutrino directions + ``flavors`` | A list of particle flavors that were simulated, using the PDG convention. + ``n_events`` | Total number of generated/simulated events(including those that did not trigger) + ``fiducial_xmax`` ``fiducial_xmin`` ``fiducial_ymax`` ``fiducial_ymin`` ``fiducial_zmax`` ``fiducial_zmin`` / ``fiducial_rmax`` ``fiducial_rmin`` ``fiducial_zmax`` ``fiducial_zmin`` | Specify the simulated qubic/cylindrical fiducial volume. An event has to produce an interaction within this volume. However, in case of a muon or tau CC interaction the first interaction can occur outside + ``rmax`` ``rmin`` ``zmax`` ``zmin`` / ``xmax`` ``xmin`` ``ymax`` ``ymin`` ``zmax`` ``zmin`` | Specify the qubic/cylindrical volume in which neutrino interactions are generated + ``volume`` | Volume of the above specified volume + ``area`` | Surface area of the above specified volume + ``start_event_id`` | ``event_id`` of the first event in the file + ``trigger_names`` | List of the names of the different triggers simulated + ``Tnoise`` | (explicit) noise temperature used in simulation + ``n_samples`` | Samples of the to-be generated antenna signals + ``config`` | The (yaml-style) config file used for the simulation + ``deposited`` | + ``detector`` | The (json-format) detector description used for the simulation + ``dt`` | The time resolution, i.e. the inverse of the sampling rate used for the simulation. This is not necessarily the same as the sampling rate of the simulated channels! The station-level attributes can be accessed using ``f[station_].attrs``. The first two attributes ``Vrms`` and ``bandwidth`` also exist on the top-level and refer to the corresponding to the first station/channel pair. @@ -84,13 +85,14 @@ The station-level attributes can be accessed using ``f[station_].att .. _hdf5-station-attrs-table: .. csv-table:: HDF5 station attributes - :header: "Key", "Description" - :widths: auto - :delim: | + :header-rows: 1 + :widths: auto + :delim: | - ``Vrms`` | RMS of the voltage used as thermal noise floor :math:`v_{n} = (k_{B} \, R \, T \, \Delta f) ^ {0.5}`. See the relevant section "Noise voltage and power" in this `wiki article `_ (last two equations). Determine from ``Tnoise`` and ``bandwidth`` (see below). - ``bandwidth`` | Bandwidth is above equation. Calculated as the integral over the simulated filter response (`filt`) squared: :math:`\Delta f = np.trapz(np.abs(filt) ** 2, ff)`. - ``antenna_positions`` | Relative position of all simulated antennas (channels) + Key | Description + ``Vrms`` | RMS of the voltage used as thermal noise floor :math:`v_{n} = (k_{B} \, R \, T \, \Delta f) ^ {0.5}`. See the relevant section "Noise voltage and power" in this `wiki article `_ (last two equations). Determine from ``Tnoise`` and ``bandwidth`` (see below). + ``bandwidth`` | Bandwidth is above equation. Calculated as the integral over the simulated filter response (`filt`) squared: :math:`\Delta f = np.trapz(np.abs(filt) ** 2, ff)`. + ``antenna_positions`` | Relative position of all simulated antennas (channels) HDF5 file contents __________________ @@ -100,27 +102,28 @@ is the number of showers (which may be larger than the number of events), and `` .. _hdf5-items-table: .. csv-table:: HDF5 items - :header: "Key", "Shape", "Description" - :widths: auto - :delim: | - - ``event_group_ids`` | (``n_showers``) | Specifies the event id to which the corresponding shower belongs (``n_events = len(unique(event_group_ids)))``) - ``xx`` ``yy`` ``zz`` | (``n_showers``) | Specifying coordinates of interaction vertices - ``vertex_times`` | (``n_showers``) | Time at the interaction vertex. The neutrino interaction (= first interaction) is defined as time 0 - ``azimuths`` ``zeniths`` | (``n_showers``) | Angle Specifying the neutrino incoming direction (``azimuths = 0`` points east) - ``energies`` | (``n_showers``) | Energy of the parent particle of a shower. This is typically the energy of the neutrino (for showers produced at the first interaction: all flavor NC, electron CC interactions) or the energy of a muon or tau lepton when those are producing secondary energy losses - ``shower_energies`` | (``n_showers``) | Energy of the shower which is used to determine the radio emission - ``flavors`` | (``n_showers``) | Same as above (the parent of an electromagnetic cascade in an electron CC interaction is the neutrino) - ``inelasticity`` | (``n_showers``) | Inelasticity of the first interaction - ``interaction_type`` | (``n_showers``) | Interaction type producing the shower (for the first interaction that can be "nc" or "cc") - ``multiple_triggers`` | (``n_showers``, ``n_triggers``) | Information which exact trigger fired each shower. The different triggers are specified in the attributes (``f.attrs["triggers"]``). The order of ``f.attrs["triggers"]`` matches that in ``multiple_triggers`` - ``triggered`` | (``n_showers``) | A boolean; ``True`` if any trigger fired for this shower, ``False`` otherwise - ``trigger_times`` | (``n_showers``, ``n_triggers``) | The trigger times (relative to the first interaction) at which each shower triggered. If there are multiple stations, this will be the earliest trigger time. - ``n_interaction`` | (``n_showers``) | Hierarchical counter for the number of showers per event (also accounts for showers which did not trigger and might not be saved) - ``shower_ids`` | (``n_showers``) | Hierarchical counter for the number of triggered showers - ``shower_realization_ARZ`` | (``n_showers``) | Which realization from the ARZ shower library was used for each shower (only if ARZ was used for signal generation). - ``shower_type`` | (``n_showers``) | Type of the shower (so far we only have "em" and "had") - ``weights`` | (``n_showers``) | Weight for the probability that the neutrino reached the interaction vertex taking into account the attenuation from the earth (Does not include interaction probability in the volume) + :header-rows: 1 + :widths: auto + :delim: | + + Key | Shape | Description + ``event_group_ids`` | (``n_showers``) | Specifies the event id to which the corresponding shower belongs (``n_events = len(unique(event_group_ids)))``) + ``xx`` ``yy`` ``zz`` | (``n_showers``) | Specifying coordinates of interaction vertices + ``vertex_times`` | (``n_showers``) | Time at the interaction vertex. The neutrino interaction (= first interaction) is defined as time 0 + ``azimuths`` ``zeniths`` | (``n_showers``) | Angle Specifying the neutrino incoming direction (``azimuths = 0`` points east) + ``energies`` | (``n_showers``) | Energy of the parent particle of a shower. This is typically the energy of the neutrino (for showers produced at the first interaction: all flavor NC, electron CC interactions) or the energy of a muon or tau lepton when those are producing secondary energy losses + ``shower_energies`` | (``n_showers``) | Energy of the shower which is used to determine the radio emission + ``flavors`` | (``n_showers``) | Same as above (the parent of an electromagnetic cascade in an electron CC interaction is the neutrino) + ``inelasticity`` | (``n_showers``) | Inelasticity of the first interaction + ``interaction_type`` | (``n_showers``) | Interaction type producing the shower (for the first interaction that can be "nc" or "cc") + ``multiple_triggers`` | (``n_showers``, ``n_triggers``) | Information which exact trigger fired each shower. The different triggers are specified in the attributes (``f.attrs["triggers"]``). The order of ``f.attrs["triggers"]`` matches that in ``multiple_triggers`` + ``triggered`` | (``n_showers``) | A boolean; ``True`` if any trigger fired for this shower, ``False`` otherwise + ``trigger_times`` | (``n_showers``, ``n_triggers``) | The trigger times (relative to the first interaction) at which each shower triggered. If there are multiple stations, this will be the earliest trigger time. + ``n_interaction`` | (``n_showers``) | Hierarchical counter for the number of showers per event (also accounts for showers which did not trigger and might not be saved) + ``shower_ids`` | (``n_showers``) | Hierarchical counter for the number of triggered showers + ``shower_realization_ARZ`` | (``n_showers``) | Which realization from the ARZ shower library was used for each shower (only if ARZ was used for signal generation). + ``shower_type`` | (``n_showers``) | Type of the shower (so far we only have "em" and "had") + ``weights`` | (``n_showers``) | Weight for the probability that the neutrino reached the interaction vertex taking into account the attenuation from the earth (Does not include interaction probability in the volume) Station data @@ -139,33 +142,34 @@ station triggered, with which amplitude, etc. The same approach works for ``show .. _hdf5-station-table: .. csv-table:: HDF5 station items - :header: "Key", "Shape", "Description" - :widths: auto - :delim: | - - ``event_group_ids`` | (``m_events``) | The event group ids of the triggered events in the selected station - ``event_group_id_per_shower`` | (``m_showers``) | The event group id of every shower that triggered the selected station - ``event_ids`` | (``m_events``) | The event ids of each event that triggered in that station for every event group id. These are unique only within each separate event group, and start from 0. - ``event_id_per_shower`` | (``m_showers``) | The event ids of each event that triggered in that station. This one is for every shower - ``shower_id`` | (``m_showers``) | The Shower ids of showers that triggered the selected station - ``max_amp_shower_and_ray`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | Maximum amplitude per shower, channel and ray tracing solution. - ``maximum_amplitudes`` | (``m_events``, ``n_channels``) | Maximum amplitude per event and channel - ``maximum_amplitudes_envelope`` | (``m_events``, ``n_channels``) | Maximum amplitude of the hilbert envelope for each event and channel - ``multiple_triggers`` | (``m_showers``, ``n_triggers``) | A boolean array that specifies if a shower contributed to an event that fulfills a certain trigger. The index of the trigger can be translated to the trigger name via the attribute ``trigger_names``. - ``multiple_triggers_per_event`` | (``m_events``, ``n_triggers``) | A boolean array that specifies if each event fulfilled a certain trigger. The index of the trigger can be translated to the trigger name via the attribute ``trigger_names``. - ``polarization`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D coordinates of the polarization vector at the antenna in cartesian coordinates. (The receive vector (which is opposite to the propagation direction) was used to rotate from spherical/on-sky coordinates to cartesian coordinates). The polarization vector does not include any propagation effects that could change the polarization, such as different reflectivities at the surface for the p and s polarization component. - ``ray_tracing_C0`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | One of two parameters specifying the **analytic** ray tracing solution. Can be used to retrieve the solutions without having to re-run the ray tracer. - ``ray_tracing_C1`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | One of two parameters specifying the **analytic** ray tracing solution. Can be used to retrieve the solutions without having to re-run the ray tracer. - ``ray_tracing_reflection`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The number of bottom reflections (This variable is only non-zero if a reflection layer was defined in the ice model and if 'propagation.n_reflections' was set to a value larger than 0 in the config.yaml file.) - ``ray_tracing_reflection_case`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | Only relevant for bottom reflections. 1: rays start upwards, 2: rays start downwards - ``ray_tracing_solution_type`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The type of the ray tracing solution. 0: direct, 1: refracted, 2: reflected (off the surface) (A refracted ray is defined as a ray that has a turning point, i.e. if it transitions from upward going to downward going; a reflected ray is defined if it has a surface reflection.) - ``focusing_factor`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The focusing factor calculated by the propagation module. - ``launch_vectors`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D (Cartesian) coordinates of the launch vector of each ray tracing solution, per shower and channel. - ``receive_vectors`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D (Cartesian) coordinates of the receive vector of each ray tracing solution, per shower and channel. - ``time_shower_and_ray`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The "signal time" per shower and raytracing solution. I.e., the time of the signal arriving at the DAQ including, e.g., cable delay, ... - ``travel_distances`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The distance travelled by each ray tracing solution to a specific channel - ``travel_times`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The time travelled by each ray tracing solution to a specific channel - ``triggered`` | (``m_showers``) | Whether each shower contributed to an event that satisfied any trigger condition - ``triggered_per_event`` | (``m_events``) | Whether each event fulfilled any trigger condition. - ``trigger_times`` | (``m_showers``, ``n_triggers``) | The trigger times for each shower and trigger. IMPORTANT: A shower can potentially generate multiple events. Then this field is ambiguous, as only a single trigger time per shower can be saved. In that case, the latest trigger time is saved into this field. - ``trigger_times_per_event`` | (``m_events``, ``n_triggers``) | The trigger times per event. + :header-rows: 1 + :widths: auto + :delim: | + + Key | Shape | Description + ``event_group_ids`` | (``m_events``) | The event group ids of the triggered events in the selected station + ``event_group_id_per_shower`` | (``m_showers``) | The event group id of every shower that triggered the selected station + ``event_ids`` | (``m_events``) | The event ids of each event that triggered in that station for every event group id. These are unique only within each separate event group, and start from 0. + ``event_id_per_shower`` | (``m_showers``) | The event ids of each event that triggered in that station. This one is for every shower + ``shower_id`` | (``m_showers``) | The Shower ids of showers that triggered the selected station + ``max_amp_shower_and_ray`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | Maximum amplitude per shower, channel and ray tracing solution. + ``maximum_amplitudes`` | (``m_events``, ``n_channels``) | Maximum amplitude per event and channel + ``maximum_amplitudes_envelope`` | (``m_events``, ``n_channels``) | Maximum amplitude of the hilbert envelope for each event and channel + ``multiple_triggers`` | (``m_showers``, ``n_triggers``) | A boolean array that specifies if a shower contributed to an event that fulfills a certain trigger. The index of the trigger can be translated to the trigger name via the attribute ``trigger_names``. + ``multiple_triggers_per_event`` | (``m_events``, ``n_triggers``) | A boolean array that specifies if each event fulfilled a certain trigger. The index of the trigger can be translated to the trigger name via the attribute ``trigger_names``. + ``polarization`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D coordinates of the polarization vector at the antenna in cartesian coordinates. (The receive vector (which is opposite to the propagation direction) was used to rotate from spherical/on-sky coordinates to cartesian coordinates). The polarization vector does not include any propagation effects that could change the polarization, such as different reflectivities at the surface for the p and s polarization component. + ``ray_tracing_C0`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | One of two parameters specifying the **analytic** ray tracing solution. Can be used to retrieve the solutions without having to re-run the ray tracer. + ``ray_tracing_C1`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | One of two parameters specifying the **analytic** ray tracing solution. Can be used to retrieve the solutions without having to re-run the ray tracer. + ``ray_tracing_reflection`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The number of bottom reflections (This variable is only non-zero if a reflection layer was defined in the ice model and if 'propagation.n_reflections' was set to a value larger than 0 in the config.yaml file.) + ``ray_tracing_reflection_case`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | Only relevant for bottom reflections. 1: rays start upwards, 2: rays start downwards + ``ray_tracing_solution_type`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The type of the ray tracing solution. 0: direct, 1: refracted, 2: reflected (off the surface) (A refracted ray is defined as a ray that has a turning point, i.e. if it transitions from upward going to downward going; a reflected ray is defined if it has a surface reflection.) + ``focusing_factor`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The focusing factor calculated by the propagation module. + ``launch_vectors`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D (Cartesian) coordinates of the launch vector of each ray tracing solution, per shower and channel. + ``receive_vectors`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``, ``3``) | 3D (Cartesian) coordinates of the receive vector of each ray tracing solution, per shower and channel. + ``time_shower_and_ray`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The "signal time" per shower and raytracing solution. I.e., the time of the signal arriving at the DAQ including, e.g., cable delay, ... + ``travel_distances`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The distance travelled by each ray tracing solution to a specific channel + ``travel_times`` | (``m_showers``, ``n_channels``, ``n_ray_tracing_solutions``) | The time travelled by each ray tracing solution to a specific channel + ``triggered`` | (``m_showers``) | Whether each shower contributed to an event that satisfied any trigger condition + ``triggered_per_event`` | (``m_events``) | Whether each event fulfilled any trigger condition. + ``trigger_times`` | (``m_showers``, ``n_triggers``) | The trigger times for each shower and trigger. IMPORTANT: A shower can potentially generate multiple events. Then this field is ambiguous, as only a single trigger time per shower can be saved. In that case, the latest trigger time is saved into this field. + ``trigger_times_per_event`` | (``m_events``, ``n_triggers``) | The trigger times per event. diff --git a/documentation/source/NuRadioMC/pages/code_documentation.rst b/documentation/source/NuRadioMC/pages/code_documentation.rst index da394bddb..67293042f 100644 --- a/documentation/source/NuRadioMC/pages/code_documentation.rst +++ b/documentation/source/NuRadioMC/pages/code_documentation.rst @@ -1,9 +1,9 @@ Code Documentation -=============================== +================== .. toctree:: - :maxdepth: 2 + :maxdepth: 1 ../apidoc/NuRadioMC.EvtGen ../apidoc/NuRadioMC.SignalGen diff --git a/documentation/source/NuRadioReco/pages/code_documentation.rst b/documentation/source/NuRadioReco/pages/code_documentation.rst index d7dc776b6..63640b113 100644 --- a/documentation/source/NuRadioReco/pages/code_documentation.rst +++ b/documentation/source/NuRadioReco/pages/code_documentation.rst @@ -1,8 +1,7 @@ Code Documentation -=============================== - +================== .. toctree:: - :maxdepth: 2 + :maxdepth: 1 ../apidoc/NuRadioReco.framework ../apidoc/NuRadioReco.modules diff --git a/documentation/source/NuRadioReco/pages/event_display.rst b/documentation/source/NuRadioReco/pages/event_display.rst index b7cce7c74..07f51e9ba 100644 --- a/documentation/source/NuRadioReco/pages/event_display.rst +++ b/documentation/source/NuRadioReco/pages/event_display.rst @@ -21,3 +21,6 @@ file you want to see. Alternatively, you can use the file ``NuRadioViewer`` in the ``eventbrowser`` directory. Add the directory to your system ``$PATH`` and you can open the eventbrowser by typing ``NuRadioViewer filename.nur``. + +If you want templates to show up in the Event Display, +you need to set up an environment variable ``NURADIORECOTEMPLATES`` and have it point to the template directory. diff --git a/documentation/source/NuRadioReco/pages/times.rst b/documentation/source/NuRadioReco/pages/times.rst index 348486ffb..ee77e031a 100644 --- a/documentation/source/NuRadioReco/pages/times.rst +++ b/documentation/source/NuRadioReco/pages/times.rst @@ -1,52 +1,95 @@ -overview of times +Overview of times ================= -Time delays are introduced by several hardware components. These time delays are often corrected for by folding/unfolding the complex transfer function (for an amp e.g. the measurement of the S12 parameter). The unfolding is typically done in the frequency domain where a convolution becomes a simple multiplication. As a consequence of typically short trace length (<~1000 samples) and because a Fourier transform implies implicitly a periodic signal, a pulse being at the beginning of the trace can end up being at the end of the trace. To avoid this behavior we use the following procedure: +This page provides an overview of the different times defined in different places in NuRadioMC/NuRadioReco. +Generally speaking, the global time of an `Event ` is stored as +a :ref:`station or event time `. The times corresponding to the `voltage time trace `, +`electric fields ` or `triggers ` +are then stored as floats relative to this global time inside each object. They can be obtained by the +`get_times() ` method for trace-like objects (`ElectricField ` , +`Channel `), or by the `get_trigger_time ` +method for `Trigger ` objects. -We smoothly filter the first 5% and last 5% of the trace using a Tukey window function. This is a function that goes smoothly from 0 to 1. -To avoid rollover of the pulse, we add 128ns of zeros to the beginning and end of the trace. Steps 2) and 3) are performed by the channelStopFilter module -Both electric fields and channels have a trace_start_time variable. The get_times() function will automatically add the trace_start_time to the time array. +Time delays are introduced by several hardware components. These time delays are often accounted for by folding/unfolding the complex transfer function (for an amplifier e.g. the measurement of the S12 parameter). +The unfolding is typically done in the frequency domain where a convolution becomes a simple multiplication. +As a consequence of typically short trace length (<~1000 samples) and because a Fourier transform implies implicitly a periodic signal, +a pulse being at the beginning of the trace can end up being at the end of the trace. +This can be avoided by using the `NuRadioReco.modules.channelStopFilter` module, which appends zeros at either end of the trace +and applies a Tukey window to taper the ends of the trace towards zero. -ARIANNA specific details: +.. Note:: + For the **ARIANNA** experiment, the hardware produces an artifact (a glitch) at the STOP position (i.e. the physical beginning of the trace). + Because of the way the hardware works, the STOP position is not at the beginning of the trace but can be anywhere. + During read in of the snowshovel calibrated data files, the trace is rolled such that the physical beginning (the STOP position) is at sample zero of the trace. + This glitch is removed by the `channelStopFilter ` procedure described in the :ref:`module overview below ` . + +Station time (Event time) ------------------------- -Our hardware produces an artifact (a glitch) at the STOP position (i.e. the physical beginning of the trace). Because of the way the hardware works, the STOP position is not at the beginning of the trace but can be anywhere. During read in of the snowshovel calibrated data files, the trace is rolled such that the physical beginning (the STOP position) is at sample zero of the trace. This glitch is removed by the procedure described above. +The global time at which the event takes place is stored as the `event time ` +in the `Event ` object. +This time usually corrsponds to the "vertex time" of the first interaction for simulations, +and the time at which the data was recorded in the DAQ for data. +It is stored as an `astropy.time.Time` object to enable sub-ns precision on the absolute time. +In simulated data, the `event time ` +is generally the same as the `station_time ` stored +in the `Station ` object; however, because different stations may operate and trigger independently, +the station_times of different stations are not guaranteed to agree in data. +Times in `Channel `, `ElectricField ` and +`Trigger ` objects are all defined relative to the +`station_time ` of the Station they are stored in (see the description +of the :doc:`NuRadio data structure `). These times are stored as an array of floats. +For trace-like objects (`Channels ` and `ElectricField `), +the times can be obtained through the `get_times() ` method of these classes. +Additionally, the trace start time (the first value of `get_times() `) +is accessible as the `trace_start_time ` . -Station time ------------- -The trace_start_times are all given relative to the station_time of the station the E-field or channel belongs to. The station_time is stored in an astopy.time.Time object for sub nanosecond precision on absolute times. -The trace_start_time itself is stored as a float. For simulations, the trace_start_time is relative to the vertex time, i.e., the time of the particle interaction. -For data: TODO, describe how current RNO-G data is handled Trace start times in channels ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Specify when the trace starts relative to the station time. Effects that change the pulse time for all frequencies equally (for example cable delays) are most often taken into account by changing the trace_start_time. +Specify when the trace starts relative to the station time. Effects that change the pulse time for all frequencies equally (for example cable delays) are most often taken into account by changing the `trace_start_time `. Trace start times in E-fields ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Work similar to trace_start_time in channels, but with a caveat: Since EM-waves move, electric_field objects hold a position (relative to the station they are associated with). The E-field is therefore defined as the field an observer would measure at the given position. Note that this position does not necessarily have to coincide with the position of a channel the E-field is associated with. This is the case for (some) cosmic-ray simulations where the same E-field at the surface is used for all surface LPDAs. +Work similar to `trace_start_time ` in channels, but with a caveat: Since EM-waves move, electric_field objects hold a position (relative to the station they are associated with). The E-field is therefore defined as the field an observer would measure at the given position. Note that this position does not necessarily have to coincide with the position of a channel the E-field is associated with. This is the case for (some) cosmic-ray simulations where the same E-field at the surface is used for all surface LPDAs. + +Trigger times +^^^^^^^^^^^^^ +The `trigger_time `, +which is the time at which the trigger fired, is stored in the `Trigger ` +object (which can be obtained using `station.get_trigger() `). +This is the time at which the trigger condition was first fulfilled. +As for the trace_start_time, the trigger time is defined relative to the +`station_time ` . + + +Overview of modules that affect time +------------------------------------ +We list all relevant modules that are used for a MC simulation and reconstruction. For a pure data reconstruction, the first two modules are not used. -overview of modules that alter time -=================================== -We list all relevant modules that is used for a MC simulation and reconstruction. For a pure data reconstruction, the first few modules are just not used +* `NuRadioReco.modules.io.coreas`: CoREAS reader prepends n samples to the simulated trace. This is done so that the trace does not directly start with the pulse and to have a good frequency resolution. -* readCoREAS: CoREAS reader prepends n samples to the simulated trace. This is done so that the trace does not directly start with the pulse and to have a good frequency resolution. +* `NuRadioReco.modules.efieldToVoltageConverter`: the voltage traces are delayed compared to the electric field signal due to the geometric separation of the antennas and cable delays. This is accounted for by shifting the `trace_start_time `. -* efieldToVoltageConverter: the traces are rolled (rotated) according to the time delay due to the geometric separation of the antennas and cable delays. +* `NuRadioReco.modules.RNO_G.hardwareResponseIncorporator`, `NuRadioReco.modules.ARIANNA.hardwareResponseIncorporator`, `NuRadioReco.modules.ARA.hardwareResponseIncorporator`: + + If ``sim to data=True``: -* hardwareResponseIncorporator (sim to data): * the channel traces are folded with the amplifier response which also includes some time delay * note that the hardwareResponseIncorporator does not take cable delays into account, as this is done by the efieldToVoltageConverter -* triggerTimeAdjuster - * 'sim_to_data' mode: This modules cuts the trace to the correct length (as specified in the detector description) around the trigger time with a pre-trigger time as defined by the respective trigger module. In the case of multiple triggers it used the primary trigger. If no primary trigger is defined, it uses the trigger with the earliest trigger time. In the end, the trace_start_time is set to the trigger time. This is done because this reflects how raw experimental data looks like. - * 'data_to_sim' mode: The module determines the trigger that was used to cut the trace to its current length (the 'sim_to_data' step above in case of simulations) and adjusts the trace_start_time according to the different readout delays. The "primary trigger" defines the readout delays. After applying this module in the "data_to_sim" direction, the position in the trace that caused the trigger can be found via `trigger_time` - `trace_start_time`. - -* channelStopFilter: this module prepends and appends all channels with a fixed length (128ns by default). The 'prepend' time is subtracted from the station start time (because all channels get the same time delay) + If ``sim to data=False``: -* hardwareResponseIncorporator (data reconstruction): * unfolds amplifier -> also implies a time delay in the channel trace * cable delay is subtracted from the trace start time (due to the limited trace length, the trace is not rolled to account for cable delays) -* voltageToEfieldConverter: +* `NuRadioReco.modules.triggerTimeAdjuster` + * ``sim_to_data`` mode: This modules cuts the trace to the correct length (as specified in the detector description) around the trigger time with a pre-trigger time as defined by the respective trigger module. In the case of multiple triggers it used the primary trigger. If no primary trigger is defined, it uses the trigger with the earliest trigger time. In the end, the `trace_start_time ` is set to the trigger time. This is done because this reflects what raw experimental data looks like. + * ``data_to_sim`` mode: The module determines the trigger that was used to cut the trace to its current length (the 'sim_to_data' step above in case of simulations) and adjusts the `trace_start_time ` according to the different readout delays. The "primary trigger" defines the readout delays. **After** applying this module in the "data_to_sim" direction, the position in the trace that caused the trigger can be found via the `trigger_time `. + +* `NuRadioReco.modules.channelStopFilter`: this module prepends and appends all channels with a fixed length (128ns by default). + The 'prepend' time is subtracted from the trace start time (because all channels get the same time delay). + It additionally applies a tukey window to taper off the start and end (by default, the first and last 5%) of the trace. + +* `NuRadioReco.modules.voltageToEfieldConverter`: * the traces from all used channels are cut to the overlapping region (including delays due to geometry and differences in delays due to different hardware components, e.g. cables of different length's) - * the E-field trace_start_time is set accordingly \ No newline at end of file + * the E-field `trace_start_time ` is set accordingly \ No newline at end of file diff --git a/install_dev.py b/install_dev.py index 820a5a73c..563646a29 100644 --- a/install_dev.py +++ b/install_dev.py @@ -160,6 +160,10 @@ def convert_poetry_to_pip(reqs): import toml toml_dict = toml.load(os.path.join(top_dir, 'pyproject.toml')) reqs = toml_dict['tool']['poetry']['dependencies'] + for req in list(reqs.keys()): # ignore optional requirements + if isinstance(reqs[req], dict): + if 'optional' in reqs[req]: + reqs.pop(req) reqs_pip = convert_poetry_to_pip(reqs) # install the requirements using pip @@ -179,10 +183,10 @@ def convert_poetry_to_pip(reqs): retcode |= subprocess.call([sys.executable, '-m', 'pip', 'install', 'toml'] + pip_install_as_user) # we need toml to read pyproject.toml import toml toml_dict = toml.load(os.path.join(top_dir, 'pyproject.toml')) - reqs = toml_dict['tool']['poetry']['dev-dependencies'] + reqs = toml_dict['tool']['poetry']['dependencies'] extras = toml_dict['tool']['poetry']['extras'] - header = "{:4s}|{:12s}|{:16s}|{}\n".format("id", "Install?", "extra", "modules") - str_format = "{:4s}|{:12s}|{:16s}|{}\n" + header = "{:4s}|{:10s}|{:16s}|{}\n".format("id", "Install?", "extra", "modules") + str_format = "{:4s}|{:10s}|{:16s}|{}\n" selected_for_install = [] header = str_format.format("id", "Install?", "feature", "modules") footer = ( diff --git a/pyproject.toml b/pyproject.toml index 107d2696c..99f1fa29e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,25 +42,24 @@ uproot = "*" importlib-metadata = {version = ">=4.8.1", python = "<3.8"} numba = "*" filelock = "*" - -[tool.poetry.dev-dependencies] -Sphinx = "*" -sphinx-rtd-theme = "*" -numpydoc = "*" -proposal = "7.6.2" -pygdsm = {git = "https://github.com/telegraphic/pygdsm"} -nifty5 = {git = "https://gitlab.mpcdf.mpg.de/ift/nifty.git", branch="NIFTy_5"} -pypocketfft = {git = "https://gitlab.mpcdf.mpg.de/mtr/pypocketfft"} -MCEq = "*" -crflux = "*" -pandas = "*" -mattak = {git = "https://github.com/RNO-G/mattak"} -runtable = {git = "ssh://git@github.com/RNO-G/rnog-runtable.git"} +Sphinx = {version = "*", optional = true} +sphinx-rtd-theme = {version = "*", optional = true} +numpydoc = {version = "*", optional = true} +proposal = {version = "7.6.2", optional = true} +pygdsm = {git = "https://github.com/telegraphic/pygdsm", optional = true} +nifty5 = {git = "https://gitlab.mpcdf.mpg.de/ift/nifty.git", branch="NIFTy_5", optional = true} +pypocketfft = {git = "https://gitlab.mpcdf.mpg.de/mtr/pypocketfft", optional = true} +MCEq = {version = "*", optional = true} +crflux = {version = "*", optional = true} +mattak = {git = "https://github.com/RNO-G/mattak", optional = true} +rnog-runtable = {git = "ssh://git@github.com/RNO-G/rnog-runtable.git", optional = true} [tool.poetry.extras] documentation = ["Sphinx", "sphinx-rtd-theme", "numpydoc"] proposal = ["proposal"] galacticnoise = ['pygdsm'] -ift_reco = ['nifty5', 'pypocketfft'] -muon_flux_calc = ['MCEq', 'crflux'] -RNO_G_DATA = ["mattak", "runtable", "pandas"] +ift-reco = ['nifty5', 'pypocketfft'] +muon-flux = ['MCEq', 'crflux'] +RNO-G = ["mattak"] +RNO-G-extras = ["rnog-runtable"] +ALL = ["Sphinx", "sphinx-rtd-theme", "numpydoc", "proposal", "pygdsm", "nifty5", "pypocketfft", "MCEq", "crflux", "mattak"]