Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Logging and BibTeX #32

Merged
merged 13 commits into from
Jan 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .zenodo.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"description": "smooth inference for reinterpretation studies",
"license": "MIT",
"title": "SpeysideHEP/spey: v0.1.5",
"version": "v0.1.5",
"title": "SpeysideHEP/spey: v0.1.6",
"version": "v0.1.6",
"upload_type": "software",
"creators": [
{
Expand All @@ -29,7 +29,7 @@
},
{
"scheme": "url",
"identifier": "https://github.com/SpeysideHEP/spey/tree/v0.1.5",
"identifier": "https://github.com/SpeysideHEP/spey/tree/v0.1.6",
"relation": "isSupplementTo"
},
{
Expand Down
4 changes: 4 additions & 0 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,19 @@ Top-Level

version
about
check_updates
ExpectationType
AvailableBackends
get_backend
get_backend_metadata
get_backend_bibtex
cite
reset_backend_entries
statistical_model_wrapper
helper_functions.correlation_to_covariance
helper_functions.covariance_to_correlation
optimizer.core.fit
set_log_level

Main Classes
------------
Expand Down
9 changes: 9 additions & 0 deletions docs/releases/changelog-v0.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,15 @@
* Update clarification on text based keyword arguments.
([#30](https://github.com/SpeysideHEP/spey/pull/30))

* Adding logging across the software and implement tools to scilence them.
([#32](https://github.com/SpeysideHEP/spey/pull/32))

* Spey will automatically look for updates during initiation.
([#32](https://github.com/SpeysideHEP/spey/pull/32))

* Utilities to retreive bibtex information for third-party plug-ins.
([#32](https://github.com/SpeysideHEP/spey/pull/32))

## Bug Fixes

* In accordance to the latest updates ```UnCorrStatisticsCombiner``` has been updated with
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"autograd==1.5",
"semantic_version~=2.10",
"tqdm>=4.64.0",
"requests>=2.31.0",
]

backend_plugins = [
Expand Down
103 changes: 101 additions & 2 deletions src/spey/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
from typing import Any, Callable, Dict, List, Optional, Text, Tuple, Union
import logging
import os
import re
import sys
import textwrap
from typing import Any, Callable, Dict, List, Optional, Text, Tuple, Union, Literal

import numpy as np
import pkg_resources
Expand All @@ -7,7 +12,9 @@
from spey.base import BackendBase, ConverterBase
from spey.combiner import UnCorrStatisticsCombiner
from spey.interface.statistical_model import StatisticalModel, statistical_model_wrapper
from spey.system import logger
from spey.system.exceptions import PluginError
from spey.system.webutils import get_bibtex, check_updates

from ._version import __version__
from .about import about
Expand All @@ -25,13 +32,43 @@
"BackendBase",
"ConverterBase",
"about",
"check_updates",
"get_backend_bibtex",
"cite",
"set_log_level",
]


def __dir__():
return __all__


logger.init(LoggerStream=sys.stdout)
log = logging.getLogger("Spey")
log.setLevel(logging.INFO)


def set_log_level(level: Literal[0, 1, 2, 3]) -> None:
"""
Set log level for spey

Args:
level (``int``): log level

* 0: error
* 1: warning
* 2: info
* 3: debug
"""
log_dict = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
log.setLevel(log_dict[level])


def version() -> Text:
"""
Version of ``spey`` package
Expand Down Expand Up @@ -59,7 +96,7 @@ def reset_backend_entries() -> None:
_backend_entries = _get_backend_entrypoints()


def AvailableBackends() -> List[Text]:
def AvailableBackends() -> List[Text]: # pylint: disable=C0103
"""
Returns a list of available backends. The default backends are automatically installed
with ``spey`` package. To enable other backends, please see the relevant section
Expand Down Expand Up @@ -209,10 +246,72 @@ def get_backend_metadata(name: Text) -> Dict[Text, Any]:
"spey_requires": statistical_model.spey_requires,
"doi": list(getattr(statistical_model, "doi", [])),
"arXiv": list(getattr(statistical_model, "arXiv", [])),
"zenodo": list(getattr(statistical_model, "zenodo", [])),
}

raise PluginError(
f"The backend {name} is unavailable. Available backends are "
+ ", ".join(AvailableBackends())
+ "."
)


def get_backend_bibtex(name: Text) -> List[Text]:
"""
Retreive BibTex entry for backend plug-in if available.

The bibtext entries are retreived both from Inspire HEP, doi.org and zenodo.
If the arXiv number matches the DOI the output will include two versions
of the same reference. If backend does not include an arXiv or DOI number
it will return an empty list.

Args:
name (``Text``): backend identifier. This backend refers to different packages
that prescribes likelihood function.

Returns:
``List[Text]``:
BibTex entries for the backend.
"""
# pylint: disable=import-outside-toplevel, W1203
txt = []
meta = get_backend_metadata(name)

for arxiv_id in meta.get("arXiv", []):
tmp = get_bibtex("inspire/arxiv", arxiv_id)
if tmp != "":
txt.append(textwrap.indent(tmp, " " * 4))
else:
log.debug(f"Can not find {arxiv_id} in Inspire")
for doi in meta.get("doi", []):
tmp = get_bibtex("inspire/doi", doi)
if tmp == "":
log.debug(f"Can not find {doi} in Inspire, looking at doi.org")
tmp = get_bibtex("doi", doi)
if tmp != "":
txt.append(tmp)
else:
log.debug(f"Can not find {doi} in doi.org")
else:
txt.append(textwrap.indent(tmp, " " * 4))
for zenodo_id in meta.get("zenodo", []):
tmp = get_bibtex("zenodo", zenodo_id)
if tmp != "":
txt.append(textwrap.indent(tmp, " " * 4))
else:
log.debug(f"{zenodo_id} is not a valid zenodo identifier")

return txt


def cite() -> List[Text]:
"""Retreive BibTex information for Spey"""
arxiv = textwrap.indent(get_bibtex("inspire/arxiv", "2307.06996"), " " * 4)
zenodo = get_bibtex("zenodo", "10156353")
linker = re.search("@software{(.+?),\n", zenodo).group(1)
zenodo = textwrap.indent(zenodo.replace(linker, "spey_zenodo"), " " * 4)
return arxiv + "\n\n" + zenodo


if os.environ.get("SPEY_CHECKUPDATE", "ON").upper() != "OFF":
check_updates()
2 changes: 1 addition & 1 deletion src/spey/_version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Version number (major.minor.patch[-label])"""

__version__ = "0.1.5"
__version__ = "0.1.6"
10 changes: 2 additions & 8 deletions src/spey/about.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import scipy
import semantic_version
import tqdm
from pkg_resources import iter_entry_points
from pkg_resources import get_distribution, iter_entry_points


def about() -> None:
Expand All @@ -21,13 +21,7 @@ def about() -> None:
)
print(f"Numpy version: {numpy.__version__}")
print(f"Scipy version: {scipy.__version__}")
print(
"Autograd version: %s"
% check_output([sys.executable, "-m", "pip", "show", "autograd"])
.decode()
.split("\n")[1]
.split(":")[1][1:]
)
print(f"Autograd version: {get_distribution('autograd').version}")
print(f"tqdm version: {tqdm.__version__}")
print(f"semantic_version version: {semantic_version.__version__}")

Expand Down
10 changes: 9 additions & 1 deletion src/spey/backends/default_pdf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Interface for default PDF sets"""

import logging
from typing import Any, Callable, Dict, List, Optional, Text, Tuple, Union

from autograd import grad, hessian, jacobian
Expand All @@ -17,6 +18,9 @@
from .uncertainty_synthesizer import signal_uncertainty_synthesizer

# pylint: disable=E1101,E1120
log = logging.getLogger("Spey")

# pylint: disable=W1203


class DefaultPDFBase(BackendBase):
Expand Down Expand Up @@ -92,7 +96,7 @@ def __init__(
self.signal_uncertainty_configuration = signal_uncertainty_synthesizer(
signal_yields=self.signal_yields,
**signal_uncertainty_configuration,
domain=slice(len(background_yields) + 1, None)
domain=slice(len(background_yields) + 1, None),
)

minimum_poi = -np.inf
Expand All @@ -101,6 +105,7 @@ def __init__(
self.background_yields[self.signal_yields > 0.0]
/ self.signal_yields[self.signal_yields > 0.0]
)
log.debug(f"Min POI set to : {minimum_poi}")

self._main_model = None
self._constraint_model = None
Expand Down Expand Up @@ -243,6 +248,7 @@ def get_objective_function(
self.background_yields if expected == ExpectationType.apriori else self.data
)
data = current_data if data is None else data
log.debug(f"Data: {data}")

def negative_loglikelihood(pars: np.ndarray) -> np.ndarray:
"""Compute twice negative log-likelihood"""
Expand Down Expand Up @@ -291,6 +297,7 @@ def get_logpdf_func(
self.background_yields if expected == ExpectationType.apriori else self.data
)
data = current_data if data is None else data
log.debug(f"Data: {data}")

return lambda pars: self.main_model.log_prob(
pars, data[: len(self.data)]
Expand Down Expand Up @@ -329,6 +336,7 @@ def get_hessian_logpdf_func(
self.background_yields if expected == ExpectationType.apriori else self.data
)
data = current_data if data is None else data
log.debug(f"Data: {data}")

def log_prob(pars: np.ndarray) -> np.ndarray:
"""Compute log-probability"""
Expand Down
20 changes: 13 additions & 7 deletions src/spey/backends/default_pdf/third_moment.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
"""Tools for computing third moment expansion"""
import warnings
from typing import Optional, Tuple, Union

import logging
import autograd.numpy as np
from scipy import integrate
from scipy.stats import norm

# pylint: disable=E1101,E1120
# pylint: disable=E1101,E1120,W1203
log = logging.getLogger("Spey")


def third_moment_expansion(
Expand Down Expand Up @@ -37,11 +38,11 @@ def third_moment_expansion(
"""
cov_diag = np.diag(covariance_matrix)

# ! Assertion error is removed, instead nan values will be converted to zero.
# assert np.all(8.0 * cov_diag**3 >= third_moment**2), (
# "Given covariance matrix and diagonal terms of the third moment does not "
# + "satisfy the condition: 8 * diag(cov)**3 >= third_moment**2."
# )
if not np.all(8.0 * cov_diag**3 >= third_moment**2):
log.warning(
r"Third moments does not satisfy the following condition: $8\Sigma_{ii}^3 \geq (m^{(3)}_i)^2$"
)
log.warning("The values that do not satisfy this condition will be set to zero.")

# arXiv:1809.05548 eq. 2.9
with warnings.catch_warnings(record=True) as w:
Expand All @@ -61,12 +62,15 @@ def third_moment_expansion(
category=RuntimeWarning,
)
C = np.where(np.isnan(C), 0.0, C)
log.debug(f"C: {C}")

# arXiv:1809.05548 eq. 2.10
B = np.sqrt(cov_diag - 2 * C**2)
log.debug(f"B: {B}")

# arXiv:1809.05548 eq. 2.11
A = expectation_value - C
log.debug(f"A: {A}")

# arXiv:1809.05548 eq. 2.12
eps = 1e-5
Expand All @@ -88,6 +92,7 @@ def third_moment_expansion(
if i != j:
corr[j, i] = corr[i, j]

log.debug(f"rho: {corr}")
return A, B, C, corr

return A, B, C
Expand Down Expand Up @@ -154,5 +159,6 @@ def compute_x3BifurgatedGaussian(x: float, upper: float, lower: float) -> float:

if return_integration_error:
return np.array(third_moment), np.array(error)
log.debug(f"Error: {error}")

return np.array(third_moment)
28 changes: 0 additions & 28 deletions src/spey/interface/functiontools.py

This file was deleted.

Loading
Loading