diff --git a/.gitignore b/.gitignore index e278507..1523dee 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,195 @@ *token -notebooks/.ipynb_checkpoints/ + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + diff --git a/python/__init__.py b/python/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/commons/README.md b/python/commons/README.md new file mode 100644 index 0000000..4ef9c9c --- /dev/null +++ b/python/commons/README.md @@ -0,0 +1,91 @@ +# DasLogger + +DasLogger is a custom Python logger that supports both file and stream logging. +It provides an easy-to-use interface for logging messages to a file and +standard error. + +## Features + +- Logs messages to both a file and `stderr` +- Customizable log levels +- Custom exception logging +- Automatic handling of uncaught exceptions +- Child logger support + +## Installation + +TODO: Add installation instructions to README.md once we have this +built/deployed to a package + +## Usage + +### Basic Usage + +```python +from das_logging import log + +# Log an info level message +log.info("This is an info message") + +# Log an exception with warning level +log.das_exception(Exception("This is an error message")) +``` + +### Creating a Child Logger + +```python +from das_logging import log + +# Create a child logger +child_logger = log.getChild("child") + +# Log an info level message with the child logger +child_logger.info("This is an info message from the child logger") +``` + +## Configuration + +The logger can be configured by modifying the default values in the module: + +- `DEFAULT_LOG_FILE`: The file where logs will be written (default: `/tmp/das.log`) +- `DEFAULT_LOGGER_NAME`: The name of the logger (default: `das_logger`) +- `DEFAULT_LOG_LEVEL`: The default log level (default: `logging.WARNING`) + +## Example + +Here's a complete example demonstrating the usage of DasLogger: + +```python +from das_logging import log + +# Log messages of various levels +log.debug("This is a debug message") # Not logged by default +log.info("This is an info message") +log.warning("This is a warning message") +log.error("This is an error message") +log.critical("This is a critical message") +log.fatal("This is a fatal message") + +# logs an exception with traceback +log.exception(Exception("This is an exception message")) + +# Log an exception with custom DAS formatting +try: + raise ValueError("An example exception") +except Exception as e: + log.das_exception(e) + +# Create and use a child logger +child = log.getChild("child") +child.setLevel(logging.DEBUG) +child.debug("This is an info message from the child logger") +``` + +## Exception Handling + +DasLogger automatically handles uncaught exceptions by logging them and then +exiting the program with a status code of 1. This is achieved by setting +`sys.excepthook` to the logger's custom exception handler. + +>Note: The automatic handling of uncaught exceptions will not work in iPython +shells (including Jupyter notebooks). diff --git a/python/commons/__init__.py b/python/commons/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/commons/das_logging.py b/python/commons/das_logging.py new file mode 100644 index 0000000..66553d2 --- /dev/null +++ b/python/commons/das_logging.py @@ -0,0 +1,99 @@ +import logging +import sys +from types import TracebackType + +DEFAULT_LOG_FILE = "/tmp/das.log" +DEFAULT_LOGGER_NAME = "das_logger" +DEFAULT_LOG_LEVEL = logging.WARNING + + +class DasLogger(logging.Logger): + """ + Custom Logger class that supports file and stream logging. + Future work will include a suport for queueing. + + Example: + + from das_logging import log + log.info("test") -- outputs an info level log message to file /tmp/das.log and also to stderr + + # if you want to have a child logger you can do so like this: + + from das_logging import log + child = log.getChild("child") + child.info("test") + """ + + # Export constants from logging + DEBUG = logging.DEBUG + INFO = logging.INFO + WARNING = logging.WARNING + ERROR = logging.ERROR + CRITICAL = logging.CRITICAL + FATAL = logging.FATAL + + def __init__(self, name: str, level: int = DEFAULT_LOG_LEVEL) -> None: + """ + Args: + name: name of the logger + level: level of the logger + """ + super().__init__(name, level) + + self.formatter = logging.Formatter( + fmt="%(asctime)s %(levelname)s %(module)s:%(threadName)s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + self._setup_stream_handler() + self._setup_file_handler() + + sys.excepthook = self._uncaught_exc_handler + + def _setup_stream_handler(self) -> None: + """ + Initializes and sets up the stream handler. + """ + stream_handler = logging.StreamHandler(sys.stderr) + stream_handler.setFormatter(self.formatter) + self.addHandler(stream_handler) + + def _setup_file_handler(self): + """ + Initializes and sets up the file handler. + """ + file_handler = logging.FileHandler(DEFAULT_LOG_FILE) + file_handler.setFormatter(self.formatter) + self.addHandler(file_handler) + + def das_exception(self, msg: str, *args, exc_info: bool = True, **kwargs): + """ + Convenience method for logging an WARNING with exception information. + + Args: + msg: message to log + *args: positional arguments + exc_info: include exception information, defaulted to True + **kwargs: keyword arguments + """ + self.warning(msg, *args, exc_info=exc_info, **kwargs) + + def _uncaught_exc_handler( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + ): + if issubclass(exc_type, KeyboardInterrupt): + return sys.__excepthook__(exc_type, exc_value, exc_traceback) + + self.error( + "An uncaught exception occurred: ", + exc_info=(exc_type, exc_value, exc_traceback) + ) + + +logging.setLoggerClass(DasLogger) +log: DasLogger = logging.getLogger(DEFAULT_LOGGER_NAME) # type: ignore + +if __name__ == "__main__": + log.warning("test") diff --git a/python/commons/test_das_logging.py b/python/commons/test_das_logging.py new file mode 100644 index 0000000..fa1d960 --- /dev/null +++ b/python/commons/test_das_logging.py @@ -0,0 +1,68 @@ +import logging +import unittest +import sys + +from python.commons.das_logging import ( + DEFAULT_LOG_LEVEL, + DEFAULT_LOGGER_NAME, + DasLogger, + log, +) + +assert isinstance(log, DasLogger) # pyright + + +class TestDasLogger(unittest.TestCase): + def test_singleton(self): + self.assertEqual(logging.getLoggerClass(), DasLogger) + new_logger = logging.getLogger(DEFAULT_LOGGER_NAME) + self.assertEqual(new_logger, log) + + def test_das_logger(self): + self.assertEqual(DEFAULT_LOGGER_NAME, log.name) + self.assertEqual(DEFAULT_LOG_LEVEL, log.level) + + def test_hashmap(self): + self.assertEqual(logging.DEBUG, log.DEBUG) + self.assertEqual(logging.INFO, log.INFO) + self.assertEqual(logging.WARNING, log.WARNING) + self.assertEqual(logging.ERROR, log.ERROR) + self.assertEqual(logging.CRITICAL, log.CRITICAL) + self.assertEqual(logging.FATAL, log.FATAL) + + def test_log_logs(self): + with self.assertLogs(log, log.INFO) as cm: + log.info("test") + self.assertEqual(cm.output, ["INFO:das_logger:test"]) + + def test_exception(self): + """Test that log.exception logs and it's level is set correctly""" + with self.assertLogs(log, level=log.WARNING) as cm: + try: + raise Exception("test") + except Exception as e: + log.exception(e) + + self.assertTrue(cm.output[0].startswith("ERROR:das_logger")) + + def test_das_exception(self): + """Test that das_exception logs and it's level is set correctly""" + with self.assertLogs(log, level=log.WARNING) as cm: + try: + raise Exception("test") + except Exception as e: + log.das_exception(e) # type: ignore + + self.assertTrue(cm.output[0].startswith("WARNING:das_logger")) + + def test_sys_excepthook(self): + """Test that sys.excepthook is set correctly""" + + self.assertEqual(sys.excepthook, log._uncaught_exc_handler) + + def test_uncaught_exc_handler(self): + """Test that uncaught exception handler logs and it's level is set correctly""" + with self.assertLogs(log, level=log.WARNING) as cm: + exc = Exception("test") + log._uncaught_exc_handler(type(exc), exc, None) + self.assertTrue(cm.output[0].startswith("ERROR:das_logger:An uncaught exception occurred"))