Skip to content

Commit

Permalink
Add logging
Browse files Browse the repository at this point in the history
  • Loading branch information
jlubken committed Jun 2, 2020
1 parent 44beefa commit 7dc7fc8
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 29 deletions.
30 changes: 26 additions & 4 deletions src/dsdk/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,13 @@ def open_database(
INSERT_ONE = "".join(
(
"{",
", ".join(('"key": "mongo.insert_one"', '"collection": "%s.%s"')),
", ".join(
(
'"key": "mongo.insert_one"',
'"collection": "%s.%s"',
'"id": "%s"',
)
),
"}",
)
)
Expand All @@ -207,7 +213,12 @@ def open_database(
def insert_one(collection: Collection, doc: Dict[str, Any]):
"""Insert one with retry."""
result = collection.insert_one(doc)
logger.info(INSERT_ONE, collection.database.name, collection.name)
logger.info(
INSERT_ONE,
collection.database.name,
collection.name,
result.inserted_id,
)
return result


Expand Down Expand Up @@ -242,7 +253,13 @@ def insert_many(collection: Collection, docs: Sequence[Dict[str, Any]]):
UPDATE_ONE = "".join(
(
"{",
", ".join(('"key": "mongo.update_one"', '"collection": "%s.%s"')),
", ".join(
(
'"key": "mongo.update_one"',
'"collection": "%s.%s"',
'"id": "%s"',
)
),
"}",
)
)
Expand All @@ -254,5 +271,10 @@ def update_one(
):
"""Update one with retry."""
result = collection.update_one(key, doc)
logger.info(UPDATE_ONE, collection.database.name, collection.name)
logger.info(
UPDATE_ONE,
collection.database.name,
collection.name,
result.inserted_id,
)
return result
47 changes: 35 additions & 12 deletions src/dsdk/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,17 +118,31 @@ def __init__(
# ... because self.pipeline is not optional
assert self.pipeline is not None

TASK_BEGIN = "".join(
("{", ", ".join(('"key": "task.begin"', '"task": "%s"')), "}")
)

TASK_END = "".join(
("{", ", ".join(('"key": "task.end"', '"task": "%s"')), "}")
)

PIPELINE_BEGIN = "".join(
("{", ", ".join(('"key": "pipeline.end"', '"pipeline": "%s"')), "}")
)

PIPELINE_END = "".join(
("{", ", ".join(('"key": "pipeline.end"', '"pipeline": "%s"')), "}")
)

def __call__(self) -> Batch:
"""Run."""
with self.open_batch() as batch:
logger.info(self.PIPELINE_BEGIN, self.__class__.__name__)
for task in self.pipeline:
logger.info(self.TASK_BEGIN, task.__class__.__name__)
task(batch, self)
logger.info(
'"pipeline": "%s"',
", ".join(
map(lambda s: str(s).split(" ")[0][1:], self.pipeline)
),
)
logger.info(self.TASK_END, task.__class__.__name__)
logger.info(self.PIPELINE_END, self.__class__.__name__)
return batch

def inject_arguments( # pylint: disable=no-self-use,protected-access
Expand All @@ -137,7 +151,11 @@ def inject_arguments( # pylint: disable=no-self-use,protected-access
"""Inject arguments."""
parser._default_config_files = [
"/local/config.yaml",
"/local/config.yml",
"/local/.yml",
"/secrets/config.yaml",
"/secrets/config.yml",
"/secrets/.yml",
]
parser._ignore_unknown_config_file_keys = True
parser.add(
Expand All @@ -148,28 +166,33 @@ def inject_arguments( # pylint: disable=no-self-use,protected-access
env_var="CONFIG", # make ENV match default metavar
)

BATCH_OPEN = "".join(
("{", ", ".join(('"key": "batch.open"', '"on": "%s"')), "}")
)

BATCH_CLOSE = "".join(
("{", ", ".join(('"key": "batch.close"', '"end": "%s"')), "}")
)

@contextmanager
def open_batch( # pylint: disable=no-self-use,unused-argument
self, key: Any = None, model: Optional[Model] = None
) -> Generator[Batch, None, None]:
"""Open batch."""
record = Interval(on=datetime.now(timezone.utc), end=None)
logger.info(self.BATCH_OPEN, record.on)
yield Batch(key, record)
record.end = datetime.now(timezone.utc)
logger.info(f'"action": "open_batch", ' f'"key": "{key}"')
logger.info(self.BATCH_CLOSE, record.end)

def store_evidence( # pylint: disable=no-self-use,unused-argument
self, batch: Batch, *args, **kwargs
) -> None:
"""Store evidence."""
# TODO It isn't really evidence if it isn't written to the data store.
while args:
key, df, *args = args # type: ignore
batch.evidence[key] = df
logger.info(
f'"action": "store_evidence", '
f'"key": "{key}", '
f'"count": {len(batch.evidence)}'
)


class Task: # pylint: disable=too-few-public-methods
Expand Down
17 changes: 4 additions & 13 deletions src/dsdk/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from functools import wraps
from json import dump as json_dump
from json import load as json_load
from logging import INFO, Formatter, LoggerAdapter, StreamHandler, getLogger
from logging import INFO, Formatter, StreamHandler, getLogger
from pickle import dump as pickle_dump
from pickle import load as pickle_load
from sys import stdout
Expand All @@ -27,21 +27,12 @@ def get_logger(name, level=INFO):
Use this function to hide the logger implementation/config for now.
Show that the conventions demonstrated here work for the applications.
"""
# TODO Pass calling function from parent application
defaults = {"callingfunc": ""}
formatter_string = " - ".join(
(
"%(asctime)-15s",
"%(name)s",
"%(levelname)s",
", ".join(
(
'{"callingfunc": "%(callingfunc)s"',
'"module": "%(module)s"',
'"function": "%(funcName)s"',
"%(message)s}",
)
),
"%(name)s.%(funcName)s",
"%(message)s",
)
)
handler = StreamHandler(stdout)
Expand All @@ -50,7 +41,7 @@ def get_logger(name, level=INFO):
result = getLogger(name)
result.propagate = False
result.addHandler(handler)
return LoggerAdapter(result, defaults)
return result


logger = get_logger(__name__)
Expand Down

0 comments on commit 7dc7fc8

Please sign in to comment.