Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor/split utils into submodules #628

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 4 additions & 133 deletions dwave/cloud/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import io
import os
import re
import sys
import json
import logging
import datetime
import importlib
import typing

from dwave.cloud.client import Client
from dwave.cloud.solver import Solver
from dwave.cloud.computation import Future
from dwave.cloud.utils import parse_loglevel
from dwave.cloud.utils.logging import add_loglevel, configure_logging_from_env

__all__ = ['Client', 'Solver', 'Future']

Expand All @@ -34,134 +28,11 @@
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())

# add TRACE log level and Logger.trace() method
logging.TRACE = 5
logging.addLevelName(logging.TRACE, "TRACE")

def _trace(logger, message, *args, **kwargs):
logger.log(logging.TRACE, message, *args, **kwargs)

logging.Logger.trace = _trace


class ISOFormatter(logging.Formatter):
# target timezone, e.g. `datetime.timezone.utc`, or `None` for naive timestamp
as_tz: typing.Optional[datetime.timezone] = None

def __init__(self, *args, as_tz: typing.Optional[datetime.timezone] = None, **kwargs):
self.as_tz = as_tz
super().__init__(*args, **kwargs)

def formatTime(self, record: logging.LogRecord, datefmt: typing.Optional[str] = None) -> str:
return datetime.datetime.fromtimestamp(record.created, tz=self.as_tz).isoformat()


class FilteredSecretsFormatter(logging.Formatter):
"""Logging formatter that filters out secrets (like Solver API tokens).

Note: we assume, for easier disambiguation, a secret/token is prefixed with
a short alphanumeric string, and comprises 40 or more hex digits.
"""

# prefixed 160-bit+ hex tokens (sapi token format: `A{2,4}-X{40,}`)
_SAPI_TOKEN_PATTERN = re.compile(
r'\b([0-9A-Za-z]{2,4}-[0-9A-Fa-f]{3})([0-9A-Fa-f]{34,})([0-9A-Fa-f]{3})\b')
# 128-bit+ hex tokens (`X{32,}`)
_HEX_TOKEN_PATTERN = re.compile(
r'\b([0-9A-Fa-f]{3})([0-9A-Fa-f]{26,})([0-9A-Fa-f]{3})\b')
# 128-bit uuid tokens (`X{8}-X{4}-X{4}-X{4}-X{12}`)
_UUID_TOKEN_PATTERN = re.compile(
r'\b([0-9A-Fa-f]{3})([0-9A-Fa-f]{5}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{9})([0-9A-Fa-f]{3})\b')

def format(self, record: logging.LogRecord) -> str:
output = super().format(record)
output = re.sub(self._SAPI_TOKEN_PATTERN, r'\1...\3', output)
output = re.sub(self._HEX_TOKEN_PATTERN, r'\1...\3', output)
output = re.sub(self._UUID_TOKEN_PATTERN, r'\1...\3', output)
return output


class JSONFormatter(ISOFormatter):
def format(self, record: logging.LogRecord) -> str:
super().format(record)
# filter out message template and potentially unserializable args
rec = record.__dict__.copy()
del rec['args']
del rec['msg']
return json.dumps(rec)


def configure_logging(logger: typing.Optional[logging.Logger] = None,
*,
level: int = logging.WARNING,
filter_secrets: bool = True,
output_stream: typing.Optional[io.IOBase] = None,
in_utc: bool = False,
structured_output: bool = False,
handler_level: typing.Optional[int] = None,
additive: bool = False,
) -> logging.Logger:
"""Configure cloud-client's `dwave.cloud` base logger.

Logging output from the cloud-client is suppressed by default. This utility
function can be used to quickly setup basic logging from the library.

.. note::
This function is currently intended for internal/private use only.

.. versionadded:: 0.12.0
Explicit optional logging configuration. Previously, logger was minimally
configured by default.
"""

if logger is None:
logger = logging.getLogger(__name__)
if output_stream is None:
output_stream = sys.stderr
if handler_level is None:
handler_level = level

format = dict(
fmt='%(asctime)s %(name)s %(levelname)s %(threadName)s [%(funcName)s] %(message)s',
as_tz=datetime.timezone.utc if in_utc else None,
)

if structured_output:
formatter_base = JSONFormatter
else:
formatter_base = ISOFormatter

if filter_secrets:
class Formatter(FilteredSecretsFormatter, formatter_base):
pass
else:
Formatter = formatter_base

if not additive:
# make sure handlers are not accumulated
while len(logger.handlers):
logger.removeHandler(logger.handlers[-1])

formatter = Formatter(**format)
handler = logging.StreamHandler(stream=output_stream)
handler.setFormatter(formatter)
handler.setLevel(handler_level)

logger.addHandler(handler)
logger.setLevel(level)

return logger

# make sure TRACE level is available
add_loglevel('TRACE', 5)

# configure logger if DWAVE_LOG_LEVEL present in environment
def _apply_loglevel_from_env(logger):
if log_level := os.getenv('DWAVE_LOG_LEVEL', os.getenv('dwave_log_level')):
level = parse_loglevel(log_level)
log_format = os.getenv('DWAVE_LOG_FORMAT', os.getenv('dwave_log_format', ''))
structured = log_format.strip().lower() == 'json'
configure_logging(logger, level=level, structured_output=structured)

_apply_loglevel_from_env(logger)
configure_logging_from_env(logger)


# alias dwave.cloud.client.{qpu,sw,hybrid} as dwave.cloud.*
Expand Down
6 changes: 3 additions & 3 deletions dwave/cloud/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@
from werkzeug.http import parse_options_header, dump_options_header

from dwave.cloud.api import constants, exceptions
from dwave.cloud.config import load_config, validate_config_v1, update_config
from dwave.cloud.config import load_config, validate_config_v1
from dwave.cloud.config.models import ClientConfig
from dwave.cloud.utils import (
PretimedHTTPAdapter, BaseUrlSession, default_user_agent, is_caused_by)
from dwave.cloud.utils.exception import is_caused_by
from dwave.cloud.utils.http import PretimedHTTPAdapter, BaseUrlSession, default_user_agent

__all__ = ['DWaveAPIClient', 'SolverAPIClient', 'MetadataAPIClient', 'LeapAPIClient']

Expand Down
2 changes: 1 addition & 1 deletion dwave/cloud/api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from pydantic.functional_validators import AfterValidator

from dwave.cloud.api import constants
from dwave.cloud.utils import coerce_numpy_to_python
from dwave.cloud.utils.coders import coerce_numpy_to_python


# coerce common numpy types to python types on validation (parsing)
Expand Down
2 changes: 1 addition & 1 deletion dwave/cloud/api/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
DWaveAPIClient, SolverAPIClient, MetadataAPIClient, LeapAPIClient)
from dwave.cloud.api import constants, models
from dwave.cloud.config.models import ClientConfig
from dwave.cloud.utils import NumpyEncoder
from dwave.cloud.utils.coders import NumpyEncoder

__all__ = ['Solvers', 'Problems', 'Regions']

Expand Down
3 changes: 2 additions & 1 deletion dwave/cloud/auth/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@
from dwave.cloud.auth.server import SingleRequestAppServer, RequestCaptureAndRedirectApp
from dwave.cloud.config.models import ClientConfig
from dwave.cloud.regions import resolve_endpoints
from dwave.cloud.utils import pretty_argvalues, default_user_agent
from dwave.cloud.utils.http import default_user_agent
from dwave.cloud.utils.logging import pretty_argvalues

__all__ = ['AuthFlow', 'LeapAuthFlow', 'OAuthError']

Expand Down
20 changes: 10 additions & 10 deletions dwave/cloud/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,16 @@
import requests.exceptions

import dwave.cloud
from dwave.cloud import Client, configure_logging
from dwave.cloud import Client
from dwave.cloud import api
from dwave.cloud.solver import StructuredSolver, BaseUnstructuredSolver
from dwave.cloud.utils import (
default_text_input, generate_random_ising_problem,
datetime_to_timestamp, utcnow, strtrunc, CLIError, set_loglevel,
get_contrib_packages, user_agent, epochnow,
get_distribution, PackageNotFoundError, VersionNotFoundError)
from dwave.cloud.utils.cli import default_text_input, strtrunc, CLIError
from dwave.cloud.utils.dist import (
get_contrib_packages, get_distribution, PackageNotFoundError, VersionNotFoundError)
from dwave.cloud.utils.http import user_agent
from dwave.cloud.utils.logging import configure_logging
from dwave.cloud.utils.qubo import generate_random_ising_problem
from dwave.cloud.utils.time import datetime_to_timestamp, utcnow, epochnow
from dwave.cloud.coders import bqm_as_file
from dwave.cloud.package_info import __title__, __version__
from dwave.cloud.exceptions import (
Expand All @@ -55,13 +57,11 @@

def enable_logging(ctx, param, value):
if value and not ctx.resilient_parsing:
configure_logging()
set_loglevel(dwave.cloud.logger, param.name)
configure_logging(level=param.name)

def enable_loglevel(ctx, param, value):
if value and not ctx.resilient_parsing:
configure_logging()
set_loglevel(dwave.cloud.logger, value)
configure_logging(level=value)

def show_platform(ctx, param, value):
if value and not ctx.resilient_parsing:
Expand Down
9 changes: 5 additions & 4 deletions dwave/cloud/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
from functools import partial, wraps
from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor
from typing import Dict, Optional, Union
from typing import Dict, Optional

import requests
import urllib3
Expand All @@ -72,9 +72,10 @@
from dwave.cloud.regions import get_regions, resolve_endpoints
from dwave.cloud.upload import ChunkedData
from dwave.cloud.events import dispatches_events
from dwave.cloud.utils import (
PretimedHTTPAdapter, BaseUrlSession, default_user_agent,
datetime_to_timestamp, utcnow, cached, retried, is_caused_by)
from dwave.cloud.utils.http import PretimedHTTPAdapter, BaseUrlSession, default_user_agent
from dwave.cloud.utils.time import datetime_to_timestamp, utcnow
from dwave.cloud.utils.decorators import cached, retried
from dwave.cloud.utils.exception import is_caused_by

__all__ = ['Client']

Expand Down
2 changes: 1 addition & 1 deletion dwave/cloud/coders.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import base64
from typing import Callable

from dwave.cloud.utils import uniform_get, active_qubits
from dwave.cloud.utils.qubo import uniform_get, active_qubits

__all__ = [
'encode_problem_as_qp', 'decode_qp', 'decode_qp_numpy',
Expand Down
6 changes: 3 additions & 3 deletions dwave/cloud/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@
from dateutil.parser import parse
from operator import itemgetter

from dwave.cloud.utils import (
utcnow, datetime_to_timestamp, aliasdict, deprecated)
from dwave.cloud.exceptions import InvalidAPIResponseError
from dwave.cloud.utils.decorators import aliasdict, deprecated
from dwave.cloud.utils.time import utcnow, datetime_to_timestamp

# Use numpy if available for fast decoding
try:
Expand Down Expand Up @@ -578,7 +578,7 @@ def result(self):

Instead of adding copies of ``solutions`` and ``num_occurrences``
keys (as ``samples`` and ``occurrences``), we alias them using
:class:`~dwave.cloud.utils.aliasdict`. Values are available under
:class:`~dwave.cloud.utils.decorators.aliasdict`. Values are available under
alias keys, but the keys themselves are not stored or visible.

Examples:
Expand Down
2 changes: 1 addition & 1 deletion dwave/cloud/regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
DEFAULT_REGION, DEFAULT_SOLVER_API_ENDPOINT, DEFAULT_LEAP_API_ENDPOINT,
DEFAULT_METADATA_API_ENDPOINT)
from dwave.cloud.config.models import ClientConfig, validate_config_v1
from dwave.cloud.utils import cached
from dwave.cloud.utils.decorators import cached

__all__ = ['get_regions']

Expand Down
36 changes: 30 additions & 6 deletions dwave/cloud/solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,15 @@
from dwave.cloud.coders import (
encode_problem_as_qp, encode_problem_as_ref, decode_binary_ref,
decode_qp_numpy, decode_qp, decode_bq, bqm_as_file)
from dwave.cloud.utils import reformat_qubo_as_ising, NumpyEncoder
from dwave.cloud.computation import Future
from dwave.cloud.concurrency import Present
from dwave.cloud.events import dispatches_events
from dwave.cloud.utils.coders import NumpyEncoder
from dwave.cloud.utils.qubo import reformat_qubo_as_ising

# Use numpy if available for fast encoding/decoding
try:
import numpy as np
import numpy
_numpy = True
except ImportError:
_numpy = False
Expand Down Expand Up @@ -593,6 +594,30 @@ class DQMSolver(BaseUnstructuredSolver):
_handled_problem_types = {"dqm"}
_handled_encoding_formats = {"bq"}

@staticmethod
def _bqm_to_dqm(bqm):
"""Represent a :class:`dimod.BQM` as a :class:`dimod.DQM`."""
try:
from dimod import DiscreteQuadraticModel
except ImportError: # pragma: no cover
raise RuntimeError(
"dimod package with support for DiscreteQuadraticModel required."
"Re-install the library with 'dqm' support.")

dqm = DiscreteQuadraticModel()

ising = bqm.spin

for v, bias in ising.linear.items():
dqm.add_variable(2, label=v)
dqm.set_linear(v, [-bias, bias])

for (u, v), bias in ising.quadratic.items():
biases = numpy.array([[bias, -bias], [-bias, bias]], dtype=numpy.float64)
dqm.set_quadratic(u, v, biases)

return dqm

def _encode_problem_for_upload(self, dqm):
try:
data = dqm.to_file()
Expand All @@ -607,10 +632,9 @@ def _encode_problem_for_upload(self, dqm):

def sample_bqm(self, bqm, label=None, **params):
"""Use for testing."""
from dwave.cloud.utils import bqm_to_dqm

# to sample BQM problems, we need to convert them to DQM
dqm = bqm_to_dqm(bqm)
dqm = self.bqm_to_dqm(bqm)

# TODO: convert sampleset back
return self.sample_dqm(dqm, label=label, **params)
Expand Down Expand Up @@ -1502,9 +1526,9 @@ def estimate_qpu_access_time(self,
q = readout_time_model_parameters[:n//2]
t = readout_time_model_parameters[n//2:]
if readout_time_model == 'pwl_log_log':
readout_time = pow(10, np.interp(np.emath.log10(num_qubits), q, t))
readout_time = pow(10, numpy.interp(numpy.emath.log10(num_qubits), q, t))
elif readout_time_model == 'pwl_linear':
readout_time = np.interp(num_qubits, q, t)
readout_time = numpy.interp(num_qubits, q, t)
else:
raise ValueError("``estimate_qpu_access_time`` does not support "
f"``readout_time_model`` value {readout_time_model} "
Expand Down
Loading