From f0989de58b62e2be8ba69be002cac1328e78bd9c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 May 2022 13:10:42 +0000 Subject: [PATCH 01/18] Bump h5py from 3.6.0 to 3.7.0 Bumps [h5py](https://github.com/h5py/h5py) from 3.6.0 to 3.7.0. - [Release notes](https://github.com/h5py/h5py/releases) - [Changelog](https://github.com/h5py/h5py/blob/master/docs/release_guide.rst) - [Commits](https://github.com/h5py/h5py/compare/3.6.0...3.7.0) --- updated-dependencies: - dependency-name: h5py dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 291e1107..1a829c00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ pymatgen==2022.5.18 scikit-learn==1.1.0 -h5py==3.6.0 +h5py==3.7.0 joblib==1.1.0 tqdm==4.64.0 pymongo==4.1.1 \ No newline at end of file From ca6004b30ecac2296204d729309f9037144df81e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 24 May 2022 13:12:55 +0000 Subject: [PATCH 02/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1a829c00..9f432beb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ scikit-learn==1.1.0 h5py==3.7.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.1.1 \ No newline at end of file +pymongo==4.1.1 From 11428267a7df3d7437ee6c42c8f24fee05702b30 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 16:06:36 -0700 Subject: [PATCH 03/18] Replace pylint, --- maml/__init__.py | 2 +- pylintrc | 593 ++++++++++++++++++++++++----------------------- setup.py | 2 +- 3 files changed, 311 insertions(+), 286 deletions(-) diff --git a/maml/__init__.py b/maml/__init__.py index d080408d..cc1fd681 100644 --- a/maml/__init__.py +++ b/maml/__init__.py @@ -2,6 +2,6 @@ maml - materials machine learning """ -__version__ = "2022.5.3" +__version__ = "2022.6.11" __import__("pkg_resources").declare_namespace(__name__) diff --git a/pylintrc b/pylintrc index f4f39d4d..932c509e 100644 --- a/pylintrc +++ b/pylintrc @@ -1,17 +1,62 @@ [MASTER] +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, checkers without error messages are disabled and for others, +# only the ERROR messages are displayed, and no reports are done by default. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) extension-pkg-whitelist= -# Add files or directories to the blacklist. They should be base names, not -# paths. -#ignore=chemenv +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold to be exceeded before program exits with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS,tests,chemenv,abinit,defects -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns=test_ +# Add files or directories matching the regex patterns to the ignore-list. The +# regex matches against paths and can be in Posix or Windows format. +ignore-paths= + +# Files or directories matching the regex patterns are skipped. The regex +# matches against base names, not paths. The default value ignores Emacs file +# locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). @@ -19,22 +64,26 @@ ignore-patterns=test_ # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. -jobs=0 +jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 -# List of plugins (as comma separated values of python modules names) to load, +# List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes -# Specify a configuration file. -#rcfile= +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.9 + +# Discover python modules and packages in the file system subtree. +recursive=no # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. @@ -44,33 +93,56 @@ suggestion-mode=yes # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, +disable=raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, @@ -78,94 +150,20 @@ disable=print-statement, useless-suppression, deprecated-pragma, use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, C0103, + W, + R, + E1120, + E1123, C0201, - C0209, E0401, - R0902, - R0904, - R0911, - R0912, - R0913, - R0914, - R0915, - R0916, - R1702, - R0903, - W, - C1801, E0611, - E1121, - R0901, C0415, - C0330, + C0114, + C0115, + C0116, C0302, - E1136, - R0801, - C0303, - R0201, - E1101 + # C0209 # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -174,47 +172,10 @@ disable=print-statement, enable=c-extension-no-member -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - [LOGGING] -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging @@ -227,18 +188,22 @@ logging-modules=logging # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the 'python-enchant' package. spelling-dict= +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + # List of comma separated words that should not be checked. spelling-ignore-words= -# A path to a file that contains private dictionary; one word per line. +# A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no @@ -249,6 +214,9 @@ notes=FIXME, XXX, TODO +# Regular expression of note tags to take in consideration. +notes-rgx= + [TYPECHECK] @@ -262,10 +230,6 @@ contextmanager-decorators=contextlib.contextmanager # expressions are accepted. generated-members= -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes @@ -278,16 +242,16 @@ ignore-none=yes # the rest of the inferred objects. ignore-on-opaque-inference=yes +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. @@ -301,6 +265,38 @@ missing-member-hint-distance=1 # showing a hint for a missing member. missing-member-max-choices=1 +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + [VARIABLES] @@ -311,6 +307,9 @@ additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes +# List of names allowed to shadow builtins +allowed-redefined-builtins= + # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, @@ -351,14 +350,7 @@ indent-string=' ' max-line-length=120 # Maximum number of lines in a module. -max-module-lines=5000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator +max-module-lines=1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. @@ -369,34 +361,146 @@ single-line-class-stmt=no single-line-if-stmt=no +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=BaseException, + Exception + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + [SIMILARITIES] -# Ignore comments when computing similarities. +# Comments are removed from the similarity computation ignore-comments=yes -# Ignore docstrings when computing similarities. +# Docstrings are removed from the similarity computation ignore-docstrings=yes -# Ignore imports when computing similarities. +# Imports are removed from the similarity computation ignore-imports=yes +# Signatures are removed from the similarity computation +ignore-signatures=yes + # Minimum lines number of a similarity. min-similarity-lines=4 +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- -# naming-style. +# naming-style. If left empty, argument names will be checked with the set +# naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= @@ -408,24 +512,38 @@ bad-names=foo, tutu, tata +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. #class-attribute-rgx= +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- -# style. +# style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming # style. #const-rgx= @@ -437,7 +555,8 @@ docstring-min-length=-1 function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- -# naming-style. +# naming-style. If left empty, function names will be checked with the set +# naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. @@ -448,6 +567,10 @@ good-names=i, Run, _ +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + # Include a hint for the correct naming format with invalid-name. include-naming-hint=no @@ -455,21 +578,22 @@ include-naming-hint=no inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- -# style. +# style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- -# style. +# style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when @@ -485,113 +609,14 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- -# naming-style. +# naming-style. If left empty, variable names will be checked with the set +# naming style. #variable-rgx= - - -[STRING] - -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement. -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/setup.py b/setup.py index e0fa216c..611143de 100644 --- a/setup.py +++ b/setup.py @@ -86,7 +86,7 @@ setup( name="maml", packages=find_packages(), - version="2022.5.3", + version="2022.6.11", install_requires=["numpy", "scipy", "monty", "scikit-learn", "pandas", "pymatgen", "tqdm"], extras_requires={ "maml.apps.symbolic._selectors_cvxpy": ["cvxpy"], From 137df6854728d968f9e2988a749c68d899477e72 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 16:51:03 -0700 Subject: [PATCH 04/18] pylint --- maml/apps/bowsr/acquisition.py | 5 +-- maml/apps/bowsr/model/cgcnn.py | 4 +- maml/apps/bowsr/model/dft.py | 2 +- maml/apps/bowsr/optimizer.py | 23 ++++------- maml/apps/bowsr/perturbation.py | 39 +++++++++---------- maml/apps/bowsr/target_space.py | 5 ++- maml/apps/pes/_gap.py | 14 +++---- maml/apps/pes/_lammps.py | 14 +++---- maml/apps/pes/_mtp.py | 10 ++--- maml/apps/pes/_nnp.py | 16 ++++---- maml/apps/pes/_snap.py | 2 +- maml/apps/symbolic/_selectors.py | 9 ++--- maml/apps/symbolic/_selectors_cvxpy.py | 1 - maml/base/_describer.py | 4 +- maml/base/_feature_batch.py | 4 +- maml/data/_url.py | 4 +- maml/describers/_composition.py | 14 +++---- maml/describers/_site.py | 22 +++++------ maml/describers/_structure.py | 2 +- .../matminer_wrapper/_matminer_wrapper.py | 4 +- maml/describers/megnet/_megnet.py | 2 +- maml/describers/rdf/_rdf.py | 3 +- maml/utils/_lammps.py | 8 ++-- maml/utils/_preprocessing.py | 2 +- maml/utils/_stats.py | 2 +- maml/utils/_tempfile.py | 4 +- 26 files changed, 100 insertions(+), 119 deletions(-) diff --git a/maml/apps/bowsr/acquisition.py b/maml/apps/bowsr/acquisition.py index c0f7b560..41d6e62e 100644 --- a/maml/apps/bowsr/acquisition.py +++ b/maml/apps/bowsr/acquisition.py @@ -31,7 +31,7 @@ def ensure_rng(seed: int = None) -> RandomState: Create a random number generator based on an optional seed. This can be an integer for a seeded rng or None for an unseeded rng. """ - return np.random.RandomState(seed=seed) + return np.random.RandomState(seed=seed) # pylint: disable=E1101 def predict_mean_std(x: Union[List, np.ndarray], gpr: GaussianProcessRegressor, noise: float) -> Tuple[Any, ...]: @@ -152,8 +152,7 @@ def __init__(self, acq_type: str, kappa: float, xi: float): if acq_type not in ["ucb", "ei", "poi", "gp-ucb"]: err_msg = ( - "The utility function {} has not been implemented, " - "please choose one of ucb, ei, or poi.".format(acq_type) + f"The utility function {acq_type} has not been implemented, " "please choose one of ucb, ei, or poi." ) raise NotImplementedError(err_msg) self.acq_type = acq_type diff --git a/maml/apps/bowsr/model/cgcnn.py b/maml/apps/bowsr/model/cgcnn.py index 12cc0cd7..0c0391a5 100644 --- a/maml/apps/bowsr/model/cgcnn.py +++ b/maml/apps/bowsr/model/cgcnn.py @@ -107,9 +107,9 @@ def _get_nbr_fea(self, all_nbrs: list, cif_id: int) -> Tuple[np.ndarray, ...]: for nbr in all_nbrs: if len(nbr) < self.max_num_nbr: warnings.warn( - "{} not find enough neighbors to build graph. " + f"{cif_id} not find enough neighbors to build graph. " "If it happens frequently, consider increase " - "radius.".format(cif_id) + "radius." ) nbr_fea_idx.append(list(map(lambda x: x[2], nbr)) + [0] * (self.max_num_nbr - len(nbr))) nbr_fea.append(list(map(lambda x: x[1], nbr)) + [self.radius + 1.0] * (self.max_num_nbr - len(nbr))) diff --git a/maml/apps/bowsr/model/dft.py b/maml/apps/bowsr/model/dft.py index 85d31595..287c9f44 100644 --- a/maml/apps/bowsr/model/dft.py +++ b/maml/apps/bowsr/model/dft.py @@ -54,7 +54,7 @@ def predict_energy(self, structure: Structure): stdout, stderr = p_exe.communicate() rc = p_exe.returncode if rc != 0: - error_msg = "vasp exited with return code %d" % rc + error_msg = f"vasp exited with return code {rc}" msg = stderr.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] diff --git a/maml/apps/bowsr/optimizer.py b/maml/apps/bowsr/optimizer.py index cc3f01cc..af006b72 100644 --- a/maml/apps/bowsr/optimizer.py +++ b/maml/apps/bowsr/optimizer.py @@ -402,20 +402,11 @@ def gpr(self): def __repr__(self): return ( - "{}(relax_coords={}, relax_lattice={}, use_symmetry={}" - "\n\t\twyckoff_dims={}, abc_dim={}, " - "\n\t\tangles_dim={}, kernel={}, scaler={}, noisy={})".format( - self.__class__.__name__, - self.relax_coords, - self.relax_lattice, - self.use_symmetry, - self.wyckoff_dims, - self.abc_dim, - self.angles_dim, - repr(self.gpr.kernel), - self.scaler.__class__.__name__, - self.noisy, - ) + f"{self.__class__.__name__}(relax_coords={self.relax_coords}, relax_lattice={self.relax_lattice}, " + f"use_symmetry={self.use_symmetry}" + f"\n\t\twyckoff_dims={self.wyckoff_dims}, abc_dim={self.abc_dim}, " + f"\n\t\tangles_dim={self.angles_dim}, kernel={repr(self.gpr.kernel)}, " + f"scaler={self.scaler.__class__.__name__}, noisy={self.noisy})" ) def as_dict(self): @@ -522,7 +513,7 @@ def gpr_from_dict(gpr_d): model = getattr(energy_model, d["model"])() else: - raise AttributeError("model {} is not supported.".format(d["model"])) + raise AttributeError(f"model {d['model']} is not supported.") structure = Structure.from_dict(d["structure"]) use_symmetry = d["use_symmetry"] @@ -555,7 +546,7 @@ def gpr_from_dict(gpr_d): space_params = np.array(space_d["params"]) space_target = np.array(space_d["target"]) space_bounds = np.array(space_d["bounds"]) - space_random_state = np.random.RandomState() + space_random_state = np.random.RandomState() # pylint: disable=E1101 space_random_state.set_state(space_d["random_state"]) from maml.apps.bowsr import preprocessing diff --git a/maml/apps/bowsr/perturbation.py b/maml/apps/bowsr/perturbation.py index b0583635..65fe2122 100644 --- a/maml/apps/bowsr/perturbation.py +++ b/maml/apps/bowsr/perturbation.py @@ -28,12 +28,14 @@ small_addup = np.array([1e-4] * 3) -perturbation_mapping = lambda x, fixed_indices: np.array( - [ - 0 if i in fixed_indices else x[np.argwhere(np.arange(3)[~np.isin(range(3), fixed_indices)] == i)[0][0]] - for i in range(3) - ] -) + +def perturbation_mapping(x, fixed_indices): + return np.array( + [ + 0 if i in fixed_indices else x[np.argwhere(np.arange(3)[~np.isin(range(3), fixed_indices)] == i)[0][0]] + for i in range(3) + ] + ) class WyckoffPerturbation: @@ -143,16 +145,12 @@ def fit_site(self): def __repr__(self): if self._site is not None: - return "{}(spg_int_number={}, wyckoff_symbol={}) {} [{:.4f}, {:.4f}, {:.4f}]".format( - self.__class__.__name__, - self.int_symbol, - self.wyckoff_symbol, - self._site.species_string, - *self._site.frac_coords, + a, b, c = self._site.frac_coords + return ( + f"{self.__class__.__name__}(spg_int_number={self.int_symbol}, wyckoff_symbol={self.wyckoff_symbol})" + f" {self._site.species_string} [{a:.4f}, {b:.4f}, {c:.4f}]" ) - return "{}(spg_int_number={}, wyckoff_symbol={})".format( - self.__class__.__name__, self.int_symbol, self.wyckoff_symbol - ) + return f"{self.__class__.__name__}(spg_int_number={self.int_symbol}, wyckoff_symbol={self.wyckoff_symbol})" def crystal_system(int_number: int) -> str: @@ -379,12 +377,11 @@ def abc(self) -> List[float]: def __repr__(self): if self._lattice is not None: - return "{}(spg_int_number={}, crystal_system={})\n".format( - self.__class__.__name__, self.spg_int_symbol, self.crys_system - ) + repr(self.lattice) - return "{}(spg_int_number={}, crystal_system={})\n".format( - self.__class__.__name__, self.spg_int_symbol, self.crys_system - ) + return ( + f"{self.__class__.__name__}(spg_int_number={self.spg_int_symbol}, " + f"crystal_system={self.crys_system})\n" + repr(self.lattice) + ) + return f"{self.__class__.__name__}(spg_int_number={self.spg_int_symbol}, crystal_system={self.crys_system})\n" def get_standardized_structure(structure: Structure) -> Structure: diff --git a/maml/apps/bowsr/target_space.py b/maml/apps/bowsr/target_space.py index 0a9738a0..e82bc870 100644 --- a/maml/apps/bowsr/target_space.py +++ b/maml/apps/bowsr/target_space.py @@ -198,6 +198,7 @@ def set_empty(self) -> None: self._target = np.empty(shape=(0)) def __repr__(self): - return "{}(relax_coords={}, relax_lattice={}, dim={}, length={})".format( - self.__class__.__name__, self.relax_coords, self.relax_lattice, self.dim, len(self) + return ( + f"{self.__class__.__name__}(relax_coords={self.relax_coords}, relax_lattice={self.relax_lattice}," + f" dim={self.dim}, length={len(self)})" ) diff --git a/maml/apps/pes/_gap.py b/maml/apps/pes/_gap.py index 94e0e64d..75c643d0 100644 --- a/maml/apps/pes/_gap.py +++ b/maml/apps/pes/_gap.py @@ -87,12 +87,12 @@ def _line_up(structure, energy, forces, virial_stress): description = [] if "Energy" in inputs: - description.append("dft_energy={}".format(inputs["Energy"])) + description.append("dft_energy=" + str(inputs["Energy"])) if "Stress" in inputs: description.append("dft_virial={%s}" % "\t".join(list(map(lambda f: str(f), inputs["Stress"])))) if "SuperCell" in inputs: - SuperCell_str = list(map(lambda f: str(f), inputs["SuperCell"].matrix.ravel())) - description.append('Lattice="{}"'.format(" ".join(SuperCell_str))) + super_cell_str = list(map(lambda f: str(f), inputs["SuperCell"].matrix.ravel())) + description.append(f'Lattice="{" ".join(super_cell_str)}"') description.append("Properties=species:S:1:pos:R:3:Z:I:1:dft_force:R:3") lines.append(" ".join(description)) @@ -312,14 +312,14 @@ def train( param = kwargs.get(param_name) if kwargs.get(param_name) else soap_params.get(param_name) gap_command.append(param_name + "=" + f"{param}") gap_command.append("add_species=T") - exe_command.append("gap=" + "{" + "{}".format(" ".join(gap_command)) + "}") + exe_command.append("gap={" + " ".join(gap_command) + "}") for param_name in preprocess_params: param = kwargs.get(param_name) if kwargs.get(param_name) else soap_params.get(param_name) exe_command.append(param_name + "=" + f"{param}") default_sigma = [str(f) for f in default_sigma] - exe_command.append("default_sigma={%s}" % (" ".join(default_sigma))) + exe_command.append(f"default_sigma={{{' '.join(default_sigma)}}}") if use_energies: exe_command.append("energy_parameter_name=dft_energy") @@ -336,7 +336,7 @@ def train( stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "gap_fit exited with return code %d" % rc + error_msg = f"gap_fit exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -392,7 +392,7 @@ def write_param(self, xml_filename="gap.2020.01.xml"): tree.write(xml_filename) pair_coeff = self.pair_coeff.format( - xml_filename, '"Potential xml_label={}"'.format(self.param.get("potential_label")), " ".join(atomic_numbers) + xml_filename, f'"Potential xml_label={self.param.get("potential_label")}"', " ".join(atomic_numbers) ) ff_settings = [self.pair_style, pair_coeff] return ff_settings diff --git a/maml/apps/pes/_lammps.py b/maml/apps/pes/_lammps.py index 95d4e7f2..f69923d4 100644 --- a/maml/apps/pes/_lammps.py +++ b/maml/apps/pes/_lammps.py @@ -36,7 +36,7 @@ def get_default_lmp_exe(): for lmp_exe in ["lmp_serial", "lmp_mpi", "lmp_g++_serial", "lmp_g++_mpich", "lmp_intel_cpu_intelmpi"]: if which(lmp_exe) is not None: - logger.info("Setting Lammps executable to %s" % lmp_exe) + logger.info(f"Setting Lammps executable to {lmp_exe}") return lmp_exe return None @@ -83,7 +83,7 @@ def __init__(self, **kwargs): if lmp_exe is None: lmp_exe = get_default_lmp_exe() if not which(lmp_exe): - raise ValueError("lammps executable %s not found" % str(lmp_exe)) + raise ValueError(f"lammps executable {lmp_exe} not found") self.LMP_EXE = lmp_exe for i, j in kwargs.items(): if i not in self.allowed_kwargs: @@ -148,7 +148,7 @@ def calculate(self, structures): stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "LAMMPS exited with return code %d" % rc + error_msg = f"LAMMPS exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -578,7 +578,7 @@ def _setup(self): stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "LAMMPS exited with return code %d" % rc + error_msg = f"LAMMPS exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -603,7 +603,7 @@ def _setup(self): stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "LAMMPS exited with return code %d" % rc + error_msg = f"LAMMPS exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -663,7 +663,7 @@ def calculate(self): stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "LAMMPS exited with return code %d" % rc + error_msg = f"LAMMPS exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -796,7 +796,7 @@ def calculate(self): stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "LAMMPS exited with return code %d" % rc + error_msg = f"LAMMPS exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] diff --git a/maml/apps/pes/_mtp.py b/maml/apps/pes/_mtp.py index 0089b557..acc01fcf 100644 --- a/maml/apps/pes/_mtp.py +++ b/maml/apps/pes/_mtp.py @@ -90,11 +90,11 @@ def _line_up(self, structure, energy, forces, virial_stress): if "Size" in inputs: lines.append(" Size") - lines.append("{:>7d}".format(inputs["Size"])) + lines.append(f"{inputs['Size']:>7d}") if "SuperCell" in inputs: lines.append(" SuperCell") for vec in inputs["SuperCell"].matrix: - lines.append("{:>17.6f}{:>14.6f}{:>14.6f}".format(*vec)) + lines.append(f"{vec[0]:>17.6f}{vec[1]:>14.6f}{vec[2]:>14.6f}") if "AtomData" in inputs: format_str = "{:>14s}{:>5s}{:>15s}{:>14s}{:>14s}{:>13s}{:>13s}{:>13s}" format_float = "{:>14d}{:>5d}{:>15f}{:>14f}{:>14f}{:>13f}{:>13f}{:>13f}" @@ -105,7 +105,7 @@ def _line_up(self, structure, energy, forces, virial_stress): lines.append(format_float.format(i + 1, self.elements.index(str(site.specie)), *site.coords, *force)) if "Energy" in inputs: lines.append(" Energy") - lines.append("{:>24.12f}".format(inputs["Energy"])) + lines.append(f"{inputs['Energy']:>24.12f}") if "Stress" in inputs: if not hasattr(self, "version") or self.version == "mlip-2": format_str = "{:>16s}{:>12s}{:>12s}{:>12s}{:>12s}{:>12s}" @@ -632,7 +632,7 @@ def train( stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "MLP exited with return code %d" % rc + error_msg = f"MLP exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -726,7 +726,7 @@ def evaluate(self, test_structures, test_energies, test_forces, test_stresses=No stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "mlp exited with return code %d" % rc + error_msg = f"mlp exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] diff --git a/maml/apps/pes/_nnp.py b/maml/apps/pes/_nnp.py index dc4f0a99..d0f5cc0e 100644 --- a/maml/apps/pes/_nnp.py +++ b/maml/apps/pes/_nnp.py @@ -87,7 +87,7 @@ def _line_up(self, structure, energy, forces, virial_stress): if "SuperCell" in inputs: bohr_matrix = inputs["SuperCell"].matrix / self.bohr_to_angstrom for vec in bohr_matrix: - lines.append("lattice {:>15.6f}{:>15.6f}{:>15.6f}".format(*vec)) + lines.append(f"lattice {vec[0]:>15.6f}{vec[1]:>15.6f}{vec[2]:>15.6f}") if "AtomData" in inputs: format_float = "atom{:>16.9f}{:>16.9f}{:>16.9f}{:>4s}{:>15.9f}{:>15.9f}{:>15.9f}{:>15.9f}{:>15.9f}" for i, (site, force) in enumerate(zip(structure, forces)): @@ -733,7 +733,7 @@ def train(self, train_structures, train_energies, train_forces, train_stresses=N stdout, stderr = p_scaling.communicate() rc = p_scaling.returncode if rc != 0: - error_msg = "n2p2 exited with return code %d" % rc + error_msg = f"n2p2 exited with return code {rc}" msg = stderr.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -749,7 +749,7 @@ def train(self, train_structures, train_energies, train_forces, train_stresses=N rc = p_train.returncode if rc != 0: - error_msg = "n2p2 exited with return code %d" % rc + error_msg = f"n2p2 exited with return code {rc}" msg = stderr.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -773,8 +773,8 @@ def train(self, train_structures, train_energies, train_forces, train_stresses=N self.validation_forces_rmse = errors[1] for specie in self.elements: - weights_filename = "weights.{}.{}.out".format( - str(Element(specie).number).zfill(3), str(self.param["epochs"]).zfill(6) + weights_filename = ( + f"weights.{str(Element(specie).number).zfill(3)}.{str(self.param['epochs']).zfill(6)}.out" ) self.weights[specie] = [] self.bs[specie] = [] @@ -824,7 +824,7 @@ def evaluate(self, test_structures, test_energies, test_forces, test_stresses=No stdout, stderr = p_evaluation.communicate() rc = p_evaluation.returncode if rc != 0: - error_msg = "n2p2 exited with return code %d" % rc + error_msg = f"n2p2 exited with return code {rc}" msg = stderr.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] @@ -857,9 +857,7 @@ def from_config(input_filename, scaling_filename, weights_filenames): nnp.load_input(input_filename) nnp.load_scaler(scaling_filename) if len(nnp.elements) != len(weights_filenames): - raise ValueError( - "{} weights files should be given to " "{}".format(len(nnp.elements), " ".join(nnp.elements)) - ) + raise ValueError(f"{len(nnp.elements)} weights files should be given to " + " ".join(nnp.elements)) for weights_filename, specie in zip(weights_filenames, nnp.elements): nnp.load_weights(weights_filename, specie) nnp.fitted = True diff --git a/maml/apps/pes/_snap.py b/maml/apps/pes/_snap.py index 95106e14..9bef145d 100644 --- a/maml/apps/pes/_snap.py +++ b/maml/apps/pes/_snap.py @@ -155,7 +155,7 @@ def write_param(self): coeff_lines = [] coeff_lines.append(f"{ne} {nbc + 1}") for element, coeff in zip(self.elements, np.split(model.model.coef_, ne)): - coeff_lines.append("{} {} {}".format(element, profile[element]["r"], profile[element]["w"])) + coeff_lines.append(f"{element} {profile[element]['r']} {profile[element]['w']}") coeff_lines.extend([str(c) for c in coeff]) with open(coeff_file, "w") as f: f.write("\n".join(coeff_lines)) diff --git a/maml/apps/symbolic/_selectors.py b/maml/apps/symbolic/_selectors.py index faa8b6c4..479c27a3 100644 --- a/maml/apps/symbolic/_selectors.py +++ b/maml/apps/symbolic/_selectors.py @@ -14,7 +14,6 @@ from sklearn.metrics import get_scorer -# pylint: disable=R0201 class BaseSelector: """ Feature selector. This is meant to work on relatively smaller @@ -161,8 +160,8 @@ def _get_param_names(cls): "scikit-learn estimators should always " "specify their parameters in the signature" " of their __init__ (no varargs)." - " %s with constructor %s doesn't " - " follow this convention." % (cls, init_signature) + f" {cls} with constructor {init_signature} doesn't " + " follow this convention." ) return sorted(p.name for p in parameters) @@ -201,9 +200,9 @@ def set_params(self, **params): key, delim, sub_key = key.partition("__") if key not in valid_params: raise ValueError( - "Invalid parameter %s for selector %s. " + f"Invalid parameter {key} for selector {self}. " "Check the list of available parameters " - "with `estimator.get_params().keys()`." % (key, self) + "with `estimator.get_params().keys()`." ) if delim: diff --git a/maml/apps/symbolic/_selectors_cvxpy.py b/maml/apps/symbolic/_selectors_cvxpy.py index 8957059c..def06a5b 100644 --- a/maml/apps/symbolic/_selectors_cvxpy.py +++ b/maml/apps/symbolic/_selectors_cvxpy.py @@ -19,7 +19,6 @@ Expression = "Expression" -# pylint: disable=R0201 class BaseSelectorCP(BaseSelector): """ Base selector using cvxpy (CP) diff --git a/maml/base/_describer.py b/maml/base/_describer.py index 68c4e2a0..34f1a517 100644 --- a/maml/base/_describer.py +++ b/maml/base/_describer.py @@ -55,11 +55,11 @@ def __init__(self, **kwargs): for k, v in kwargs.items(): if k not in allowed_kwargs: - raise TypeError("%s not allowed as kwargs" % (str(k))) + raise TypeError(f"{k} not allowed as kwargs") memory = kwargs.get("memory", None) if isinstance(memory, bool): memory = tempfile.mkdtemp() - logger.info("Created temporary directory %s" % memory) + logger.info(f"Created temporary directory {memory}") verbose = kwargs.get("verbose", False) n_jobs = kwargs.get("n_jobs", 0) diff --git a/maml/base/_feature_batch.py b/maml/base/_feature_batch.py index 18a13571..e418be86 100644 --- a/maml/base/_feature_batch.py +++ b/maml/base/_feature_batch.py @@ -85,8 +85,6 @@ def get_feature_batch(fb_name: Optional[Union[str, Callable]] = None) -> Callabl try: return AVAILABLE_FB_METHODS[fb_name] except KeyError: - raise KeyError( - "Feature batch method not supported!" "Available ones are %s" % str(AVAILABLE_FB_METHODS.keys()) - ) + raise KeyError("Feature batch method not supported! Available ones are " + str(AVAILABLE_FB_METHODS.keys())) else: return fb_name diff --git a/maml/data/_url.py b/maml/data/_url.py index a25c1b09..58f969d7 100644 --- a/maml/data/_url.py +++ b/maml/data/_url.py @@ -36,7 +36,7 @@ def get(self, url: str) -> pd.DataFrame: # type: ignore pd.DataFrame """ raw = requests.get(url).text - read_func = getattr(pd, "read_%s" % self.fmt) + read_func = getattr(pd, f"read_{self.fmt}") return read_func(StringIO(raw), **self.read_kwargs) @@ -55,5 +55,5 @@ def get(self, file_id: str) -> pd.DataFrame: # type: ignore data frame """ - url = "https://ndownloader.figshare.com/files/%s" % file_id + url = f"https://ndownloader.figshare.com/files/{file_id}" return super().get(url) diff --git a/maml/describers/_composition.py b/maml/describers/_composition.py index fe20a711..7faf1ea9 100644 --- a/maml/describers/_composition.py +++ b/maml/describers/_composition.py @@ -22,8 +22,8 @@ } for length in [2, 3, 4, 8, 16, 32]: - DATA_MAPPING["megnet_l%d" % length] = "data/elemental_embedding_1MEGNet_layer_length_%d.json" % length - DATA_MAPPING["megnet_ion_l%d" % length] = "data/ion_embedding_1MEGNet_layer_length_%d.json" % length + DATA_MAPPING[f"megnet_l{length}"] = f"data/elemental_embedding_1MEGNet_layer_length_{length}.json" + DATA_MAPPING[f"megnet_ion_l{length}"] = f"data/ion_embedding_1MEGNet_layer_length_{length}.json" try: @@ -100,7 +100,7 @@ def __init__( n_single_property = n_property[0] if property_names is None: - property_names = ["p%d" % i for i in range(n_single_property)] + property_names = [f"p{i}" for i in range(n_single_property)] if len(property_names) != n_single_property: raise ValueError("Property name length is not consistent") @@ -205,7 +205,7 @@ def from_data(cls, data_name: Union[List[str], str], stats: Optional[List[str]] """ if isinstance(data_name, str): if data_name not in ElementStats.AVAILABLE_DATA: - raise ValueError("data name not found in the list %s" % str(ElementStats.AVAILABLE_DATA)) + raise ValueError(f"Data name not found in the list {str(ElementStats.AVAILABLE_DATA)}") filename = os.path.join(CWD, DATA_MAPPING[data_name]) return cls.from_file(filename, stats=stats, **kwargs) @@ -231,7 +231,7 @@ def from_data(cls, data_name: Union[List[str], str], stats: Optional[List[str]] for k in common_keys: element_properties[k].extend(instance.element_properties[k]) - property_names.extend(["%d_%s" % (index, i) for i in instance.property_names]) + property_names.extend([f"{index}_{i}" for i in instance.property_names]) return cls(element_properties=element_properties, property_names=property_names, stats=stats, **kwargs) @@ -270,10 +270,10 @@ def _reduce_dimension( if reduction_algo == "pca": m = PCA(n_components=num_dim, **reduction_params) - property_names = ["pca_%d" % i for i in range(num_dim)] + property_names = [f"pca_{i}" for i in range(num_dim)] elif reduction_algo == "kpca": m = KernelPCA(n_components=num_dim, **reduction_params) - property_names = ["kpca_%d" % i for i in range(num_dim)] + property_names = [f"kpca_{i}" for i in range(num_dim)] else: raise ValueError("Reduction algorithm not available") diff --git a/maml/describers/_site.py b/maml/describers/_site.py index a40c08f6..e538d907 100644 --- a/maml/describers/_site.py +++ b/maml/describers/_site.py @@ -104,11 +104,11 @@ def transform_one(self, structure: Structure) -> pd.DataFrame: Args: structure (Structure): Pymatgen Structure object. """ - columns = list(map(lambda s: "-".join(["%d" % i for i in s]), self.subscripts)) + columns = list(map(lambda s: "-".join([str(i) for i in s]), self.subscripts)) if self.quadratic: columns += list( map( - lambda s: "-".join(["%d%d%d" % (i, j, k) for i, j, k in s]), + lambda s: "-".join([f"{i}{j}{k}" for i, j, k in s]), itertools.combinations_with_replacement(self.subscripts, 2), ) ) @@ -129,7 +129,7 @@ def process(output, combine): hstack_b.fillna(0, inplace=True) dbs = np.split(db, len(self.elements), axis=1) dbs = np.hstack([np.insert(d.reshape(-1, len(columns)), 0, 0, axis=1) for d in dbs]) - db_index = ["%d_%s" % (i, d) for i in df_b.index for d in "xyz"] + db_index = [f"{i}_{d}" for i in df_b.index for d in "xyz"] df_db = pd.DataFrame(dbs, index=db_index, columns=hstack_b.columns) if self.include_stress: vbs = np.split(vb.sum(axis=0), len(self.elements)) @@ -230,14 +230,14 @@ def transform_one(self, structure: Structure) -> pd.DataFrame: atomic_numbers = [str(element.number) for element in sorted(np.unique(structure.species))] n_Z = len(atomic_numbers) n_species = len(atomic_numbers) - Z = "{" + "{}".format(" ".join(atomic_numbers)) + "}" - species_Z = "{" + "{}".format(" ".join(atomic_numbers)) + "}" - descriptor_command.append("n_Z" + "=" + str(n_Z)) - descriptor_command.append("Z" + "=" + Z) - descriptor_command.append("n_species" + "=" + str(n_species)) - descriptor_command.append("species_Z" + "=" + species_Z) + Z = "{" + " ".join(atomic_numbers) + "}" + species_Z = "{" + " ".join(atomic_numbers) + "}" + descriptor_command.append(f"n_Z={n_Z}") + descriptor_command.append(f"Z={Z}") + descriptor_command.append(f"n_species={n_species}") + descriptor_command.append(f"species_Z={species_Z}") - exe_command.append("descriptor_str=" + "{" + "{}".format(" ".join(descriptor_command)) + "}") + exe_command.append("descriptor_str={" + " ".join(descriptor_command) + "}") with ScratchDir("."): _ = self.operator.write_cfgs(filename=atoms_filename, cfg_pool=pool_from([structure])) @@ -247,7 +247,7 @@ def transform_one(self, structure: Structure) -> pd.DataFrame: stdout = p.communicate()[0] rc = p.returncode if rc != 0: - error_msg = "quip/soap exited with return code %d" % rc + error_msg = f"quip/soap exited with return code {rc}" msg = stdout.decode("utf-8").split("\n")[:-1] try: error_line = [i for i, m in enumerate(msg) if m.startswith("ERROR")][0] diff --git a/maml/describers/_structure.py b/maml/describers/_structure.py index 3c1fe688..628f5412 100644 --- a/maml/describers/_structure.py +++ b/maml/describers/_structure.py @@ -285,7 +285,7 @@ def get_randomized_coulomb_mat(self, s: Union[Molecule, Structure]) -> pd.DataFr """ c = self.get_coulomb_mat(s) row_norms = np.linalg.norm(c, axis=1) - rng = np.random.RandomState(self.random_seed) + rng = np.random.RandomState(self.random_seed) # pylint: disable=E1101 e = rng.normal(size=row_norms.size) p = np.argsort(row_norms + e) c = c[p][:, p] diff --git a/maml/describers/matminer_wrapper/_matminer_wrapper.py b/maml/describers/matminer_wrapper/_matminer_wrapper.py index 50696754..86aec550 100644 --- a/maml/describers/matminer_wrapper/_matminer_wrapper.py +++ b/maml/describers/matminer_wrapper/_matminer_wrapper.py @@ -38,10 +38,10 @@ def constructor(self, *args, **kwargs): memory = kwargs.pop("memory", None) verbose = kwargs.pop("verbose", False) feature_batch = kwargs.pop("feature_concat", "pandas_concat") - wrapped_class.__init__(self, *args, **kwargs) + wrapped_class(*args, **kwargs) logger.info(f"Using matminer_wrapper {wrapped_class.__name__} class") base_kwargs = dict(n_jobs=n_jobs, memory=memory, verbose=verbose, feature_batch=feature_batch) - BaseDescriber.__init__(self, **base_kwargs) + BaseDescriber(**base_kwargs) @classmethod # type: ignore def _get_param_names(cls): # type: ignore diff --git a/maml/describers/megnet/_megnet.py b/maml/describers/megnet/_megnet.py index 654c3f48..f33ef0d6 100644 --- a/maml/describers/megnet/_megnet.py +++ b/maml/describers/megnet/_megnet.py @@ -176,7 +176,7 @@ def transform_one(self, obj: Union[Structure, Molecule]): column_names = [] final_features = [] for i, f in enumerate(features_transpose): - column_names.extend(["%d_%s" % (i, n) for n in self.full_stats]) + column_names.extend([f"{i}_{n}" for n in self.full_stats]) final_features.extend([func(f) for func in self.stats_func]) return pd.DataFrame([final_features], columns=column_names) diff --git a/maml/describers/rdf/_rdf.py b/maml/describers/rdf/_rdf.py index 614471f0..d386710d 100644 --- a/maml/describers/rdf/_rdf.py +++ b/maml/describers/rdf/_rdf.py @@ -60,8 +60,7 @@ def get_site_rdf(self, structure: Structure) -> Tuple[np.ndarray, List[Dict]]: temp_neighbors = neighbors["neighbors"] rdfs[i] = { - "%s:%s" - % (c_specie, specie): _dist_to_counts( + f"{c_specie}:{specie}": _dist_to_counts( temp_neighbors[specie], r_min=self.r_min, r_max=self.r_max, n_grid=self.n_grid ) / self.volumes diff --git a/maml/utils/_lammps.py b/maml/utils/_lammps.py index 3cd44324..b4772060 100644 --- a/maml/utils/_lammps.py +++ b/maml/utils/_lammps.py @@ -69,7 +69,7 @@ def check_structures_forces_stresses( new_stresses.append(stresses[i]) # type: ignore continue - logger.info("Structure index %d is rotated." % i) + logger.info(f"Structure index {i} is rotated.") new_latt_matrix, symmop, rot_matrix = get_lammps_lattice_and_rotation(s, (0, 0, 0)) coords = symmop.operate_multi(s.cart_coords) new_s = Structure( @@ -239,10 +239,10 @@ def write_data_from_structure( element_map = {i: j + 1 for j, i in enumerate(ff_elements)} # generate atom section - lines.append("%d atoms\n" % len(structure)) - lines.append("%d atom types\n" % n_types) + lines.append(f"{len(structure)} atoms\n") + lines.append(f"{n_types} atom types\n") - ph = "{:.%df}" % significant_figures + ph = f"{{:.{significant_figures}f}}" % significant_figures for bound, d in zip(bounds, "xyz"): line = " ".join([ph.format(i) for i in bound] + [f"{d}{i}" for i in ["lo", "hi"]]) diff --git a/maml/utils/_preprocessing.py b/maml/utils/_preprocessing.py index a80e5c23..150aa004 100644 --- a/maml/utils/_preprocessing.py +++ b/maml/utils/_preprocessing.py @@ -115,7 +115,7 @@ def from_training_data( return cls(mean, std, is_intensive) def __str__(self): - return "StandardScaler(mean=%.3f, std=%.3f, is_intensive=%d)" % (self.mean, self.std, self.is_intensive) + return f"StandardScaler(mean={self.mean:.3f}, std={self.std:.3f}, is_intensive={self.is_intensive})" def __repr__(self): return str(self) diff --git a/maml/utils/_stats.py b/maml/utils/_stats.py index 231c1d2d..766e07d8 100644 --- a/maml/utils/_stats.py +++ b/maml/utils/_stats.py @@ -405,7 +405,7 @@ def _moment_symbol_conversion(moment_symbol: str): if max_order is None: return [moment_symbol] if max_order > 0: - return ["moment:%d:None" % i for i in range(1, max_order + 1)] + return [f"moment:{i}:None" for i in range(1, max_order + 1)] return ["moment:0:None"] diff --git a/maml/utils/_tempfile.py b/maml/utils/_tempfile.py index cb6d88a9..ec6744c5 100644 --- a/maml/utils/_tempfile.py +++ b/maml/utils/_tempfile.py @@ -86,7 +86,7 @@ def __enter__(self): if self.start_copy: [copy_r(".", tempdir) for tempdir in tempdirs] if self.create_symbolic_link: - [os.symlink(tempdir, "%s_%d" % (MultiScratchDir.SCR_LINK, i)) for i, tempdir in enumerate(tempdirs)] + [os.symlink(tempdir, f"{MultiScratchDir.SCR_LINK}_{i}") for i, tempdir in enumerate(tempdirs)] return tempdirs def __exit__(self, exc_type: str, exc_val: str, exc_tb: str): @@ -130,4 +130,4 @@ def _copy_r_with_suffix(src: str, dst: str, suffix: Optional[Any] = None): elif not absdst.startswith(fpath): _copy_r_with_suffix(fpath, os.path.join(absdst, f), suffix=suffix) else: - warnings.warn("Cannot copy %s to itself" % fpath) + warnings.warn(f"Cannot copy {fpath} to itself") From fabb3920d8c03ec3725cdba8df4c677fa1920ed0 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 18:41:47 -0700 Subject: [PATCH 05/18] pylint --- maml/apps/bowsr/perturbation.py | 10 ++++++++++ maml/describers/matminer_wrapper/_matminer_wrapper.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/maml/apps/bowsr/perturbation.py b/maml/apps/bowsr/perturbation.py index 65fe2122..09766f66 100644 --- a/maml/apps/bowsr/perturbation.py +++ b/maml/apps/bowsr/perturbation.py @@ -30,6 +30,16 @@ def perturbation_mapping(x, fixed_indices): + """ + Perturbation mapping. + + Args: + x: + fixed_indices: + + Returns: + + """ return np.array( [ 0 if i in fixed_indices else x[np.argwhere(np.arange(3)[~np.isin(range(3), fixed_indices)] == i)[0][0]] diff --git a/maml/describers/matminer_wrapper/_matminer_wrapper.py b/maml/describers/matminer_wrapper/_matminer_wrapper.py index 86aec550..7afabfab 100644 --- a/maml/describers/matminer_wrapper/_matminer_wrapper.py +++ b/maml/describers/matminer_wrapper/_matminer_wrapper.py @@ -41,7 +41,7 @@ def constructor(self, *args, **kwargs): wrapped_class(*args, **kwargs) logger.info(f"Using matminer_wrapper {wrapped_class.__name__} class") base_kwargs = dict(n_jobs=n_jobs, memory=memory, verbose=verbose, feature_batch=feature_batch) - BaseDescriber(**base_kwargs) + BaseDescriber.__init__(self, **base_kwargs) @classmethod # type: ignore def _get_param_names(cls): # type: ignore From f39541d663334c287b49860eab7568e17374305b Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 19:00:34 -0700 Subject: [PATCH 06/18] pylint --- maml/utils/_lammps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maml/utils/_lammps.py b/maml/utils/_lammps.py index b4772060..900976e5 100644 --- a/maml/utils/_lammps.py +++ b/maml/utils/_lammps.py @@ -242,7 +242,7 @@ def write_data_from_structure( lines.append(f"{len(structure)} atoms\n") lines.append(f"{n_types} atom types\n") - ph = f"{{:.{significant_figures}f}}" % significant_figures + ph = f"{{{significant_figures:.f}}}" for bound, d in zip(bounds, "xyz"): line = " ".join([ph.format(i) for i in bound] + [f"{d}{i}" for i in ["lo", "hi"]]) From 5bb3c3e29e4d6a640b1d919cb8037cca7c6c0aa1 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 19:00:59 -0700 Subject: [PATCH 07/18] pylint --- maml/apps/bowsr/perturbation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maml/apps/bowsr/perturbation.py b/maml/apps/bowsr/perturbation.py index 09766f66..a257ee16 100644 --- a/maml/apps/bowsr/perturbation.py +++ b/maml/apps/bowsr/perturbation.py @@ -32,7 +32,7 @@ def perturbation_mapping(x, fixed_indices): """ Perturbation mapping. - + Args: x: fixed_indices: From cfe011a3fc112f8d24cb0b282d87bfa9b6f0d100 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Sat, 11 Jun 2022 19:25:13 -0700 Subject: [PATCH 08/18] pylint --- maml/utils/_lammps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maml/utils/_lammps.py b/maml/utils/_lammps.py index 900976e5..b5b017ac 100644 --- a/maml/utils/_lammps.py +++ b/maml/utils/_lammps.py @@ -242,7 +242,7 @@ def write_data_from_structure( lines.append(f"{len(structure)} atoms\n") lines.append(f"{n_types} atom types\n") - ph = f"{{{significant_figures:.f}}}" + ph = f"{{:.{significant_figures}f}}" for bound, d in zip(bounds, "xyz"): line = " ".join([ph.format(i) for i in bound] + [f"{d}{i}" for i in ["lo", "hi"]]) From 1187e0a925bafc486b3bc1554b69873d05f2b2ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Jun 2022 13:12:02 +0000 Subject: [PATCH 09/18] Bump torch from 1.11.0 to 1.12.0 Bumps [torch](https://github.com/pytorch/pytorch) from 1.11.0 to 1.12.0. - [Release notes](https://github.com/pytorch/pytorch/releases) - [Changelog](https://github.com/pytorch/pytorch/blob/master/RELEASE.md) - [Commits](https://github.com/pytorch/pytorch/compare/v1.11.0...v1.12.0) --- updated-dependencies: - dependency-name: torch dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dl.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dl.txt b/requirements-dl.txt index eb32499d..a4ce7431 100644 --- a/requirements-dl.txt +++ b/requirements-dl.txt @@ -1,2 +1,2 @@ tensorflow==2.9.0 -torch==1.11.0 +torch==1.12.0 From 9fa7d96a604fd548f02515a115ab536d7ad59d2d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 29 Jun 2022 13:12:28 +0000 Subject: [PATCH 10/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 291e1107..287057d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ scikit-learn==1.1.0 h5py==3.6.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.1.1 \ No newline at end of file +pymongo==4.1.1 From 71b1a3ffc16bc0a6ea62fc78a8a35d495f8deb4a Mon Sep 17 00:00:00 2001 From: JiQi535 Date: Sat, 9 Jul 2022 16:28:14 -0700 Subject: [PATCH 11/18] remove magic number of 1.228445 in stress processing --- maml/apps/pes/_mtp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maml/apps/pes/_mtp.py b/maml/apps/pes/_mtp.py index acc01fcf..1e8d33c6 100644 --- a/maml/apps/pes/_mtp.py +++ b/maml/apps/pes/_mtp.py @@ -114,7 +114,7 @@ def _line_up(self, structure, energy, forces, virial_stress): format_str = "{:>12s}{:>12s}{:>12s}{:>12s}{:>12s}{:>12s}" lines.append(format_str.format("Stress: xx", "yy", "zz", "yz", "xz", "xy")) format_float = "{:>12f}{:>12f}{:>12f}{:>12f}{:>12f}{:>12f}" - lines.append(format_float.format(*np.array(virial_stress) / 1.228445)) + lines.append(format_float.format(*np.array(virial_stress))) lines.append("END_CFG") From f4ec91ca7560aa269447cc2dabdb5ffda4c47a83 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 9 Jul 2022 23:38:32 +0000 Subject: [PATCH 12/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 291e1107..287057d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ scikit-learn==1.1.0 h5py==3.6.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.1.1 \ No newline at end of file +pymongo==4.1.1 From 66ad2f05cdfbcea86b1dd9593581f3bdb5d362c4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Aug 2022 22:07:43 +0000 Subject: [PATCH 13/18] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.32.0 → v2.37.3](https://github.com/asottile/pyupgrade/compare/v2.32.0...v2.37.3) - [github.com/pre-commit/pre-commit-hooks: v4.2.0 → v4.3.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.2.0...v4.3.0) - [github.com/psf/black: 22.3.0 → 22.6.0](https://github.com/psf/black/compare/22.3.0...22.6.0) - [github.com/PyCQA/flake8: 4.0.1 → 5.0.2](https://github.com/PyCQA/flake8/compare/4.0.1...5.0.2) - [github.com/pre-commit/mirrors-mypy: v0.950 → v0.971](https://github.com/pre-commit/mirrors-mypy/compare/v0.950...v0.971) --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ba9ac723..a52c70f6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,13 +21,13 @@ repos: - --ignore-init-module-imports - repo: https://github.com/asottile/pyupgrade - rev: v2.32.0 + rev: v2.37.3 hooks: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v4.3.0 hooks: - id: check-yaml exclude: pymatgen/analysis/vesta_cutoffs.yaml @@ -41,16 +41,16 @@ repos: args: ["--profile", "black"] - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 22.6.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 5.0.2 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.950 + rev: v0.971 hooks: - id: mypy From ee4d915c80cb5e547fdc887e89ad3c34cc3b7746 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Mon, 1 Aug 2022 16:02:59 -0700 Subject: [PATCH 14/18] Update req. --- requirements.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 287057d3..0fead705 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ -pymatgen==2022.5.18 -scikit-learn==1.1.0 -h5py==3.6.0 +pymatgen==2022.5.26 +scikit-learn==1.1.1 +h5py==3.7.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.1.1 +pymongo==4.1.1 \ No newline at end of file From 6b2a0110fce2a12634321f3a6b94d5b8a40ed5a9 Mon Sep 17 00:00:00 2001 From: Shyue Ping Ong Date: Mon, 1 Aug 2022 16:03:45 -0700 Subject: [PATCH 15/18] Update matminer. --- requirements-optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-optional.txt b/requirements-optional.txt index 3d75242b..a7b71a17 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -1,4 +1,4 @@ cvxpy==1.2.1 tftb==0.1.3 megnet==1.3.0 -matminer==0.7.6 +matminer==0.7.8 \ No newline at end of file From fd8724fb3715ebb5aff3ca1c297eeec534222a56 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Aug 2022 23:04:28 +0000 Subject: [PATCH 16/18] Bump pymongo from 4.1.1 to 4.2.0 Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.1.1 to 4.2.0. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.1.1...4.2.0) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0fead705..b0e376b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ scikit-learn==1.1.1 h5py==3.7.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.1.1 \ No newline at end of file +pymongo==4.2.0 \ No newline at end of file From f71c289115d1bf6aa66224741ef036212c636338 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Aug 2022 23:04:48 +0000 Subject: [PATCH 17/18] Bump megnet from 1.3.0 to 1.3.1 Bumps [megnet](https://github.com/materialsvirtuallab/megnet) from 1.3.0 to 1.3.1. - [Release notes](https://github.com/materialsvirtuallab/megnet/releases) - [Changelog](https://github.com/materialsvirtuallab/megnet/blob/master/CHANGES.md) - [Commits](https://github.com/materialsvirtuallab/megnet/compare/v1.3.0...v1.3.1) --- updated-dependencies: - dependency-name: megnet dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-optional.txt b/requirements-optional.txt index a7b71a17..295604a7 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -1,4 +1,4 @@ cvxpy==1.2.1 tftb==0.1.3 -megnet==1.3.0 +megnet==1.3.1 matminer==0.7.8 \ No newline at end of file From 64827114b99785a0252767093c995a5d6bd33872 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Aug 2022 23:05:19 +0000 Subject: [PATCH 18/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- requirements-optional.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-optional.txt b/requirements-optional.txt index a7b71a17..f6353a65 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -1,4 +1,4 @@ cvxpy==1.2.1 tftb==0.1.3 megnet==1.3.0 -matminer==0.7.8 \ No newline at end of file +matminer==0.7.8 diff --git a/requirements.txt b/requirements.txt index b0e376b7..9a57e3c5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ scikit-learn==1.1.1 h5py==3.7.0 joblib==1.1.0 tqdm==4.64.0 -pymongo==4.2.0 \ No newline at end of file +pymongo==4.2.0