Skip to content

Commit

Permalink
✨ Add ids option for needimport (#1292)
Browse files Browse the repository at this point in the history
A more performant alternative to the `filter` option.

This PR also adds some other performance optimisations;
caching the schema load and not reading the import file twice (during validation)

Also, add to and improve the `needimport` tests
  • Loading branch information
chrisjsewell committed Sep 13, 2024
1 parent 44d7db9 commit 2ff14e7
Show file tree
Hide file tree
Showing 9 changed files with 2,503 additions and 98 deletions.
14 changes: 13 additions & 1 deletion docs/directives/needimport.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,25 @@ In most cases this should be the latest available version.
tags
~~~~

You can attach tags to existing tags of imported needs using the ``:tags:`` option.
You can attach tags to existing tags of imported needs using the ``:tags:`` option
(as a comma-separated list).
This may be useful to mark easily imported needs and to create specialised filters for them.

ids
~~~

.. versionadded:: 3.1.0

You can use the ``:ids:`` option to import only the needs with the given ids
(as a comma-separated list).
This is useful if you want to import only a subset of the needs from the JSON file.

filter
~~~~~~

You can use the ``:filter:`` option to imports only the needs which pass the filter criteria.
This is a string that is evaluated as a Python expression,
it is less performant than the ``:ids:`` option, but more flexible.

Please read :ref:`filter` for more information.

Expand Down
39 changes: 24 additions & 15 deletions sphinx_needs/directives/needimport.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from sphinx_needs.defaults import string_to_boolean
from sphinx_needs.filter_common import filter_single_need
from sphinx_needs.logging import log_warning
from sphinx_needs.needsfile import check_needs_file
from sphinx_needs.needsfile import SphinxNeedsFileException, check_needs_data
from sphinx_needs.utils import add_doc, import_prefix_link_edit, logger


Expand All @@ -37,6 +37,7 @@ class NeedimportDirective(SphinxDirective):
"version": directives.unchanged_required,
"hide": directives.flag,
"collapse": string_to_boolean,
"ids": directives.unchanged_required,
"filter": directives.unchanged_required,
"id_prefix": directives.unchanged_required,
"tags": directives.unchanged_required,
Expand All @@ -56,10 +57,6 @@ def run(self) -> Sequence[nodes.Node]:
filter_string = self.options.get("filter")
id_prefix = self.options.get("id_prefix", "")

tags = self.options.get("tags", [])
if len(tags) > 0:
tags = [tag.strip() for tag in re.split("[;,]", tags)]

need_import_path = self.arguments[0]

# check if given arguemnt is downloadable needs.json path
Expand Down Expand Up @@ -115,21 +112,21 @@ def run(self) -> Sequence[nodes.Node]:
f"Could not load needs import file {correct_need_import_path}"
)

errors = check_needs_file(correct_need_import_path)
try:
with open(correct_need_import_path) as needs_file:
needs_import_list = json.load(needs_file)
except (OSError, json.JSONDecodeError) as e:
# TODO: Add exception handling
raise SphinxNeedsFileException(correct_need_import_path) from e

errors = check_needs_data(needs_import_list)
if errors.schema:
logger.info(
f"Schema validation errors detected in file {correct_need_import_path}:"
)
for error in errors.schema:
logger.info(f' {error.message} -> {".".join(error.path)}')

try:
with open(correct_need_import_path) as needs_file:
needs_import_list = json.load(needs_file)
except json.JSONDecodeError as e:
# TODO: Add exception handling
raise e

if version is None:
try:
version = needs_import_list["current_version"]
Expand All @@ -146,6 +143,13 @@ def run(self) -> Sequence[nodes.Node]:

needs_config = NeedsSphinxConfig(self.config)
data = needs_import_list["versions"][version]

if ids := self.options.get("ids"):
id_list = [i.strip() for i in ids.split(",") if i.strip()]
data["needs"] = {
key: data["needs"][key] for key in id_list if key in data["needs"]
}

# TODO this is not exactly NeedsInfoType, because the export removes/adds some keys
needs_list: dict[str, NeedsInfoType] = data["needs"]
if schema := data.get("needs_schema"):
Expand Down Expand Up @@ -184,8 +188,13 @@ def run(self) -> Sequence[nodes.Node]:
needs_list = needs_list_filtered

# tags update
for need in needs_list.values():
need["tags"] = need["tags"] + tags
if tags := [
tag.strip()
for tag in re.split("[;,]", self.options.get("tags", ""))
if tag.strip()
]:
for need in needs_list.values():
need["tags"] = need["tags"] + tags

import_prefix_link_edit(needs_list, id_prefix, needs_config.extra_links)

Expand Down
30 changes: 24 additions & 6 deletions sphinx_needs/needsfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import sys
from copy import deepcopy
from datetime import datetime
from functools import lru_cache
from typing import Any, Iterable

from jsonschema import Draft7Validator
Expand Down Expand Up @@ -242,21 +243,38 @@ def check_needs_file(path: str) -> Errors:
:param path: File path to a needs.json file
:return: Dict, with error reports
"""
schema_path = os.path.join(os.path.dirname(__file__), "needsfile.json")
with open(schema_path) as schema_file:
needs_schema = json.load(schema_file)

with open(path) as needs_file:
try:
needs_data = json.load(needs_file)
data = json.load(needs_file)
except json.JSONDecodeError as e:
raise SphinxNeedsFileException(
f'Problems loading json file "{path}". '
f"Maybe it is empty or has an invalid json format. Original exception: {e}"
)
return check_needs_data(data)


@lru_cache
def _load_schema() -> dict[str, Any]:
schema_path = os.path.join(os.path.dirname(__file__), "needsfile.json")
with open(schema_path) as schema_file:
return json.load(schema_file) # type: ignore[no-any-return]


def check_needs_data(data: Any) -> Errors:
"""
Checks a given json-file, if it passes our needs.json structure tests.
Current checks:
* Schema validation
:param data: Loaded needs.json file
:return: Dict, with error reports
"""
needs_schema = _load_schema()

validator = Draft7Validator(needs_schema)
schema_errors = list(validator.iter_errors(needs_data))
schema_errors = list(validator.iter_errors(data))

# In future there may be additional types of validations.
# So lets already use a class for all errors
Expand Down
Loading

0 comments on commit 2ff14e7

Please sign in to comment.