diff --git a/lambdas/service/app.py b/lambdas/service/app.py index e08bd22b7..2296715ba 100644 --- a/lambdas/service/app.py +++ b/lambdas/service/app.py @@ -228,7 +228,7 @@ # changes and reset the minor version to zero. Otherwise, increment only # the minor version for backwards compatible changes. A backwards # compatible change is one that does not require updates to clients. - 'version': '4.1' + 'version': '4.2' }, 'tags': [ { @@ -1402,6 +1402,10 @@ def manifest_route(*, fetch: bool, initiate: bool): manifest in [JSONL][5] format. Each line contains an unaltered metadata entity from the underlying repository. + - {ManifestFormat.verbatim_pfb.value} for a verbatim + manifest in the [PFB format][3]. This format is mainly + used for exporting data to Terra. + [1]: https://bd2k.ini.usc.edu/tools/bdbag/ [2]: https://software.broadinstitute.org/firecloud/documentation/article?id=10954 diff --git a/lambdas/service/openapi.json b/lambdas/service/openapi.json index e826453cc..e1cdcf36a 100644 --- a/lambdas/service/openapi.json +++ b/lambdas/service/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "azul_service", "description": "\n# Overview\n\nAzul is a REST web service for querying metadata associated with\nboth experimental and analysis data from a data repository. In order\nto deliver response times that make it suitable for interactive use\ncases, the set of metadata properties that it exposes for sorting,\nfiltering, and aggregation is limited. Azul provides a uniform view\nof the metadata over a range of diverse schemas, effectively\nshielding clients from changes in the schemas as they occur over\ntime. It does so, however, at the expense of detail in the set of\nmetadata properties it exposes and in the accuracy with which it\naggregates them.\n\nAzul denormalizes and aggregates metadata into several different\nindices for selected entity types. Metadata entities can be queried\nusing the [Index](#operations-tag-Index) endpoints.\n\nA set of indices forms a catalog. There is a default catalog called\n`dcp2` which will be used unless a\ndifferent catalog name is specified using the `catalog` query\nparameter. Metadata from different catalogs is completely\nindependent: a response obtained by querying one catalog does not\nnecessarily correlate to a response obtained by querying another\none. Two catalogs can contain metadata from the same sources or\ndifferent sources. It is only guaranteed that the body of a\nresponse by any given endpoint adheres to one schema,\nindependently of which catalog was specified in the request.\n\nAzul provides the ability to download data and metadata via the\n[Manifests](#operations-tag-Manifests) endpoints. The\n`curl` format manifests can be used to\ndownload data files. Other formats provide various views of the\nmetadata. Manifests can be generated for a selection of files using\nfilters. These filters are interchangeable with the filters used by\nthe [Index](#operations-tag-Index) endpoints.\n\nAzul also provides a [summary](#operations-Index-get_index_summary)\nview of indexed data.\n\n## Data model\n\nAny index, when queried, returns a JSON array of hits. Each hit\nrepresents a metadata entity. Nested in each hit is a summary of the\nproperties of entities associated with the hit. An entity is\nassociated either by a direct edge in the original metadata graph,\nor indirectly as a series of edges. The nested properties are\ngrouped by the type of the associated entity. The properties of all\ndata files associated with a particular sample, for example, are\nlisted under `hits[*].files` in a `/index/samples` response. It is\nimportant to note that while each _hit_ represents a discrete\nentity, the properties nested within that hit are the result of an\naggregation over potentially many associated entities.\n\nTo illustrate this, consider a data file that is part of two\nprojects (a project is a group of related experiments, typically by\none laboratory, institution or consortium). Querying the `files`\nindex for this file yields a hit looking something like:\n\n```\n{\n \"projects\": [\n {\n \"projectTitle\": \"Project One\"\n \"laboratory\": ...,\n ...\n },\n {\n \"projectTitle\": \"Project Two\"\n \"laboratory\": ...,\n ...\n }\n ],\n \"files\": [\n {\n \"format\": \"pdf\",\n \"name\": \"Team description.pdf\",\n ...\n }\n ]\n}\n```\n\nThis example hit contains two kinds of nested entities (a hit in an\nactual Azul response will contain more): There are the two projects\nentities, and the file itself. These nested entities contain\nselected metadata properties extracted in a consistent way. This\nmakes filtering and sorting simple.\n\nAlso notice that there is only one file. When querying a particular\nindex, the corresponding entity will always be a singleton like\nthis.\n", - "version": "4.1" + "version": "4.2" }, "tags": [ { @@ -9479,10 +9479,11 @@ "terra.bdbag", "terra.pfb", "curl", - "verbatim.jsonl" + "verbatim.jsonl", + "verbatim.pfb" ] }, - "description": "\nThe desired format of the output.\n\n- `compact` (the default) for a compact,\n tab-separated manifest\n\n- `terra.bdbag` for a manifest in the\n [BDBag format][1]. This provides a ZIP file containing two\n manifests: one for Participants (aka Donors) and one for\n Samples (aka Specimens). For more on the format of the\n manifests see [documentation here][2].\n\n- `terra.pfb` for a manifest in the [PFB\n format][3]. This format is mainly used for exporting data to\n Terra.\n\n- `curl` for a [curl configuration\n file][4] manifest. This manifest can be used with the curl\n program to download all the files listed in the manifest.\n\n- `verbatim.jsonl` for a verbatim\n manifest in [JSONL][5] format. Each line contains an\n unaltered metadata entity from the underlying repository.\n\n[1]: https://bd2k.ini.usc.edu/tools/bdbag/\n\n[2]: https://software.broadinstitute.org/firecloud/documentation/article?id=10954\n\n[3]: https://github.com/uc-cdis/pypfb\n\n[4]: https://curl.haxx.se/docs/manpage.html#-K\n\n[5]: https://jsonlines.org/\n" + "description": "\nThe desired format of the output.\n\n- `compact` (the default) for a compact,\n tab-separated manifest\n\n- `terra.bdbag` for a manifest in the\n [BDBag format][1]. This provides a ZIP file containing two\n manifests: one for Participants (aka Donors) and one for\n Samples (aka Specimens). For more on the format of the\n manifests see [documentation here][2].\n\n- `terra.pfb` for a manifest in the [PFB\n format][3]. This format is mainly used for exporting data to\n Terra.\n\n- `curl` for a [curl configuration\n file][4] manifest. This manifest can be used with the curl\n program to download all the files listed in the manifest.\n\n- `verbatim.jsonl` for a verbatim\n manifest in [JSONL][5] format. Each line contains an\n unaltered metadata entity from the underlying repository.\n\n- verbatim.pfb for a verbatim \n manifest in the [PFB format][3]. This format is mainly\n used for exporting data to Terra.\n\n[1]: https://bd2k.ini.usc.edu/tools/bdbag/\n\n[2]: https://software.broadinstitute.org/firecloud/documentation/article?id=10954\n\n[3]: https://github.com/uc-cdis/pypfb\n\n[4]: https://curl.haxx.se/docs/manpage.html#-K\n\n[5]: https://jsonlines.org/\n" } ], "responses": { @@ -10887,10 +10888,11 @@ "terra.bdbag", "terra.pfb", "curl", - "verbatim.jsonl" + "verbatim.jsonl", + "verbatim.pfb" ] }, - "description": "\nThe desired format of the output.\n\n- `compact` (the default) for a compact,\n tab-separated manifest\n\n- `terra.bdbag` for a manifest in the\n [BDBag format][1]. This provides a ZIP file containing two\n manifests: one for Participants (aka Donors) and one for\n Samples (aka Specimens). For more on the format of the\n manifests see [documentation here][2].\n\n- `terra.pfb` for a manifest in the [PFB\n format][3]. This format is mainly used for exporting data to\n Terra.\n\n- `curl` for a [curl configuration\n file][4] manifest. This manifest can be used with the curl\n program to download all the files listed in the manifest.\n\n- `verbatim.jsonl` for a verbatim\n manifest in [JSONL][5] format. Each line contains an\n unaltered metadata entity from the underlying repository.\n\n[1]: https://bd2k.ini.usc.edu/tools/bdbag/\n\n[2]: https://software.broadinstitute.org/firecloud/documentation/article?id=10954\n\n[3]: https://github.com/uc-cdis/pypfb\n\n[4]: https://curl.haxx.se/docs/manpage.html#-K\n\n[5]: https://jsonlines.org/\n" + "description": "\nThe desired format of the output.\n\n- `compact` (the default) for a compact,\n tab-separated manifest\n\n- `terra.bdbag` for a manifest in the\n [BDBag format][1]. This provides a ZIP file containing two\n manifests: one for Participants (aka Donors) and one for\n Samples (aka Specimens). For more on the format of the\n manifests see [documentation here][2].\n\n- `terra.pfb` for a manifest in the [PFB\n format][3]. This format is mainly used for exporting data to\n Terra.\n\n- `curl` for a [curl configuration\n file][4] manifest. This manifest can be used with the curl\n program to download all the files listed in the manifest.\n\n- `verbatim.jsonl` for a verbatim\n manifest in [JSONL][5] format. Each line contains an\n unaltered metadata entity from the underlying repository.\n\n- verbatim.pfb for a verbatim \n manifest in the [PFB format][3]. This format is mainly\n used for exporting data to Terra.\n\n[1]: https://bd2k.ini.usc.edu/tools/bdbag/\n\n[2]: https://software.broadinstitute.org/firecloud/documentation/article?id=10954\n\n[3]: https://github.com/uc-cdis/pypfb\n\n[4]: https://curl.haxx.se/docs/manpage.html#-K\n\n[5]: https://jsonlines.org/\n" } ], "responses": { diff --git a/src/azul/plugins/__init__.py b/src/azul/plugins/__init__.py index 212f4d673..6bf9d6af1 100644 --- a/src/azul/plugins/__init__.py +++ b/src/azul/plugins/__init__.py @@ -134,6 +134,7 @@ class ManifestFormat(Enum): terra_pfb = 'terra.pfb' curl = 'curl' verbatim_jsonl = 'verbatim.jsonl' + verbatim_pfb = 'verbatim.pfb' T = TypeVar('T', bound='Plugin') diff --git a/src/azul/plugins/metadata/anvil/__init__.py b/src/azul/plugins/metadata/anvil/__init__.py index 6b9121402..d347459c6 100644 --- a/src/azul/plugins/metadata/anvil/__init__.py +++ b/src/azul/plugins/metadata/anvil/__init__.py @@ -70,7 +70,10 @@ def manifest_formats(self) -> Sequence[ManifestFormat]: return [ ManifestFormat.compact, ManifestFormat.terra_pfb, - *iif(config.enable_replicas, [ManifestFormat.verbatim_jsonl]) + *iif(config.enable_replicas, [ + ManifestFormat.verbatim_jsonl, + ManifestFormat.verbatim_pfb + ]) ] def transformer_types(self) -> Iterable[Type[BaseTransformer]]: diff --git a/src/azul/plugins/metadata/hca/__init__.py b/src/azul/plugins/metadata/hca/__init__.py index 6338314c7..09f08830c 100644 --- a/src/azul/plugins/metadata/hca/__init__.py +++ b/src/azul/plugins/metadata/hca/__init__.py @@ -10,6 +10,7 @@ from azul import ( config, + iif, ) from azul.indexer.document import ( Aggregate, @@ -163,7 +164,10 @@ def manifest_formats(self) -> Sequence[ManifestFormat]: ManifestFormat.terra_bdbag, ManifestFormat.terra_pfb, ManifestFormat.curl, - *([ManifestFormat.verbatim_jsonl] if config.enable_replicas else []) + *iif(config.enable_replicas, [ + ManifestFormat.verbatim_jsonl, + ManifestFormat.verbatim_pfb + ]) ] @property diff --git a/src/azul/service/avro_pfb.py b/src/azul/service/avro_pfb.py index 0a94c9340..951f2d04e 100644 --- a/src/azul/service/avro_pfb.py +++ b/src/azul/service/avro_pfb.py @@ -13,6 +13,7 @@ itemgetter, ) from typing import ( + AbstractSet, ClassVar, MutableSet, Self, @@ -198,6 +199,13 @@ def for_transform(cls, name: str, object_: MutableJSON, schema: JSON) -> Self: object_=object_, schema=schema) + @classmethod + def for_replica(cls, object_: MutableJSON, schema: JSON) -> Self: + return cls.from_json(name=object_['replica_type'], + ids=[object_['entity_id']], + object_=object_, + schema=schema) + @classmethod def _add_missing_fields(cls, name: str, object_: MutableJSON, schema): """ @@ -303,6 +311,14 @@ def pfb_schema_from_field_types(field_types: FieldTypes) -> JSON: return _avro_pfb_schema(entity_schemas) +def pfb_schema_from_replicas(replicas: Iterable[JSON]) -> tuple[AbstractSet[str], JSON]: + schemas_by_replica_type = _schemas_by_replica_type(replicas) + return ( + schemas_by_replica_type.keys(), + _avro_pfb_schema(schemas_by_replica_type.values()) + ) + + def _avro_pfb_schema(azul_avro_schema: Iterable[JSON]) -> JSON: """ The boilerplate Avro schema that comprises a PFB's schema is returned in @@ -483,6 +499,13 @@ def _inject_reference_handover_values(entity: MutableJSON, doc: JSON): # that all of the primitive field types types are nullable # https://github.com/DataBiosphere/azul/issues/4094 +_json_to_pfb_types = { + bool: 'boolean', + float: 'double', + int: 'long', + str: 'string' +} + _nullable_to_pfb_types = { null_bool: ['null', 'boolean'], null_float: ['null', 'double'], @@ -570,10 +593,7 @@ def _entity_schema_recursive(field_types: FieldTypes, 'type': 'array', 'items': { 'type': 'array', - 'items': { - int: 'long', - float: 'double' - }[field_type.ends_type.native_type] + 'items': _json_to_pfb_types[field_type.ends_type.native_type] } } } @@ -612,3 +632,69 @@ def _entity_schema_recursive(field_types: FieldTypes, pass else: assert False, field_type + + +def _schemas_by_replica_type(replicas: Iterable[JSON]) -> JSON: + schemas = {} + for replica in replicas: + replica_type = replica['replica_type'] + replica_contents = replica['contents'] + _update_replica_schema_recursive(schemas, + replica_type, + replica_type, + replica_contents) + return schemas + + +def _update_replica_schema_recursive(schema, key, name, value): + try: + old_type = schema[key] + except KeyError: + schema[key] = _new_replica_schema(name, value) + else: + if value is None: + if old_type == 'null' or isinstance(old_type, list): + pass + else: + schema[key] = ['null', old_type] + elif old_type == 'null': + schema[key] = ['null', _new_replica_schema(name, value)] + elif isinstance(value, list): + for v in value: + _update_replica_schema_recursive(old_type, 'items', name, v) + elif isinstance(value, dict): + for k, v in value.items(): + # This will fail if the set of keys is inconsistent + field = one(field for field in old_type['fields'] if field['name'] == k) + _update_replica_schema_recursive(field, 'type', k, v) + else: + new_type = _json_to_pfb_types[type(value)] + if isinstance(old_type, list): + old_type = old_type[1] + assert old_type == new_type, (old_type, value) + + +def _new_replica_schema(name, value): + if value is None: + return 'null' + elif isinstance(value, list): + schema = {'type': 'array'} + # The `items` field will be absent from the schema if we never observe + # a nonempty array + for v in value: + _update_replica_schema_recursive(schema, 'items', name, v) + return schema + elif isinstance(value, dict): + return { + 'name': name, + 'type': 'record', + 'fields': [ + { + 'name': k, + 'type': _new_replica_schema(k, v) + } + for k, v in value.items() + ] + } + else: + return _json_to_pfb_types[type(value)] diff --git a/src/azul/service/manifest_service.py b/src/azul/service/manifest_service.py index 6830ab878..3d01bfcbd 100644 --- a/src/azul/service/manifest_service.py +++ b/src/azul/service/manifest_service.py @@ -2063,7 +2063,7 @@ def _all_replicas(self) -> Iterable[JSON]: replica_id = replica.meta.id if replica_id not in emitted_replica_ids: num_new_replicas += 1 - yield replica.contents.to_dict() + yield replica.to_dict() # Note that this will be zero for replicas that use implicit # hubs, in which case there are actually many hubs explicit_hub_count = len(replica.hub_ids) @@ -2109,6 +2109,36 @@ def create_file(self) -> tuple[str, Optional[str]]: os.close(fd) with open(path, 'w') as f: for replica in self._all_replicas(): - json.dump(replica, f) + json.dump(replica['contents'], f) f.write('\n') return path, None + + +class PFBVerbatimManifestGenerator(VerbatimManifestGenerator): + + @property + def content_type(self) -> str: + return 'application/octet-stream' + + @classmethod + def file_name_extension(cls): + return 'avro' + + @classmethod + def format(cls) -> ManifestFormat: + return ManifestFormat.verbatim_pfb + + def create_file(self) -> tuple[str, Optional[str]]: + replicas = list(self._all_replicas()) + replica_types, pfb_schema = avro_pfb.pfb_schema_from_replicas(replicas) + pfb_metadata_entity = avro_pfb.pfb_metadata_entity(replica_types) + + def pfb_entities(): + yield pfb_metadata_entity + for replica in replicas: + yield avro_pfb.PFBEntity.for_replica(dict(replica), pfb_schema).to_json(()) + + fd, path = mkstemp(suffix=f'.{self.file_name_extension()}') + os.close(fd) + avro_pfb.write_pfb_entities(pfb_entities(), pfb_schema, path) + return path, None