diff --git a/.gitignore b/.gitignore index 920700f3..cb06762d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,7 @@ __pycache__/ # Distribution / packaging .Python env/ -build/ +build develop-eggs/ dist/ downloads/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e812677c..0adf7851 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - flake8-pep3101>=1.2.1 # language_version: python3.6 - id: pretty-format-json - exclude: inputs.json + exclude: "[inputs.json|syntax_error.json]" args: - --autofix - --indent=4 diff --git a/README.md b/README.md index 17997a64..1e98b531 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@ # AWS CloudFormation CLI -The CloudFormation CLI (cfn) allows you to author your own resource providers that can be used by CloudFormation. +The CloudFormation CLI (cfn) allows you to author your own resource providers and modules that can be used by CloudFormation. ## Usage ### Documentation -Primary documentation for the CloudFormation CLI can be found at the [AWS Documentation](https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/resource-types.html) site. +Primary documentation for the CloudFormation CLI can be found at the [AWS Documentation](https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/what-is-cloudformation-cli.html) site. ### Installation -This tool can be installed using [pip](https://pypi.org/project/pip/) from the Python Package Index (PyPI). It requires Python 3. The tool requires at least one language plugin. The language plugins are also available on PyPI and as such can be installed all at once: +This tool can be installed using [pip](https://pypi.org/project/pip/) from the Python Package Index (PyPI). It requires Python 3. For resource types, the tool requires at least one language plugin. Language plugins are not needed to create a module type. The language plugins are also available on PyPI and as such can be installed all at once: ```bash pip install cloudformation-cli cloudformation-cli-java-plugin cloudformation-cli-go-plugin cloudformation-cli-python-plugin @@ -28,6 +28,7 @@ cfn init ### Command: generate To refresh auto-generated code, use the `generate` command. Usually, plugins try to integrate this command in the native build flow, so please consult a plugin's README to see if this is necessary. +In a module project, this will regenerate the module schema. ```bash cfn generate @@ -44,7 +45,7 @@ cfn submit --dry-run #prepares schema handler package without submitting for reg ### Command: test -To run the contract tests, use the `test` command. +To run the contract tests for a resource type, use the `test` command. ```bash cfn test @@ -56,7 +57,7 @@ cfn test --enforce-timeout 60 -- -k contract_delete_update # combine args ### Command: build-image -To build an image. This image provides a minimalistic execution environment for the handler that does not depend on AWS Lambda in anyway. This image can be used during cfn invoke and cfn test instead of using sam cli. +To build an image for a resource type. This image provides a minimalistic execution environment for the resource handler that does not depend on AWS Lambda in anyway. This image can be used during cfn invoke and cfn test instead of using sam cli. ```bash cfn build-image @@ -82,7 +83,7 @@ pip install -e . -r requirements.txt pre-commit install ``` -You will also need to install a language plugin, such as [the Java language plugin](https://github.com/aws-cloudformation/cloudformation-cli-java-plugin), also via `pip install`. For example, assuming the plugin is checked out in the same parent directory as this repository: +If you're creating a resource type, you will also need to install a language plugin, such as [the Java language plugin](https://github.com/aws-cloudformation/cloudformation-cli-java-plugin), also via `pip install`. For example, assuming the plugin is checked out in the same parent directory as this repository: ```bash pip install -e ../cloudformation-cli-java-plugin diff --git a/fragments/sample.json b/fragments/sample.json new file mode 100644 index 00000000..76120c7d --- /dev/null +++ b/fragments/sample.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/src/rpdk/core/__init__.py b/src/rpdk/core/__init__.py index 38ce9d61..b3ed4eb5 100644 --- a/src/rpdk/core/__init__.py +++ b/src/rpdk/core/__init__.py @@ -1,5 +1,5 @@ import logging -__version__ = "0.1.14" +__version__ = "0.2.0" logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/src/rpdk/core/contract/resource_client.py b/src/rpdk/core/contract/resource_client.py index a892e732..fbd06f94 100644 --- a/src/rpdk/core/contract/resource_client.py +++ b/src/rpdk/core/contract/resource_client.py @@ -462,7 +462,7 @@ def _call(self, payload): LOG.debug("=== Handler execution logs ===") LOG.debug(result) # pylint: disable=W1401 - regex = "__CFN_RESOURCE_START_RESPONSE__([\s\S]*)__CFN_RESOURCE_END_RESPONSE__" # noqa: W605 # pylint: disable=C0301 + regex = "__CFN_RESOURCE_START_RESPONSE__([\s\S]*)__CFN_RESOURCE_END_RESPONSE__" # noqa: W605,B950 # pylint: disable=C0301 payload = json.loads(re.search(regex, result).group(1)) else: result = self._client.invoke( diff --git a/src/rpdk/core/data/examples/module/sample.json b/src/rpdk/core/data/examples/module/sample.json new file mode 100644 index 00000000..76120c7d --- /dev/null +++ b/src/rpdk/core/data/examples/module/sample.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/src/rpdk/core/data/schema/provider.definition.schema.modules.v1.json b/src/rpdk/core/data/schema/provider.definition.schema.modules.v1.json new file mode 100644 index 00000000..d38bd082 --- /dev/null +++ b/src/rpdk/core/data/schema/provider.definition.schema.modules.v1.json @@ -0,0 +1,397 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "$id": "https://schema.cloudformation.us-east-1.amazonaws.com/module.definition.schema.v1.json", + "title": "CloudFormation Module Provider Definition MetaSchema", + "description": "This schema validates a CloudFormation module provider definition.", + "definitions": { + "httpsUrl": { + "type": "string", + "pattern": "^https://[0-9a-zA-Z]([-.\\w]*[0-9a-zA-Z])(:[0-9]*)*([?/#].*)?$", + "maxLength": 4096 + }, + "handlerDefinition": { + "description": "Defines any execution operations which can be performed on this module provider", + "type": "object", + "properties": { + "permissions": { + "type": "array", + "items": { + "type": "string" + }, + "additionalItems": false + }, + "timeoutInMinutes": { + "description": "Defines the timeout for the entire operation to be interpreted by the invoker of the handler. The default is 120 (2 hours).", + "type": "integer", + "minimum": 2, + "maximum": 720, + "default": 120 + } + }, + "additionalProperties": false, + "required": [ + "permissions" + ] + }, + "jsonPointerArray": { + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "format": "json-pointer" + } + }, + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/properties" + } + }, + "validations": { + "dependencies": { + "enum": { + "$comment": "Enforce that properties are strongly typed when enum, or const is specified.", + "required": [ + "type" + ] + }, + "const": { + "required": [ + "type" + ] + }, + "properties": { + "$comment": "An object cannot have both defined and undefined properties; therefore, patternProperties is not allowed when properties is specified.", + "not": { + "required": [ + "patternProperties" + ] + } + } + } + }, + "replacementStrategy": { + "type": "string", + "description": "The valid replacement strategies are [create_then_delete] and [delete_then_create]. All other inputs are invalid.", + "default": [ + "create_then_delete" + ], + "enum": [ + "create_then_delete", + "delete_then_create" + ] + }, + "properties": { + "allOf": [ + { + "$ref": "#/definitions/validations" + }, + { + "$comment": "The following subset of draft-07 property references is supported for module definitions. Nested properties are disallowed and should be specified as a $ref to a definitions block.", + "type": "object", + "properties": { + "insertionOrder": { + "description": "When set to true, this flag indicates that the order of insertion of the array will be honored, and that changing the order of the array would indicate a diff", + "type": "boolean", + "default": true + }, + "$ref": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$ref" + }, + "$comment": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$comment" + }, + "title": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/title" + }, + "description": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/description" + }, + "examples": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/examples" + }, + "default": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/default" + }, + "multipleOf": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/maxLength" + }, + "minLength": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/minLength" + }, + "pattern": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/pattern" + }, + "items": { + "$comment": "Redefined as just a schema. A list of schemas is not allowed", + "$ref": "#/definitions/properties", + "default": {} + }, + "maxItems": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/maxItems" + }, + "minItems": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/minItems" + }, + "uniqueItems": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/uniqueItems" + }, + "contains": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/contains" + }, + "maxProperties": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/maxProperties" + }, + "minProperties": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/minProperties" + }, + "required": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/required" + }, + "properties": { + "type": "object", + "patternProperties": { + "^[A-Za-z0-9]{1,64}$": { + "$ref": "#/definitions/properties" + } + }, + "additionalProperties": false, + "minProperties": 1 + }, + "additionalProperties": { + "$comment": "All properties of a resource must be expressed in the schema - arbitrary inputs are not allowed", + "type": "boolean" + }, + "patternProperties": { + "$comment": "patternProperties allow providers to introduce a specification for key-value pairs, or Map inputs.", + "type": "object", + "propertyNames": { + "format": "regex" + } + }, + "dependencies": { + "$comment": "Redefined to capture our properties override.", + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/properties" + }, + { + "$ref": "https://json-schema.org/draft-07/schema#/definitions/stringArray" + } + ] + } + }, + "const": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/const" + }, + "enum": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/enum" + }, + "type": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/type" + }, + "format": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/format" + }, + "allOf": { + "$ref": "#/definitions/schemaArray" + }, + "anyOf": { + "$ref": "#/definitions/schemaArray" + }, + "oneOf": { + "$ref": "#/definitions/schemaArray" + }, + "Resources": { + "description": "List of all the resources that module creates.", + "type": "object" + }, + "Parameters": { + "description": "List of input parameters", + "type": "object" + } + }, + "additionalProperties": false + } + ] + }, + "moduleLink": { + "type": "object", + "properties": { + "$comment": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$comment" + }, + "templateUri": { + "type": "string", + "pattern": "^(/|https:)" + }, + "mappings": { + "type": "object", + "patternProperties": { + "^[A-Za-z0-9]{1,64}$": { + "type": "string", + "format": "json-pointer" + } + }, + "additionalProperties": false + } + }, + "required": [ + "templateUri", + "mappings" + ], + "additionalProperties": false + } + }, + "type": "object", + "patternProperties": { + "^\\$id$": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$id" + } + }, + "properties": { + "$schema": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$schema" + }, + "type": { + "$comment": "Type", + "type": "string", + "const": "MODULE" + }, + "typeName": { + "$comment": "Module Type Identifier", + "examples": [ + "Organization::Service::XYZ::MODULE", + "AWS::S3::SecureBucket::MODULE" + ], + "type": "string", + "pattern": "^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::MODULE$" + }, + "$comment": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$comment" + }, + "title": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/title" + }, + "description": { + "$comment": "A short description of the module provider. This will be shown in the AWS CloudFormation console.", + "$ref": "https://json-schema.org/draft-07/schema#/properties/description" + }, + "sourceUrl": { + "$comment": "The location of the source code for this module provider, to help interested parties submit issues or improvements.", + "examples": [ + "https://github.com/aws-cloudformation/aws-cloudformation-resource-providers-s3" + ], + "$ref": "#/definitions/httpsUrl" + }, + "documentationUrl": { + "$comment": "A page with supplemental documentation. The property documentation in schemas should be able to stand alone, but this is an opportunity for e.g. rich examples or more guided documents.", + "examples": [ + "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/CHAP_Using.html" + ], + "$ref": "#/definitions/httpsUrl" + }, + "additionalProperties": { + "$comment": "Does schema allow additional properties for a some types", + "type": "boolean" + }, + "properties": { + "type": "object", + "patternProperties": { + "^[A-Za-z0-9]{1,64}$": { + "$ref": "#/definitions/properties" + } + }, + "additionalProperties": true, + "minProperties": 1, + "required": ["Resources"] + }, + "definitions": { + "type": "object", + "patternProperties": { + "^[A-Za-z0-9]{1,64}$": { + "$ref": "#/definitions/properties" + } + }, + "additionalProperties": false + }, + "handlers": { + "description": "Defines the provisioning operations which can be performed on this type", + "type": "object", + "properties": { + "resolve": { + "$ref": "#/definitions/handlerDefinition" + } + }, + "additionalProperties": false + }, + "remote": { + "description": "Reserved for CloudFormation use. A namespace to inline remote schemas.", + "type": "object", + "patternProperties": { + "^schema[0-9]+$": { + "description": "Reserved for CloudFormation use. A inlined remote schema.", + "type": "object", + "properties": { + "$comment": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/$comment" + }, + "properties": { + "$ref": "#/properties/properties" + }, + "definitions": { + "$ref": "#/properties/definitions" + } + }, + "additionalProperties": true + } + }, + "additionalProperties": false + }, + "replacementStrategy": { + "$comment": "The order of replacement for an immutable module update.", + "$ref": "#/definitions/replacementStrategy" + }, + "required": { + "$ref": "https://json-schema.org/draft-07/schema#/properties/required" + }, + "allOf": { + "$ref": "#/definitions/schemaArray" + }, + "anyOf": { + "$ref": "#/definitions/schemaArray" + }, + "oneOf": { + "$ref": "#/definitions/schemaArray" + }, + "moduleLink": { + "description": "A template-able link to a resource instance. AWS-internal service links must be relative to the AWS console domain. External service links must be absolute, HTTPS URIs.", + "$ref": "#/definitions/moduleLink" + } + }, + "required": [ + "typeName", + "properties", + "description", + "additionalProperties" + ], + "additionalProperties": false +} diff --git a/src/rpdk/core/exceptions.py b/src/rpdk/core/exceptions.py index 8b115df3..3412b37c 100644 --- a/src/rpdk/core/exceptions.py +++ b/src/rpdk/core/exceptions.py @@ -14,6 +14,10 @@ class SpecValidationError(RPDKBaseException): pass +class FragmentValidationError(RPDKBaseException): + pass + + class WizardError(RPDKBaseException): pass @@ -52,3 +56,7 @@ class InvalidRequestError(ContractTestError): class ModelResolverError(RPDKBaseException): pass + + +class InvalidFragmentFileError(RPDKBaseException): + pass diff --git a/src/rpdk/core/fragment/generator.py b/src/rpdk/core/fragment/generator.py new file mode 100644 index 00000000..775d59ed --- /dev/null +++ b/src/rpdk/core/fragment/generator.py @@ -0,0 +1,347 @@ +""" +This class has two responsibilities: +1. generating a sample template fragment so the user has some initial +file fragment as an example. +The method "generate_sample_fragment" will be called as part of the init command. +2. generating schema for provided template fragments. +The method "generate_schema" will be called right before submission. +""" +import json +import logging +import os +from pathlib import Path + +import yaml + +from rpdk.core.data_loaders import resource_json +from rpdk.core.exceptions import FragmentValidationError + +LOG = logging.getLogger(__name__) +FRAGMENT_DIR = "fragments" +SAMPLE_FRAGMENT_OUTPUT = "sample.json" +SCHEMA_NAME = "schema.json" +SAMPLE_FRAGMENT = "../data/examples/module/sample.json" +ALLOWED_EXTENSIONS = {".json", ".yaml", ".yml"} +RESOURCE_LIMIT = 500 +OUTPUT_LIMIT = 200 +MAPPING_LIMIT = 200 +MAPPING_ATTRIBUTE_LIMIT = 200 +TEMPLATE_FILE_SIZE_IN_BYTES_LIMIT = 1500000 + + +class TemplateFragment: # pylint: disable=too-many-instance-attributes + def __init__(self, type_name, root=None): + self.root = Path(root) if root else Path.cwd() + self.fragment_dir = self.root / FRAGMENT_DIR + self.type_name = type_name + self.resource_limit = RESOURCE_LIMIT + self.output_limit = OUTPUT_LIMIT + self.mapping_limit = MAPPING_LIMIT + self.mapping_attribute_limit = MAPPING_ATTRIBUTE_LIMIT + self.template_file_size_in_bytes_limit = TEMPLATE_FILE_SIZE_IN_BYTES_LIMIT + + LOG.debug("Fragment directory: %s", self.fragment_dir) + + def generate_schema(self): + raw_fragments = self._read_raw_fragments() + + schema = {} + properties = {} + + schema["typeName"] = self.type_name + schema["description"] = "Schema for Module Fragment of type " + self.type_name + schema["properties"] = properties + schema["additionalProperties"] = True + + if "Parameters" in raw_fragments: + properties["Parameters"] = self.__build_parameters(raw_fragments) + properties["Resources"] = self.__build_resources(raw_fragments) + + self.__write_schema(schema) + + return schema + + def validate_fragments(self): + """ + This method makes sure that the fragments adhere + to the template fragment restrictions. + Note: Fn::ImportValue was checked when loading the fragments + since it can occur anywhere in the template. + """ + raw_fragments = self._read_raw_fragments() + self.__validate_file_size_limit() + self.__validate_resources(raw_fragments) + self.__validate_parameters(raw_fragments) + self.__validate_no_transforms_present(raw_fragments) + self.__validate_outputs(raw_fragments) + self.__validate_mappings(raw_fragments) + + def __validate_outputs(self, raw_fragments): + self.__validate_no_exports_present(raw_fragments) + self.__validate_output_limit(raw_fragments) + + @staticmethod + def __validate_no_exports_present(raw_fragments): + if "Outputs" in raw_fragments: + for _output_name, output in raw_fragments["Outputs"].items(): + if "Export" in output: + raise FragmentValidationError( + "Template fragment cannot contain any Export. " + "Found an Export statement in Output: " + _output_name + ) + + def __validate_output_limit(self, raw_fragments): + if "Outputs" in raw_fragments: + output_count = len(raw_fragments["Outputs"].items()) + if output_count > self.output_limit: + raise FragmentValidationError( + "The Module template fragment has " + + str(output_count) + + " outputs but must not exceed the limit of " + + str(self.output_limit) + + " outputs" + ) + + def __validate_resources(self, raw_fragments): + if "Resources" not in raw_fragments: + raise FragmentValidationError( + "A Module template fragment must have a Resources section" + ) + self.__validate_resource_limit(raw_fragments) + for _resource_name, resource in raw_fragments["Resources"].items(): + if "Type" in resource: + if resource["Type"] == "AWS::CloudFormation::Stack": + raise FragmentValidationError( + "Template fragment can't contain nested stack." + ) + if resource["Type"] == "AWS::CloudFormation::Macro": + raise FragmentValidationError( + "Template fragment can't contain any macro." + ) + elif "Name" in resource: + if resource["Name"] == "AWS::Include": + raise FragmentValidationError( + "Template fragment can't use AWS::Include transform." + ) + raise FragmentValidationError( + "Resource '" + _resource_name + "' is invalid" + ) + else: + raise FragmentValidationError( + "Resource '" + _resource_name + "' has neither Type nor Name" + ) + + def __validate_resource_limit(self, raw_fragments): + resource_count = len(raw_fragments["Resources"].items()) + if resource_count > self.resource_limit: + raise FragmentValidationError( + "The Module template fragment has " + + str(resource_count) + + " resources but must not exceed the limit of " + + str(self.resource_limit) + + " resources" + ) + + @staticmethod + def __validate_parameters(raw_fragments): + if "Parameters" in raw_fragments: + for _parameter_name, parameter in raw_fragments["Parameters"].items(): + if "Type" not in parameter: + raise FragmentValidationError( + "Parameter '" + _parameter_name + "' must have a Type" + ) + + @staticmethod + def __validate_no_transforms_present(raw_fragments): + if "transform" in raw_fragments or "Transform" in raw_fragments: + raise FragmentValidationError( + "Template fragment can't contain transform section." + ) + if "Fn::Transform" in raw_fragments: + raise FragmentValidationError( + "Template fragment can't contain any transform." + ) + + def __validate_mappings(self, raw_fragments): + self.__validate_mapping_limit(raw_fragments) + self.__validate_mapping_attribute_limit(raw_fragments) + + def __validate_mapping_limit(self, raw_fragments): + if "Mappings" in raw_fragments: + mapping_count = len(raw_fragments["Mappings"].items()) + if mapping_count > self.mapping_limit: + raise FragmentValidationError( + "The Module template fragment has " + + str(mapping_count) + + " mappings but must not exceed the limit of " + + str(self.output_limit) + + " mappings" + ) + + def __validate_mapping_attribute_limit(self, raw_fragments): + if "Mappings" in raw_fragments: + for _mapping_name, mapping in raw_fragments["Mappings"].items(): + attribute_count = len(mapping.items()) + if attribute_count > self.mapping_attribute_limit: + raise FragmentValidationError( + "The mapping " + + _mapping_name + + " has " + + str(attribute_count) + + " attributes but must not exceed the limit of " + + str(self.output_limit) + + " mapping attributes" + ) + + def __validate_file_size_limit(self): + total_size = self.__get_template_file_size_in_bytes() + if total_size > self.template_file_size_in_bytes_limit: + raise FragmentValidationError( + "The total file size of the template" + " fragments exceeds the CloudFormation Template size limit" + ) + + def __get_template_file_size_in_bytes(self): + return os.stat(self._get_fragment_file()).st_size + + @staticmethod + def __build_resources(raw_fragments): + raw_resources = {} + resources = {} + for resource in raw_fragments["Resources"]: + raw_resources[resource] = { + "type": raw_fragments["Resources"][resource]["Type"] + } + resources_properties = {} + for resource in raw_resources: + type_object = {"type": "object", "properties": {}} + type_object["properties"]["Type"] = { + "type": "string", + "const": raw_resources[resource]["type"], + } + type_object["properties"]["Properties"] = {"type": "object"} + resources_properties[resource] = type_object + resources["properties"] = resources_properties + resources["type"] = "object" + resources["additionalProperties"] = False + return resources + + @staticmethod + def __build_parameters(raw_fragments): + raw_parameters = {} + parameters = {} + for param in raw_fragments["Parameters"]: + param_type = raw_fragments["Parameters"][param]["Type"] + + description = raw_fragments["Parameters"][param].get("Description") + raw_parameters[param] = { + "type": param_type.lower(), + "description": description, + } + parameter_properties = {} + for raw_param in raw_parameters: + description = raw_parameters[raw_param]["description"] + type_name = "object" + properties = {"Type": {"type": "string"}} + required = ["Type"] + parameter_properties[raw_param] = { + "type": type_name, + "properties": properties, + "required": required, + } + if description is not None: + parameter_properties[raw_param]["description"] = description + properties["Description"] = {"type": "string"} + required.append("Description") + parameters["type"] = "object" + parameters["properties"] = parameter_properties + return parameters + + def __write_schema(self, schema): + def _write(f): + json.dump(schema, f, indent=4) + f.write("\n") + + self._overwrite(self.root / SCHEMA_NAME, _write) + + def generate_sample_fragment(self): + self._create_fragment_directory() + sample_json = self.__get_sample_fragment_json() + + def _write(f): + json.dump(sample_json, f, indent=4) + f.write("\n") + + self._overwrite(self.fragment_dir / SAMPLE_FRAGMENT_OUTPUT, _write) + + @staticmethod + def __get_sample_fragment_json(): + sample_json = resource_json(__name__, SAMPLE_FRAGMENT) + return sample_json + + def _create_fragment_directory(self): + if not os.path.exists(self.fragment_dir): + os.mkdir(self.fragment_dir) + print("Directory ", self.fragment_dir, " Created ") + else: + print("Directory ", self.fragment_dir, " already exists") + + def _read_raw_fragments(self): + return self._load_fragment(self._get_fragment_file()) + + def _load_fragment(self, fragment_file): + try: + with open(fragment_file, "r", encoding="utf-8") as f: + return yaml.safe_load( + self.__first_pass_syntax_check(self.__convert_function(f.read())) + ) + except (json.JSONDecodeError, yaml.parser.ParserError) as e: + raise FragmentValidationError( + "Fragment file '{}' is invalid: {}".format(fragment_file, str(e)) + ) from e + + def _get_fragment_file(self): + all_fragment_files = [] + for root, _directories, files in os.walk(self.fragment_dir): + for f in files: + ext = os.path.splitext(f)[-1].lower() + if ext in ALLOWED_EXTENSIONS: + all_fragment_files.append(os.path.join(root, f)) + if len(all_fragment_files) > 1: + raise FragmentValidationError( + "A Module can only consist of a " + "single template file, but there are " + + str(len(all_fragment_files)) + + ": " + + str(all_fragment_files) + ) + return all_fragment_files[0] + + @staticmethod + def _overwrite(path, contents): + LOG.debug("Overwriting '%s'", path) + with path.open("w", encoding="utf-8") as f: + if callable(contents): + contents(f) + else: + f.write(contents) + + @staticmethod + def __first_pass_syntax_check(template): + if "Fn::ImportValue" in template: + raise FragmentValidationError( + "Template fragment can't contain any Fn::ImportValue." + ) + return template + + @staticmethod + def __convert_function(template): + """ + When generating schema, we don't care about the actual reference. + So the following will only make a valid YAML file. + """ + return ( + template.replace("!Transform", "Fn::Transform") + .replace("!ImportValue", "Fn::ImportValue") + .replace("!", "") + ) diff --git a/src/rpdk/core/init.py b/src/rpdk/core/init.py index bb758977..7742df50 100644 --- a/src/rpdk/core/init.py +++ b/src/rpdk/core/init.py @@ -1,4 +1,5 @@ -"""This sub command generates IDE and build files for a given language. +"""This sub command generates IDE and build files for a resource, +or schema files for a module. """ import argparse import logging @@ -8,8 +9,11 @@ from colorama import Fore, Style from .exceptions import WizardAbortError, WizardValidationError +from .module.init_module import init_module from .plugin_registry import get_parsers, get_plugin_choices -from .project import Project +from .project import ARTIFACT_TYPE_MODULE, Project +from .resource.init_resource import init_resource +from .utils.init_utils import init_artifact_type, validate_yes LOG = logging.getLogger(__name__) @@ -36,8 +40,8 @@ def input_with_validation(prompt, validate, description=""): response = input() try: return validate(response) - except WizardValidationError as error: - print_error(error) + except WizardValidationError as e: + print_error(e) def validate_type_name(value): @@ -50,10 +54,6 @@ def validate_type_name(value): ) -def validate_yes(value): - return value.lower() in ("y", "yes") - - class ValidatePluginChoice: def __init__(self, choices): self.choices = tuple(choices) @@ -109,16 +109,6 @@ def check_for_existing_project(project): raise WizardAbortError() -def input_typename(): - type_name = input_with_validation( - "What's the name of your resource type?", - validate_type_name, - "\n(Organization::Service::Resource)", - ) - LOG.debug("Resource type identifier: %s", type_name) - return type_name - - def input_language(): # language/plugin if validate_plugin_choice.max < 1: @@ -143,30 +133,13 @@ def init(args): check_for_existing_project(project) - if args.type_name: - try: - type_name = validate_type_name(args.type_name) - except WizardValidationError as error: - print_error(error) - type_name = input_typename() - else: - type_name = input_typename() + artifact_type = init_artifact_type(args) - if "language" in vars(args): - language = args.language.lower() + if artifact_type == ARTIFACT_TYPE_MODULE: + init_module(args, project) + # artifact type can only be module or resource at this point else: - language = input_language() - - settings = { - arg: getattr(args, arg) - for arg in vars(args) - if not callable(getattr(args, arg)) - } - - project.init(type_name, language, settings) - - project.generate() - project.generate_docs() + init_resource(args, project) LOG.warning("Initialized a new project in %s", project.root.resolve()) @@ -204,5 +177,11 @@ def setup_subparser(subparsers, parents): parser.add_argument( "-t", "--type-name", - help="Select the name of the resource type.", + help="Select the name of the type.", + ) + + parser.add_argument( + "-a", + "--artifact-type", + help="Select the type of artifact (RESOURCE or MODULE)", ) diff --git a/src/rpdk/core/module/__init__.py b/src/rpdk/core/module/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/rpdk/core/module/init_module.py b/src/rpdk/core/module/init_module.py new file mode 100644 index 00000000..9316881f --- /dev/null +++ b/src/rpdk/core/module/init_module.py @@ -0,0 +1,48 @@ +import logging +import re + +from rpdk.core.exceptions import WizardValidationError +from rpdk.core.fragment.generator import TemplateFragment +from rpdk.core.utils.init_utils import input_with_validation, print_error + +LOG = logging.getLogger(__name__) + +# this regex has to be kept in sync with the one in the meta-schema. +MODULE_TYPE_NAME_REGEX = ( + r"^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::MODULE$" +) + + +def init_module(args, project): + if args.type_name: + try: + type_name = validate_type_name(args.type_name) + except WizardValidationError as error: + print_error(error) + type_name = input_typename() + else: + type_name = input_typename() + + project.init_module(type_name) + template_fragment = TemplateFragment(type_name) + template_fragment.generate_sample_fragment() + + +def input_typename(): + type_name = input_with_validation( + "What's the name of your module type?", + validate_type_name, + "\n(::::::MODULE)", + ) + LOG.debug("Resource type identifier: %s", type_name) + return type_name + + +def validate_type_name(value): + match = re.match(MODULE_TYPE_NAME_REGEX, value) + if match: + return value + LOG.debug("'%s' did not match '%s'", value, MODULE_TYPE_NAME_REGEX) + raise WizardValidationError( + "Please enter a value matching '{}'".format(MODULE_TYPE_NAME_REGEX) + ) diff --git a/src/rpdk/core/project.py b/src/rpdk/core/project.py index 84f00aed..63a27fc2 100644 --- a/src/rpdk/core/project.py +++ b/src/rpdk/core/project.py @@ -12,12 +12,14 @@ from jsonschema import Draft7Validator from jsonschema.exceptions import ValidationError +from rpdk.core.fragment.generator import TemplateFragment from rpdk.core.jsonutils.flattener import JsonSchemaFlattener from .boto_helpers import create_sdk_session from .data_loaders import load_resource_spec, resource_json from .exceptions import ( DownstreamError, + FragmentValidationError, InternalError, InvalidProjectError, SpecValidationError, @@ -35,7 +37,12 @@ INPUTS_FOLDER = "inputs" EXAMPLE_INPUTS_FOLDER = "example_inputs" ROLE_TEMPLATE_FILENAME = "resource-role.yaml" -TYPE_NAME_REGEX = "^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}$" +TYPE_NAME_RESOURCE_REGEX = "^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}$" +TYPE_NAME_MODULE_REGEX = ( + "^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::MODULE$" +) +ARTIFACT_TYPE_RESOURCE = "RESOURCE" +ARTIFACT_TYPE_MODULE = "MODULE" DEFAULT_ROLE_TIMEOUT_MINUTES = 120 # 2 hours # min and max are according to CreateRole API restrictions @@ -60,8 +67,9 @@ SETTINGS_VALIDATOR = Draft7Validator( { "properties": { + "artifact_type": {"type": "string"}, "language": {"type": "string"}, - "typeName": {"type": "string", "pattern": TYPE_NAME_REGEX}, + "typeName": {"type": "string", "pattern": TYPE_NAME_RESOURCE_REGEX}, "runtime": {"type": "string", "enum": list(LAMBDA_RUNTIMES)}, "entrypoint": {"type": ["string", "null"]}, "testEntrypoint": {"type": ["string", "null"]}, @@ -73,6 +81,18 @@ } ) +MODULE_SETTINGS_VALIDATOR = Draft7Validator( + { + "properties": { + "artifact_type": {"type": "string"}, + "typeName": {"type": "string", "pattern": TYPE_NAME_MODULE_REGEX}, + "settings": {"type": "object"}, + }, + "required": ["artifact_type", "typeName"], + "additionalProperties": False, + } +) + BASIC_TYPE_MAPPINGS = { "string": "String", @@ -94,12 +114,13 @@ def escape_markdown(string): return string -class Project: # pylint: disable=too-many-instance-attributes +class Project: # pylint: disable=too-many-instance-attributes,too-many-public-methods def __init__(self, overwrite_enabled=False, root=None): self.overwrite_enabled = overwrite_enabled self.root = Path(root) if root else Path.cwd() self.settings_path = self.root / SETTINGS_FILENAME self.type_info = None + self.artifact_type = None self.language = None self._plugin = None self.settings = None @@ -110,6 +131,7 @@ def __init__(self, overwrite_enabled=False, root=None): self.entrypoint = None self.test_entrypoint = None self.executable_entrypoint = None + self.fragment_dir = None self.env = Environment( trim_blocks=True, @@ -170,14 +192,35 @@ def load_settings(self): "Project file '{}' is invalid".format(self.settings_path), e ) + # backward compatible + if "artifact_type" not in raw_settings: + raw_settings["artifact_type"] = ARTIFACT_TYPE_RESOURCE + + if raw_settings["artifact_type"] == ARTIFACT_TYPE_RESOURCE: + self.validate_and_load_resource_settings(raw_settings) + else: + self.validate_and_load_module_settings(raw_settings) + + def validate_and_load_module_settings(self, raw_settings): try: - SETTINGS_VALIDATOR.validate(raw_settings) + MODULE_SETTINGS_VALIDATOR.validate(raw_settings) except ValidationError as e: self._raise_invalid_project( "Project file '{}' is invalid".format(self.settings_path), e ) + self.type_name = raw_settings["typeName"] + self.artifact_type = raw_settings["artifact_type"] + self.settings = raw_settings.get("settings", {}) + def validate_and_load_resource_settings(self, raw_settings): + try: + SETTINGS_VALIDATOR.validate(raw_settings) + except ValidationError as e: + self._raise_invalid_project( + "Project file '{}' is invalid".format(self.settings_path), e + ) self.type_name = raw_settings["typeName"] + self.artifact_type = raw_settings["artifact_type"] self.language = raw_settings["language"] self.runtime = raw_settings["runtime"] self.entrypoint = raw_settings["entrypoint"] @@ -225,7 +268,7 @@ def write_settings(self): ) raise InternalError("Internal error (Plugin returned invalid runtime)") - def _write(f): + def _write_resource_settings(f): executable_entrypoint_dict = ( {"executableEntrypoint": self.executable_entrypoint} if self.executable_entrypoint @@ -233,6 +276,7 @@ def _write(f): ) json.dump( { + "artifact_type": self.artifact_type, "typeName": self.type_name, "language": self.language, "runtime": self.runtime, @@ -246,19 +290,40 @@ def _write(f): ) f.write("\n") - self.overwrite(self.settings_path, _write) + def _write_module_settings(f): + json.dump( + { + "artifact_type": self.artifact_type, + "typeName": self.type_name, + "settings": self.settings, + }, + f, + indent=4, + ) + f.write("\n") + + if self.artifact_type == ARTIFACT_TYPE_RESOURCE: + self.overwrite(self.settings_path, _write_resource_settings) + else: + self.overwrite(self.settings_path, _write_module_settings) def init(self, type_name, language, settings=None): + self.artifact_type = ARTIFACT_TYPE_RESOURCE self.type_name = type_name self.language = language self._plugin = load_plugin(language) self.settings = settings or {} - self._write_example_schema() self._write_example_inputs() self._plugin.init(self) self.write_settings() + def init_module(self, type_name): + self.artifact_type = ARTIFACT_TYPE_MODULE + self.type_name = type_name + self.settings = {} + self.write_settings() + def load_schema(self): if not self.type_info: msg = "Internal error (Must load settings first)" @@ -291,6 +356,9 @@ def safewrite(self, path, contents): LOG.info("File already exists, not overwriting '%s'", path) def generate(self): + if self.artifact_type == ARTIFACT_TYPE_MODULE: + return # for Modules, the schema is already generated in cfn validate + # generate template for IAM role assumed by cloudformation # to provision resources if schema has handlers defined if "handlers" in self.schema: @@ -336,7 +404,7 @@ def generate(self): ) self.overwrite(path, contents) - return self._plugin.generate(self) + self._plugin.generate(self) def load(self): try: @@ -346,15 +414,34 @@ def load(self): "Project file not found. Have you run 'init'?", e ) - LOG.info("Validating your resource schema...") - try: - self.load_schema() - except FileNotFoundError as e: - self._raise_invalid_project("Resource schema not found.", e) - except SpecValidationError as e: - msg = "Resource schema is invalid: " + str(e) - self._raise_invalid_project(msg, e) + if self.artifact_type == ARTIFACT_TYPE_MODULE: + LOG.info("Validating your module fragments...") + template_fragment = TemplateFragment(self.type_name) + try: + self._validate_fragments(template_fragment) + except FragmentValidationError as e: + msg = "Invalid template fragment: " + str(e) + self._raise_invalid_project(msg, e) + self.schema = template_fragment.generate_schema() + self.fragment_dir = template_fragment.fragment_dir + else: + LOG.info("Validating your resource specification...") + try: + self.load_schema() + except FileNotFoundError as e: + self._raise_invalid_project("Resource specification not found.", e) + except SpecValidationError as e: + msg = "Resource specification is invalid: " + str(e) + self._raise_invalid_project(msg, e) + @staticmethod + def _validate_fragments(template_fragment): + template_fragment.validate_fragments() + + # flake8: noqa: C901 + # pylint: disable=too-many-locals + # pylint: disable=too-many-branches + # pylint: disable=too-many-public-methods def submit( self, dry_run, endpoint_url, region_name, role_arn, use_role, set_default ): # pylint: disable=too-many-arguments @@ -369,8 +456,26 @@ def submit( # the default compression is ZIP_STORED, which helps with the # file-size check on upload with zipfile.ZipFile(f, mode="w") as zip_file: - zip_file.write(self.schema_path, SCHEMA_UPLOAD_FILENAME) zip_file.write(self.settings_path, SETTINGS_FILENAME) + # Include all fragments in zip file + if self.artifact_type == ARTIFACT_TYPE_MODULE: + if not os.path.exists(self.root / SCHEMA_UPLOAD_FILENAME): + msg = "Module schema could not be found." + raise InternalError(msg) + zip_file.write( + self.root / SCHEMA_UPLOAD_FILENAME, SCHEMA_UPLOAD_FILENAME + ) + for root, _dirs, files in os.walk(self.fragment_dir): + for file in files: + zip_file.write( + os.path.join(root, file), + arcname=os.path.join( + root.replace(str(self.fragment_dir), "fragments/"), + file, + ), + ) + else: + zip_file.write(self.schema_path, SCHEMA_UPLOAD_FILENAME) try: zip_file.write(self.overrides_path, OVERRIDES_FILENAME) LOG.debug("%s found. Writing to package.", OVERRIDES_FILENAME) @@ -378,14 +483,20 @@ def submit( LOG.debug( "%s not found. Not writing to package.", OVERRIDES_FILENAME ) - if os.path.isdir(self.inputs_path): - for filename in os.listdir(self.inputs_path): - absolute_path = self.inputs_path / filename - zip_file.write(absolute_path, INPUTS_FOLDER + "/" + filename) - LOG.debug("%s found. Writing to package.", filename) - else: - LOG.debug("%s not found. Not writing to package.", INPUTS_FOLDER) - self._plugin.package(self, zip_file) + + if self.artifact_type != ARTIFACT_TYPE_MODULE: + if os.path.isdir(self.inputs_path): + for filename in os.listdir(self.inputs_path): + absolute_path = self.inputs_path / filename + zip_file.write( + absolute_path, INPUTS_FOLDER + "/" + filename + ) + LOG.debug("%s found. Writing to package.", filename) + else: + LOG.debug( + "%s not found. Not writing to package.", INPUTS_FOLDER + ) + self._plugin.package(self, zip_file) if dry_run: LOG.error("Dry run complete: %s", path.resolve()) @@ -396,6 +507,9 @@ def submit( ) def generate_docs(self): + if self.artifact_type == ARTIFACT_TYPE_MODULE: + return + # generate the docs folder that contains documentation based on the schema docs_path = self.root / "docs" @@ -691,7 +805,7 @@ def _upload( log_delivery_role = uploader.get_log_delivery_role_arn() LOG.debug("Got Log Role: %s", log_delivery_role) kwargs = { - "Type": "RESOURCE", + "Type": self.artifact_type, "TypeName": self.type_name, "SchemaHandlerPackage": s3_url, "ClientRequestToken": str(uuid4()), diff --git a/src/rpdk/core/resource/__init__.py b/src/rpdk/core/resource/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/rpdk/core/resource/init_resource.py b/src/rpdk/core/resource/init_resource.py new file mode 100644 index 00000000..8e5b3328 --- /dev/null +++ b/src/rpdk/core/resource/init_resource.py @@ -0,0 +1,102 @@ +import logging +import re + +from rpdk.core.exceptions import WizardAbortError, WizardValidationError +from rpdk.core.plugin_registry import get_plugin_choices +from rpdk.core.utils.init_utils import input_with_validation, print_error + +LOG = logging.getLogger(__name__) +RESOURCE_TYPE_NAME_REGEX = r"^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}$" + + +def init_resource(args, project): + if args.type_name: + try: + type_name = validate_type_name(args.type_name) + except WizardValidationError as error: + print_error(error) + type_name = input_typename() + else: + type_name = input_typename() + + if "language" in vars(args): + language = args.language.lower() + else: + language = input_language() + + settings = { + arg: getattr(args, arg) + for arg in vars(args) + if not callable(getattr(args, arg)) + } + + project.init(type_name, language, settings) + project.generate() + project.generate_docs() + + +def input_typename(): + type_name = input_with_validation( + "What's the name of your resource type?", + validate_type_name, + "\n(Organization::Service::Resource)", + ) + LOG.debug("Resource type identifier: %s", type_name) + return type_name + + +def input_language(): + # language/plugin + if validate_plugin_choice.max < 1: + LOG.critical("No language plugins found") + raise WizardAbortError() + + if validate_plugin_choice.max == 1: + language = validate_plugin_choice.choices[0] + LOG.warning("One language plugin found, defaulting to %s", language) + else: + language = input_with_validation( + validate_plugin_choice.message, validate_plugin_choice + ) + LOG.debug("Language plugin: %s", language) + return language + + +def validate_type_name(value): + match = re.match(RESOURCE_TYPE_NAME_REGEX, value) + if match: + return value + LOG.debug("'%s' did not match '%s'", value, RESOURCE_TYPE_NAME_REGEX) + raise WizardValidationError( + "Please enter a value matching '{}'".format(RESOURCE_TYPE_NAME_REGEX) + ) + + +class ValidatePluginChoice: + def __init__(self, choices): + self.choices = tuple(choices) + self.max = len(self.choices) + + pretty = "\n".join( + "[{}] {}".format(i, choice) for i, choice in enumerate(self.choices, 1) + ) + self.message = ( + "Select a language for code generation:\n" + + pretty + + "\n(enter an integer): " + ) + + def __call__(self, value): + try: + choice = int(value) + except ValueError as e: + raise WizardValidationError("Please enter an integer") from e + choice -= 1 + if choice < 0 or choice >= self.max: + raise WizardValidationError("Please select a choice") + return self.choices[choice] + + +validate_plugin_choice = ValidatePluginChoice( # pylint: disable=invalid-name + get_plugin_choices() +) diff --git a/src/rpdk/core/test.py b/src/rpdk/core/test.py index 2a131eb4..e34d7a89 100644 --- a/src/rpdk/core/test.py +++ b/src/rpdk/core/test.py @@ -23,7 +23,7 @@ from .contract.resource_client import ResourceClient from .data_loaders import copy_resource from .exceptions import SysExitRecommendedError -from .project import Project +from .project import ARTIFACT_TYPE_MODULE, Project LOG = logging.getLogger(__name__) @@ -179,6 +179,9 @@ def test(args): _validate_sam_args(args) project = Project() project.load() + if project.artifact_type == ARTIFACT_TYPE_MODULE: + LOG.warning("The test command is not supported in a module project") + return overrides = get_overrides( project.root, args.region, args.cloudformation_endpoint_url, args.role_arn diff --git a/src/rpdk/core/utils/__init__.py b/src/rpdk/core/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/rpdk/core/utils/init_utils.py b/src/rpdk/core/utils/init_utils.py new file mode 100644 index 00000000..bf7b7015 --- /dev/null +++ b/src/rpdk/core/utils/init_utils.py @@ -0,0 +1,70 @@ +import logging + +from colorama import Fore, Style + +from rpdk.core.exceptions import WizardValidationError +from rpdk.core.project import ARTIFACT_TYPE_MODULE, ARTIFACT_TYPE_RESOURCE + +LOG = logging.getLogger(__name__) + +INPUT_TYPES_STRING = "resource(r) or a module(m)" +VALID_RESOURCES_REPRESENTATION = {"r", "resource", "resources"} +VALID_MODULES_REPRESENTATION = {"m", "module", "modules"} + + +# NOTE this function is also in init, for compatibility with language plugins +def init_artifact_type(args=None): + if args and args.artifact_type: + try: + artifact_type = validate_artifact_type(args.artifact_type) + except WizardValidationError as error: + print_error(error) + artifact_type = input_with_validation( + "Do you want to develop a new {}?.".format(INPUT_TYPES_STRING), + validate_artifact_type, + ) + + else: + artifact_type = input_with_validation( + "Do you want to develop a new {}?.".format(INPUT_TYPES_STRING), + validate_artifact_type, + ) + + return artifact_type + + +def print_error(error): + print(Style.BRIGHT, Fore.RED, str(error), Style.RESET_ALL, sep="") + + +def input_with_validation(prompt, validate, description=""): + while True: + print( + Style.BRIGHT, + Fore.WHITE, + prompt, + Style.RESET_ALL, + description, + Style.RESET_ALL, + sep="", + ) + print(Fore.YELLOW, ">> ", Style.RESET_ALL, sep="", end="") + response = input() + try: + return validate(response) + except WizardValidationError as e: + print_error(e) + + +def validate_artifact_type(value): + if value.lower() in VALID_RESOURCES_REPRESENTATION: + return ARTIFACT_TYPE_RESOURCE + if value.lower() in VALID_MODULES_REPRESENTATION: + return ARTIFACT_TYPE_MODULE + raise WizardValidationError( + "Please enter a value matching {}".format(INPUT_TYPES_STRING) + ) + + +def validate_yes(value): + return value.lower() in ("y", "yes") diff --git a/tests/data/sample_fragments/aws-specific-parameter.json b/tests/data/sample_fragments/aws-specific-parameter.json new file mode 100644 index 00000000..9b33ecd3 --- /dev/null +++ b/tests/data/sample_fragments/aws-specific-parameter.json @@ -0,0 +1,20 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Parameters": { + "VpcId": { + "Description": "Id of the VPC", + "Type": "AWS::EC2::VPC::Id" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/ec2.yaml b/tests/data/sample_fragments/ec2.yaml new file mode 100644 index 00000000..312a4256 --- /dev/null +++ b/tests/data/sample_fragments/ec2.yaml @@ -0,0 +1,20 @@ +Parameters: + Volume: + Description: "The size of ebs block" + Type: String +Resources: + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + BlockDeviceMappings: + - DeviceName: "/dev/sdm" + Ebs: + VolumeType: "io1" + Iops: "200" + DeleteOnTermination: "false" + VolumeSize: + Ref: Volume + - DeviceName: "/dev/sdk" + NoDevice: {} diff --git a/tests/data/sample_fragments/ec2_short.yaml b/tests/data/sample_fragments/ec2_short.yaml new file mode 100644 index 00000000..821e91e2 --- /dev/null +++ b/tests/data/sample_fragments/ec2_short.yaml @@ -0,0 +1,19 @@ +Parameters: + Volume: + Description: "The size of ebs block" + Type: String +Resources: + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + BlockDeviceMappings: + - DeviceName: "/dev/sdm" + Ebs: + VolumeType: "io1" + Iops: "200" + DeleteOnTermination: "false" + VolumeSize: !Ref Volume + - DeviceName: "/dev/sdk" + NoDevice: {} diff --git a/tests/data/sample_fragments/exports.json b/tests/data/sample_fragments/exports.json new file mode 100644 index 00000000..72ef03dc --- /dev/null +++ b/tests/data/sample_fragments/exports.json @@ -0,0 +1,35 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + }, + "Outputs": { + "BucketName": { + "Description": "Name of the bucket", + "Value": { + "Ref": "S3Bucket" + }, + "Export": { + "Name": "S3BucketName" + } + } + } +} diff --git a/tests/data/sample_fragments/fragment_mapping_with_three_attributes.json b/tests/data/sample_fragments/fragment_mapping_with_three_attributes.json new file mode 100644 index 00000000..9ad409c7 --- /dev/null +++ b/tests/data/sample_fragments/fragment_mapping_with_three_attributes.json @@ -0,0 +1,21 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait1": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + }, + "Mappings": { + "Mapping01": { + "Key01": { + "Name": "Value01" + }, + "Key02": { + "Name": "Value01" + }, + "Key03": { + "Name": "Value01" + } + } + } +} diff --git a/tests/data/sample_fragments/fragment_three_mappings.json b/tests/data/sample_fragments/fragment_three_mappings.json new file mode 100644 index 00000000..36008480 --- /dev/null +++ b/tests/data/sample_fragments/fragment_three_mappings.json @@ -0,0 +1,25 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait1": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + }, + "Mappings": { + "Mapping01": { + "Key01": { + "Name": "Value01" + } + }, + "Mapping02": { + "Key01": { + "Name": "Value01" + } + }, + "Mapping03": { + "Key01": { + "Name": "Value01" + } + } + } +} diff --git a/tests/data/sample_fragments/fragment_three_outputs.json b/tests/data/sample_fragments/fragment_three_outputs.json new file mode 100644 index 00000000..e381eeb3 --- /dev/null +++ b/tests/data/sample_fragments/fragment_three_outputs.json @@ -0,0 +1,19 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait1": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + }, + "Outputs": { + "Output1": { + "Value": "Value 1" + }, + "Output2": { + "Value": "Value 2" + }, + "Output3": { + "Value": "Value 3" + } + } +} diff --git a/tests/data/sample_fragments/fragment_three_resources.json b/tests/data/sample_fragments/fragment_three_resources.json new file mode 100644 index 00000000..4039812f --- /dev/null +++ b/tests/data/sample_fragments/fragment_three_resources.json @@ -0,0 +1,14 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait1": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + }, + "Wait2": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + }, + "Wait3": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + } +} diff --git a/tests/data/sample_fragments/fragments/unrelated.txt b/tests/data/sample_fragments/fragments/unrelated.txt new file mode 100644 index 00000000..53348903 --- /dev/null +++ b/tests/data/sample_fragments/fragments/unrelated.txt @@ -0,0 +1 @@ +This is absolutely not a module fragment diff --git a/tests/data/sample_fragments/fragments/valid_fragment.json b/tests/data/sample_fragments/fragments/valid_fragment.json new file mode 100644 index 00000000..76120c7d --- /dev/null +++ b/tests/data/sample_fragments/fragments/valid_fragment.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/import_short.yaml b/tests/data/sample_fragments/import_short.yaml new file mode 100644 index 00000000..89cfeccd --- /dev/null +++ b/tests/data/sample_fragments/import_short.yaml @@ -0,0 +1,19 @@ +Parameters: + Volume: + Description: "The size of ebs block" + Type: String +Resources: + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + BlockDeviceMappings: + - DeviceName: "/dev/sdm" + Ebs: + VolumeType: "io1" + Iops: "200" + DeleteOnTermination: !ImportValue deleteCondition + VolumeSize: !Ref Volume + - DeviceName: "/dev/sdk" + NoDevice: {} diff --git a/tests/data/sample_fragments/import_value.json b/tests/data/sample_fragments/import_value.json new file mode 100644 index 00000000..5a289a4b --- /dev/null +++ b/tests/data/sample_fragments/import_value.json @@ -0,0 +1,38 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "WebServerInstance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "InstanceType": "t2.micro", + "ImageId": "ami-a1b23456", + "NetworkInterfaces": [ + { + "GroupSet": [ + { + "Fn::ImportValue": { + "Fn::Sub": "${NetworkStackNameParameter}-SecurityGroupID" + } + } + ], + "AssociatePublicIpAddress": "true", + "DeviceIndex": "0", + "DeleteOnTermination": "true", + "SubnetId": { + "Fn::ImportValue": { + "Fn::Sub": "${NetworkStackNameParameter}-SubnetID" + } + } + } + ] + } + } + } +} diff --git a/tests/data/sample_fragments/include.json b/tests/data/sample_fragments/include.json new file mode 100644 index 00000000..746bdb7f --- /dev/null +++ b/tests/data/sample_fragments/include.json @@ -0,0 +1,18 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "Fn::Transform": { + "Name": "AWS::Include", + "Parameters": { + "Location": "s3://MyAmazonS3BucketName/MyFileName.json" + } + } + } +} diff --git a/tests/data/sample_fragments/invalid_transform.json b/tests/data/sample_fragments/invalid_transform.json new file mode 100644 index 00000000..080b5fe8 --- /dev/null +++ b/tests/data/sample_fragments/invalid_transform.json @@ -0,0 +1,15 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "Fn::Transform": { + "Name": "AWS::UnknownTransform" + } + } +} diff --git a/tests/data/sample_fragments/macros.yaml b/tests/data/sample_fragments/macros.yaml new file mode 100644 index 00000000..f150d6bb --- /dev/null +++ b/tests/data/sample_fragments/macros.yaml @@ -0,0 +1,9 @@ +Resources: + CompanyDefaultsMacro: + Type: AWS::CloudFormation::Macro + Properties: + Name: CompanyDefaults + FunctionName: + Fn::GetAtt: + - CompanyDefaultsLambdaFunction + - Arn diff --git a/tests/data/sample_fragments/nested_stack.json b/tests/data/sample_fragments/nested_stack.json new file mode 100644 index 00000000..b98bcc84 --- /dev/null +++ b/tests/data/sample_fragments/nested_stack.json @@ -0,0 +1,17 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Some random IAM role", + "Resources": { + "NestedStack": { + "Type": "AWS::CloudFormation::Stack", + "DeletionPolicy": "Retain", + "Properties": { + "TemplateURL": "https://s3.amazonaws.com/cloudformation-templates-us-east-2/EC2ChooseAMI.template", + "Parameters": { + "InstanceType": "t1.micro", + "KeyName": "mykey" + } + } + } + } +} diff --git a/tests/data/sample_fragments/noresources.json b/tests/data/sample_fragments/noresources.json new file mode 100644 index 00000000..d6c652fb --- /dev/null +++ b/tests/data/sample_fragments/noresources.json @@ -0,0 +1,10 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + } +} diff --git a/tests/data/sample_fragments/output.json b/tests/data/sample_fragments/output.json new file mode 100644 index 00000000..9c5d4c99 --- /dev/null +++ b/tests/data/sample_fragments/output.json @@ -0,0 +1,32 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + }, + "Outputs": { + "BucketName": { + "Description": "Name of the bucket", + "Value": { + "Ref": "S3Bucket" + } + } + } +} diff --git a/tests/data/sample_fragments/paramWithoutDescription.yaml b/tests/data/sample_fragments/paramWithoutDescription.yaml new file mode 100644 index 00000000..c8cf899d --- /dev/null +++ b/tests/data/sample_fragments/paramWithoutDescription.yaml @@ -0,0 +1,6 @@ +Parameters: + anInput: + Type: String +Resources: + WaitHandle: + Type: AWS::CloudFormation::WaitConditionHandle diff --git a/tests/data/sample_fragments/parameter_without_type.json b/tests/data/sample_fragments/parameter_without_type.json new file mode 100644 index 00000000..b354a19d --- /dev/null +++ b/tests/data/sample_fragments/parameter_without_type.json @@ -0,0 +1,23 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/randomIAM.json b/tests/data/sample_fragments/randomIAM.json new file mode 100644 index 00000000..db0e3f5b --- /dev/null +++ b/tests/data/sample_fragments/randomIAM.json @@ -0,0 +1,27 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Some random IAM role", + "Resources": { + "InstanceRole": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": [ + "ec2.amazonaws.com" + ] + }, + "Action": [ + "sts:AssumeRole" + ] + } + ] + } + } + } + } +} diff --git a/tests/data/sample_fragments/resource_without_type_or_name.json b/tests/data/sample_fragments/resource_without_type_or_name.json new file mode 100644 index 00000000..4785eaa0 --- /dev/null +++ b/tests/data/sample_fragments/resource_without_type_or_name.json @@ -0,0 +1,13 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": {} + } +} diff --git a/tests/data/sample_fragments/sample.json b/tests/data/sample_fragments/sample.json new file mode 100644 index 00000000..76120c7d --- /dev/null +++ b/tests/data/sample_fragments/sample.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/secureS3_resolved.json b/tests/data/sample_fragments/secureS3_resolved.json new file mode 100644 index 00000000..c21e252a --- /dev/null +++ b/tests/data/sample_fragments/secureS3_resolved.json @@ -0,0 +1,21 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": "someBucketName", + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/syntax_error.json b/tests/data/sample_fragments/syntax_error.json new file mode 100644 index 00000000..fb9b2656 --- /dev/null +++ b/tests/data/sample_fragments/syntax_error.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": {, + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } +} +} diff --git a/tests/data/sample_fragments/template_with_empty_description.json b/tests/data/sample_fragments/template_with_empty_description.json new file mode 100644 index 00000000..5ce1181c --- /dev/null +++ b/tests/data/sample_fragments/template_with_empty_description.json @@ -0,0 +1,15 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket" + } + } +} diff --git a/tests/data/sample_fragments/template_without_description.json b/tests/data/sample_fragments/template_without_description.json new file mode 100644 index 00000000..3b56a712 --- /dev/null +++ b/tests/data/sample_fragments/template_without_description.json @@ -0,0 +1,14 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket" + } + } +} diff --git a/tests/data/sample_fragments/template_without_parameter_section.json b/tests/data/sample_fragments/template_without_parameter_section.json new file mode 100644 index 00000000..6b5278d8 --- /dev/null +++ b/tests/data/sample_fragments/template_without_parameter_section.json @@ -0,0 +1,15 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "An S3 Bucket", + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/test_multiple_files/fragments/sample.json b/tests/data/sample_fragments/test_multiple_files/fragments/sample.json new file mode 100644 index 00000000..2c62860d --- /dev/null +++ b/tests/data/sample_fragments/test_multiple_files/fragments/sample.json @@ -0,0 +1,8 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait1": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + } +} diff --git a/tests/data/sample_fragments/test_multiple_files/fragments/sample2.json b/tests/data/sample_fragments/test_multiple_files/fragments/sample2.json new file mode 100644 index 00000000..017f43f1 --- /dev/null +++ b/tests/data/sample_fragments/test_multiple_files/fragments/sample2.json @@ -0,0 +1,8 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Wait2": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + } +} diff --git a/tests/data/sample_fragments/top_level_include.json b/tests/data/sample_fragments/top_level_include.json new file mode 100644 index 00000000..1a5e8c82 --- /dev/null +++ b/tests/data/sample_fragments/top_level_include.json @@ -0,0 +1,30 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Fn::Transform": { + "Name": "AWS::Include", + "Parameters": { + "Location": "s3://MyAmazonS3BucketName/MyFileName.json" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/data/sample_fragments/transform.json b/tests/data/sample_fragments/transform.json new file mode 100644 index 00000000..6c8949d4 --- /dev/null +++ b/tests/data/sample_fragments/transform.json @@ -0,0 +1,32 @@ +{ + "Transform": [ + "AWS::Serverless" + ], + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "MyServerlessFunctionLogicalID": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Handler": "index.handler", + "Code": { + "S3Bucket": "testBucket", + "S3Key": "mySourceCode.zip" + }, + "Role": { + "Fn::GetAtt": [ + "FunctionNameRole", + "Arn" + ] + }, + "Runtime": "nodejs8.10" + } + } + } +} diff --git a/tests/data/sample_fragments/transform_section.json b/tests/data/sample_fragments/transform_section.json new file mode 100644 index 00000000..4de044c0 --- /dev/null +++ b/tests/data/sample_fragments/transform_section.json @@ -0,0 +1,22 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Some random IAM role", + "Transform": [ + "MyMacro", + "AWS::Serverless" + ], + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "DeletionPolicy": "Retain", + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/fragments/sample.json b/tests/fragments/sample.json new file mode 100644 index 00000000..76120c7d --- /dev/null +++ b/tests/fragments/sample.json @@ -0,0 +1,24 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "A secure S3 Bucket. The features are Versioning and DeletionPolicy.", + "Parameters": { + "BucketName": { + "Description": "Name for the bucket", + "Type": "String" + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "VersioningConfiguration": { + "Status": "Enabled" + } + } + } + } +} diff --git a/tests/fragments/schema.json b/tests/fragments/schema.json new file mode 100644 index 00000000..1599cc5a --- /dev/null +++ b/tests/fragments/schema.json @@ -0,0 +1,58 @@ +{ + "typeName": "AWS::ORG::MYTYPE::MODULE", + "description": "Some random IAM role", + "properties": { + "Parameters": { + "type": "object", + "properties": { + "BucketName": { + "description": "Name for the bucket", + "type": "object", + "properties": { + "Description": { + "type": "string" + }, + "Type": { + "type": "string" + } + }, + "required": [ + "Type", + "Description" + ] + } + } + }, + "Resources": { + "properties": { + "S3Bucket": { + "type": "object", + "properties": { + "Type": { + "type": "string", + "const": "AWS::S3::Bucket" + }, + "Properties": { + "$ref": "https://schema.cloudformation.us-east-1.amazonaws.com/aws_s3_bucket.json" + } + } + }, + "InstanceRole": { + "type": "object", + "properties": { + "Type": { + "type": "string", + "const": "AWS::IAM::Role" + }, + "Properties": { + "$ref": "https://schema.cloudformation.us-east-1.amazonaws.com/aws_iam_role.json" + } + } + } + }, + "type": "object", + "additionalProperties": false + } + }, + "additionalProperties": true +} diff --git a/tests/fragments/test_generator.py b/tests/fragments/test_generator.py new file mode 100644 index 00000000..469cb670 --- /dev/null +++ b/tests/fragments/test_generator.py @@ -0,0 +1,385 @@ +import os +from pathlib import Path +from unittest.mock import patch + +import pytest + +from rpdk.core.data_loaders import make_validator, resource_json +from rpdk.core.exceptions import FragmentValidationError +from rpdk.core.fragment.generator import TemplateFragment +from tests.utils import CONTENTS_UTF8 + +type_name = "AWS::ORG::MYTYPE::MODULE" +TIMEOUT_IN_SECONDS = 10 + +directory = os.path.dirname(__file__) + +test_root = "build" + + +@pytest.fixture +def template_fragment(): + return TemplateFragment(type_name, test_root) + + +def test_schema_generator(template_fragment): + fragment1 = os.path.join(directory, "../data/sample_fragments/sample.json") + merged_fragment = template_fragment._load_fragment(fragment1) + with patch.object( + template_fragment, "_read_raw_fragments", return_value=merged_fragment + ): + schema = template_fragment.generate_schema() + + assert os.path.exists(test_root + "/schema.json") + + assert len(schema) == 4 + assert len(schema["properties"]) == 2 + assert "Parameters" in schema["properties"] + assert "Resources" in schema["properties"] + assert "S3Bucket" in schema["properties"]["Resources"]["properties"] + assert ( + len(schema["properties"]["Resources"]["properties"]["S3Bucket"]["properties"]) + == 2 + ) + assert ( + schema["properties"]["Resources"]["properties"]["S3Bucket"]["properties"][ + "Properties" + ]["type"] + == "object" + ) + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def test_schema_generation_param_without_description(template_fragment): + schema = __generate_schema("paramWithoutDescription.yaml", template_fragment) + + assert len(schema) == 4 + assert len(schema["properties"]) == 2 + assert "Parameters" in schema["properties"] + assert ( + "Description" + not in schema["properties"]["Parameters"]["properties"]["anInput"]["required"] + ) + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def test_schema_generation_param_type_aws_specific(template_fragment): + schema = __generate_schema("aws-specific-parameter.json", template_fragment) + + assert len(schema) == 4 + assert len(schema["properties"]) == 2 + assert ( + schema["properties"]["Parameters"]["properties"]["VpcId"]["properties"]["Type"][ + "type" + ] + == "string" + ) + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def test_template_fragments_without_parameter_section(template_fragment): + schema = __generate_schema( + "template_without_parameter_section.json", template_fragment + ) + + assert len(schema) == 4 + assert schema["properties"] is not None + assert schema["properties"]["Resources"] is not None + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def test_template_fragments_without_parameter_section_is_valid(template_fragment): + __assert_validation_throws_no_error( + "template_without_parameter_section.json", template_fragment + ) + + +def test_template_fragments_without_description(template_fragment): + schema = __generate_schema("template_without_description.json", template_fragment) + + assert len(schema) == 4 + assert schema["properties"] is not None + assert schema["description"] is not None + assert schema["properties"].get("Resources") is not None + assert schema["properties"].get("Parameters") is not None + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def test_template_fragment_with_empty_description(template_fragment): + schema = __generate_schema( + "template_with_empty_description.json", template_fragment + ) + + assert len(schema) == 4 + assert schema["properties"] is not None + assert schema["description"] is not None + assert schema["properties"].get("Resources") is not None + assert schema["properties"].get("Parameters") is not None + __validate_against_meta_schema(schema) + os.remove(test_root + "/schema.json") + + +def __generate_schema(fragment_file_name, template_fragment): + if not os.path.exists(test_root): + os.mkdir(test_root) + fragment = os.path.join(directory, "../data/sample_fragments/" + fragment_file_name) + merged_fragment = template_fragment._load_fragment(fragment) + with patch.object( + template_fragment, "_read_raw_fragments", return_value=merged_fragment + ): + schema = template_fragment.generate_schema() + return schema + + +def test_resolved_generated_schema_is_valid_against_metaschema(template_fragment): + if not os.path.exists(test_root): + os.mkdir(test_root) + fragment1 = os.path.join( + directory, "../data/sample_fragments/secureS3_resolved.json" + ) + merged_fragment = template_fragment._load_fragment(fragment1) + with patch.object( + template_fragment, "_read_raw_fragments", return_value=merged_fragment + ): + schema = template_fragment.generate_schema() + + __validate_against_meta_schema(schema) + assert os.path.exists(test_root + "/schema.json") + os.remove(test_root + "/schema.json") + + +def __validate_against_meta_schema(schema): + __make_resource_validator().validate(schema) + + +def test_generate_sample_fragment(template_fragment): + if not os.path.exists(test_root): + os.mkdir(test_root) + sample_fragment_folder_path = test_root + "/fragments" + sample_fragment_path = sample_fragment_folder_path + "/sample.json" + if os.path.exists(sample_fragment_path): + os.remove(sample_fragment_path) + os.rmdir(sample_fragment_folder_path) + assert not os.path.exists(sample_fragment_path) + template_fragment.generate_sample_fragment() + assert os.path.exists(sample_fragment_path) + + +def test_fragments_are_loaded_yaml_short(template_fragment): + fragment = os.path.join(directory, "../data/sample_fragments/ec2_short.yaml") + merged_fragment = template_fragment._load_fragment(fragment) + assert len(merged_fragment) == 2 + assert len(merged_fragment["Resources"]) == 1 + assert "MyEC2Instance" in merged_fragment["Resources"] + + +def test_template_fragments_are_valid(template_fragment): + __assert_validation_throws_no_error("sample.json", template_fragment) + + +def test_template_fragments_import_value(template_fragment): + __assert_throws_validation_error( + "import_value.json", template_fragment, "can't contain any Fn::ImportValue" + ) + + +def test_template_fragments_import_value_short(template_fragment): + __assert_throws_validation_error( + "import_short.yaml", template_fragment, "can't contain any Fn::ImportValue" + ) + + +def test_template_fragments_include_resource_level(template_fragment): + __assert_throws_validation_error( + "include.json", template_fragment, "can't use AWS::Include" + ) + + +def test_template_fragments_include_top_level(template_fragment): + __assert_throws_validation_error( + "top_level_include.json", template_fragment, "can't contain any transform" + ) + + +def test_template_fragments_invalid_transform(template_fragment): + __assert_throws_validation_error( + "invalid_transform.json", + template_fragment, + "Resource 'Fn::Transform' is invalid", + ) + + +def test_template_fragments_resource_without_type(template_fragment): + __assert_throws_validation_error( + "resource_without_type_or_name.json", + template_fragment, + "has neither Type nor Name", + ) + + +def test_template_fragments_macros(template_fragment): + __assert_throws_validation_error( + "macros.yaml", template_fragment, "can't contain any macro" + ) + + +def test_template_fragments_nested_stack(template_fragment): + __assert_throws_validation_error( + "nested_stack.json", template_fragment, "can't contain nested stack" + ) + + +def test_template_fragments_parameter_without_type(template_fragment): + __assert_throws_validation_error( + "parameter_without_type.json", template_fragment, "must have a Type" + ) + + +def test_template_fragments_transform(template_fragment): + __assert_throws_validation_error( + "transform.json", template_fragment, "can't contain transform section" + ) + + +def test_template_fragments_transform_section(template_fragment): + __assert_throws_validation_error( + "transform_section.json", template_fragment, "can't contain transform section" + ) + + +def test_template_fragments_without_resources(template_fragment): + __assert_throws_validation_error( + "noresources.json", template_fragment, "must have a Resources section" + ) + + +def test_template_fragments_with_json_syntax_error(template_fragment): + __assert_throws_validation_error( + "syntax_error.json", template_fragment, "line 15, column 24" + ) + + +def test_template_fragments_exports(template_fragment): + __assert_throws_validation_error( + "exports.json", template_fragment, "cannot contain any Export" + ) + + +def test_template_fragments_output_without_export_is_valid(template_fragment): + __assert_validation_throws_no_error("output.json", template_fragment) + + +def test_template_exceeding_resource_limit(template_fragment): + template_fragment.resource_limit = 2 + __assert_throws_validation_error( + "fragment_three_resources.json", + template_fragment, + "has 3 resources but must not exceed the limit of 2", + ) + + +def test_template_exceeding_output_limit(template_fragment): + template_fragment.output_limit = 2 + __assert_throws_validation_error( + "fragment_three_outputs.json", + template_fragment, + "has 3 outputs but must not exceed the limit of 2", + ) + + +def test_template_exceeding_mapping_limit(template_fragment): + template_fragment.mapping_limit = 2 + __assert_throws_validation_error( + "fragment_three_mappings.json", + template_fragment, + "has 3 mappings but must not exceed the limit of 2", + ) + + +def test_template_exceeding_mapping_attribute_limit(template_fragment): + template_fragment.mapping_attribute_limit = 2 + __assert_throws_validation_error( + "fragment_mapping_with_three_attributes.json", + template_fragment, + "has 3 attributes but must not exceed the limit of 2", + ) + + +def test_template_mappings_dont_exceed_any_limit(template_fragment): + __assert_validation_throws_no_error( + "fragment_mapping_with_three_attributes.json", template_fragment + ) + + +def test_template_exceeding_file_size_limit(template_fragment): + template_fragment.template_file_size_in_bytes_limit = 300 + __assert_throws_validation_error( + "sample.json", + template_fragment, + "exceeds the CloudFormation Template size limit", + ) + + +def test_template_folder_with_multiple_fragment_files(): + template_fragment = TemplateFragment( + type_name, + os.path.join(directory, "../data/sample_fragments/test_multiple_files"), + ) + with pytest.raises(FragmentValidationError) as validation_error: + template_fragment.validate_fragments() + assert "can only consist of a single template file" in str(validation_error.value) + + +def test_merge_fragments_ignores_unrelated_files(): + template_fragment = TemplateFragment( + type_name, os.path.join(directory, "../data/sample_fragments/") + ) + template_fragment.validate_fragments() + + +def __assert_validation_throws_no_error(template_file_name, template_fragment): + with patch.object( + template_fragment, + "_get_fragment_file", + return_value=os.path.join( + directory, "../data/sample_fragments/" + template_file_name + ), + ): + template_fragment.validate_fragments() + + +def __assert_throws_validation_error( + template_file_name, template_fragment, expected_error_message_fragment +): + with pytest.raises(FragmentValidationError) as validation_error: + with patch.object( + template_fragment, + "_get_fragment_file", + return_value=os.path.join( + directory, "../data/sample_fragments/" + template_file_name + ), + ): + template_fragment.validate_fragments() + assert expected_error_message_fragment in str(validation_error.value) + + +def test_overwrite_doesnt_exist(template_fragment, tmpdir): + path = Path(tmpdir.join("test")).resolve() + + template_fragment._overwrite(path, CONTENTS_UTF8) + + with path.open("r", encoding="utf-8") as f: + assert f.read() == CONTENTS_UTF8 + + +def __make_resource_validator(base_uri=None, timeout=TIMEOUT_IN_SECONDS): + schema = resource_json( + __name__, + "../../src/rpdk/core/data/schema/provider.definition.schema.modules.v1.json", + ) + return make_validator(schema, base_uri=base_uri) diff --git a/tests/module/test_init_module.py b/tests/module/test_init_module.py new file mode 100644 index 00000000..feb7c1da --- /dev/null +++ b/tests/module/test_init_module.py @@ -0,0 +1,32 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from rpdk.core.exceptions import WizardValidationError +from rpdk.core.module import init_module +from rpdk.core.module.init_module import validate_type_name +from rpdk.core.project import Project + + +def test_validate_type_name_invalid(): + with pytest.raises(WizardValidationError): + validate_type_name("AWS-Color-Red") + + +def test_validate_type_name_valid(): + assert validate_type_name("AWS::Color::Red::MODULE") == "AWS::Color::Red::MODULE" + + +def test_init_module_falls_back_to_user_input_if_arg_invalid(): + patch_validate = patch.object( + init_module, "validate_type_name", side_effect=WizardValidationError + ) + patch_input = patch.object( + init_module, "input_typename", return_value="Module::Mc::Modulson::MODULE" + ) + mock_project = MagicMock(spec=Project) + mock_args = MagicMock() + mock_args.type_name.return_value = "Not a valid type" + with patch_validate, patch_input: + init_module.init_module(mock_args, mock_project) + mock_project.init_module.assert_called_once_with("Module::Mc::Modulson::MODULE") diff --git a/tests/resource/test_init_resource.py b/tests/resource/test_init_resource.py new file mode 100644 index 00000000..274e42e5 --- /dev/null +++ b/tests/resource/test_init_resource.py @@ -0,0 +1,84 @@ +from unittest.mock import patch + +import pytest + +from rpdk.core.exceptions import WizardAbortError, WizardValidationError +from rpdk.core.resource.init_resource import ( + ValidatePluginChoice, + input_language, + input_typename, + validate_type_name, +) +from tests.test_init import PROMPT + + +def test_input_typename(): + type_name = "AWS::Color::Red" + patch_input = patch( + "rpdk.core.resource.init_resource.input_with_validation", return_value=type_name + ) + with patch_input as mock_input: + assert input_typename() == type_name + mock_input.assert_called_once() + + +def test_input_language_no_plugins(): + validator = ValidatePluginChoice([]) + with patch("rpdk.core.resource.init_resource.validate_plugin_choice", validator): + with pytest.raises(WizardAbortError): + input_language() + + +def test_input_language_one_plugin(): + validator = ValidatePluginChoice([PROMPT]) + with patch("rpdk.core.resource.init_resource.validate_plugin_choice", validator): + assert input_language() == PROMPT + + +def test_input_language_several_plugins(): + validator = ValidatePluginChoice(["1", PROMPT, "2"]) + patch_validator = patch( + "rpdk.core.resource.init_resource.validate_plugin_choice", validator + ) + patch_input = patch("rpdk.core.utils.init_utils.input", return_value="2") + with patch_validator, patch_input as mock_input: + assert input_language() == PROMPT + + mock_input.assert_called_once() + + +def test_validate_plugin_choice_not_an_int(): + validator = ValidatePluginChoice(["test"]) + with pytest.raises(WizardValidationError) as excinfo: + validator("a") + assert "integer" in str(excinfo.value) + + +def test_validate_plugin_choice_less_than_zero(): + validator = ValidatePluginChoice(["test"]) + with pytest.raises(WizardValidationError) as excinfo: + validator("-1") + assert "select" in str(excinfo.value) + + +def test_validate_plugin_choice_greater_than_choice(): + choices = range(3) + validator = ValidatePluginChoice(choices) + with pytest.raises(WizardValidationError) as excinfo: + validator(str(len(choices) + 1)) # index is 1 based for input + assert "select" in str(excinfo.value) + + +def test_validate_plugin_choice_valid(): + choices = ["1", PROMPT, "2"] + validator = ValidatePluginChoice(choices) + assert validator("2") == PROMPT + + +def test_validate_type_name_invalid(): + with pytest.raises(WizardValidationError): + validate_type_name("AWS-Color-Red") + + +def test_validate_type_name_valid(): + assert validate_type_name("AWS::Color::Red") == "AWS::Color::Red" diff --git a/tests/test_init.py b/tests/test_init.py index 9fbf2db8..a4f649f0 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -9,12 +9,13 @@ check_for_existing_project, ignore_abort, input_language, - input_typename, input_with_validation, validate_type_name, validate_yes, ) +from rpdk.core.module.init_module import input_typename as input_typename_module from rpdk.core.project import Project +from rpdk.core.resource.init_resource import input_typename as input_typename_resource from .utils import add_dummy_language_plugin, dummy_parser, get_args, get_mock_project @@ -22,19 +23,25 @@ ERROR = "TUJFEL" -def test_init_method_interactive(): +def test_init_resource_method_interactive(): type_name = object() language = object() mock_project, patch_project = get_mock_project() - patch_tn = patch("rpdk.core.init.input_typename", return_value=type_name) - patch_l = patch("rpdk.core.init.input_language", return_value=language) + patch_tn = patch( + "rpdk.core.resource.init_resource.input_typename", return_value=type_name + ) + patch_l = patch( + "rpdk.core.resource.init_resource.input_language", return_value=language + ) + patch_at = patch("rpdk.core.init.init_artifact_type", return_value="RESOURCE") - with patch_project, patch_tn as mock_tn, patch_l as mock_l: + with patch_project, patch_at as mock_t, patch_tn as mock_tn, patch_l as mock_l: main(args_in=["init"]) mock_tn.assert_called_once_with() mock_l.assert_called_once_with() + mock_t.assert_called_once() mock_project.load_settings.assert_called_once_with() mock_project.init.assert_called_once_with( @@ -46,23 +53,62 @@ def test_init_method_interactive(): "verbose": 0, "force": False, "type_name": None, + "artifact_type": None, }, ) mock_project.generate.assert_called_once_with() -def test_init_method_noninteractive(): - add_dummy_language_plugin() +def test_init_module_method_interactive(): + type_name = object() + language = object() + + mock_project, patch_project = get_mock_project() + + patch_tn = patch( + "rpdk.core.module.init_module.input_typename", return_value=type_name + ) + patch_l = patch( + "rpdk.core.resource.init_resource.input_language", return_value=language + ) + patch_at = patch("rpdk.core.init.init_artifact_type", return_value="MODULE") - args = get_args("dummy", "Test::Test::Test") + with patch_project, patch_tn as mock_tn, patch_l as mock_l, patch_at as mock_t: + main(args_in=["init"]) + + mock_tn.assert_called_once_with() + mock_l.assert_not_called() + mock_t.assert_called_once() + + mock_project.load_settings.assert_called_once_with() + mock_project.init_module.assert_called_once_with(type_name) + mock_project.generate.assert_not_called() + + +def test_init_resource_method_noninteractive(): + add_dummy_language_plugin() + artifact_type = "RESOURCE" + args = get_args("dummy", "Test::Test::Test", artifact_type) mock_project, patch_project = get_mock_project() patch_get_parser = patch( "rpdk.core.init.get_parsers", return_value={"dummy": dummy_parser} ) + # flake8: noqa: B950 + # pylint: disable=line-too-long with patch_project, patch_get_parser as mock_parser: - main(args_in=["init", "--type-name", args.type_name, args.language, "--dummy"]) + main( + args_in=[ + "init", + "--type-name", + args.type_name, + "--artifact-type", + args.artifact_type, + args.language, + "--dummy", + ] + ) mock_parser.assert_called_once() @@ -78,25 +124,42 @@ def test_init_method_noninteractive(): "type_name": args.type_name, "language": args.language, "dummy": True, + "artifact_type": artifact_type, }, ) mock_project.generate.assert_called_once_with() -def test_init_method_noninteractive_invalid_type_name(): +def test_init_resource_method_noninteractive_invalid_type_name(): add_dummy_language_plugin() type_name = object() + artifact_type = "RESOURCE" - args = get_args("dummy", "invalid_type_name") + args = get_args("dummy", "invalid_type_name", "RESOURCE") mock_project, patch_project = get_mock_project() - patch_tn = patch("rpdk.core.init.input_typename", return_value=type_name) + patch_tn = patch( + "rpdk.core.resource.init_resource.input_typename", return_value=type_name + ) + patch_t = patch("rpdk.core.init.init_artifact_type", return_value=artifact_type) patch_get_parser = patch( "rpdk.core.init.get_parsers", return_value={"dummy": dummy_parser} ) - with patch_project, patch_tn as mock_tn, patch_get_parser as mock_parser: - main(args_in=["init", "-t", args.type_name, args.language, "--dummy"]) + # flake8: noqa: B950 + # pylint: disable=line-too-long + with patch_project, patch_t, patch_tn as mock_tn, patch_get_parser as mock_parser: + main( + args_in=[ + "init", + "-t", + args.type_name, + "-a", + args.artifact_type, + args.language, + "--dummy", + ] + ) mock_tn.assert_called_once_with() mock_parser.assert_called_once() @@ -111,8 +174,9 @@ def test_init_method_noninteractive_invalid_type_name(): "verbose": 0, "force": False, "type_name": args.type_name, - "language": args.language, + "artifact_type": artifact_type, "dummy": True, + "language": args.language, }, ) mock_project.generate.assert_called_once_with() @@ -201,6 +265,38 @@ def test_validate_plugin_choice_valid(): assert validator("2") == PROMPT +def test_init_module_method_noninteractive(): + add_dummy_language_plugin() + artifact_type = "MODULE" + args = get_args("dummy", "Test::Test::Test::MODULE", artifact_type) + mock_project, patch_project = get_mock_project() + + patch_get_parser = patch( + "rpdk.core.init.get_parsers", return_value={"dummy": dummy_parser} + ) + + # flake8: noqa: B950 + # pylint: disable=line-too-long + with patch_project, patch_get_parser as mock_parser: + main( + args_in=[ + "init", + "--type-name", + args.type_name, + "--artifact-type", + args.artifact_type, + args.language, + "--dummy", + ] + ) + + mock_parser.assert_called_once() + + mock_project.load_settings.assert_called_once_with() + mock_project.init_module.assert_called_once_with(args.type_name) + mock_project.generate.assert_not_called() + + def test_check_for_existing_project_good_path(): project = Mock(spec=Project) project.load_settings.side_effect = FileNotFoundError @@ -231,7 +327,7 @@ def test_check_for_existing_project_bad_path_ask_yes(): project.type_name = "" patch_input = patch( - "rpdk.core.init.input_with_validation", autospec=True, return_value=True + "rpdk.core.init.input_with_validation", autospec=True, return_value="m" ) with patch_input as mock_input: check_for_existing_project(project) @@ -286,11 +382,19 @@ def test_ignore_abort_abort(): function.assert_called_once_with(sentinel) -def test_input_typename(): +def test_input_typename_resource(): type_name = "AWS::Color::Red" - patch_input = patch("rpdk.core.init.input", return_value=type_name) + patch_input = patch("rpdk.core.utils.init_utils.input", return_value=type_name) + with patch_input as mock_input: + assert input_typename_resource() == type_name + mock_input.assert_called_once() + + +def test_input_typename_module(): + type_name = "AWS::Color::Red::MODULE" + patch_input = patch("rpdk.core.utils.init_utils.input", return_value=type_name) with patch_input as mock_input: - assert input_typename() == type_name + assert input_typename_module() == type_name mock_input.assert_called_once() diff --git a/tests/test_project.py b/tests/test_project.py index c4f5029c..71ec0f9e 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -19,6 +19,7 @@ from rpdk.core.data_loaders import resource_json, resource_stream from rpdk.core.exceptions import ( DownstreamError, + FragmentValidationError, InternalError, InvalidProjectError, SpecValidationError, @@ -37,8 +38,11 @@ from .utils import CONTENTS_UTF8, UnclosingBytesIO +ARTIFACT_TYPE_RESOURCE = "RESOURCE" +ARTIFACT_TYPE_MODULE = "MODULE" LANGUAGE = "BQHDBC" TYPE_NAME = "AWS::Color::Red" +MODULE_TYPE_NAME = "AWS::Color::Red::MODULE" REGION = "us-east-1" ENDPOINT = "cloudformation.beta.com" RUNTIME = random.choice(list(LAMBDA_RUNTIMES)) @@ -106,10 +110,18 @@ def test_load_settings_invalid_settings(project): mock_open.assert_called_once_with("r", encoding="utf-8") -def test_load_settings_valid_json(project): +def test_load_settings_invalid_modules_settings(project): + with patch_settings(project, '{"artifact_type": "MODULE"}') as mock_open: + with pytest.raises(InvalidProjectError): + project.load_settings() + mock_open.assert_called_once_with("r", encoding="utf-8") + + +def test_load_settings_valid_json_for_resource(project): plugin = object() data = json.dumps( { + "artifact_type": "RESOURCE", "typeName": TYPE_NAME, "language": LANGUAGE, "runtime": RUNTIME, @@ -129,10 +141,71 @@ def test_load_settings_valid_json(project): assert project.type_info == ("AWS", "Color", "Red") assert project.type_name == TYPE_NAME assert project.language == LANGUAGE + assert project.artifact_type == ARTIFACT_TYPE_RESOURCE assert project._plugin is plugin assert project.settings == {} +def test_load_settings_valid_json_for_resource_backward_compatible(project): + plugin = object() + data = json.dumps( + { + "typeName": TYPE_NAME, + "language": LANGUAGE, + "runtime": RUNTIME, + "entrypoint": None, + "testEntrypoint": None, + } + ) + patch_load = patch( + "rpdk.core.project.load_plugin", autospec=True, return_value=plugin + ) + + with patch_settings(project, data) as mock_open, patch_load as mock_load: + project.load_settings() + + mock_open.assert_called_once_with("r", encoding="utf-8") + mock_load.assert_called_once_with(LANGUAGE) + assert project.type_info == ("AWS", "Color", "Red") + assert project.type_name == TYPE_NAME + assert project.language == LANGUAGE + assert project.artifact_type == ARTIFACT_TYPE_RESOURCE + assert project._plugin is plugin + assert project.settings == {} + + +def test_load_settings_valid_json_for_module(project): + plugin = object() + data = json.dumps( + { + "artifact_type": "MODULE", + "typeName": MODULE_TYPE_NAME, + } + ) + patch_load = patch( + "rpdk.core.project.load_plugin", autospec=True, return_value=plugin + ) + + with patch_settings(project, data) as mock_open, patch_load as mock_load: + project.load_settings() + + mock_open.assert_called_once_with("r", encoding="utf-8") + mock_load.assert_not_called() + assert project.type_info == ("AWS", "Color", "Red", "MODULE") + assert project.type_name == MODULE_TYPE_NAME + assert project.language is None + assert project.artifact_type == ARTIFACT_TYPE_MODULE + assert project._plugin is None + assert project.settings == {} + + +def test_generate_for_modules_succeeds(project): + project.type_info = ("AWS", "Color", "Red", "MODULE") + project.artifact_type = ARTIFACT_TYPE_MODULE + project.generate() + project.generate_docs() + + def test_load_schema_settings_not_loaded(project): with pytest.raises(InternalError): project.load_schema() @@ -477,7 +550,7 @@ def test_generate_handlers_role_session_timeout(project, tmpdir, schema, result) mock_plugin.generate.assert_called_once_with(project) -def test_init(project): +def test_init_resource(project): type_name = "AWS::Color::Red" mock_plugin = MagicMock(spec=["init"]) @@ -494,6 +567,7 @@ def test_init(project): assert project.type_info == ("AWS", "Color", "Red") assert project.type_name == type_name assert project.language == LANGUAGE + assert project.artifact_type == ARTIFACT_TYPE_RESOURCE assert project._plugin is mock_plugin assert project.settings == {} @@ -523,6 +597,36 @@ def test_init(project): assert f.read() == b"\n" +def test_init_module(project): + type_name = "AWS::Color::Red" + + mock_plugin = MagicMock(spec=["init"]) + patch_load_plugin = patch( + "rpdk.core.project.load_plugin", autospec=True, return_value=mock_plugin + ) + + with patch_load_plugin as mock_load_plugin: + project.init_module(type_name) + + mock_load_plugin.assert_not_called() + mock_plugin.init.assert_not_called() + + assert project.type_info == ("AWS", "Color", "Red") + assert project.type_name == type_name + assert project.language is None + assert project.artifact_type == ARTIFACT_TYPE_MODULE + assert project._plugin is None + assert project.settings == {} + + with project.settings_path.open("r", encoding="utf-8") as f: + assert json.load(f) + + # ends with newline + with project.settings_path.open("rb") as f: + f.seek(-1, os.SEEK_END) + assert f.read() == b"\n" + + def test_load_invalid_schema(project): patch_settings = patch.object(project, "load_settings") patch_schema = patch.object( @@ -539,6 +643,29 @@ def test_load_invalid_schema(project): assert "invalid" in str(excinfo.value) +def test_load_module_project_succeeds(project): + project.artifact_type = "MODULE" + project.type_name = "Unit::Test::Malik::MODULE" + patch_load_settings = patch.object( + project, "load_settings", return_value={"artifact_type": "MODULE"} + ) + with patch_load_settings: + project.load() + + +def test_load_module_project_with_invalid_fragments(project): + project.artifact_type = "MODULE" + project.type_name = "Unit::Test::Malik::MODULE" + patch_load_settings = patch.object( + project, "load_settings", return_value={"artifact_type": "MODULE"} + ) + patch_validate = patch.object( + project, "_validate_fragments", side_effect=FragmentValidationError + ) + with patch_load_settings, patch_validate, pytest.raises(InvalidProjectError): + project.load() + + def test_schema_not_found(project): patch_settings = patch.object(project, "load_settings") patch_schema = patch.object(project, "load_schema", side_effect=FileNotFoundError) @@ -593,6 +720,7 @@ def test_submit_dry_run(project): project.type_name = TYPE_NAME project.runtime = RUNTIME project.language = LANGUAGE + project.artifact_type = ARTIFACT_TYPE_RESOURCE zip_path = project.root / "test.zip" with project.schema_path.open("w", encoding="utf-8") as f: @@ -654,10 +782,79 @@ def test_submit_dry_run(project): assert zip_file.testzip() is None +def test_submit_dry_run_modules(project): + project.type_name = MODULE_TYPE_NAME + project.runtime = RUNTIME + project.language = LANGUAGE + project.artifact_type = ARTIFACT_TYPE_MODULE + project.fragment_dir = project.root / "fragments" + zip_path = project.root / "test.zip" + schema_path = project.root / "schema.json" + fragment_path = project.root / "fragments" / "fragment.json" + + with project.schema_path.open("w", encoding="utf-8") as f: + f.write(CONTENTS_UTF8) + + with schema_path.open("w", encoding="utf-8") as f: + f.write(CONTENTS_UTF8) + + if not os.path.exists(project.root / "fragments"): + os.mkdir(project.root / "fragments") + + with fragment_path.open("w", encoding="utf-8") as f: + f.write(CONTENTS_UTF8) + + with project.overrides_path.open("w", encoding="utf-8") as f: + f.write(json.dumps(empty_override())) + + project.write_settings() + + patch_plugin = patch.object(project, "_plugin", spec=LanguagePlugin) + patch_upload = patch.object(project, "_upload", autospec=True) + patch_path = patch("rpdk.core.project.Path", return_value=zip_path) + patch_temp = patch("rpdk.core.project.TemporaryFile", autospec=True) + + # fmt: off + # these context managers can't be wrapped by black, but it removes the \ + with patch_plugin as mock_plugin, patch_path as mock_path, \ + patch_temp as mock_temp, patch_upload as mock_upload: + project.submit( + True, + endpoint_url=ENDPOINT, + region_name=REGION, + role_arn=None, + use_role=True, + set_default=False + ) + # fmt: on + + mock_temp.assert_not_called() + mock_path.assert_called_once_with("{}.zip".format(project.hypenated_name)) + mock_plugin.package.assert_not_called() + mock_upload.assert_not_called() + + fragment_file_name = "fragments/fragment.json" + + with zipfile.ZipFile(zip_path, mode="r") as zip_file: + assert set(zip_file.namelist()) == { + fragment_file_name, + SCHEMA_UPLOAD_FILENAME, + SETTINGS_FILENAME, + OVERRIDES_FILENAME, + } + schema_contents = zip_file.read(SCHEMA_UPLOAD_FILENAME).decode("utf-8") + assert schema_contents == CONTENTS_UTF8 + overrides = json.loads(zip_file.read(OVERRIDES_FILENAME).decode("utf-8")) + assert "CREATE" in overrides + # https://docs.python.org/3/library/zipfile.html#zipfile.ZipFile.testzip + assert zip_file.testzip() is None + + def test_submit_live_run(project): project.type_name = TYPE_NAME project.runtime = RUNTIME project.language = LANGUAGE + project.artifact_type = ARTIFACT_TYPE_RESOURCE with project.schema_path.open("w", encoding="utf-8") as f: f.write(CONTENTS_UTF8) @@ -705,8 +902,47 @@ def test_submit_live_run(project): temp_file._close() +def test_submit_live_run_for_module(project): + project.type_name = MODULE_TYPE_NAME + project.runtime = RUNTIME + project.language = LANGUAGE + project.artifact_type = ARTIFACT_TYPE_MODULE + + with project.schema_path.open("w", encoding="utf-8") as f: + f.write(CONTENTS_UTF8) + + project.write_settings() + + temp_file = UnclosingBytesIO() + + patch_plugin = patch.object(project, "_plugin", spec=LanguagePlugin) + patch_path = patch("rpdk.core.project.Path", autospec=True) + patch_temp = patch("rpdk.core.project.TemporaryFile", return_value=temp_file) + + # fmt: off + # these context managers can't be wrapped by black, but it removes the \ + with patch_plugin as mock_plugin, patch_path as mock_path, \ + patch_temp as mock_temp, \ + pytest.raises(InternalError): + project.submit( + False, + endpoint_url=ENDPOINT, + region_name=REGION, + role_arn=None, + use_role=True, + set_default=True + ) + # fmt: on + + mock_path.assert_not_called() + mock_temp.assert_called_once_with("w+b") + mock_plugin.package.assert_not_called() + temp_file._close() + + def test__upload_good_path_create_role_and_set_default(project): project.type_name = TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_RESOURCE project.schema = {"handlers": {}} mock_cfn_client = MagicMock(spec=["register_type"]) @@ -765,6 +1001,7 @@ def test__upload_good_path_skip_role_creation( project, use_role, expected_additional_args ): project.type_name = TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_RESOURCE project.schema = {"handlers": {}} mock_cfn_client = MagicMock(spec=["register_type"]) @@ -812,6 +1049,7 @@ def test__upload_good_path_skip_role_creation( def test__upload_clienterror(project): project.type_name = TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_RESOURCE project.schema = {} mock_cfn_client = MagicMock(spec=["register_type"]) @@ -856,6 +1094,53 @@ def test__upload_clienterror(project): ) +def test__upload_clienterror_module(project): + project.type_name = MODULE_TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_MODULE + project.schema = {} + + mock_cfn_client = MagicMock(spec=["register_type"]) + mock_cfn_client.register_type.side_effect = ClientError( + BLANK_CLIENT_ERROR, "RegisterType" + ) + fileobj = object() + + patch_sdk = patch("rpdk.core.project.create_sdk_session", autospec=True) + patch_uploader = patch.object(Uploader, "upload", return_value="url") + patch_role_arn = patch.object( + Uploader, "get_log_delivery_role_arn", return_value="some-log-role-arn" + ) + patch_uuid = patch("rpdk.core.project.uuid4", autospec=True, return_value="foo") + + with patch_sdk as mock_sdk, patch_uploader as mock_upload_method, patch_role_arn as mock_role_arn_method: # noqa: B950 as it conflicts with formatting rules # pylint: disable=C0301 + mock_session = mock_sdk.return_value + mock_session.client.side_effect = [mock_cfn_client, MagicMock()] + with patch_uuid as mock_uuid, pytest.raises(DownstreamError): + project._upload( + fileobj, + endpoint_url=None, + region_name=None, + role_arn=None, + use_role=False, + set_default=True, + ) + + mock_sdk.assert_called_once_with(None) + mock_upload_method.assert_called_once_with(project.hypenated_name, fileobj) + mock_role_arn_method.assert_called_once_with() + mock_uuid.assert_called_once_with() + mock_cfn_client.register_type.assert_called_once_with( + Type="MODULE", + TypeName=project.type_name, + SchemaHandlerPackage="url", + ClientRequestToken=mock_uuid.return_value, + LoggingConfig={ + "LogRoleArn": "some-log-role-arn", + "LogGroupName": "aws-color-red-module-logs", + }, + ) + + def test__wait_for_registration_set_default(project): mock_cfn_client = MagicMock( spec=["describe_type_registration", "set_type_default_version", "get_waiter"] @@ -1050,6 +1335,7 @@ def test_generate_image_build_config_plugin_not_supported(project): def test__write_settings_null_executable_entrypoint(project): project.type_name = TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_RESOURCE project.runtime = RUNTIME project.language = LANGUAGE project.executable_entrypoint = None @@ -1062,6 +1348,7 @@ def test__write_settings_null_executable_entrypoint(project): def test__write_settings_nonnull_executable_entrypoint(project): project.type_name = TYPE_NAME + project.artifact_type = ARTIFACT_TYPE_RESOURCE project.runtime = RUNTIME project.language = LANGUAGE project.executable_entrypoint = "executable_entrypoint" diff --git a/tests/test_test.py b/tests/test_test.py index 1f21fd27..e09e4bdd 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -11,7 +11,7 @@ from rpdk.core.cli import EXIT_UNHANDLED_EXCEPTION, main from rpdk.core.contract.interface import Action from rpdk.core.exceptions import SysExitRecommendedError -from rpdk.core.project import Project +from rpdk.core.project import ARTIFACT_TYPE_MODULE, ARTIFACT_TYPE_RESOURCE, Project from rpdk.core.test import ( DEFAULT_ENDPOINT, DEFAULT_FUNCTION, @@ -103,6 +103,7 @@ def test_test_command_happy_path( mock_project.schema = SCHEMA mock_project.root = base mock_project.executable_entrypoint = None + mock_project.artifact_type = ARTIFACT_TYPE_RESOURCE patch_project = patch( "rpdk.core.test.Project", autospec=True, return_value=mock_project @@ -152,6 +153,7 @@ def test_test_command_return_code_on_error(): mock_project.root = None mock_project.schema = SCHEMA mock_project.executable_entrypoint = None + mock_project.artifact_type = ARTIFACT_TYPE_RESOURCE patch_project = patch( "rpdk.core.test.Project", autospec=True, return_value=mock_project ) @@ -165,6 +167,17 @@ def test_test_command_return_code_on_error(): assert excinfo.value.code != EXIT_UNHANDLED_EXCEPTION +def test_test_command_module_project_succeeds(): + mock_project = Mock(spec=Project) + + mock_project.artifact_type = ARTIFACT_TYPE_MODULE + patch_project = patch( + "rpdk.core.test.Project", autospec=True, return_value=mock_project + ) + with patch_project: + main(args_in=["test"]) + + def test_temporary_ini_file(): with temporary_ini_file() as path_str: assert isinstance(path_str, str) diff --git a/tests/utils.py b/tests/utils.py index 79d63d59..242384f1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -95,16 +95,18 @@ def get_mock_project(): return (mock_project, patch_project) -def get_args(language=None, type_name=None): +def get_args(language=None, type_name=None, artifact_type=None): args = Mock( spec_set=[ "language", "type_name", + "artifact_type", ] ) args.language = language args.type_name = type_name + args.artifact_type = artifact_type return args diff --git a/tests/utils/test_init_utils.py b/tests/utils/test_init_utils.py new file mode 100644 index 00000000..5e71220f --- /dev/null +++ b/tests/utils/test_init_utils.py @@ -0,0 +1,92 @@ +from unittest.mock import Mock, patch + +import pytest + +from rpdk.core.exceptions import WizardValidationError +from rpdk.core.utils.init_utils import ( + init_artifact_type, + input_with_validation, + validate_artifact_type, + validate_yes, +) +from tests.test_init import ERROR, PROMPT + + +def test_input_artifact_type(): + artifact_type = "MODULE" + patch_input = patch("rpdk.core.utils.init_utils.input", return_value=artifact_type) + with patch_input as mock_input: + assert init_artifact_type() == artifact_type + mock_input.assert_called_once() + + +def test_input_modules_bad_arg_but_valid_input(): + artifact_type = "MODULE" + patch_input = patch("rpdk.core.utils.init_utils.input", return_value=artifact_type) + mock_args = Mock() + mock_args.artifact_type.return_value = "Not a valid type" + with patch_input as mock_input: + assert init_artifact_type(mock_args) == artifact_type + mock_input.assert_called_once() + + +def test_input_with_validation_valid_first_try(capsys): + sentinel1 = object() + sentinel2 = object() + + validator = Mock(return_value=sentinel1) + with patch( + "rpdk.core.utils.init_utils.input", return_value=sentinel2 + ) as mock_input: + ret = input_with_validation(PROMPT, validator) + + mock_input.assert_called_once_with() + validator.assert_called_once_with(sentinel2) + assert ret is sentinel1 + + out, err = capsys.readouterr() + assert not err + assert PROMPT in out + + +def test_input_with_validation_valid_second_try(capsys): + def mock_validator(value): + if value == ERROR: + raise WizardValidationError(ERROR) + return value + + sentinel = object() + + with patch( + "rpdk.core.utils.init_utils.input", side_effect=(ERROR, sentinel) + ) as mock_input: + ret = input_with_validation(PROMPT, mock_validator) + + assert mock_input.call_count == 2 + assert ret is sentinel + + out, err = capsys.readouterr() + assert not err + assert ERROR in out + + +def test_validate_artifact_type_valid(): + assert validate_artifact_type("m") == "MODULE" + assert validate_artifact_type("module") == "MODULE" + assert validate_artifact_type("r") == "RESOURCE" + assert validate_artifact_type("resource") == "RESOURCE" + + +def test_validate_artifact_type_invalid(): + with pytest.raises(WizardValidationError): + validate_artifact_type("invalid_type") + + +@pytest.mark.parametrize("value", ("y", "yes", "Y", "YES", "yEs", "YeS")) +def test_validate_yes_yes(value): + assert validate_yes(value) + + +@pytest.mark.parametrize("value", ("n", "N", "no", "NO", "yesn't")) +def test_validate_yes_no(value): + assert not validate_yes(value)