From 0de645cf3d70337b2b85cb07298934001c9d5d0a Mon Sep 17 00:00:00 2001 From: Oleg Zasymenko Date: Thu, 13 May 2021 11:17:41 +0300 Subject: [PATCH] CORE-210 pyiso8583 support (#112) * CORE-210 iso8583 test * Iso parser running * removed odd imports from iso test * decode test working * a minor change * reverted an accident * mutalibility from thread * pyiso8583 test parser * a minor * renamed ISO decoder * tests renamed * set union * no 2 field * fixed by mahmoudimus * encode first attempt * fields encoding working * fixed bitmap processing * bitmap fix * a minor * encode/decode with test_payload test fix * test renaming * enc_field_len_str fix * test encoder bitmaps * spec star * added two tests of encoder * removed jpos support ( * an import fix * unhexlify * deleted odd file * no header in spec test * test_non_string_field_keys --- .../main/resources/vendor/ISO8583Decoder.star | 515 ++++ .../main/resources/vendor/ISO8583Encoder.star | 455 +++ .../main/resources/vendor/ISO8583Specs.star | 2289 ++++++++++++++ .../quick_tests/test_ISO8583Encode.star | 2632 +++++++++++++++++ .../resources/quick_tests/test_iso8583.star | 985 ++++++ 5 files changed, 6876 insertions(+) create mode 100644 larky/src/main/resources/vendor/ISO8583Decoder.star create mode 100644 larky/src/main/resources/vendor/ISO8583Encoder.star create mode 100755 larky/src/main/resources/vendor/ISO8583Specs.star create mode 100644 larky/src/test/resources/quick_tests/test_ISO8583Encode.star create mode 100644 larky/src/test/resources/quick_tests/test_iso8583.star diff --git a/larky/src/main/resources/vendor/ISO8583Decoder.star b/larky/src/main/resources/vendor/ISO8583Decoder.star new file mode 100644 index 000000000..4b630ee3d --- /dev/null +++ b/larky/src/main/resources/vendor/ISO8583Decoder.star @@ -0,0 +1,515 @@ +load("@stdlib//larky", larky="larky") +load("@stdlib//builtins","builtins") +load("@stdlib//sets", "sets") +load("@stdlib//types", types="types") +load("@stdlib//codecs", "codecs") +load("@stdlib//binascii", unhexlify="unhexlify", hexlify="hexlify") + + +def _decode(bytes, packager): + result = decode(bytes, packager) + # print(result) + return result + +Decoder = larky.struct( + decode=_decode, +) + + +# DecodedDict = Dict[str, str] +# EncodedDict = Dict[str, Dict[str, bytes]] +# SpecDict = Mapping[str, Mapping[str, Any]] +# def DecodeError(msg, +# s, +# doc_dec, +# doc_enc, +# pos, +# field, +# ): +# r"""Subclass of ValueError that describes ISO8583 decoding error. +# +# Attributes +# ---------- +# msg : str +# The unformatted error message +# s : bytes or bytearray +# The ISO8583 bytes instance being parsed +# doc_dec : dict +# Dict containing partially decoded ISO8583 data +# doc_enc : dict +# Dict containing partially encoded ISO8583 data +# pos : int +# The start index where ISO8583 bytes data failed parsing +# field : str +# The ISO8583 field where parsing failed +# """ +# +# def __init__( +# msg, +# s, +# doc_dec, +# doc_enc, +# pos, +# field, +# ): +# errmsg = msg+"field "+field+" pos"+ pos +# ValueError.__init__(self, errmsg) +# self.msg = msg +# self.s = s +# self.doc_dec = doc_dec +# self.doc_enc = doc_enc +# self.field = field +# self.pos = pos +# self = __init__(msg, s, doc_dec, doc_enc, pos, field) +# +# def __reduce__( +# ) : +# return ( +# self.__class__, +# (self.msg, self.s, self.doc_dec, self.doc_enc, self.pos, self.field), +# ) +# self.__reduce__ = __reduce__ +# return self + + +def decode( + s, spec +) : + r"""Deserialize a bytes or bytearray instance containing + ISO8583 data to a Python dict. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray + TypeError + `s` must be a bytes or bytearray instance + + Examples + -------- + >>> import pprint + >>> import iso8583 + >>> from iso8583.specs import default_ascii as spec + >>> s = b"02004010100000000000161234567890123456123456111" + >>> doc_dec, doc_enc = iso8583.decode(s, spec) + >>> pprint.pprint(doc_dec) + {'12': '123456', + '2': '1234567890123456', + '20': '111', + 'p': '4010100000000000', + 't': '0200'} + """ + + if not types.is_bytelike(s): + fail(" TypeError(\n f\"Encoded ISO8583 data must be bytes or bytearray, not {s.__class__.__name__}\"\n )") + + doc_dec= {} + doc_enc= {} + fields = sets.make() + idx = 0 + + idx = _decode_header(s, doc_dec, doc_enc, idx, spec) + idx = _decode_type(s, doc_dec, doc_enc, idx, spec) + idx, fields = _decode_bitmaps(s, doc_dec, doc_enc, idx, spec, fields) + + # `field_key` can be used to throw an exception after the loop. + # So, create it here in case the `fields` set is empty + # and never enters the loop to create the variable. + # Set `field_key` to the last mandatory one: primary bitmap. + field_key = "p" + + for field_key in [str(i) for i in sorted(sets.to_list(fields))]: + # Secondary bitmap is already decoded in _decode_bitmaps + if field_key == "1": + continue + idx = _decode_field(s, doc_dec, doc_enc, idx, field_key, spec) + + if idx != len(s): + fail(" DecodeError(\n \"Extra data after last field\", s, doc_dec, doc_enc, idx, field_key\n )") + + return doc_dec, doc_enc + + +# +# Private interface +# + + +def _decode_header( + s, + doc_dec, + doc_enc, + idx, + spec, +): + r"""Decode ISO8583 header data if present. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + idx : int + Current index in ISO8583 byte array + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + int + Index in ISO8583 byte array where parsing of the header ended + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray. + """ + + # Header is not expected according to specifications + if spec["h"]["max_len"] <= 0: + return idx + + return _decode_field(s, doc_dec, doc_enc, idx, "h", spec) + + +def _decode_type( + s, + doc_dec, + doc_enc, + idx, + spec, +) : + r"""Decode ISO8583 message type. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + idx : int + Current index in ISO8583 byte array + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + int + Index in ISO8583 byte array where parsing of message type ended + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray. + """ + + # Message type is a set length in ISO8583 + if spec["t"]["data_enc"] == "b": + f_len = 2 + else: + f_len = 4 + + doc_dec["t"] = "" + doc_enc["t"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(s[idx : idx + f_len])} + + if len(s[idx : idx + f_len]) != f_len: + fail(" DecodeError(\n f\"Field data is {len(s[idx:idx + f_len])} bytes, expecting {f_len}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n \"t\",\n )") + + if spec["t"]["data_enc"] == "b": + doc_dec["t"] = hexlify(s[idx : idx + f_len]).upper() + else: + doc_dec["t"] = s[idx : idx + f_len].decode(spec["t"]["data_enc"]) + + + return idx + f_len + + +def _decode_bitmaps( + s, + doc_dec, + doc_enc, + idx, + spec, + fields, +) : + r"""Decode ISO8583 primary and secondary bitmaps. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + idx : int + Current index in ISO8583 byte array + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + fields: set + Will not be mutated, but will be returned in a tuple + populated with enabled field numbers + + Returns + ------- + Tuple containing the following: + + int + Index in ISO8583 byte array where parsing of bitmaps ended + + fields: set + Will be populated with enabled field numbers + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray. + """ + + # Primary bitmap is a set length in ISO8583 + if spec["p"]["data_enc"] == "b": + f_len = 8 + else: + f_len = 16 + + doc_dec["p"] = "" + doc_enc["p"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(s[idx : idx + f_len])} + + if len(s[idx : idx + f_len]) != f_len: + fail(" DecodeError(\n f\"Field data is {len(s[idx:idx + f_len])} bytes, expecting {f_len}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n \"p\",\n )") + + if spec["p"]["data_enc"] == "b": + doc_dec["p"] = hexlify(s[idx : idx + f_len]).upper() + bm = s[idx : idx + f_len] + else: + doc_dec["p"] = s[idx : idx + f_len].decode(spec["p"]["data_enc"]) + bm = unhexlify(doc_dec["p"]) + + + fields = sets.union( + fields, + sets.make([ + byte_idx * 8 + bit + for bit in range(1, 9) + for byte_idx, byte in enumerate(bm) + if byte >> (8 - bit) & 1 + ]) + ) + + idx += f_len + + # No need to produce secondary bitmap if it's not required + # if 1 not in fields: + if not sets.contains(fields, 1): + return idx + + # Decode secondary bitmap + # Secondary bitmap is a set length in ISO8583 + if spec["1"]["data_enc"] == "b": + f_len = 8 + else: + f_len = 16 + + doc_dec["1"] = "" + doc_enc["1"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(s[idx : idx + f_len])} + + if len(s[idx : idx + f_len]) != f_len: + fail(" DecodeError(\n f\"Field data is {len(s[idx:idx + f_len])} bytes, expecting {f_len}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n \"1\",\n )") + + if spec["1"]["data_enc"] == "b": + doc_dec["1"] = s[idx : idx + f_len].hex().upper() + bm = s[idx : idx + f_len] + else: + doc_dec["1"] = s[idx : idx + f_len].decode(spec["1"]["data_enc"]) + bm = unhexlify(doc_dec["1"]) + + fields = sets.union( + fields, + sets.make([ + 64 + byte_idx * 8 + bit + for bit in range(1, 9) + for byte_idx, byte in enumerate(bm) + if byte >> (8 - bit) & 1 + ]) + ) + + return idx + f_len, fields + + +def _decode_field( + s, + doc_dec, + doc_enc, + idx, + field_key, + spec, +): + r"""Decode ISO8583 individual fields. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + idx : int + Current index in ISO8583 byte array + field_key : str + Field ID to be decoded + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + int + Index in ISO8583 byte array where parsing of the field ended + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray. + """ + len_type = spec[field_key]["len_type"] + + # Optional field added in v2.1. Prior specs do not have it. + len_count = spec[field_key].get("len_count", "bytes") + + doc_dec[field_key] = "" + doc_enc[field_key] = {"len": bytes(s[idx : idx + len_type]), "data": bytes(r"", encoding='utf-8')} + + if len(s[idx : idx + len_type]) != len_type: + fail(" DecodeError(\n f\"Field length is {len(s[idx:idx + len_type])} bytes wide, expecting {len_type}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n field_key,\n )") + + # Parse field length if present. + # For fixed-length fields max_len is the length. + if len_type == 0: + enc_field_len = spec[field_key]["max_len"] + # Variable field length + else: + # BCD length + if spec[field_key]["len_enc"] == "b": + enc_field_len = int(s[idx : idx + len_type].hex(), 10) + # Text length + else: + enc_field_len = int( + s[idx : idx + len_type].decode(spec[field_key]["len_enc"]), 10 + ) + + if enc_field_len > spec[field_key]["max_len"]: + fail(" DecodeError(\n f\"Field data is {enc_field_len} {len_count}, larger than maximum {spec[field_key]['max_len']}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n field_key,\n )") + + idx += len_type + + # Do not parse zero-length field + if enc_field_len == 0: + return idx + + # Encoded field length can be in bytes or half bytes (nibbles) + # Convert nibbles to bytes if needed + if len_count == "nibbles": + if enc_field_len & 1: + byte_field_len = (enc_field_len + 1) // 2 + else: + byte_field_len = enc_field_len // 2 + else: + byte_field_len = enc_field_len + + # Parse field data + doc_enc[field_key]["data"] = bytes(s[idx : idx + byte_field_len]) + if len(doc_enc[field_key]["data"]) != byte_field_len: + if len_count == "nibbles": + actual_field_len = len(doc_enc[field_key]["data"]) * 2 + else: + actual_field_len = len(doc_enc[field_key]["data"]) + + fail(" DecodeError(\n f\"Field data is {actual_field_len} {len_count}, expecting {enc_field_len}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n field_key,\n )") + + if spec[field_key]["data_enc"] == "b": + doc_dec[field_key] = doc_enc[field_key]["data"].hex().upper() + if len_count == "nibbles" and enc_field_len & 1: + doc_dec[field_key] = _remove_pad_field( + s, idx, doc_dec, doc_enc, field_key, spec, enc_field_len + ) + else: + doc_dec[field_key] = doc_enc[field_key]["data"].decode( + spec[field_key]["data_enc"] + ) + + return idx + byte_field_len + + +def _remove_pad_field( + s, + idx, + doc_dec, + doc_enc, + field_key, + spec, + enc_field_len, +): + r"""Remove left or right pad from a BCD or hex field. + + Parameters + ---------- + s : bytes or bytearray + Encoded ISO8583 data + idx : int + Current index in ISO8583 byte array + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + field_key : str + Field ID to remove pad from + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + enc_field_len : int + Number of nibbles expected in the field + + Returns + ------- + str + Field data without pad + + Raises + ------ + DecodeError + An error decoding ISO8583 bytearray. + """ + pad = spec[field_key].get("left_pad", "")[:1] + if len(pad) > 0 and doc_dec[field_key][:1] == pad: + return doc_dec[field_key][1:] + + pad = spec[field_key].get("right_pad", "")[:1] + if len(pad) > 0 and doc_dec[field_key][-1:] == pad: + return doc_dec[field_key][:-1] + + fail(" DecodeError(\n f\"Field data is {len(doc_dec[field_key])} nibbles, expecting {enc_field_len}\",\n s,\n doc_dec,\n doc_enc,\n idx,\n field_key,\n )") + diff --git a/larky/src/main/resources/vendor/ISO8583Encoder.star b/larky/src/main/resources/vendor/ISO8583Encoder.star new file mode 100644 index 000000000..2d81eabae --- /dev/null +++ b/larky/src/main/resources/vendor/ISO8583Encoder.star @@ -0,0 +1,455 @@ +load("@stdlib//larky", larky="larky") +load("@stdlib//builtins","builtins") +load("@stdlib//sets", "sets") +load("@stdlib//types", types="types") +load("@stdlib//codecs", "codecs") +load("@stdlib//binascii", unhexlify="unhexlify", hexlify="hexlify") +load("@vendor//escapes", "escapes") + + +# Tests of 'bytes' (immutable byte strings). +b = builtins.b + +def _encode(bytes, packager): + result = encode(bytes, packager) + return result + +Encoder = larky.struct( + encode=_encode, +) + +# def EncodeError(msg: str, doc_dec: DecodedDict, doc_enc: EncodedDict, field: str +# ): +# r"""Subclass of ValueError that describes ISO8583 encoding error. +# +# Attributes +# ---------- +# msg : str +# The unformatted error message +# doc_dec : dict +# Dict containing decoded ISO8583 data being encoded +# doc_enc : dict +# Dict containing partially encoded ISO8583 data +# field : str +# The ISO8583 field where parsing failed +# """ +# +# def __init__( +# msg: str, doc_dec: DecodedDict, doc_enc: EncodedDict, field: str +# ): +# errmsg = f"{msg}: field {field}" +# ValueError.__init__(self, errmsg) +# self.msg = msg +# self.doc_dec = doc_dec +# self.doc_enc = doc_enc +# self.field = field +# self = __init__(msg, doc_dec, doc_enc, field) +# +# def __reduce__( +# ) -> Tuple[Type["EncodeError"], Tuple[str, DecodedDict, EncodedDict, str]]: +# return self.__class__, (self.msg, self.doc_dec, self.doc_enc, self.field) +# self.__reduce__ = __reduce__ +# return self + + +def encode(doc_dec, spec): + r"""Serialize Python dict containing ISO8583 data to a bytearray. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + s : bytearray + Encoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + + Raises + ------ + EncodeError + An error encoding ISO8583 bytearray + TypeError + `doc_dec` must be a dict instance + + Examples + -------- + >>> import iso8583 + >>> from iso8583.specs import default_ascii as spec + >>> doc_dec = { + ... 't': '0210', + ... '3': '111111', + ... '39': '05'} + >>> s, doc_enc = iso8583.encode(doc_dec, spec) + >>> s + bytearray(b'0210200000000200000011111105') + """ + + if not types.is_dict(doc_dec): + fail(" TypeError(\n f\"Decoded ISO8583 data must be dict, not {doc_dec.__class__.__name__}\"\n )") + + s = bytearray() + doc_enc = {} + fields = sets.make() + s += _encode_header(doc_dec, doc_enc, spec) + s += _encode_type(doc_dec, doc_enc, spec) + results, fields = _encode_bitmaps(doc_dec, doc_enc, spec, fields) + s += results + + for field_key in [str(i) for i in sorted(sets.to_list(fields))]: + # Secondary bitmap is already encoded in _encode_bitmaps + if field_key == "1": + continue + s += _encode_field(doc_dec, doc_enc, field_key, spec) + + return s, doc_enc + + +# +# Private interface +# + + +def _encode_header(doc_dec, doc_enc, spec): + r"""Encode ISO8583 header data if present from `d["h"]`. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + bytes + Encoded ISO8583 header data + + Raises + ------ + EncodeError + An error encoding ISO8583 bytearray. + """ + + # Header is not expected according to specifications + if spec["h"]["max_len"] <= 0: + return bytes(r"", encoding='utf-8') + + # Header data is a required field. + if "h" not in doc_dec: + fail("EncodeError('Field data is required according to specifications: field h')") + + return _encode_field(doc_dec, doc_enc, "h", spec) + + +def _encode_type(doc_dec, doc_enc, spec): + r"""Encode ISO8583 message type from `d["t"]`. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + bytes + Encoded ISO8583 message type data + + Raises + ------ + EncodeError + An error encoding ISO8583 bytearray. + """ + + # Message type is a required field. + if "t" not in doc_dec: + fail(" EncodeError(\"Field data is required\", doc_dec, doc_enc, \"t\")") + + # Message type is a set length in ISO8583 + if spec["t"]["data_enc"] == "b": + f_len = 2 + else: + f_len = 4 + + doc_enc["t"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(r"", encoding='utf-8')} + + # try: + if spec["t"]["data_enc"] == "b": + doc_enc["t"]["data"] = unhexlify(doc_dec["ะต"]) + else: + # doc_enc["t"]["data"] = doc_dec["t"].encode(encoding=spec["t"]["data_enc"]) + doc_enc["t"]["data"] = codecs.encode(doc_dec["t"], spec["t"]["data_enc"]) + # except Exception as e: + # raise EncodeError(f"Failed to encode ({e})", doc_dec, doc_enc, "t") from None + + if len(doc_enc["t"]["data"]) != f_len: + fail(" EncodeError(\n f\"Field data is {len(doc_enc['t']['data'])} bytes, expecting {f_len}\",\n doc_dec,\n doc_enc,\n \"t\",\n )") + + return doc_enc["t"]["data"] + + +def _encode_bitmaps( + doc_dec, doc_enc, spec, fields): + r"""Encode ISO8583 primary and secondary bitmap from dictionary keys. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + fields: set + Will be populated with enabled field numbers + + Returns + ------- + bytes + Encoded ISO8583 primary and/or secondary bitmaps data + + Raises + ------ + EncodeError + An error encoding ISO8583 bytearray. + """ + + # Secondary bitmap will be calculated as needed + doc_dec.pop("1", None) + + # Primary and secondary bitmaps will be created from the keys + # try: + # fields.update([int(k) for k in doc_dec.keys() if k.isnumeric()]) + invalid_fields = [] + for k in doc_dec.keys(): + if not types.is_string(k): + invalid_fields.append(k) + if len(invalid_fields) != 0: + fail("Dictionary contains invalid fields {k}: field p".format(k=str(invalid_fields))) + + fields = sets.union( + fields, + sets.make([int(k) for k in doc_dec.keys() if k.isdigit()])) + # except AttributeError: + # raise EncodeError( + # f"Dictionary contains invalid fields {[k for k in doc_dec.keys() if not isinstance(k, str)]}", + # doc_dec, + # doc_enc, + # "p", + # ) from None + + # Bitmap must consist of 1-128 field range + # if not fields.issubset(range(1, 129)): + # fail(" EncodeError(\n f\"Dictionary contains fields outside of 1-128 range {sorted(fields.difference(range(1, 129)))}\",\n doc_dec,\n doc_enc,\n \"p\",\n )") + + # Add secondary bitmap if any 65-128 fields are present + # if not fields.isdisjoint(range(65, 129)): + if not sets.is_subset(sets.make(range(65, 129)), fields): + fields = sets.union(fields, sets.union(sets.make([1]))) + + # Turn on bitmap bits of associated fields. + # There is no need to sort this set because the code below will + # figure out appropriate byte/bit for each field. + s = bytearray(bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])) + for f in sets.to_list(fields): + # Fields start at 1. Make them zero-bound for easier conversion. + f -= 1 + + # Place this particular field in a byte where it belongs. + # E.g. field 8 belongs to byte 0, field 121 belongs to byte 15. + byte = f // 8 + + # Determine bit to enable. ISO8583 bitmaps are left-aligned. + # E.g. fields 1, 9, 17, etc. enable bit 7 in bytes 0, 1, 2, etc. + bit = 7 - (f - byte * 8) + # s[byte] |= 1 << bit + new = s[byte] | (1 << bit) + s = s[0:byte] + bytes([new]) + s[(byte+1):] + + # Encode primary bitmap + doc_dec["p"] = hexlify(s[0:8]).upper() + doc_enc["p"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(r"", encoding='utf-8')} + + # try: + if spec["p"]["data_enc"] == "b": + doc_enc["p"]["data"] = s[0:8] + else: + # doc_enc["p"]["data"] = doc_dec["p"].encode(spec["p"]["data_enc"]) + doc_enc["p"]["data"] = bytes(doc_dec["p"], encoding=(spec["p"]["data_enc"])) + # except Exception as e: + # raise EncodeError(f"Failed to encode ({e})", doc_dec, doc_enc, "p") from None + + # No need to produce secondary bitmap if it's not required + if not sets.contains(fields, 1): + return doc_enc["p"]["data"], fields + + # Encode secondary bitmap + doc_dec["1"] = hexlify(s[8:16]).upper() + doc_enc["1"] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(r"", encoding='utf-8')} + + # try: + if spec["1"]["data_enc"] == "b": + doc_enc["1"]["data"] = s[8:16] + else: + # doc_enc["1"]["data"] = doc_dec["1"].encode(spec["1"]["data_enc"]) + doc_enc["1"]["data"] = bytes(doc_dec["1"], encoding=(spec["1"]["data_enc"])) + # except Exception as e: + # raise EncodeError(f"Failed to encode ({e})", doc_dec, doc_enc, "1") from None + + return bytearray(doc_enc["p"]["data"]) + bytearray(doc_enc["1"]["data"]), fields + + +def _encode_field(doc_dec, doc_enc, field_key, spec): + r"""Encode ISO8583 individual field from `doc_dec[field_key]`. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + doc_enc : dict + Dict containing encoded ISO8583 data + field_key : str + Field ID to be encoded + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + bytes + Encoded ISO8583 field data + + Raises + ------ + EncodeError + An error encoding ISO8583 bytearray. + """ + + # Encode field data + doc_enc[field_key] = {"len": bytes(r"", encoding='utf-8'), "data": bytes(r"", encoding='utf-8')} + + # Optional field added in v2.1. Prior specs do not have it. + len_count = spec[field_key].get("len_count", "bytes") + + # try: + # Binary data: either hex or BCD + if spec[field_key]["data_enc"] == "b": + if len_count == "nibbles" and len(doc_dec[field_key]) & 1: + doc_enc[field_key]["data"] = codecs.encode( + _add_pad_field(doc_dec, field_key, spec) + ) + else: + doc_enc[field_key]["data"] = codecs.encode(doc_dec[field_key]) + + # Encoded field length can be in bytes or half bytes (nibbles) + if len_count == "nibbles": + enc_field_len = len(doc_dec[field_key]) + else: + enc_field_len = len(doc_enc[field_key]["data"]) + # Text data + else: + doc_enc[field_key]["data"] = codecs.encode(doc_dec[field_key], encoding = spec[field_key]["data_enc"]) + + + # Encoded field length can be in bytes or half bytes (nibbles) + if len_count == "nibbles": + enc_field_len = len(doc_enc[field_key]["data"]) * 2 + else: + enc_field_len = len(doc_enc[field_key]["data"]) + # except Exception as e: + # raise EncodeError( + # f"Failed to encode ({e})", doc_dec, doc_enc, field_key + # ) from None + + len_type = spec[field_key]["len_type"] + + # Handle fixed length field. No need to calculate length. + if len_type == 0: + if enc_field_len != spec[field_key]["max_len"]: + expecting = spec[field_key]["max_len"] + fail( + "EncodeError(Field data is {enc_field_len} {len_count} for field key {field_key}, expecting {expecting})" + .format(enc_field_len=enc_field_len, len_count=len_count, + expecting=expecting, field_key=field_key)) + + doc_enc[field_key]["len"] = bytes(r"", encoding='utf-8') + return doc_enc[field_key]["data"] + + # Continue with variable length field. + + if enc_field_len > spec[field_key]["max_len"]: + fail(" EncodeError(\n f\"Field data is {enc_field_len} {len_count}, larger than maximum {spec[field_key]['max_len']}\",\n doc_dec,\n doc_enc,\n field_key,\n )") + + # Encode field length + # try: + if spec[field_key]["len_enc"] == "b": + # Odd field length type is not allowed for purpose of string + # to BCD translation. Double it, e.g.: + # BCD LVAR length \x09 must be string "09" + # BCD LLVAR length \x99 must be string "99" + # BCD LLLVAR length \x09\x99 must be string "0999" + # BCD LLLLVAR length \x99\x99 must be string "9999" + # TODO use unhexlify + doc_enc[field_key]["len"] = bytes.fromhex( + # "{:0{len_type}d}".format(enc_field_len, len_type=len_type * 2) + "{enc_field_len}".format(enc_field_len=enc_field_len) + ) + else: + # "{:0{len_type}d}".format(enc_field_len, len_type=len_type), + enc_field_len_str = "{enc_field_len}".format(enc_field_len=enc_field_len) + if len_type > len(str(enc_field_len)): + enc_field_len_str = "0{enc_field_len}".format(enc_field_len=enc_field_len_str) + doc_enc[field_key]["len"] = bytes( + enc_field_len_str, + spec[field_key]["len_enc"], + ) + # except Exception as e: + # raise EncodeError( + # f"Failed to encode length ({e})", doc_dec, doc_enc, field_key + # ) from None + + return bytearray(doc_enc[field_key]["len"]) + bytearray(doc_enc[field_key]["data"]) + + +def _add_pad_field(doc_dec, field_key, spec): + r"""Pad a BCD or hex field from the left or right. + + Parameters + ---------- + doc_dec : dict + Dict containing decoded ISO8583 data + field_key : str + Field ID to pad + spec : dict + A Python dict defining ISO8583 specification. + See :mod:`iso8583.specs` module for examples. + + Returns + ------- + str + Padded field data + """ + pad= spec[field_key].get("left_pad", "")[:1] + if len(pad) > 0: + return pad + doc_dec[field_key] + + pad = spec[field_key].get("right_pad", "")[:1] + if len(pad) > 0: + return doc_dec[field_key] + pad + + return doc_dec[field_key] + diff --git a/larky/src/main/resources/vendor/ISO8583Specs.star b/larky/src/main/resources/vendor/ISO8583Specs.star new file mode 100755 index 000000000..c6f1a7ebf --- /dev/null +++ b/larky/src/main/resources/vendor/ISO8583Specs.star @@ -0,0 +1,2289 @@ +r"""An ISO8583 spec is a Python dictionary. It describes how each field +needs to be encoded and decoded. + +Fields +------ +Supported fields: + +- **h** - Message Header. If a specification does not have a header then + set **max_len** to ``0`` to disable it. +- **t** - Message Type Identifier +- **p** - Primary Bitmap +- **1** - Secondary Bitmap +- **2** .. **128** - Regular fields + +Mandatory Field Properties +-------------------------- +Each field defines these properties: + +- **data_enc** - field's data encoding type, where: + + - Use ``b`` for binary or Binary-Coded Decimal (BCD) data. For example, + ``ABCD`` hex string is encoded as ``\xAB\xCD`` 2-byte value. Or ``1234`` + numeric string is encoded as ``\x12\x34`` 2-byte BCD value. + - Otherwise, provide any valid Python encoding. For example, ``ascii`` or + ``latin-1`` for ASCII data and ``cp500`` or similar for EBCDIC data. + For a list of possible encodings, see Python standard encodings: + https://docs.python.org/3/library/codecs.html#standard-encodings + +- **len_enc** - field's length encoding type. Follows the same rules as **data_enc**. + Some fields, such as ICC data, could have binary data but ASCII length. + This parameter does not affect fixed-length fields. + +- **len_type** - field's length type: fixed, LVAR, LLVAR, etc. + Expressed as a number of bytes in field length. For example, + ASCII LLVAR length takes up 2 bytes (``b'00' - b'99'``). + BCD LLVAR length can take up only 1 byte (``b'\x00' - b'\x99'``). + Therefore, **len_type** depends on the type of **len_enc** being used. + BCD **len_enc** can fit higher length ranges in fewer bytes. + + +--------------+---------------------------------------+ + | | **len_enc** | + | +----------------+----------------------+ + | **len_type** | ASCII / EBCDIC | Binary-Coded Decimal | + +--------------+----------------+----------------------+ + | Fixed | ``0`` | ``0`` | + +--------------+----------------+----------------------+ + | LVAR | ``1`` | ``1`` | + +--------------+----------------+----------------------+ + | LLVAR | ``2`` | ``1`` | + +--------------+----------------+----------------------+ + | LLLVAR | ``3`` | ``2`` | + +--------------+----------------+----------------------+ + | LLLLVAR | ``4`` | ``2`` | + +--------------+----------------+----------------------+ + +- **max_len** - field's maximum length in bytes or nibbles. For fixed fields + **max_len** defines the length of the field. + +- **desc** - field's description that's printed in a pretty format. + **desc** plays no role in encoding or decoding data. It's safe to omit it + from the specifications. However, if omitted :func:`iso8583.pp` may or may + not work as expected. + +Optional Field Properties +------------------------- +Each field may define these additional properties as needed: + +- **len_count** - specifies if field's length is measured + in bytes or nibbles (half bytes). This parameter affects **max_len**. + + - Use ``bytes`` (default) to measure field length in bytes. + - Use ``nibbles`` to measure field length if nibbles (half bytes). + +- **left_pad** - specifies pad character to be added/removed on the left side + of an odd binary or BCD field without this character being included into + field length. Valid pad character is ``0-9`` for BCD fields and ``0-F`` + for binary fields. + + This option is used only when **data_enc** is set to ``b`` (binary/BCD) + and **len_count** is set to ``nibbles``. This option is meant for + specifications that require odd length binary or BCD data. + +- **right_pad** - same as **left_pad** but it pads on the right side. + Specify either **left_pad** or **right_pad**. If both are specified at + the same time then **left_pad** takes precedence. + +Sample Field Specifications +--------------------------- +Binary-coded decimal primary bitmap:: + + specification["p"] = { + "data_enc": "b", + "len_enc": "b", + "len_type": 0, + "max_len": 8, + "desc": "BCD bitmap, e.g. \x12\x34\x56\x78\x90\xAB\xCD\xEF" + } + +Hex string primary bitmap:: + + specification["p"] = { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Hex string bitmap, e.g 1234567890ABCDEF" + } + +Field 2, a 10-byte BCD fixed length field:: + + specification["2"] = { + "data_enc": "b", + "len_enc": "b", + "len_type": 0, + "max_len": 10, + "desc": "BCD fixed field" + } + +Field 3, an ASCII LLVAR field of maximum 20 bytes:: + + specification["3"] = { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 20, + "desc": "ASCII LLVAR field" + } + +Field 4, an EBCDIC LLLVAR field of maximum 150 bytes:: + + specification["4"] = { + "data_enc": "cp500", + "len_enc": "cp500", + "len_type": 3, + "max_len": 150, + "desc": "EBCDIC LLLVAR field" + } + +Field 5, a BCD LLVAR field measured in nibbles and left-padded with 0. +The field is maximum 20 nibbles:: + + specification["5"] = { + "data_enc": "b", + "len_enc": "b", + "len_type": 1, + "len_count": "nibbles", + "left_pad": "0", + "max_len": 20, + "desc": "BCD LLVAR field measured in nibbles, e.g. \x03\x01\x11" + } + +Field 6, a 3-nibble BCD fixed field right-padded with 0:: + + specification["6"] = { + "data_enc": "b", + "len_enc": "b", + "len_type": 0, + "len_count": "nibbles", + "right_pad": "0", + "max_len": 3, + "desc": "BCD fixed field measured in nibbles, e.g. \x11\x10" + } + +Sample Message Specifications +----------------------------- + +ASCII/Binary +~~~~~~~~~~~~ + +Bitmaps, MACs, PIN, and ICC data are in binary:: + + default = { + "h": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 0, "desc": "Message Header"}, + "t": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Type"}, + "p": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Bitmap, Primary"}, + "1": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Bitmap, Secondary"}, + "2": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 19, "desc": "Primary Account Number (PAN)"}, + "3": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Processing Code"}, + "4": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Transaction"}, + "5": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Settlement"}, + "6": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Cardholder Billing"}, + "7": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transmission Date and Time"}, + "8": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Amount, Cardholder Billing Fee"}, + "9": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Conversion Rate, Settlement"}, + "10": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Conversion Rate, Cardholder Billing"}, + "11": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "System Trace Audit Number"}, + "12": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Time, Local Transaction"}, + "13": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Local Transaction"}, + "14": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Expiration"}, + "15": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Settlement"}, + "16": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Conversion"}, + "17": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Capture"}, + "18": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Merchant Type"}, + "19": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Acquiring Institution Country Code"}, + "20": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "PAN Country Code"}, + "21": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Forwarding Institution Country Code"}, + "22": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Point-of-Service Entry Mode"}, + "23": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "PAN Sequence Number"}, + "24": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Network International ID (NII)"}, + "25": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Point-of-Service Condition Code"}, + "26": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Point-of-Service Capture Code"}, + "27": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "Authorizing ID Response Length"}, + "28": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Transaction Fee"}, + "29": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Settlement Fee"}, + "30": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Transaction Processing Fee"}, + "31": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Settlement Processing Fee"}, + "32": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Acquiring Institution ID Code"}, + "33": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Forwarding Institution ID Code"}, + "34": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Primary Account Number, Extended"}, + "35": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 37, "desc": "Track 2 Data"}, + "36": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 104, "desc": "Track 3 Data"}, + "37": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Retrieval Reference Number"}, + "38": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Authorization ID Response"}, + "39": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Response Code"}, + "40": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Service Restriction Code"}, + "41": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Card Acceptor Terminal ID"}, + "42": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 15, "desc": "Card Acceptor ID Code"}, + "43": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 40, "desc": "Card Acceptor Name/Location"}, + "44": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 25, "desc": "Additional Response Data"}, + "45": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 76, "desc": "Track 1 Data"}, + "46": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - ISO"}, + "47": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - National"}, + "48": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - Private"}, + "49": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Transaction"}, + "50": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Settlement"}, + "51": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Cardholder Billing"}, + "52": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "PIN"}, + "53": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Security-Related Control Information"}, + "54": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 240, "desc": "Additional Amounts"}, + "55": {"data_enc": "b", "len_enc": "ascii", "len_type": 3, "max_len": 255, "desc": "ICC data"}, + "56": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved ISO"}, + "57": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "58": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "59": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "60": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "61": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "62": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "63": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "64": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "MAC"}, + "65": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Bitmap, Extended"}, + "66": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "Settlement Code"}, + "67": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Extended Payment Code"}, + "68": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Receiving Institution Country Code"}, + "69": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Settlement Institution Country Code"}, + "70": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Network Management Information Code"}, + "71": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Number"}, + "72": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Number, Last"}, + "73": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Date, Action"}, + "74": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Credits, Number"}, + "75": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Credits, Reversal Number"}, + "76": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Debits, Number"}, + "77": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Debits, Reversal Number"}, + "78": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transfer, Number"}, + "79": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transfer, Reversal Number"}, + "80": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Inquiries, Number"}, + "81": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Authorizations, Number"}, + "82": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Credits, Processing Fee Amount"}, + "83": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Credits, Transaction Fee Amount"}, + "84": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Debits, Processing Fee Amount"}, + "85": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Debits, Transaction Fee Amount"}, + "86": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Credits, Amount"}, + "87": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Credits, Reversal Amount"}, + "88": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Debits, Amount"}, + "89": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Debits, Reversal Amount"}, + "90": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 42, "desc": "Original Data Elements"}, + "91": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "File Update Code"}, + "92": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "File Security Code"}, + "93": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 5, "desc": "Response Indicator"}, + "94": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 7, "desc": "Service Indicator"}, + "95": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 42, "desc": "Replacement Amounts"}, + "96": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Message Security Code"}, + "97": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 17, "desc": "Amount, Net Settlement"}, + "98": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 25, "desc": "Payee"}, + "99": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Settlement Institution ID Code"}, + "100": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Receiving Institution ID Code"}, + "101": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 17, "desc": "File Name"}, + "102": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Account ID 1"}, + "103": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Account ID 2"}, + "104": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 100, "desc": "Transaction Description"}, + "105": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "106": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "107": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "108": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "109": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "110": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "111": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "112": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "113": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "114": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "115": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "116": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "117": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "118": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "119": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "120": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "121": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "122": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "123": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "124": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "125": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "126": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "127": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "128": {"data_enc": "b", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "MAC"} + } + +ASCII +~~~~~ + +All fields are in ASCII:: + + default_ascii = { + "h": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 0, "desc": "Message Header"}, + "t": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Type"}, + "p": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Bitmap, Primary"}, + "1": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Bitmap, Secondary"}, + "2": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 19, "desc": "Primary Account Number (PAN)"}, + "3": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Processing Code"}, + "4": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Transaction"}, + "5": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Settlement"}, + "6": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Amount, Cardholder Billing"}, + "7": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transmission Date and Time"}, + "8": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Amount, Cardholder Billing Fee"}, + "9": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Conversion Rate, Settlement"}, + "10": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Conversion Rate, Cardholder Billing"}, + "11": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "System Trace Audit Number"}, + "12": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Time, Local Transaction"}, + "13": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Local Transaction"}, + "14": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Expiration"}, + "15": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Settlement"}, + "16": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Conversion"}, + "17": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Date, Capture"}, + "18": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Merchant Type"}, + "19": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Acquiring Institution Country Code"}, + "20": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "PAN Country Code"}, + "21": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Forwarding Institution Country Code"}, + "22": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Point-of-Service Entry Mode"}, + "23": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "PAN Sequence Number"}, + "24": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Network International ID (NII)"}, + "25": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Point-of-Service Condition Code"}, + "26": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Point-of-Service Capture Code"}, + "27": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "Authorizing ID Response Length"}, + "28": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Transaction Fee"}, + "29": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Settlement Fee"}, + "30": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Transaction Processing Fee"}, + "31": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 9, "desc": "Amount, Settlement Processing Fee"}, + "32": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Acquiring Institution ID Code"}, + "33": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Forwarding Institution ID Code"}, + "34": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Primary Account Number, Extended"}, + "35": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 37, "desc": "Track 2 Data"}, + "36": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 104, "desc": "Track 3 Data"}, + "37": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Retrieval Reference Number"}, + "38": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Authorization ID Response"}, + "39": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Response Code"}, + "40": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Service Restriction Code"}, + "41": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 8, "desc": "Card Acceptor Terminal ID"}, + "42": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 15, "desc": "Card Acceptor ID Code"}, + "43": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 40, "desc": "Card Acceptor Name/Location"}, + "44": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 25, "desc": "Additional Response Data"}, + "45": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 76, "desc": "Track 1 Data"}, + "46": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - ISO"}, + "47": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - National"}, + "48": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Additional Data - Private"}, + "49": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Transaction"}, + "50": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Settlement"}, + "51": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Currency Code, Cardholder Billing"}, + "52": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "PIN"}, + "53": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Security-Related Control Information"}, + "54": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 240, "desc": "Additional Amounts"}, + "55": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 510, "desc": "ICC data"}, + "56": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved ISO"}, + "57": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "58": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "59": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "60": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved National"}, + "61": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "62": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "63": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved Private"}, + "64": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "MAC"}, + "65": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Bitmap, Extended"}, + "66": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "Settlement Code"}, + "67": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "Extended Payment Code"}, + "68": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Receiving Institution Country Code"}, + "69": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Settlement Institution Country Code"}, + "70": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 3, "desc": "Network Management Information Code"}, + "71": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Number"}, + "72": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 4, "desc": "Message Number, Last"}, + "73": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 6, "desc": "Date, Action"}, + "74": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Credits, Number"}, + "75": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Credits, Reversal Number"}, + "76": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Debits, Number"}, + "77": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Debits, Reversal Number"}, + "78": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transfer, Number"}, + "79": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Transfer, Reversal Number"}, + "80": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Inquiries, Number"}, + "81": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 10, "desc": "Authorizations, Number"}, + "82": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Credits, Processing Fee Amount"}, + "83": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Credits, Transaction Fee Amount"}, + "84": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Debits, Processing Fee Amount"}, + "85": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 12, "desc": "Debits, Transaction Fee Amount"}, + "86": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Credits, Amount"}, + "87": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Credits, Reversal Amount"}, + "88": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Debits, Amount"}, + "89": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Debits, Reversal Amount"}, + "90": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 42, "desc": "Original Data Elements"}, + "91": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 1, "desc": "File Update Code"}, + "92": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 2, "desc": "File Security Code"}, + "93": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 5, "desc": "Response Indicator"}, + "94": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 7, "desc": "Service Indicator"}, + "95": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 42, "desc": "Replacement Amounts"}, + "96": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "Message Security Code"}, + "97": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 17, "desc": "Amount, Net Settlement"}, + "98": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 25, "desc": "Payee"}, + "99": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Settlement Institution ID Code"}, + "100": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 11, "desc": "Receiving Institution ID Code"}, + "101": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 17, "desc": "File Name"}, + "102": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Account ID 1"}, + "103": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 2, "max_len": 28, "desc": "Account ID 2"}, + "104": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 100, "desc": "Transaction Description"}, + "105": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "106": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "107": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "108": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "109": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "110": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "111": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for ISO Use"}, + "112": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "113": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "114": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "115": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "116": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "117": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "118": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "119": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for National Use"}, + "120": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "121": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "122": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "123": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "124": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "125": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "126": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "127": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 3, "max_len": 999, "desc": "Reserved for Private Use"}, + "128": {"data_enc": "ascii", "len_enc": "ascii", "len_type": 0, "max_len": 16, "desc": "MAC"} + } + +""" + +# ASCII/Binary +# Bitmaps, MACs, PIN, and ICC data are in binary +default = { + "h": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 0, + "desc": "Message Header", + }, + "t": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Type", + }, + "p": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Primary", + }, + "1": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Secondary", + }, + "2": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 19, + "desc": "Primary Account Number (PAN)", + }, + "3": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Processing Code", + }, + "4": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Transaction", + }, + "5": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Settlement", + }, + "6": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Cardholder Billing", + }, + "7": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transmission Date and Time", + }, + "8": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Amount, Cardholder Billing Fee", + }, + "9": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Settlement", + }, + "10": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Cardholder Billing", + }, + "11": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "System Trace Audit Number", + }, + "12": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Time, Local Transaction", + }, + "13": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Local Transaction", + }, + "14": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Expiration", + }, + "15": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Settlement", + }, + "16": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Conversion", + }, + "17": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Capture", + }, + "18": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Merchant Type", + }, + "19": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Acquiring Institution Country Code", + }, + "20": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Country Code", + }, + "21": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Forwarding Institution Country Code", + }, + "22": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Point-of-Service Entry Mode", + }, + "23": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Sequence Number", + }, + "24": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network International ID (NII)", + }, + "25": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Condition Code", + }, + "26": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Capture Code", + }, + "27": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Authorizing ID Response Length", + }, + "28": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Fee", + }, + "29": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Fee", + }, + "30": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Processing Fee", + }, + "31": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Processing Fee", + }, + "32": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Acquiring Institution ID Code", + }, + "33": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Forwarding Institution ID Code", + }, + "34": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Primary Account Number, Extended", + }, + "35": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 37, + "desc": "Track 2 Data", + }, + "36": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 104, + "desc": "Track 3 Data", + }, + "37": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Retrieval Reference Number", + }, + "38": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Authorization ID Response", + }, + "39": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Response Code", + }, + "40": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Service Restriction Code", + }, + "41": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Card Acceptor Terminal ID", + }, + "42": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 15, + "desc": "Card Acceptor ID Code", + }, + "43": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 40, + "desc": "Card Acceptor Name/Location", + }, + "44": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 25, + "desc": "Additional Response Data", + }, + "45": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 76, + "desc": "Track 1 Data", + }, + "46": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - ISO", + }, + "47": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - National", + }, + "48": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - Private", + }, + "49": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Transaction", + }, + "50": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Settlement", + }, + "51": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Cardholder Billing", + }, + "52": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "PIN", + }, + "53": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Security-Related Control Information", + }, + "54": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 240, + "desc": "Additional Amounts", + }, + "55": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 3, + "max_len": 255, + "desc": "ICC data", + }, + "56": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved ISO", + }, + "57": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "58": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "59": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "60": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "61": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "62": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "63": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "64": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "MAC", + }, + "65": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Extended", + }, + "66": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Settlement Code", + }, + "67": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Extended Payment Code", + }, + "68": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Receiving Institution Country Code", + }, + "69": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Settlement Institution Country Code", + }, + "70": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network Management Information Code", + }, + "71": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number", + }, + "72": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number, Last", + }, + "73": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Date, Action", + }, + "74": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Number", + }, + "75": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Reversal Number", + }, + "76": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Number", + }, + "77": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Reversal Number", + }, + "78": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Number", + }, + "79": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Reversal Number", + }, + "80": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Inquiries, Number", + }, + "81": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Authorizations, Number", + }, + "82": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Processing Fee Amount", + }, + "83": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Transaction Fee Amount", + }, + "84": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Processing Fee Amount", + }, + "85": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Transaction Fee Amount", + }, + "86": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Amount", + }, + "87": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Reversal Amount", + }, + "88": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Amount", + }, + "89": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Reversal Amount", + }, + "90": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Original Data Elements", + }, + "91": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "File Update Code", + }, + "92": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "File Security Code", + }, + "93": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 5, + "desc": "Response Indicator", + }, + "94": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 7, + "desc": "Service Indicator", + }, + "95": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Replacement Amounts", + }, + "96": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Message Security Code", + }, + "97": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 17, + "desc": "Amount, Net Settlement", + }, + "98": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 25, + "desc": "Payee", + }, + "99": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Settlement Institution ID Code", + }, + "100": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Receiving Institution ID Code", + }, + "101": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 17, + "desc": "File Name", + }, + "102": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Account ID 1", + }, + "103": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Account ID 2", + }, + "104": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 100, + "desc": "Transaction Description", + }, + "105": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "106": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "107": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "108": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "109": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "110": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "111": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "112": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "113": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "114": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "115": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "116": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "117": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "118": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "119": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "120": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "121": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "122": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "123": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "124": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "125": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "126": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "127": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "128": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "MAC", + }, +} + +# ASCII +# All fields are in ASCII +default_ascii = { + "h": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 0, + "desc": "Message Header", + }, + "t": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Type", + }, + "p": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Bitmap, Primary", + }, + "1": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Bitmap, Secondary", + }, + "2": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 19, + "desc": "Primary Account Number (PAN)", + }, + "3": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Processing Code", + }, + "4": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Transaction", + }, + "5": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Settlement", + }, + "6": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Cardholder Billing", + }, + "7": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transmission Date and Time", + }, + "8": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Amount, Cardholder Billing Fee", + }, + "9": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Settlement", + }, + "10": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Cardholder Billing", + }, + "11": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "System Trace Audit Number", + }, + "12": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Time, Local Transaction", + }, + "13": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Local Transaction", + }, + "14": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Expiration", + }, + "15": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Settlement", + }, + "16": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Conversion", + }, + "17": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Capture", + }, + "18": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Merchant Type", + }, + "19": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Acquiring Institution Country Code", + }, + "20": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Country Code", + }, + "21": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Forwarding Institution Country Code", + }, + "22": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Point-of-Service Entry Mode", + }, + "23": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Sequence Number", + }, + "24": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network International ID (NII)", + }, + "25": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Condition Code", + }, + "26": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Capture Code", + }, + "27": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Authorizing ID Response Length", + }, + "28": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Fee", + }, + "29": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Fee", + }, + "30": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Processing Fee", + }, + "31": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Processing Fee", + }, + "32": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Acquiring Institution ID Code", + }, + "33": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Forwarding Institution ID Code", + }, + "34": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Primary Account Number, Extended", + }, + "35": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 37, + "desc": "Track 2 Data", + }, + "36": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 104, + "desc": "Track 3 Data", + }, + "37": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Retrieval Reference Number", + }, + "38": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Authorization ID Response", + }, + "39": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Response Code", + }, + "40": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Service Restriction Code", + }, + "41": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Card Acceptor Terminal ID", + }, + "42": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 15, + "desc": "Card Acceptor ID Code", + }, + "43": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 40, + "desc": "Card Acceptor Name/Location", + }, + "44": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 25, + "desc": "Additional Response Data", + }, + "45": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 76, + "desc": "Track 1 Data", + }, + "46": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - ISO", + }, + "47": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - National", + }, + "48": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - Private", + }, + "49": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Transaction", + }, + "50": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Settlement", + }, + "51": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Cardholder Billing", + }, + "52": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "PIN", + }, + "53": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Security-Related Control Information", + }, + "54": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 240, + "desc": "Additional Amounts", + }, + "55": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 510, + "desc": "ICC data", + }, + "56": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved ISO", + }, + "57": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "58": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "59": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "60": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "61": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "62": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "63": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "64": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "MAC", + }, + "65": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Bitmap, Extended", + }, + "66": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Settlement Code", + }, + "67": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Extended Payment Code", + }, + "68": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Receiving Institution Country Code", + }, + "69": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Settlement Institution Country Code", + }, + "70": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network Management Information Code", + }, + "71": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number", + }, + "72": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number, Last", + }, + "73": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Date, Action", + }, + "74": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Number", + }, + "75": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Reversal Number", + }, + "76": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Number", + }, + "77": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Reversal Number", + }, + "78": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Number", + }, + "79": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Reversal Number", + }, + "80": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Inquiries, Number", + }, + "81": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Authorizations, Number", + }, + "82": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Processing Fee Amount", + }, + "83": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Transaction Fee Amount", + }, + "84": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Processing Fee Amount", + }, + "85": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Transaction Fee Amount", + }, + "86": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Amount", + }, + "87": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Reversal Amount", + }, + "88": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Amount", + }, + "89": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Reversal Amount", + }, + "90": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Original Data Elements", + }, + "91": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "File Update Code", + }, + "92": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "File Security Code", + }, + "93": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 5, + "desc": "Response Indicator", + }, + "94": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 7, + "desc": "Service Indicator", + }, + "95": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Replacement Amounts", + }, + "96": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Message Security Code", + }, + "97": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 17, + "desc": "Amount, Net Settlement", + }, + "98": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 25, + "desc": "Payee", + }, + "99": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Settlement Institution ID Code", + }, + "100": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Receiving Institution ID Code", + }, + "101": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 17, + "desc": "File Name", + }, + "102": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Account ID 1", + }, + "103": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Account ID 2", + }, + "104": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 100, + "desc": "Transaction Description", + }, + "105": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "106": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "107": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "108": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "109": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "110": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "111": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "112": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "113": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "114": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "115": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "116": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "117": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "118": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "119": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "120": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "121": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "122": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "123": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "124": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "125": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "126": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "127": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "128": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "MAC", + }, +} diff --git a/larky/src/test/resources/quick_tests/test_ISO8583Encode.star b/larky/src/test/resources/quick_tests/test_ISO8583Encode.star new file mode 100644 index 000000000..e1ffee63f --- /dev/null +++ b/larky/src/test/resources/quick_tests/test_ISO8583Encode.star @@ -0,0 +1,2632 @@ +load("@stdlib//unittest", "unittest") +load("@vendor//asserts", "asserts") +load("@vendor//ISO8583Decoder", Decoder="Decoder") +load("@vendor//ISO8583Encoder", Encoder="Encoder") +load("@vendor//ISO8583Specs", "default") +load("@stdlib//binascii", unhexlify="unhexlify", hexlify="hexlify") +load("@stdlib//builtins", "builtins") +load("@stdlib//sets", "sets") + + +spec = default + + +def test_EncodeError_exception(): + """ + Validate EncodeError class + """ + # spec["h"]["data_enc"] = "ascii" + # spec["h"]["len_type"] = 0 + # spec["h"]["max_len"] = 6 + # spec["t"]["data_enc"] = "ascii" + # spec["p"]["data_enc"] = "ascii" + # spec["1"]["len_type"] = 0 + # spec["1"]["max_len"] = 0 + + spec = { + "h": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 6, + }, + "t": { + "data_enc": "ascii", + }, + "p": { + "data_enc": "ascii", + }, + "1": { + "len_type": 0, + "max_len": 0 + } + } + + doc_dec = {"t": ""} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Field data is required according to specifications: field h") + +# TODO pickle is not supported +# def test_EncodeError_exception_pickle(): +# """ +# Validate EncodeError class with pickle +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["1"]["len_type"] = 0 +# spec["1"]["max_len"] = 0 +# +# doc_dec = {"t": ""} +# +# try: +# iso8583.encode(doc_dec, spec=spec) +# except iso8583.EncodeError as e: +# p = pickle.dumps(e) +# e_unpickled = pickle.loads(p) +# +# asserts.assert_that(e.doc_dec == e_unpickled.doc_dec +# asserts.assert_that(e.doc_enc == e_unpickled.doc_enc +# asserts.assert_that(e.msg == e_unpickled.msg +# asserts.assert_that(e.field == e_unpickled.field +# asserts.assert_that(e.args[0] == e_unpickled.args[0] + + +def test_non_string_field_keys(): + """ + Input dictionary contains non + """ + # spec["h"]["data_enc"] = "ascii" + # spec["h"]["len_type"] = 0 + # spec["h"]["max_len"] = 6 + # spec["t"]["data_enc"] = "ascii" + # spec["p"]["data_enc"] = "b" + # spec["2"]["len_type"] = 2 + # spec["2"]["max_len"] = 10 + # spec["2"]["data_enc"] = "ascii" + # spec["2"]["len_enc"] = "ascii" + # spec["3"]["len_type"] = 2 + # spec["3"]["max_len"] = 10 + # spec["3"]["data_enc"] = "ascii" + # spec["3"]["len_enc"] = "ascii" + spec = { + "h": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 6, + }, + "t": { + "data_enc": "ascii", + }, + "p": { + "data_enc": "b", + }, + "2": { + "len_type": 2, + "max_len": 10, + "data_enc": "ascii", + "len_enc": "ascii" + }, + "3": { + "len_type": 2, + "max_len": 10, + "data_enc": "ascii", + "len_enc": "ascii" + } + } + + doc_dec = {"h": "header", "t": "0210", 2: "1122"} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Dictionary contains invalid fields .2.: field p") + + doc_dec = {"h": "header", "t": "0210", 2: "1122", 3: "3344"} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Dictionary contains invalid fields .2, 3.: field p") + + doc_dec = {"h": "header", "t": "0210", 2.5: "1122", 3.5: "3344"} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Dictionary contains invalid fields .2.5, 3.5.: field p") + + doc_dec = {"h": "header", "t": "0210", 2.5: "1122", 3.5: "3344"} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Dictionary contains invalid fields .2.5, 3.5.: field p") + + doc_dec = {"h": "header", "t": "0210", (1, 2): "1122", (3, 4): "3344"} + asserts.assert_fails(lambda: Encoder.encode(doc_dec, spec), ".*?Dictionary contains invalid fields ..1, 2., .3, 4..: field p") + + +# def test_input_type(): +# """ +# Encode accepts only dict. +# """ +# s = bytes(r"", encoding='utf-8') +# with pytest.raises(TypeError, match="Decoded ISO8583 data must be dict, not bytes"): +# iso8583.encode(s, spec=spec) +# +# +# def test_header_no_key(): +# """ +# Message header is required and key is not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["1"]["len_type"] = 0 +# spec["1"]["max_len"] = 0 +# +# doc_dec = {"t": ""} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is required according to specifications: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_header_ascii_absent(): +# """ +# ASCII header is not required by spec and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_ascii_present(): +# """ +# ASCII header is required by spec and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_ebcdic_absent(): +# """ +# EBCDIC header is not required by spec and not provided +# """ +# spec["h"]["data_enc"] = "cp500" +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_ebcdic_present(): +# """ +# EBCDIC header is required by spec and provided +# """ +# spec["h"]["data_enc"] = "cp500" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x88, 0x85, 0x81, 0x84, 0x85, 0x99, 0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x88, 0x85, 0x81, 0x84, 0x85, 0x99]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_bdc_absent(): +# """ +# BDC header is not required by spec and not provided +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_bcd_present(): +# """ +# BCD header is required by spec and provided +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "A1A2A3A4A5A6", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6]) +# asserts.assert_that(doc_dec["h"] == "A1A2A3A4A5A6" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_not_required_provided(): +# """ +# String header is not required by spec but provided. +# No error. Header is not included in the message. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_header_negative_missing(): +# """ +# String header is required by spec but not provided. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0200"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 6: field h" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_header_negative_partial(): +# """ +# String header is required by spec but partially provided. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "head", "t": "0200"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 4 bytes, expecting 6: field h" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_header_negative_incorrect_encoding(): +# """ +# String header is required by spec and provided. +# However, the spec encoding is not correct +# """ +# spec["h"]["data_enc"] = "invalid" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .unknown encoding: invalid.: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_header_negative_incorrect_ascii_data(): +# """ +# ASCII header is required by spec and provided. +# However, the data is not ASCII +# CPython and PyPy throw differently worded exception +# CPython: 'ascii' codec can't encode characters in position 0-5: ordinal not in range(128) +# PyPy: 'ascii' codec can't encode character '\\xff' in position 0: ordinal not in range(128) +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = { +# "h": bytes([0xff, 0xff, 0xff, 0xff, 0xff, 0xff]).decode("latin-1"), +# "t": "0200", +# } +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .'ascii' codec can't encode character.*: ordinal not in range.128..: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_header_negative_incorrect_bcd_data(): +# """ +# BCD header is required by spec and provided. +# However, the data is not hex +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 0.: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_header_ascii_over_max(): +# """ +# ASCII variable header is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_enc"] = "ascii" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header12", "t": "0210"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 8 bytes, larger than maximum 6: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_header_ascii_present(): +# """ +# ASCII variable header is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_enc"] = "ascii" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x36, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0x30, 0x36]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_ascii_present_zero_legnth(): +# """ +# ASCII zero-length variable header +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_enc"] = "ascii" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x30, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0x30, 0x30]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["h"] == "" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_ebcdic_over_max(): +# """ +# EBCDIC variable header is required and over max provided +# """ +# spec["h"]["data_enc"] = "cp500" +# spec["h"]["len_enc"] = "cp500" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header1", "t": "0210"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 7 bytes, larger than maximum 6: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_header_ebcdic_present(): +# """ +# EBCDIC variable header is required and provided +# """ +# spec["h"]["data_enc"] = "cp500" +# spec["h"]["len_enc"] = "cp500" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0xf0, 0xf6, 0x88, 0x85, 0x81, 0x84, 0x85, 0x99, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0xf0, 0xf6]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x88, 0x85, 0x81, 0x84, 0x85, 0x99]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_ebcdic_present_zero_legnth(): +# """ +# EBCDIC zero-length variable header +# """ +# spec["h"]["data_enc"] = "cp500" +# spec["h"]["len_enc"] = "cp500" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0xf0, 0xf0, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0xf0, 0xf0]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["h"] == "" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_bdc_over_max(): +# """ +# BDC variable header is required and over max is provided +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "b" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 2 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcdef", "t": "0210"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 3 bytes, larger than maximum 2: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_header_bdc_odd(): +# """ +# BDC variable header is required and odd length is provided +# CPython and PyPy throw differently worded exception +# CPython: non-hexadecimal number found in fromhex() arg at position 5 +# PyPy: non-hexadecimal number found in fromhex() arg at position 4 +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "b" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcde", "t": "0210"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 4|5.: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_header_bdc_ascii_length(): +# """ +# BDC variable header +# The length is in ASCII. +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "ascii" +# spec["h"]["len_type"] = 3 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcd", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x30, 0x30, 0x32, 0xab, 0xcd, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0x30, 0x30, 0x32]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0xab, 0xcd]) +# asserts.assert_that(doc_dec["h"] == "abcd" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_bdc_ebcdic_length(): +# """ +# BDC variable header is required and provided +# The length is in EBCDIC. +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "cp500" +# spec["h"]["len_type"] = 3 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcd", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0xf0, 0xf0, 0xf2, 0xab, 0xcd, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0xf0, 0xf0, 0xf2]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0xab, 0xcd]) +# asserts.assert_that(doc_dec["h"] == "abcd" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_bcd_present(): +# """ +# BCD variable header is required and provided +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "b" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcd", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x00, 0x02, 0xab, 0xcd, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0x00, 0x02]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0xab, 0xcd]) +# asserts.assert_that(doc_dec["h"] == "abcd" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_bcd_present_zero_length(): +# """ +# BCD zero-length variable header is required and provided +# """ +# spec["h"]["data_enc"] = "b" +# spec["h"]["len_enc"] = "b" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "", "t": "0210"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x00, 0x00, 0x30, 0x32, 0x31, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes([0x00, 0x00]) +# asserts.assert_that(doc_enc["h"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["h"] == "" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_variable_header_incorrect_encoding(): +# """ +# variable header is required and provided. +# However, the spec encoding is not correct for length +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_enc"] = "invalid" +# spec["h"]["len_type"] = 2 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "abcd", "t": "0210"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode length .unknown encoding: invalid.: field h", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_no_key(): +# """ +# Message type is required and key is not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["1"]["len_type"] = 0 +# spec["1"]["max_len"] = 0 +# +# doc_dec = {"h": "header", "2": ""} +# +# with pytest.raises(iso8583.EncodeError, match="Field data is required: field t"): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ascii_absent(): +# """ +# ASCII message type is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ascii_partial(): +# """ +# ASCII message type is required and partial is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "02"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 2 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ascii_over_max(): +# """ +# ASCII message type is required and over max is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "02101"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 5 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ascii_incorrect_data(): +# """ +# ASCII message type is required and provided. +# However, the data is not ASCII +# CPython and PyPy throw differently worded exception +# CPython: 'ascii' codec can't encode characters in position 0-3: ordinal not in range(128) +# PyPy: 'ascii' codec can't encode character '\\xff' in position 0: ordinal not in range(128) +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = { +# "h": "header", +# "t": bytes([0xff, 0xff, 0xff, 0xff]).decode("latin-1"), +# } +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .'ascii' codec can't encode character.*: ordinal not in range.128..: field t", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ascii_present(): +# """ +# ASCII message type is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_type_ebcdic_absent(): +# """ +# EBCDIC message type is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "cp500" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ebcdic_partial(): +# """ +# EBCDIC message type is required and partial provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "cp500" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "02"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 2 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ebcdic_over_max(): +# """ +# EBCDIC message type is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "cp500" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "02101"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 5 bytes, expecting 4: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_ebcdic_present(): +# """ +# EBCDIC message type is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "cp500" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0xf0, 0xf2, 0xf0, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0xf0, 0xf2, 0xf0, 0xf0]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_type_bdc_absent(): +# """ +# BDC message type is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_bdc_partial(): +# """ +# BDC message type is required and partial is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "02"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_bdc_over_max(): +# """ +# BDC message type is required and over max is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "021000"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field t" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_bdc_odd(): +# """ +# BDC message type is required and odd length is provided +# CPython and PyPy throw differently worded exception +# CPython: non-hexadecimal number found in fromhex() arg at position 3 +# PyPy: non-hexadecimal number found in fromhex() arg at position 2 +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "021"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 2|3.: field t", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_bdc_non_hex(): +# """ +# BDC message type is required and provided +# However, the data is not hex +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "021x"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 3.: field t", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_type_bcd_present(): +# """ +# BCD message type is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "b" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x02, 0x00]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "0000000000000000" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p"]) +# +# +# def test_type_incorrect_encoding(): +# """ +# String message type is required and provided. +# However, the spec encoding is not correct +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "invalid" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .unknown encoding: invalid.: field t", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_bitmap_range(): +# """ +# ISO8583 bitmaps must be between 1 and 128. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0200"} +# +# doc_dec["0"] = "" +# with pytest.raises( +# iso8583.EncodeError, +# match="Dictionary contains fields outside of 1-128 range .0.: field p", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# del doc_dec["0"] +# doc_dec["129"] = "" +# with pytest.raises( +# iso8583.EncodeError, +# match="Dictionary contains fields outside of 1-128 range .129.: field p", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# for f in range(0, 130): +# doc_dec[str(f)] = "" +# with pytest.raises( +# iso8583.EncodeError, +# match="Dictionary contains fields outside of 1-128 range .0, 129.: field p", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# for f in range(0, 131): +# doc_dec[str(f)] = "" +# with pytest.raises( +# iso8583.EncodeError, +# match="Dictionary contains fields outside of 1-128 range .0, 129, 130.: field p", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_bitmap_remove_secondary(): +# """ +# If 65-128 fields are not in bitmap then remove field 1. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["data_enc"] = "ascii" +# spec["2"]["len_enc"] = "ascii" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 19 +# +# doc_dec = { +# "h": "header", +# "t": "0200", +# "1": "not needed", +# "2": "1234567890", +# } +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x30, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x31, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x31, 0x30]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30]) +# asserts.assert_that(doc_dec["2"] == "1234567890" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_bitmap_add_secondary(): +# """ +# If one of 65-128 fields are in bitmap then add field 1. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["66"]["data_enc"] = "ascii" +# spec["66"]["len_enc"] = "ascii" +# spec["66"]["len_type"] = 2 +# spec["66"]["max_len"] = 19 +# +# doc_dec = { +# "h": "header", +# "t": "0200", +# "66": "1234567890", +# } +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(( +# s +# == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x30, 0x30, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x31, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30]) +# ) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0200" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "8000000000000000" +# +# asserts.assert_that(doc_enc["1"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["1"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["1"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["66"]["len"] == bytes([0x31, 0x30]) +# asserts.assert_that(doc_enc["66"]["data"] == bytes([0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30]) +# asserts.assert_that(doc_dec["66"] == "1234567890" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "1", "66"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "1", "66"]) +# +# +# def test_primary_bitmap_incorrect_encoding(): +# """ +# Incorrect encoding specified for primary bitmap +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "invalid" +# spec["1"]["len_type"] = 0 +# spec["1"]["max_len"] = 0 +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .unknown encoding: invalid.: field p", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_secondary_bitmap_incorrect_encoding(): +# """ +# Incorrect encoding specified for secondary bitmap +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["1"]["len_type"] = 0 +# spec["1"]["max_len"] = 16 +# spec["1"]["data_enc"] = "invalid" +# +# doc_dec = {"h": "header", "t": "0210", "65": ""} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .unknown encoding: invalid.: field 1", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +def test_bitmaps_ascii(): + """ + Field is required and not key provided + """ + # spec["h"]["data_enc"] = "ascii" + # spec["h"]["len_type"] = 0 + # spec["h"]["max_len"] = 6 + # spec["t"]["data_enc"] = "ascii" + # spec["p"]["data_enc"] = "ascii" + # spec["1"]["data_enc"] = "ascii" + # spec["105"]["len_enc"] = "ascii" + + + spec = { + "h": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 6, + }, + "t": { + "data_enc": "ascii", + }, + "p": { + "data_enc": "ascii", + }, + "1": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 0 + }, + "105": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 0 + }, + } + + doc_dec = {"h": "header", "t": "0210", "105": ""} + + s, doc_enc = Encoder.encode(doc_dec, spec) + print(str(s)) + print(str(doc_enc)) + asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30])) + # + asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72])) + asserts.assert_that(doc_dec["h"] == "header") + # + asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30])) + asserts.assert_that(doc_dec["t"] == "0210") + # + asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["p"]["data"] == bytes([0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30])) + asserts.assert_that(doc_dec["p"] == "8000000000000000") + # + asserts.assert_that(doc_enc["1"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["1"]["data"] == bytes([0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x38, 0x30, 0x30, 0x30, 0x30, 0x30])) + asserts.assert_that(doc_dec["1"] == "0000000000800000") + # + asserts.assert_that(doc_enc["105"]["len"] == bytes([0x30, 0x30, 0x30])) + asserts.assert_that(doc_enc["105"]["data"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_dec["105"] == "") + # + asserts.assert_that(doc_enc.keys() == sets.make(["h", "t", "p", "1", "105"])) + asserts.assert_that(doc_dec.keys() == sets.make(["h", "t", "p", "1", "105"])) + + +def test_bitmaps_ebcidic(): + """ + Field is required and not key provided + """ + # spec["h"]["data_enc"] = "ascii" + # spec["h"]["len_type"] = 0 + # spec["h"]["max_len"] = 6 + # spec["t"]["data_enc"] = "ascii" + # spec["p"]["data_enc"] = "cp500" + # spec["1"]["data_enc"] = "cp500" + # spec["105"]["len_enc"] = "ascii" + spec = { + "h": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 6, + }, + "t": { + "data_enc": "ascii", + }, + "p": { + "data_enc": "cp500", + }, + "1": { + "data_enc": "cp500", + "len_type": 0, + "max_len": 0 + }, + "105": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 0 + }, + } + doc_dec = {"h": "header", "t": "0210", "105": ""} + + s, doc_enc = Encoder.encode(doc_dec, spec) + + asserts.assert_that( + s + == bytearray(bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0xf8, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0])) + + bytearray(bytes([0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0])) + + bytearray(bytes([0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf8, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0x30, 0x30, 0x30])) + ) + + asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72])) + asserts.assert_that(doc_dec["h"] == "header") + + asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30])) + asserts.assert_that(doc_dec["t"] == "0210") + + asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that( + doc_enc["p"]["data"] + == bytes([0xf8, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0]) + ) + asserts.assert_that(doc_dec["p"] == "8000000000000000") + + asserts.assert_that(doc_enc["1"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that( + doc_enc["1"]["data"] + == bytes([0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf8, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0]) + ) + asserts.assert_that(doc_dec["1"] == "0000000000800000") + + asserts.assert_that(doc_enc["105"]["len"] == bytes([0x30, 0x30, 0x30])) + asserts.assert_that(doc_enc["105"]["data"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_dec["105"] == "") + + asserts.assert_that(doc_enc.keys() == sets.make(["h", "t", "p", "1", "105"])) + asserts.assert_that(doc_dec.keys() == sets.make(["h", "t", "p", "1", "105"])) + + +def test_bitmaps_bcd(): + """ + Field is required and not key provided + """ + # spec["h"]["data_enc"] = "ascii" + # spec["h"]["len_type"] = 0 + # spec["h"]["max_len"] = 6 + # spec["t"]["data_enc"] = "ascii" + # spec["p"]["data_enc"] = "b" + # spec["1"]["data_enc"] = "b" + # spec["105"]["len_enc"] = "ascii" + spec = { + "h": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 6, + }, + "t": { + "data_enc": "ascii", + }, + "p": { + "data_enc": "b", + }, + "1": { + "data_enc": "b", + "len_type": 0, + "max_len": 0 + }, + "105": { + "data_enc": "ascii", + "len_type": 0, + "max_len": 0 + }, + } + + doc_dec = {"h": "header", "t": "0210", "105": ""} + + s, doc_enc = Encoder.encode(doc_dec, spec) + + asserts.assert_that( + s + == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x30, 0x30, 0x30]) + ) + + asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72])) + asserts.assert_that(doc_dec["h"] == "header") + + asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30])) + asserts.assert_that(doc_dec["t"] == "0210") + + asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["p"]["data"] == bytes([0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])) + asserts.assert_that(doc_dec["p"] == "8000000000000000") + + asserts.assert_that(doc_enc["1"]["len"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_enc["1"]["data"] == bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00])) + asserts.assert_that(doc_dec["1"] == "0000000000800000") + + asserts.assert_that(doc_enc["105"]["len"] == bytes([0x30, 0x30, 0x30])) + asserts.assert_that(doc_enc["105"]["data"] == bytes(r"", encoding='utf-8')) + asserts.assert_that(doc_dec["105"] == "") + + asserts.assert_that(doc_enc.keys() == sets.make(["h", "t", "p", "1", "105"])) + asserts.assert_that(doc_dec.keys() == sets.make(["h", "t", "p", "1", "105"])) + + +# def test_primary_bitmap_ascii_upper_case(): +# """ +# This test makes sure that encoded primary bitmap is in upper case. +# """ +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# +# spec["5"]["len_type"] = 0 +# spec["5"]["max_len"] = 1 +# spec["5"]["data_enc"] = "ascii" +# spec["7"]["len_type"] = 0 +# spec["7"]["max_len"] = 1 +# spec["7"]["data_enc"] = "ascii" +# +# doc_dec = {"t": "0200", "5": "A", "7": "B"} +# s, doc_enc = iso8583.encode(doc_dec, spec) +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x30, 0x41, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x41, 0x42]) +# asserts.assert_that(doc_dec["p"] == "0A00000000000000" +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x30, 0x41, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30]) +# asserts.assert_that(doc_enc["5"]["data"] == bytes([0x41]) +# asserts.assert_that(doc_enc["7"]["data"] == bytes([0x42]) +# +# +# def test_secondary_bitmap_ascii_upper_case(): +# """ +# This test makes sure that encoded secondary bitmap is in upper case. +# """ +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 0 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["1"]["data_enc"] = "ascii" +# +# spec["69"]["len_type"] = 0 +# spec["69"]["max_len"] = 1 +# spec["69"]["data_enc"] = "ascii" +# spec["71"]["len_type"] = 0 +# spec["71"]["max_len"] = 1 +# spec["71"]["data_enc"] = "ascii" +# +# doc_dec = {"t": "0200", "69": "A", "71": "B"} +# s, doc_enc = iso8583.encode(doc_dec, spec) +# asserts.assert_that(s == bytes([0x30, 0x32, 0x30, 0x30, 0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x41, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x41, 0x42]) +# asserts.assert_that(doc_dec["p"] == "8000000000000000" +# asserts.assert_that(doc_dec["1"] == "0A00000000000000" +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x30, 0x30]) +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30]) +# asserts.assert_that(doc_enc["1"]["data"] == bytes([0x30, 0x41, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30]) +# asserts.assert_that(doc_enc["69"]["data"] == bytes([0x41]) +# asserts.assert_that(doc_enc["71"]["data"] == bytes([0x42]) +# +# +# def test_fixed_field_ascii_absent(): +# """ +# ASCII fixed field is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ascii_partial(): +# """ +# ASCII fixed field is required and partially provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ascii_over_max(): +# """ +# ASCII fixed field is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "123"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ascii_incorrect_data(): +# """ +# ASCII fixed field is required and provided. +# However, the data is not ASCII +# CPython and PyPy throw differently worded exception +# CPython: 'ascii' codec can't encode characters in position 0-1: ordinal not in range(128) +# PyPy: 'ascii' codec can't encode character '\\xff' in position 0: ordinal not in range(128) +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = { +# "h": "header", +# "t": "0210", +# "2": bytes([0xff, 0xff]).decode("latin-1"), +# } +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .'ascii' codec can't encode character.*: ordinal not in range.128..: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ascii_present(): +# """ +# ASCII fixed field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "22"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x32, 0x32]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x32, 0x32]) +# asserts.assert_that(doc_dec["2"] == "22" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_ascii_present_zero_legnth(): +# """ +# ASCII zero-length fixed field is required and provided +# This is pointless but should work. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 0 +# spec["2"]["data_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_ebcdic_absent(): +# """ +# EBCDIC fixed field is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ebcdic_partial(): +# """ +# EBCDIC fixed field is required and partially provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ebcdic_over_max(): +# """ +# EBCDIC fixed field is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "123"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_ebcdic_present(): +# """ +# EBCDIC fixed field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "22"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf2, 0xf2]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0xf2, 0xf2]) +# asserts.assert_that(doc_dec["2"] == "22" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_ebcdic_present_zero_legnth(): +# """ +# EBCDIC zero-length fixed field is required and provided +# This is pointless but should work. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 0 +# spec["2"]["data_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_bdc_absent(): +# """ +# BDC fixed field is required and not provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_bdc_partial(): +# """ +# BDC fixed field is required and partial is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "12"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_bdc_over_max(): +# """ +# BDC fixed field is required and over max is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "123456"} +# +# with pytest.raises( +# iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2" +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_bdc_odd(): +# """ +# BDC fixed field is required and odd length is provided +# CPython and PyPy throw differently worded exception +# CPython: non-hexadecimal number found in fromhex() arg at position 5 +# PyPy: non-hexadecimal number found in fromhex() arg at position 4 +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "12345"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 4|5.: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_bdc_non_hex(): +# """ +# BDC fixed field is required and provided +# However, the data is not hex +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "11xx"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 2.: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_fixed_field_bcd_present(): +# """ +# BCD fixed field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x11]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_bcd_present_zero_length(): +# """ +# BCD zero-length fixed field is required and provided +# This is pointless but should work. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 0 +# spec["2"]["data_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_fixed_field_incorrect_encoding(): +# """ +# Fixed field is required and provided. +# However, the spec encoding is not correct +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 0 +# spec["2"]["max_len"] = 2 +# spec["2"]["data_enc"] = "invalid" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .unknown encoding: invalid.: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_field_ascii_over_max(): +# """ +# ASCII variable field is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "ascii" +# spec["2"]["len_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "12345678901"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 11 bytes, larger than maximum 10: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_field_ascii_present(): +# """ +# ASCII variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "ascii" +# spec["2"]["len_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x34, 0x31, 0x31, 0x32, 0x32]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x30, 0x34]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x31, 0x31, 0x32, 0x32]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_ascii_present_zero_legnth(): +# """ +# ASCII zero-length variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "ascii" +# spec["2"]["len_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x30]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x30, 0x30]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_ebcdic_over_max(): +# """ +# EBCDIC variable field is required and over max provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "ascii" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "cp500" +# spec["2"]["len_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "12345678901"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 11 bytes, larger than maximum 10: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_field_ebcdic_present(): +# """ +# EBCDIC variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "cp500" +# spec["2"]["len_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0xf4, 0xf1, 0xf1, 0xf2, 0xf2]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0xf0, 0xf4]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0xf1, 0xf1, 0xf2, 0xf2]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_ebcdic_present_zero_legnth(): +# """ +# EBCDIC zero-length variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "cp500" +# spec["2"]["len_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0xf0]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0xf0, 0xf0]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_bdc_over_max(): +# """ +# BDC variable field is required and over max is provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 5 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "123456789012"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Field data is 6 bytes, larger than maximum 5: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_field_bdc_odd(): +# """ +# BDC variable field is required and odd length is provided +# CPython and PyPy throw differently worded exception +# CPython: non-hexadecimal number found in fromhex() arg at position 5 +# PyPy: non-hexadecimal number found in fromhex() arg at position 4 +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "12345"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode .non-hexadecimal number found in fromhex.. arg at position 4|5.: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) +# +# +# def test_variable_field_bdc_ascii_length(): +# """ +# BDC variable field is required and provided +# The length is in ASCII. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 3 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "ascii" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x30, 0x32, 0x11]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x30, 0x30, 0x32]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x11]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_bdc_ebcdic_length(): +# """ +# BDC variable field is required and provided +# The length is in EBCDIC. +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 3 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "cp500" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0xf0, 0xf2, 0x11]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0xf0, 0xf0, 0xf2]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x11]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_bcd_present(): +# """ +# BCD variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x11]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x00, 0x02]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes([0x11]) +# asserts.assert_that(doc_dec["2"] == "1122" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_bcd_present_zero_length(): +# """ +# BCD zero-length variable field is required and provided +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "b" +# spec["2"]["len_enc"] = "b" +# +# doc_dec = {"h": "header", "t": "0210", "2": ""} +# +# s, doc_enc = iso8583.encode(doc_dec, spec=spec) +# +# asserts.assert_that(s == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x30, 0x32, 0x31, 0x30, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# +# asserts.assert_that(doc_enc["h"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["h"]["data"] == bytes([0x68, 0x65, 0x61, 0x64, 0x65, 0x72]) +# asserts.assert_that(doc_dec["h"] == "header" +# +# asserts.assert_that(doc_enc["t"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["t"]["data"] == bytes([0x30, 0x32, 0x31, 0x30]) +# asserts.assert_that(doc_dec["t"] == "0210" +# +# asserts.assert_that(doc_enc["p"]["len"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_enc["p"]["data"] == bytes([0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) +# asserts.assert_that(doc_dec["p"] == "4000000000000000" +# +# asserts.assert_that(doc_enc["2"]["len"] == bytes([0x00, 0x00]) +# asserts.assert_that(doc_enc["2"]["data"] == bytes(r"", encoding='utf-8') +# asserts.assert_that(doc_dec["2"] == "" +# +# asserts.assert_that(doc_enc.keys() == set(["h", "t", "p", "2"]) +# asserts.assert_that(doc_dec.keys() == set(["h", "t", "p", "2"]) +# +# +# def test_variable_field_incorrect_encoding(): +# """ +# Variable field is required and provided. +# However, the spec encoding is not correct for length +# """ +# spec["h"]["data_enc"] = "ascii" +# spec["h"]["len_type"] = 0 +# spec["h"]["max_len"] = 6 +# spec["t"]["data_enc"] = "ascii" +# spec["p"]["data_enc"] = "b" +# spec["2"]["len_type"] = 2 +# spec["2"]["max_len"] = 10 +# spec["2"]["data_enc"] = "ascii" +# spec["2"]["len_enc"] = "invalid" +# +# doc_dec = {"h": "header", "t": "0210", "2": "1122"} +# +# with pytest.raises( +# iso8583.EncodeError, +# match="Failed to encode length .unknown encoding: invalid.: field 2", +# ): +# iso8583.encode(doc_dec, spec=spec) + + +def _testsuite(): + _suite = unittest.TestSuite() + _suite.addTest(unittest.FunctionTestCase(test_bitmaps_ascii)) + _suite.addTest(unittest.FunctionTestCase(test_bitmaps_ebcidic)) + _suite.addTest(unittest.FunctionTestCase(test_bitmaps_bcd)) + _suite.addTest(unittest.FunctionTestCase(test_EncodeError_exception)) + _suite.addTest(unittest.FunctionTestCase(test_non_string_field_keys)) + + return _suite + +_runner = unittest.TextTestRunner() +_runner.run(_testsuite()) diff --git a/larky/src/test/resources/quick_tests/test_iso8583.star b/larky/src/test/resources/quick_tests/test_iso8583.star new file mode 100644 index 000000000..419fa4d6c --- /dev/null +++ b/larky/src/test/resources/quick_tests/test_iso8583.star @@ -0,0 +1,985 @@ +load("@stdlib//unittest", "unittest") +load("@vendor//asserts", "asserts") +load("@vendor//ISO8583Decoder", Decoder="Decoder") +load("@vendor//ISO8583Encoder", Encoder="Encoder") +load("@stdlib//binascii", unhexlify="unhexlify", hexlify="hexlify") +load("@stdlib//builtins", "builtins") + + + +def MyTestCase_test_decode_encode(): + payload = unhexlify(bytes(hex_string_payload, encoding='utf-8')) + decoded, encoded1 = Decoder.decode(payload, test_payload_spec) + print('encoded1:' + str(encoded1)) + print('decoded:' + str(decoded)) + # token = vault.put(encoded) + # decoded['2'] = token + + asserts.assert_that('0100').is_equal_to(decoded['t']) + asserts.assert_that('FEFA448108E0E48A' == decoded['p']) + asserts.assert_that('100194868740300').is_equal_to(decoded['2']) + + encoded_raw, encoded2 = Encoder.encode(decoded, test_payload_spec) + asserts.assert_that('100194868740300' == encoded2['2']['data']) + asserts.assert_that(decoded['2'] == encoded2['p']) + asserts.assert_that(decoded['2'] == encoded2['2']['data']) + for field_key in decoded: + print(field_key+':'+str(decoded[field_key]) + ' | ' + str(encoded2[field_key]['data'])) + asserts.assert_that(decoded[field_key] == encoded2[field_key]['data']) + + print("encoded1:"+str(encoded1)) + print("encoded2:"+str(encoded2)) + print("payload :"+str(payload)) + print("encoded_:"+str(encoded_raw)) + + hex_string_payload_encoded_raw = str(hexlify(bytes(encoded_raw, encoding='utf-8'))) + print("hex_string_payload :" + hex_string_payload) + print("hex_string_payload_:" + hex_string_payload_encoded_raw) + asserts.assert_that(hex_string_payload).is_equal_to(hex_string_payload_encoded_raw) + + +hex_string_payload = '30313030fefa448108e0e48a0000000004020008313531303031393438363837343033303030303030303030303030303030303135303030303030303030303135303030303030303030303137303030333035313334393033363130303030303036313030303030303030303030313133343930333033303530333035353439393031323030303630313233343531303030303031303030303431323334353637383132333435363738393132333435364d5352204d45524348414e54202020202020202020202020204252555353454c5320202020202020555341383430383430383430303230303037323834304430303030303030303032303030303331363030313630303032303030303237313330303030303730202020202020202020202020202020202020202020202020202020202020202020203020202020202020202020202020202020202020202020204d4153544552434152442020313531303031393438363837343033303034333220202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020303030303030302020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020203030303030303030303030303030202020202020202020202020202020303030303030303030202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020203030302020202020303030202020202020202020202020202020202020202020202020202020202020202020203030303030302020202020202020202020202020203031373533373533363630303030203030303030303030303030303030303020202020202020202020202020206e7520202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206e2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020203020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020203030202020202020202020202020202020' + +test_payload_spec = { + "h": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 0, + "desc": "Message Header", + }, + "t": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Type", + }, + "p": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Primary", + }, + "1": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Secondary", + }, + "2": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 19, + "desc": "Primary Account Number (PAN)", + }, + "3": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Processing Code", + }, + "4": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Transaction", + }, + "5": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Settlement", + }, + "6": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Amount, Cardholder Billing", + }, + "7": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transmission Date and Time", + }, + "8": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Amount, Cardholder Billing Fee", + }, + "9": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Settlement", + }, + "10": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Conversion Rate, Cardholder Billing", + }, + "11": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "System Trace Audit Number", + }, + "12": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Time, Local Transaction", + }, + "13": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Local Transaction", + }, + "14": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Expiration", + }, + "15": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Settlement", + }, + "16": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Conversion", + }, + "17": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Date, Capture", + }, + "18": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Merchant Type", + }, + "19": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Acquiring Institution Country Code", + }, + "20": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Country Code", + }, + "21": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Forwarding Institution Country Code", + }, + "22": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Point-of-Service Entry Mode", + }, + "23": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "PAN Sequence Number", + }, + "24": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network International ID (NII)", + }, + "25": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Condition Code", + }, + "26": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Point-of-Service Capture Code", + }, + "27": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Authorizing ID Response Length", + }, + "28": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Fee", + }, + "29": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Fee", + }, + "30": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Transaction Processing Fee", + }, + "31": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 9, + "desc": "Amount, Settlement Processing Fee", + }, + "32": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Acquiring Institution ID Code", + }, + "33": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Forwarding Institution ID Code", + }, + "34": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Primary Account Number, Extended", + }, + "35": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 37, + "desc": "Track 2 Data", + }, + "36": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 104, + "desc": "Track 3 Data", + }, + "37": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Retrieval Reference Number", + }, + "38": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Authorization ID Response", + }, + "39": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Response Code", + }, + "40": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Service Restriction Code", + }, + "41": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Card Acceptor Terminal ID", + }, + "42": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 15, + "desc": "Card Acceptor ID Code", + }, + "43": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 40, + "desc": "Card Acceptor Name/Location", + }, + "44": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 25, + "desc": "Additional Response Data", + }, + "45": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 76, + "desc": "Track 1 Data", + }, + "46": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - ISO", + }, + "47": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - National", + }, + "48": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Additional Data - Private", + }, + "49": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Transaction", + }, + "50": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Settlement", + }, + "51": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Currency Code, Cardholder Billing", + }, + "52": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "PIN", + }, + "53": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Security-Related Control Information", + }, + "54": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + # "max_len": 240, + "max_len": 840, + "desc": "Additional Amounts", + }, + "55": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 3, + "max_len": 255, + "desc": "ICC data", + }, + "56": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved ISO", + }, + "57": { + "data_enc": "ascii", + # "data_enc": "b", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + # "max_len": 999, + "max_len": 102, + "desc": "Reserved National", + }, + "58": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "59": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved National", + }, + "60": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + "max_len": 999, + "desc": "Reserved National", + }, + "61": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + # "max_len": 999, + "max_len": 0, + "desc": "Reserved Private", + }, + "62": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved Private", + }, + "63": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + # "max_len": 999, + "max_len": 0, + "desc": "Reserved Private", + }, + "64": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "MAC", + }, + "65": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Bitmap, Extended", + }, + "66": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "Settlement Code", + }, + "67": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "Extended Payment Code", + }, + "68": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Receiving Institution Country Code", + }, + "69": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Settlement Institution Country Code", + }, + "70": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 3, + "desc": "Network Management Information Code", + }, + "71": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number", + }, + "72": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 4, + "desc": "Message Number, Last", + }, + "73": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 6, + "desc": "Date, Action", + }, + "74": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Number", + }, + "75": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Credits, Reversal Number", + }, + "76": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Number", + }, + "77": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Debits, Reversal Number", + }, + "78": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Number", + }, + "79": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Transfer, Reversal Number", + }, + "80": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Inquiries, Number", + }, + "81": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 10, + "desc": "Authorizations, Number", + }, + "82": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Processing Fee Amount", + }, + "83": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Credits, Transaction Fee Amount", + }, + "84": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Processing Fee Amount", + }, + "85": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 12, + "desc": "Debits, Transaction Fee Amount", + }, + "86": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Amount", + }, + "87": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Credits, Reversal Amount", + }, + "88": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Amount", + }, + "89": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 16, + "desc": "Debits, Reversal Amount", + }, + "90": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Original Data Elements", + }, + "91": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 1, + "desc": "File Update Code", + }, + "92": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 2, + "desc": "File Security Code", + }, + "93": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 5, + "desc": "Response Indicator", + }, + "94": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 7, + "desc": "Service Indicator", + }, + "95": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 42, + "desc": "Replacement Amounts", + }, + "96": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "Message Security Code", + }, + "97": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 17, + "desc": "Amount, Net Settlement", + }, + "98": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 0, + "max_len": 25, + "desc": "Payee", + }, + "99": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Settlement Institution ID Code", + }, + "100": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 11, + "desc": "Receiving Institution ID Code", + }, + "101": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 17, + "desc": "File Name", + }, + "102": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 2, + "len_type": 0, + # "max_len": 28, + "max_len": 0, + "desc": "Account ID 1", + }, + "103": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 2, + "max_len": 28, + "desc": "Account ID 2", + }, + "104": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 100, + "desc": "Transaction Description", + }, + "105": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "106": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "107": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "108": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "109": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "110": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "111": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + "max_len": 0, + # "max_len": 999, + "desc": "Reserved for ISO Use", + }, + "112": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "113": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "114": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "115": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "116": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "117": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "118": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "119": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for National Use", + }, + "120": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "121": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "122": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "123": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "124": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "125": { + "data_enc": "ascii", + "len_enc": "ascii", + # "len_type": 3, + "len_type": 0, + # "max_len": 999, + "max_len": 0, + "desc": "Reserved for Private Use", + }, + "126": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "127": { + "data_enc": "ascii", + "len_enc": "ascii", + "len_type": 3, + "max_len": 999, + "desc": "Reserved for Private Use", + }, + "128": { + "data_enc": "b", + "len_enc": "ascii", + "len_type": 0, + "max_len": 8, + "desc": "MAC", + }, +} + +def _testsuite(): + _suite = unittest.TestSuite() + _suite.addTest(unittest.FunctionTestCase(MyTestCase_test_decode_encode)) + return _suite + + +_runner = unittest.TextTestRunner() +_runner.run(_testsuite())