Skip to content

Commit

Permalink
Merge pull request #38 from Ableton/mkp-print-frozen-stats
Browse files Browse the repository at this point in the history
maxdiff: For frozen devices, print device statistics
  • Loading branch information
MattijsKneppers authored Dec 16, 2024
2 parents 170f7a7 + e4f3d74 commit 9e97cc4
Show file tree
Hide file tree
Showing 19 changed files with 1,044 additions and 92 deletions.
24 changes: 23 additions & 1 deletion maxdiff/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,34 @@ Removing the comments from the new MIDI Effect:
For `.amxd` files:
* The scripts will also tell you what device type it is.
* If a device is frozen, you will see an overview of the content frozen into the deivce. NOTE: We recommend never to commit frozen devices to a git repository, instead to include the dependencies as separate files.
* If a device is frozen, you will see an overview and statistics of the content frozen into the device. NOTE: We recommend never to commit frozen devices to a git repository, instead to include the dependencies as separate files.

### Why?

Readable diffs are very useful for patch code review, or for a sanity check before committing (did I really change nothing else expect removing all my debug messages and prints?).

### Using the frozen device statistics

For frozen devices, `maxdiff` reports statistics. This feature can compared to [`git diff --stat`](https://git-scm.com/docs/git-diff#Documentation/git-diff.txt---statltwidthgtltname-widthgtltcountgt) to indicate the amount of change in a commit.

Currently, it reports the number of object instances and connections of a device in two different ways:

* Total - Counting every abstraction instance - Indicates loading time
* This recursively counts the contents of all subpatchers and abstraction instances
* Unique - Counting abstractions once - Indicates maintainability
* This counts the contents once of every dependency frozen into the device.

Apart from that it reports the amount of times dependencies are used.

We typically don't commit frozen devices to a repo, so the typical way to use this feature is:

1. Commit the device and its dependencies in **un**frozen state.
2. Temporarily freeze the device.
3. Run `python3 ./maxdiff/amxd_textconv.py <path/to/device.amxd>` to get the statistics or view the file in your git client.
4. Discard the freezing.

The reason this only works with frozen devices is that frozen devices unambiguously capture all dependencies.

### What does not work

Typical things you can do with text-based code that will not work with Max patches or devices:
Expand Down
108 changes: 108 additions & 0 deletions maxdiff/freezing_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
import json
import datetime
from typing import Optional

footer_entry = dict[str, str | int | datetime.datetime | None]
device_entry_with_data = dict[str, str | bytes]


def parse_footer(data: bytes) -> list[footer_entry]:
"""Parses the byte data of a frozen device footer and returns a list of frozen dependencies."""
footer_entries: list[footer_entry] = []
while data[:4].decode("ascii") == "dire":
size = int.from_bytes(data[4:8], byteorder="big")
fields = get_fields(data[8 : 8 + size])
footer_entries.append(fields)
data = data[size:]
return footer_entries


def get_fields(data: bytes) -> footer_entry:
"""Parses the data for a frozen dependency and returns a dict of its fields and their contents."""
fields = {}
while len(data) >= 12:
field_type = data[:4].decode("ascii")
field_size = int.from_bytes(data[4:8], byteorder="big")
field_data = data[8:field_size]
fields[field_type] = parse_field_data(field_type, field_data)

data = data[field_size:]
return fields


def parse_field_data(
field_type: str, data: bytes
) -> Optional[str | int | datetime.datetime]:
"""Parses the data of a field. Depending on the field type, returns its data as the correct type"""
match field_type:
case "type":
return remove_trailing_zeros(data).decode("ascii")
case "fnam":
return remove_trailing_zeros(data).decode("ascii")
case "sz32":
return int.from_bytes(data, byteorder="big")
case "of32":
return int.from_bytes(data, byteorder="big")
case "vers":
return int.from_bytes(data, byteorder="big")
case "flag":
return int.from_bytes(data, byteorder="big")
case "mdat":
return get_hfs_date(data)
return None


def remove_trailing_zeros(data: bytes) -> bytes:
"""Remove trailing zeros from a zero-padded byte representation of a string"""
return data.rstrip(b"\x00")


def get_hfs_date(data: bytes) -> datetime.datetime:
"""Converts a byte sequence that represents a HFS+ date to a Python datetime object"""
seconds_offset_from_unix = 2082844800 # Mac HFS+ is time since 1 Jan 1904 while Unix time is since 1 Jan 1970
seconds_in_hfs_plus = int.from_bytes(data, byteorder="big")
return datetime.datetime.fromtimestamp(
seconds_in_hfs_plus - seconds_offset_from_unix, datetime.UTC
)


def get_patcher_dict(entry: device_entry_with_data):
"""Returns the dict that represents the given patcher data.
Prints errors if parsing fails"""

if not "data" in entry:
return {}

patch_data = entry["data"]
if not isinstance(patch_data, bytes):
return {}

if not "file_name" in entry:
return {}

name = entry["file_name"]
if not isinstance(name, str):
return {}

device_data_text = ""
try:
if patch_data[len(patch_data) - 1] == 0:
device_data_text = patch_data[: len(patch_data) - 1].decode("utf-8")
else:
device_data_text = patch_data.decode("utf-8")
except Exception as e:
print(f"Error getting patch data as text for entry {name}: {e}")
return {}

try:
patcher_dict = json.loads(device_data_text)
except ValueError as e:
print(f"Error parsing device patch data as json for entry {name}: {e}")
return {}

try:
patcher = patcher_dict["patcher"]
return patcher
except:
print(f"Content of entry {name} does not seem to be a patcher")
return {}
135 changes: 66 additions & 69 deletions maxdiff/frozen_device_printer.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import datetime
from typing import Optional
from freezing_utils import *
from get_frozen_stats import get_frozen_stats, get_used_files


def print_frozen_device(data: bytes) -> str:
"""Parses a frozen device represented as bytes and returns a string representation of it."""
dependency_data_size = int.from_bytes(
footer_data_size = int.from_bytes(
data[8:16], byteorder="big"
) # data size int is 64 bit
footer_data = data[dependency_data_size:]
footer_data = data[footer_data_size:]

if footer_data[:4].decode("ascii") != "dlst":
return "Error parsing footer data; footer data does not start with 'dlst'"
Expand All @@ -16,80 +16,77 @@ def print_frozen_device(data: bytes) -> str:
return "Error parsing footer data; recorded size does not match actual size"

frozen_string = "Device is frozen\n----- Contents -----\n"
dependencies = parse_footer(footer_data[8:])
for dependency in dependencies:
frozen_string += dependency + "\n"

footer_entries = parse_footer(footer_data[8:])
device_entries = get_device_entries(data, footer_entries)
used_files = get_used_files(device_entries)

i = 0
for entry in device_entries:
description = entry["description"]
if isinstance(description, str):
file_name = str(entry["file_name"])
if i == 0:
frozen_string += f"{description} <= Device \n"
else:
if file_name in used_files:
frozen_string += f"{description}, {used_files[file_name]} instance{'s' if used_files[file_name] > 1 else ''}\n"
else:
frozen_string += f"{description}, NOT FOUND IN PATCH\n"
i += 1

[object_count_total, line_count_total, object_count_unique, line_count_unique] = (
get_frozen_stats(device_entries)
)

frozen_string += "\n"
frozen_string += (
"Total - Counting every abstraction instance - Indicates loading time\n"
)
frozen_string += f" Object instances: {object_count_total}\n"
frozen_string += f" Connections: {line_count_total}\n"
frozen_string += "Unique - Counting abstractions once - Indicates maintainability\n"
frozen_string += f" Object instances: {object_count_unique}\n"
frozen_string += f" Connections: {line_count_unique}\n"

return frozen_string


def parse_footer(data: bytes) -> list[str]:
"""Parses the footer byte data of a frozen device and returns an array of
string representations of the frozen dependencies."""
dependencies: list[str] = []
while data[:4].decode("ascii") == "dire":
size = int.from_bytes(data[4:8], byteorder="big")
fields = get_fields(data[8 : 8 + size])
if "fnam" in fields and "sz32" in fields and "mdat" in fields:
def get_device_entries(
data: bytes, footer_entries: list[footer_entry]
) -> list[device_entry_with_data]:
"""Converts footer entries to dict containing footer entry information and data."""
entries: list[device_entry_with_data] = []

for fields in footer_entries:
if (
"type" in fields
and "fnam" in fields
and "of32" in fields
and "sz32" in fields
and "mdat" in fields
):
type_field = fields["type"]
name_field = fields["fnam"]
offset_field = fields["of32"]
size_field = fields["sz32"]
date_field = fields["mdat"]
if not (
isinstance(name_field, str)
isinstance(type_field, str)
and isinstance(name_field, str)
and isinstance(offset_field, int)
and isinstance(size_field, int)
and isinstance(date_field, datetime.datetime)
):
raise Exception("Incorrect type for parsed footer fields")
dependencies.append(
f'{fields["fnam"]}: {fields["sz32"]} bytes, modified at {date_field.strftime("%Y/%m/%d %T")} UTC'
)
data = data[size:]
return dependencies


def get_fields(data: bytes) -> dict[str, str | int | datetime.datetime | None]:
"""Parses the data for a frozen dependency and returns a dict of its fields and their contents."""
fields = {}
while len(data) >= 12:
field_type = data[:4].decode("ascii")
field_size = int.from_bytes(data[4:8], byteorder="big")
field_data = data[8:field_size]
fields[field_type] = parse_field_data(field_type, field_data)

data = data[field_size:]
return fields


def parse_field_data(
field_type: str, data: bytes
) -> Optional[str | int | datetime.datetime]:
"""Parses the data of a field. Depending on the field type, returns its data as the correct type"""
match field_type:
case "type":
return remove_trailing_zeros(data).decode("ascii")
case "fnam":
return remove_trailing_zeros(data).decode("ascii")
case "sz32":
return int.from_bytes(data, byteorder="big")
case "of32":
return int.from_bytes(data, byteorder="big")
case "vers":
return int.from_bytes(data, byteorder="big")
case "flag":
return int.from_bytes(data, byteorder="big")
case "mdat":
return get_hfs_date(data)
return None


def remove_trailing_zeros(data: bytes) -> bytes:
"""Remove trailing zeros from a zero-padded byte representation of a string"""
return data.rstrip(b"\x00")


def get_hfs_date(data: bytes) -> datetime.datetime:
"""Converts a byte sequence that represents a HFS+ date to a Python datetime object"""
seconds_offset_from_unix = 2082844800 # Mac HFS+ is time since 1 Jan 1904 while Unix time is since 1 Jan 1970
seconds_in_hfs_plus = int.from_bytes(data, byteorder="big")
return datetime.datetime.fromtimestamp(
seconds_in_hfs_plus - seconds_offset_from_unix, datetime.UTC
)
description = f'{name_field}: {size_field} bytes, modified at {date_field.strftime("%Y/%m/%d %T")} UTC'
entry_data = data[offset_field : offset_field + size_field]
entry: device_entry_with_data = {
"file_name": name_field,
"description": description,
"type": type_field,
"data": entry_data,
}
entries.append(entry)
return entries
Loading

0 comments on commit 9e97cc4

Please sign in to comment.