diff --git a/.env.sample b/.env.sample new file mode 100644 index 00000000..3d766e76 --- /dev/null +++ b/.env.sample @@ -0,0 +1,4 @@ +DUNE_API_KEY= +AWS_ROLE= +APP_DATA_BUCKET= +VOLUME_PATH= \ No newline at end of file diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..f4b7573d --- /dev/null +++ b/.pylintrc @@ -0,0 +1,2 @@ +[MASTER] +disable=fixme,logging-fstring-interpolation \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..168c9cb7 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM python:3.10 + +WORKDIR /app + +COPY requirements/* requirements/ +RUN pip install -r requirements/prod.txt +COPY ./src ./src + +ENTRYPOINT [ "python3", "-m" , "src.main"] diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..099fc2f2 --- /dev/null +++ b/Makefile @@ -0,0 +1,34 @@ +VENV = venv +PYTHON = $(VENV)/bin/python3 +PIP = $(VENV)/bin/pip +PROJECT_ROOT = src + + +$(VENV)/bin/activate: requirements/dev.txt + python3 -m venv $(VENV) + $(PIP) install --upgrade pip + $(PIP) install -r requirements/dev.txt + + +install: + make $(VENV)/bin/activate + +clean: + rm -rf __pycache__ + +fmt: + black ./ + +lint: + pylint ${PROJECT_ROOT}/ + +types: + mypy ${PROJECT_ROOT}/ --strict + +check: + make fmt + make lint + make types + +test: + python -m pytest tests diff --git a/README.md b/README.md index bb2a004c..b2016009 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,30 @@ # dune-sync Components for syncing off-chain data with Dune Community Sources + + +# Local Development + + +1. clone repo +2. Several Makefile commands: +```shell +make install +``` +```shell +make check # (runs black, pylint and mypy --strict) +``` +```shell +make test # Runs all tests +``` + +## Docker +### Build +```shell +docker build -t local_dune_sync . +``` + +You must provide valid environment variables as specified in [.env.sample](.env.sample) +### Run +```shell +docker run -v ${PWD}data:/app/data --env-file .env local_dune_sync +``` diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..16150b7e --- /dev/null +++ b/mypy.ini @@ -0,0 +1,2 @@ +[mypy-src.sync] +implicit_reexport = True diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..dcf75255 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,18 @@ +# Prod +dune-client==0.2.2 +psycopg2-binary==2.9.3 +python-dotenv>=0.20.0 +requests>=2.28.1 +pandas==1.5.0 +ndjson>=0.3.1 +py-multiformats-cid>=0.4.3 +boto3==1.26.12 + + +# Dev +pandas-stubs==1.5.1.221024 +boto3-stubs==1.26.12 +black==22.6.0 +mypy==0.982 +pylint==2.14.4 +pytest==7.1.2 \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 00000000..0da4cba2 --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,7 @@ +-r prod.txt +pandas-stubs==1.5.1.221024 +boto3-stubs==1.26.12 +black==22.6.0 +mypy==0.982 +pylint==2.14.4 +pytest==7.1.2 \ No newline at end of file diff --git a/requirements/prod.txt b/requirements/prod.txt new file mode 100644 index 00000000..bb414268 --- /dev/null +++ b/requirements/prod.txt @@ -0,0 +1,8 @@ +dune-client==0.2.2 +psycopg2-binary==2.9.3 +python-dotenv>=0.20.0 +requests>=2.28.1 +pandas==1.5.0 +ndjson>=0.3.1 +py-multiformats-cid>=0.4.3 +boto3==1.26.12 \ No newline at end of file diff --git a/seed_data.zip b/seed_data.zip new file mode 100644 index 00000000..33d7e1f3 Binary files /dev/null and b/seed_data.zip differ diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/dune_queries.py b/src/dune_queries.py new file mode 100644 index 00000000..a484beff --- /dev/null +++ b/src/dune_queries.py @@ -0,0 +1,44 @@ +""" +Localized account of all Queries related to this project's main functionality +""" +from __future__ import annotations + +from copy import copy +from dataclasses import dataclass + +from dune_client.query import Query +from dune_client.types import QueryParameter + + +@dataclass +class QueryData: + """Stores name and a version of the query for each query.""" + + name: str + query: Query + + def __init__(self, name: str, query_id: int, filename: str) -> None: + self.name = name + self.filepath = filename + self.query = Query(query_id, name) + + def with_params(self, params: list[QueryParameter]) -> Query: + """ + Copies the query and adds parameters to it, returning the copy. + """ + # We currently default to the V1 Queries, soon to switch them out. + query_copy = copy(self.query) + query_copy.params = params + return query_copy + + +QUERIES = { + "APP_HASHES": QueryData( + query_id=1610025, name="Unique App Hashes", filename="app_hashes.sql" + ), + "LATEST_APP_HASH_BLOCK": QueryData( + query_id=1615490, + name="Latest Possible App Hash Block", + filename="app_hash_latest_block.sql", + ), +} diff --git a/src/environment.py b/src/environment.py new file mode 100644 index 00000000..22692adc --- /dev/null +++ b/src/environment.py @@ -0,0 +1,7 @@ +# """ +# Collection of runtime constants +# """ +# from pathlib import Path +# +# PROJECT_ROOT = Path(__file__).parent.parent +# OUT_DIR = PROJECT_ROOT / Path("data") diff --git a/src/fetch/__init__.py b/src/fetch/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/fetch/dune.py b/src/fetch/dune.py new file mode 100644 index 00000000..874d1ccb --- /dev/null +++ b/src/fetch/dune.py @@ -0,0 +1,90 @@ +""" +All Dune Query executions should be routed through this file. +TODO - Move reusable components into dune-client: + https://github.com/cowprotocol/dune-bridge/issues/40 +""" +import asyncio +import logging +import sys + +from requests import HTTPError + +from dune_client.client import DuneClient +from dune_client.query import Query +from dune_client.types import DuneRecord + +from src.dune_queries import QUERIES +from src.models.block_range import BlockRange + + +class DuneFetcher: + """ + Class containing, DuneClient, FileIO and a logger for convenient Dune Fetching. + """ + + def __init__( + self, + api_key: str, + ) -> None: + """ + Class constructor. + Builds DuneClient from `api_key` along with a logger and FileIO object. + """ + # It's a bit weird that the DuneClient also declares a log like this, + # but it also doesn't make sense to inherit that log. Not sure what's best practise here. + self.log = logging.getLogger(__name__) + logging.basicConfig(format="%(asctime)s %(levelname)s %(name)s %(message)s") + # TODO - use runtime parameter. https://github.com/cowprotocol/dune-bridge/issues/41 + self.log.setLevel(logging.DEBUG) + self.dune = DuneClient(api_key) + + async def fetch(self, query: Query) -> list[DuneRecord]: + """Async Dune Fetcher with some exception handling.""" + self.log.debug(f"Executing {query}") + + try: + # Tried to use the AsyncDuneClient, without success: + # https://github.com/cowprotocol/dune-client/pull/31#issuecomment-1316045313 + response = await asyncio.to_thread( + self.dune.refresh, query, ping_frequency=10 + ) + if response.state.is_complete(): + response_rows = response.get_rows() + self.log.debug( + f"Got {len(response_rows)} results for execution {response.execution_id}" + ) + return response_rows + + message = ( + f"query execution {response.execution_id} incomplete {response.state}" + ) + self.log.error(message) + raise RuntimeError(f"no results for {message}") + except HTTPError as err: + self.log.error(f"Got {err} - Exiting") + sys.exit() + + async def latest_app_hash_block(self) -> int: + """ + Block Range is used to app hash fetcher where to find the new records. + block_from: read from file `fname` as a loaded singleton. + - uses genesis block is no file exists (should only ever happen once) + - raises RuntimeError if column specified does not exist. + block_to: fetched from Dune as the last indexed block for "GPv2Settlement_call_settle" + """ + return int( + # KeyError here means the query has been modified and column no longer exists + # IndexError means no results were returned from query (which is unlikely). + (await self.fetch(QUERIES["LATEST_APP_HASH_BLOCK"].query))[0][ + "latest_block" + ] + ) + + async def get_app_hashes(self, block_range: BlockRange) -> list[DuneRecord]: + """ + Executes APP_HASHES query for the given `block_range` and returns the results + """ + app_hash_query = QUERIES["APP_HASHES"].with_params( + block_range.as_query_params() + ) + return await self.fetch(app_hash_query) diff --git a/src/fetch/ipfs.py b/src/fetch/ipfs.py new file mode 100644 index 00000000..7553a6c6 --- /dev/null +++ b/src/fetch/ipfs.py @@ -0,0 +1,51 @@ +"""IPFS CID (de)serialization""" +from __future__ import annotations + +from typing import Any, Optional + +import requests +from multiformats_cid.cid import from_bytes # type: ignore + + +class Cid: + """Holds logic for constructing and converting various representations of a Delegation ID""" + + def __init__(self, hex_str: str) -> None: + """Builds Object (bytes as base representation) from hex string.""" + stripped_hex = hex_str.replace("0x", "") + # Anatomy of a CID: https://proto.school/anatomy-of-a-cid/04 + prefix = bytearray([1, 112, 18, 32]) + self.bytes = bytes(prefix + bytes.fromhex(stripped_hex)) + + @property + def hex(self) -> str: + """Returns hex representation""" + without_prefix = self.bytes[4:] + return "0x" + without_prefix.hex() + + def __str__(self) -> str: + """Returns string representation""" + return str(from_bytes(self.bytes)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Cid): + return False + return self.bytes == other.bytes + + def url(self) -> str: + """IPFS URL where content can be recovered""" + return f"https://gnosis.mypinata.cloud/ipfs/{self}" + + def get_content(self, max_retries: int = 3) -> Optional[Any]: + """ + Attempts to fetch content at cid with a timeout of 1 second. + Trys `max_retries` times and otherwise returns None` + """ + attempts = 0 + while attempts < max_retries: + try: + response = requests.get(self.url(), timeout=1) + return response.json() + except requests.exceptions.ReadTimeout: + attempts += 1 + return None diff --git a/src/main.py b/src/main.py new file mode 100644 index 00000000..241b6c40 --- /dev/null +++ b/src/main.py @@ -0,0 +1,32 @@ +"""Main Entry point for app_hash sync""" +import asyncio +import logging.config +import os +from pathlib import Path + +from dotenv import load_dotenv + +from src.sync import sync_app_data +from src.fetch.dune import DuneFetcher +from src.sync.config import AppDataSyncConfig + +log = logging.getLogger(__name__) +logging.basicConfig(format="%(asctime)s %(levelname)s %(name)s %(message)s") +log.setLevel(logging.DEBUG) + + +GIVE_UP_THRESHOLD = 10 + + +if __name__ == "__main__": + load_dotenv() + asyncio.run( + sync_app_data( + dune=DuneFetcher(os.environ["DUNE_API_KEY"]), + config=AppDataSyncConfig( + aws_role=os.environ["AWS_ROLE"], + aws_bucket=os.environ["AWS_BUCKET"], + volume_path=Path(os.environ["VOLUME_PATH"]).absolute(), + ), + ) + ) diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/models/block_range.py b/src/models/block_range.py new file mode 100644 index 00000000..f2893402 --- /dev/null +++ b/src/models/block_range.py @@ -0,0 +1,28 @@ +""" +BlockRange Model is just a data class for left and right bounds +""" +from dataclasses import dataclass + +from dune_client.types import QueryParameter + + +@dataclass +class BlockRange: + """ + Basic dataclass for an Ethereum block range with some Dune compatibility methods. + TODO (easy) - this data class could probably live in dune-client. + https://github.com/cowprotocol/dune-bridge/issues/40 + """ + + block_from: int + block_to: int + + def __str__(self) -> str: + return f"({self.block_from}, {self.block_to})" + + def as_query_params(self) -> list[QueryParameter]: + """Returns self as Dune QueryParameters""" + return [ + QueryParameter.number_type("BlockFrom", self.block_from), + QueryParameter.number_type("BlockTo", self.block_to), + ] diff --git a/src/post/__init__.py b/src/post/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/post/aws.py b/src/post/aws.py new file mode 100644 index 00000000..58804475 --- /dev/null +++ b/src/post/aws.py @@ -0,0 +1,58 @@ +"""Aws S3 Bucket functionality (namely upload_file)""" +import logging + +import boto3 +from boto3.exceptions import S3UploadFailedError +from boto3.s3.transfer import S3Transfer + + +def upload_file( + s3_client: S3Transfer, file_name: str, bucket: str, object_key: str +) -> bool: + """Upload a file to an S3 bucket + + :param s3_client: S3Transfer object with `upload_file` method + :param file_name: File to upload. Should be a full path to file. + :param bucket: Bucket to upload to + :param object_key: S3 object key. For our purposes, this would + be f"{table_name}/cow_{latest_block_number}.json" + :return: True if file was uploaded, else False + """ + + try: + s3_client.upload_file( + filename=file_name, + bucket=bucket, + key=object_key, + extra_args={"ACL": "bucket-owner-full-control"}, + ) + logging.info(f"successfully uploaded {file_name} to {bucket} with response") + return True + except S3UploadFailedError as err: + logging.error(err) + return False + + +def get_s3_client(profile: str) -> S3Transfer: + """Constructs a client session for S3 Bucket upload.""" + # This page suggests: + # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#aws-iam-identity-center + session = boto3.Session(profile_name=profile) + return S3Transfer(session.client("s3")) + + # First attempt + # return boto3.client( + # "s3", + # aws_access_key_id=os.environ["AWS_ACCESS_KEY"], + # aws_secret_access_key=os.environ["AWS_SECRET_KEY"], + # ) + + # This is how flashbots instantiates a connection: here + # https://github.com/flashbots/mev-inspect-py/blob/d917ae72ded847af9cbdda0e87a1f38f94f4cb55/mev_inspect/s3_export.py#L103-L111 + # return boto3.client( + # "s3", + # endpoint_url=os.environ[AWS_ENDPOINT], + # region_name=os.environ[AWS_REGION], + # aws_access_key_id=os.environ.get(AWS_ACCESS_KEY), + # aws_secret_access_key=os.environ.get(AWS_SECRET_KEY), + # ) diff --git a/src/sql/app_hash_latest_block.sql b/src/sql/app_hash_latest_block.sql new file mode 100644 index 00000000..571fce55 --- /dev/null +++ b/src/sql/app_hash_latest_block.sql @@ -0,0 +1,4 @@ +-- https://dune.com/queries/1615490 +select + max(call_block_number) as latest_block +from gnosis_protocol_v2_ethereum.GPv2Settlement_call_settle \ No newline at end of file diff --git a/src/sql/app_hashes.sql b/src/sql/app_hashes.sql new file mode 100644 index 00000000..daef7452 --- /dev/null +++ b/src/sql/app_hashes.sql @@ -0,0 +1,21 @@ +-- App Hashes: https://dune.com/queries/1610025 +-- MIN(first_block_seen) = 12153263 +-- Nov 16, 2022: Query takes 4 seconds to run for on full block range +with +app_hashes as ( + select + min(call_block_number) first_seen_block, + get_json_object(trade, '$.appData') as app_hash + from gnosis_protocol_v2_ethereum.GPv2Settlement_call_settle + lateral view explode(trades) as trade + group by app_hash +) +select + app_hash, + first_seen_block +from app_hashes +where first_seen_block > '{{BlockFrom}}' +and first_seen_block <= '{{BlockTo}}' + +-- For some additional stats, +-- on this data see https://dune.com/queries/1608286 \ No newline at end of file diff --git a/src/sync/__init__.py b/src/sync/__init__.py new file mode 100644 index 00000000..37b723b3 --- /dev/null +++ b/src/sync/__init__.py @@ -0,0 +1,2 @@ +"""Re-exported sync methods.""" +from .app_data import sync_app_data diff --git a/src/sync/app_data.py b/src/sync/app_data.py new file mode 100644 index 00000000..69b73ea4 --- /dev/null +++ b/src/sync/app_data.py @@ -0,0 +1,221 @@ +"""Main Entry point for app_hash sync""" +import json +import logging.config +import os.path + +from dune_client.file.interface import FileIO +from dune_client.types import DuneRecord + +from src.fetch.dune import DuneFetcher +from src.fetch.ipfs import Cid +from src.models.block_range import BlockRange +from src.post.aws import upload_file, get_s3_client +from src.sync.config import AppDataSyncConfig + +log = logging.getLogger(__name__) +logging.basicConfig(format="%(asctime)s %(levelname)s %(name)s %(message)s") +log.setLevel(logging.DEBUG) + +MAX_RETRIES = 3 +GIVE_UP_THRESHOLD = 10 + + +class RecordHandler: + """ + This class is responsible for consuming new dune records and missing values from previous runs + it attempts to fetch content for them and filters them into "found" and "not found" as necessary + """ + + def __init__( + self, + new_rows: list[DuneRecord], + missing_values: list[DuneRecord], + block_range: BlockRange, + config: AppDataSyncConfig, + ): + self.config = config + self.block_range = block_range + + self.found: list[dict[str, str]] = [] + self.not_found: list[dict[str, str]] = [] + + self.new_rows = new_rows + self.missing_values = missing_values + + def _handle_new_records(self, max_retries: int) -> None: + # Drain the dune_results into "found" and "not found" categories + while self.new_rows: + row = self.new_rows.pop() + app_hash = row["app_hash"] + cid = Cid(app_hash) + app_data = cid.get_content(max_retries) + + # Here it would be nice if python we more like rust! + if app_data is not None: + # Row is modified and added found items + log.debug(f"Found content for {app_hash} at CID {cid}") + row["content"] = app_data + self.found.append(row) + else: + # Unmodified row added to not_found items + log.debug( + f"No content found for {app_hash} at CID {cid} after {max_retries} retries" + ) + # Dune Records are string dicts.... :( + row["attempts"] = str(max_retries) + self.not_found.append(row) + + def _handle_missing_records(self, max_retries: int) -> None: + while self.missing_values: + row = self.missing_values.pop() + app_hash = row["app_hash"] + cid = Cid(app_hash) + app_data = cid.get_content(max_retries) + attempts = int(row["attempts"]) + max_retries + + if app_data is not None: + log.debug( + f"Found previously missing content hash {row['app_hash']} at CID {cid}" + ) + self.found.append( + { + "app_hash": app_hash, + "first_seen_block": row["first_seen_block"], + "content": app_data, + } + ) + elif app_data is None and attempts > GIVE_UP_THRESHOLD: + log.debug( + f"No content found after {attempts} attempts for {app_hash} assuming NULL." + ) + self.found.append( + { + "app_hash": app_hash, + "first_seen_block": row["first_seen_block"], + "content": json.dumps({}), + } + ) + else: + log.debug( + f"Still no content found for {app_hash} at CID {cid} after {attempts} attempts" + ) + row.update({"attempts": str(attempts)}) + self.not_found.append(row) + + def fetch_content_and_filter( + self, max_retries: int + ) -> tuple[list[DuneRecord], list[DuneRecord]]: + """ + Run loop fetching app_data for hashes, + separates into (found and not found), returning the pair. + """ + self._handle_new_records(max_retries) + log.info( + f"Attempting to recover missing {len(self.missing_values)} records from previous run" + ) + self._handle_missing_records(max_retries) + return self.found, self.not_found + + def write_to_disk(self, file_manager: FileIO, filename: str) -> None: + """ + Does all appropriate file writes for a single run of the app data sync job + Write new records, missing records and last sync block. + """ + # Write the most recent data and also record the block_from, + # so that next run will know where to start + file_manager.write_ndjson(data=self.found, name=filename) + # When not_found is empty, we want to overwrite the file (hence skip_empty=False) + # This happens when all records in the file have attempts exceeding GIVE_UP_THRESHOLD + file_manager.write_ndjson( + self.not_found, self.config.missing_files_name, skip_empty=False + ) + # Write last sync block only after the data has been written. + file_manager.write_csv( + data=[{self.config.sync_column: str(self.block_range.block_to)}], + name=self.config.sync_file, + ) + + +async def get_block_range( + file_manager: FileIO, dune: DuneFetcher, last_block_file: str, column: str +) -> BlockRange: + """ + Constructs a block range object + block_from is fetched from the last sync block (via file_manager) + block_to is fetched from Dune via query results. + """ + + block_from = 12153262 # Genesis block + try: + block_from = int(file_manager.load_singleton(last_block_file, "csv")[column]) + except FileNotFoundError: + log.warning( + f"block range file {last_block_file} not found, using genesis block {block_from}" + ) + except KeyError as err: + message = ( + f"block range file {last_block_file} does not contain column header {column}, " + f"exiting to avoid duplication" + ) + log.error(message) + raise RuntimeError(message) from err + + return BlockRange( + # TODO - could be replaced by Dune Query on the app_data table (once available). + # https://github.com/cowprotocol/dune-bridge/issues/42 + block_from, + block_to=await dune.latest_app_hash_block(), + ) + + +def get_missing_data(file_manager: FileIO, missing_fname: str) -> list[DuneRecord]: + """ + Loads missing records from file (aka previous run) if there are any. + Otherwise, assumes there are none. + """ + try: + return file_manager.load_ndjson(missing_fname) + except FileNotFoundError: + return [] + + +async def sync_app_data(dune: DuneFetcher, config: AppDataSyncConfig) -> None: + """App Data Sync Logic""" + log.info(f"Using configuration {config}") + # TODO - assert legit configuration before proceeding! + table_name = config.table_name + file_manager = FileIO(config.volume_path / table_name) + block_range = await get_block_range( + file_manager, + dune, + last_block_file=config.sync_file, + column=config.sync_column, + ) + + data_handler = RecordHandler( + new_rows=await dune.get_app_hashes(block_range), + missing_values=get_missing_data( + file_manager, missing_fname=config.missing_files_name + ), + block_range=block_range, + config=config, + ) + found, not_found = data_handler.fetch_content_and_filter(MAX_RETRIES) + + content_filename = f"cow_{block_range.block_to}.json" + data_handler.write_to_disk(file_manager, filename=content_filename) + + if len(found) > 0: + success = upload_file( + s3_client=get_s3_client(profile=config.aws_role), + file_name=os.path.join(file_manager.path, content_filename), + bucket=config.aws_bucket, + object_key=f"{table_name}/{content_filename}", + ) + if success: + log.info( + f"App Data Sync for block range {BlockRange} complete: " + f"synced {len(found)} records with {len(not_found)} missing" + ) + else: + log.info(f"No new App Data for block range {BlockRange}: no sync necessary") diff --git a/src/sync/config.py b/src/sync/config.py new file mode 100644 index 00000000..1a3b4989 --- /dev/null +++ b/src/sync/config.py @@ -0,0 +1,20 @@ +"""Configuration details for sync jobs""" +from dataclasses import dataclass +from pathlib import Path + + +@dataclass +class AppDataSyncConfig: + """ + This data class contains all the credentials and volume paths + required to sync with both a persistent volume and Dune's S3 Buckets. + """ + + aws_role: str + aws_bucket: str + volume_path: Path + table_name: str = "app_data" + # File System + missing_files_name: str = "missing_app_hashes.json" + sync_file: str = "sync_block.csv" + sync_column: str = "last_synced_block" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_ipfs.py b/tests/test_ipfs.py new file mode 100644 index 00000000..46e38785 --- /dev/null +++ b/tests/test_ipfs.py @@ -0,0 +1,74 @@ +import unittest + +from src.fetch.ipfs import Cid + + +class TestIPFS(unittest.TestCase): + def test_cid_parsing(self): + self.assertEqual( + "bafybeib5q5w6r7gxbfutjhes24y65mcif7ugm7hmub2vsk4hqueb2yylti", + str( + Cid( + "0x3d876de8fcd70969349c92d731eeb0482fe8667ceca075592b8785081d630b9a" + ) + ), + ) + self.assertEqual( + "bafybeia747cvkwz7tqkp67da3ehrl4nfwena3jnr5cvainmcugzocbmnbq", + str( + Cid( + "0x1FE7C5555B3F9C14FF7C60D90F15F1A5B11A0DA5B1E8AA043582A1B2E1058D0C" + ) + ), + ) + + def test_cid_constructor(self): + # works with or without 0x prefix: + hex_str = "0x3d876de8fcd70969349c92d731eeb0482fe8667ceca075592b8785081d630b9a" + self.assertEqual(Cid(hex_str), Cid(hex_str[2:])) + self.assertEqual(hex_str, Cid(hex_str).hex) + + def test_no_content(self): + null_cid = Cid( + "0000000000000000000000000000000000000000000000000000000000000000" + ) + + self.assertEqual(None, null_cid.get_content()) + + def test_get_content(self): + self.assertEqual( + { + "version": "0.1.0", + "appCode": "CowSwap", + "metadata": { + "referrer": { + "version": "0.1.0", + "address": "0x424a46612794dbb8000194937834250Dc723fFa5", + } + }, + }, + Cid( + "3d876de8fcd70969349c92d731eeb0482fe8667ceca075592b8785081d630b9a" + ).get_content(), + ) + + self.assertEqual( + { + "version": "1.0.0", + "appCode": "CowSwap", + "metadata": { + "referrer": { + "kind": "referrer", + "referrer": "0x8c35B7eE520277D14af5F6098835A584C337311b", + "version": "1.0.0", + } + }, + }, + Cid( + "1FE7C5555B3F9C14FF7C60D90F15F1A5B11A0DA5B1E8AA043582A1B2E1058D0C" + ).get_content(), + ) + + +if __name__ == "__main__": + unittest.main()