Skip to content

Commit

Permalink
Merge pull request #527 from singnet/validate-org-metadata
Browse files Browse the repository at this point in the history
Added validate organization metadata file
  • Loading branch information
kiruxaspb authored Nov 13, 2024
2 parents 93f2865 + ec99dde commit dd2faaf
Show file tree
Hide file tree
Showing 6 changed files with 311 additions and 99 deletions.
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ mnemonic==0.20
pycoin==0.92.20230326
pyyaml==6.0.1
ipfshttpclient==0.4.13.2
rfc3986==2.0.0
pymultihash==0.8.2
base58==2.1.1
argcomplete==3.1.2
Expand Down
9 changes: 6 additions & 3 deletions snet/cli/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
import os
import re
import sys
from email.policy import default
from random import choices

from snet.contracts import get_all_abi_contract_files, get_contract_def

Expand Down Expand Up @@ -255,7 +253,7 @@ def add_p_org_id(p):
def add_metadatafile_argument_for_org(p):
p.add_argument("--metadata-file",
default="organization_metadata.json",
help="Service metadata json file (default organization_metadata.json)")
help="Organization metadata json file (default organization_metadata.json)")


def add_p_storage_param(_p):
Expand Down Expand Up @@ -435,6 +433,11 @@ def add_organization_options(parser):
add_p_org_id(p)
add_organization_arguments(p)

p = subparsers.add_parser("validate-metadata",
help="Validates if created metadata is consistent")
p.set_defaults(fn="metadata_validate")
add_metadatafile_argument_for_org(p)


def add_contract_function_options(parser, contract_name):
add_contract_identity_arguments(parser)
Expand Down
129 changes: 107 additions & 22 deletions snet/cli/commands/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
import json
import secrets
import sys
from pathlib import Path
from textwrap import indent
from urllib.parse import urljoin

import ipfshttpclient
import jsonschema
from lighthouseweb3 import Lighthouse
import yaml
from rfc3986 import urlparse
import web3
from snet.contracts import get_contract_def

Expand Down Expand Up @@ -480,7 +480,7 @@ def initialize_metadata(self):

def print_metadata(self):
org_id = self.args.org_id
org_metadata = self._get_organization_metadata_from_registry(org_id)
org_metadata = self._get_organization_metadata_from_registry(org_id, False)
self._printout(org_metadata.get_json_pretty())

def _get_organization_registration(self, org_id):
Expand All @@ -492,15 +492,15 @@ def _get_organization_registration(self, org_id):
self.args.org_id))
return {"orgMetadataURI": rez[2]}

def _get_organization_metadata_from_registry(self, org_id):
def _get_organization_metadata_from_registry(self, org_id, check_url=True):
rez = self._get_organization_registration(org_id)
storage_type, metadata_hash = bytesuri_to_hash(rez["orgMetadataURI"])
if storage_type == "ipfs":
metadata = get_from_ipfs_and_checkhash(self._get_ipfs_client(), metadata_hash)
else:
metadata = get_file_from_filecoin(metadata_hash)
metadata = metadata.decode("utf-8")
return OrganizationMetadata.from_json(json.loads(metadata))
return OrganizationMetadata.from_json(json.loads(metadata), check_url)

def _get_organization_by_id(self, org_id):
org_id_bytes32 = type_converter("bytes32")(org_id)
Expand Down Expand Up @@ -573,20 +573,107 @@ def info(self):
for idx, service in enumerate(serviceNames):
self._printout(" - {}".format(bytes32_to_str(service)))

def create(self):
def metadata_validate(self):
self._metadata_validate(as_exception=False)

metadata_file = self.args.metadata_file
def _metadata_validate(self, as_exception=True):
validation_res = self._metadata_validate_with_schema()
if validation_res["status"] == 2:
if as_exception:
raise Exception(validation_res["msg"])
else:
self._printout(validation_res["msg"])
exit(1)
with open(self.args.metadata_file, 'r') as f:
org_metadata = OrganizationMetadata.from_json(json.load(f))

occurred_errors = []
unique_group_names = set([group.group_name for group in org_metadata.groups])
if len(unique_group_names) != len(org_metadata.groups):
occurred_errors.append("There should be no groups with duplicated names in the metadata file.")

for group in org_metadata.groups:
for i, endpoint in enumerate(group.payment.payment_channel_storage_client.endpoints):
if not is_valid_url(endpoint):
occurred_errors.append(f"Invalid endpoint `{endpoint}` at index {i} in group `{group.group_name}`.")

existing_errors = validation_res.get("n_errors", 0)

docs = "https://dev.singularitynet.io/docs/products/DecentralizedAIPlatform/CLI/Manual/Organization/"
hint_message = f"\nVisit {docs} for more information."
hint_message = f"\n{len(occurred_errors) + existing_errors} errors found." + hint_message + "\n"

res_msg = ""
for i in range(len(occurred_errors)):
res_msg += str(existing_errors + i + 1) + ". " + occurred_errors[i] + "\n"

if res_msg:
if validation_res["status"] == 0:
res_msg = "\nErrors found in the metadata file:\n" + res_msg
else:
res_msg += validation_res["msg"] + res_msg
res_msg += hint_message
elif validation_res["status"] == 0:
res_msg = validation_res["msg"]
else:
res_msg = validation_res["msg"] + hint_message

if as_exception and not res_msg.startswith("Organization metadata is valid and ready to publish."):
raise Exception(res_msg)
elif not as_exception:
self._printout(res_msg)

def _metadata_validate_with_schema(self):
current_path = Path(__file__).parent
relative_path = '../resources/org_schema.json'
path_to_schema = (current_path / relative_path).resolve()
with open(path_to_schema, 'r') as f:
schema = json.load(f)

metadata_file = self.args.metadata_file
try:
with open(metadata_file, 'r') as f:
org_metadata = OrganizationMetadata.from_json(json.load(f))
metadata_dict = json.load(f)
except Exception as e:
print(
"Organization metadata json file not found ,Please check --metadata-file path ")
raise e
return {"status": 2, "msg": "Organization metadata json file not found, please check --metadata-file path"}

validator = jsonschema.Draft7Validator(schema)
occurred_errors = list(validator.iter_errors(metadata_dict))

def get_path(err):
return " -> ".join(f"`{el}`" for el in err.path)

if len(occurred_errors) > 0:
res_msg = f"\nErrors found in the metadata file:\n"
for i, e in enumerate(occurred_errors):
res_msg += str(i + 1) + ". "
if e.validator == 'additionalProperties':
if len(e.path) != 0:
res_msg += f"{e.message} in {get_path(e)}."
else:
res_msg += f"{e.message} in main object."
elif e.validator in ['required', 'type', 'enum', 'pattern', 'minLength', 'minItems']:
res_msg += f"{get_path(e)} - {e.message}"
if e.validator == 'minItems':
res_msg += f" (minimum 1 item required)"
else:
res_msg += e.message
res_msg += "\n"

return {"status": 1, "msg": res_msg, "n_errors": len(occurred_errors)}
else:
return {"status": 0, "msg": "Organization metadata is valid and ready to publish."}

def create(self):

self._metadata_validate()

metadata_file = self.args.metadata_file
with open(metadata_file, 'r') as f:
org_metadata = OrganizationMetadata.from_json(json.load(f))
org_metadata.check_remove_groups()

org_id = self.args.org_id
# validate the metadata before creating
org_metadata.validate()

# R Check if Organization already exists
found = self._get_organization_by_id(org_id)[0]
Expand Down Expand Up @@ -630,19 +717,17 @@ def delete(self):
raise

def update_metadata(self):
metadata_file = self.args.metadata_file

try:
with open(metadata_file, 'r') as f:
org_metadata = OrganizationMetadata.from_json(json.load(f))
except Exception as e:
print("Organization metadata JSON file not found. Please check --metadata-file path.")
raise e
self._metadata_validate()

metadata_file = self.args.metadata_file
with open(metadata_file, 'r') as f:
org_metadata = OrganizationMetadata.from_json(json.load(f))
org_metadata.check_remove_groups()

# Validate the metadata before updating
org_id = self.args.org_id
existing_registry_org_metadata = self._get_organization_metadata_from_registry(org_id)
org_metadata.validate(existing_registry_org_metadata)
org_metadata.check_remove_groups(existing_registry_org_metadata)

# Check if Organization already exists
found = self._get_organization_by_id(org_id)[0]
Expand Down
90 changes: 18 additions & 72 deletions snet/cli/metadata/organization.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,20 +30,15 @@ def add_payment_storage_client_details(self, connection_time_out, request_timeou
self.endpoints = endpoints

@classmethod
def from_json(cls, json_data: dict):
endpoints = json_data["endpoints"]
if endpoints:
for endpoint in endpoints:
if not is_valid_url(endpoint):
raise Exception("Invalid endpoint passed in json file")
def from_json(cls, json_data: dict, check_url=True):
if check_url:
endpoints = json_data["endpoints"]
if endpoints:
for endpoint in endpoints:
if not is_valid_url(endpoint):
raise Exception("Invalid endpoint passed in json file")
return cls(**json_data)

def validate(self):
if len(self.endpoints) < 1:
raise Exception(
"At least one endpoint is required for payment channel ")


class Payment(object):

def __init__(self, payment_address="", payment_expiration_threshold="", payment_channel_storage_type="",
Expand All @@ -54,25 +49,12 @@ def __init__(self, payment_address="", payment_expiration_threshold="", payment_
self.payment_channel_storage_client = payment_channel_storage_client

@classmethod
def from_json(cls, json_data: dict):
def from_json(cls, json_data: dict, check_url=True):
payment_channel_storage_client = PaymentStorageClient.from_json(
json_data['payment_channel_storage_client'])
json_data['payment_channel_storage_client'], check_url)
return cls(json_data['payment_address'], json_data['payment_expiration_threshold'],
json_data['payment_channel_storage_type'], payment_channel_storage_client)

def validate(self):
if self.payment_address is None:
raise Exception("Payment address cannot be null")
if self.payment_channel_storage_type is None:
raise Exception("Payment channel storage type cannot be null")
if self.payment_expiration_threshold is None:
raise Exception("Payment expiration threshold cannot be null")

if self.payment_channel_storage_client is None:
raise Exception("Payment channel storage client cannot be null")
else:
self.payment_channel_storage_client.validate()

def update_connection_timeout(self, connection_timeout):
self.payment_channel_storage_client.connection_timeout = connection_timeout

Expand All @@ -91,29 +73,17 @@ def __init__(self, group_name="", group_id="", payment=Payment()):
self.payment = payment

@classmethod
def from_json(cls, json_data: dict):
def from_json(cls, json_data: dict, check_url=True):
payment = Payment()
if 'payment' in json_data:
payment = Payment.from_json(json_data['payment'])
payment = Payment.from_json(json_data['payment'], check_url)
return cls(json_data['group_name'], json_data['group_id'], payment)

def add_group_details(self, group_name, group_id, payment):
self.group_name = group_name
self.group_id = group_id
self.payment = payment

def validate(self):
if self.group_name is None:
raise Exception("group name cannot be null")
if self.group_id is None:
raise Exception("group_id is cannot be null")

if self.payment is None:
raise Exception(
"payment details cannot be null for group_name %s", self.group_name)
else:
self.payment.validate()

def update_payment_expiration_threshold(self, payment_expiration_threshold):
self.payment.payment_expiration_threshold = payment_expiration_threshold

Expand Down Expand Up @@ -219,10 +189,10 @@ def save_pretty(self, file_name):
f.write(self.get_json_pretty())

@classmethod
def from_json(cls, json_data: dict):
def from_json(cls, json_data: dict, check_url=True):
groups = []
if 'groups' in json_data:
groups = list(map(Group.from_json, json_data["groups"]))
groups = list(map(lambda j_d: Group.from_json(j_d, check_url), json_data["groups"]))
if "contacts" not in json_data:
json_data["contacts"] = []
if "description" not in json_data:
Expand Down Expand Up @@ -252,46 +222,22 @@ def from_file(cls, filepath):
raise e

def is_removing_existing_group_from_org(self, current_group_name, existing_registry_metadata_group_names):
if len(existing_registry_metadata_group_names-current_group_name) == 0:
if len(existing_registry_metadata_group_names - current_group_name) == 0:
pass
else:
removed_groups = existing_registry_metadata_group_names - current_group_name
raise Exception("Cannot remove existing group from organization as it might be attached"
" to services, groups you are removing are %s" % removed_groups)

def validate(self, existing_registry_metadata=None):

if self.org_id is None:
raise Exception("Org_id cannot be null")
if self.org_name is None:
raise Exception("Org_name cannot be null")
if self.org_type is None:
raise Exception("Org_type cannot be null")
if self.contacts is None:
raise Exception("contact_details can not be null")
if self.description is None:
raise Exception("description can not be null")
if self.groups:
unique_group_names = set()
for group in self.groups:
unique_group_names.add(group.group_name)

if len(unique_group_names) < len(self.groups):
raise Exception("Cannot create group with duplicate names")
if len(self.groups) < 1:
raise Exception(
"At least One group is required to create an organization")
else:
for group in self.groups:
group.validate()

def check_remove_groups(self, existing_registry_metadata):
unique_group_names = set([group.group_name for group in self.groups])
existing_registry_metadata_group_names = set()

if existing_registry_metadata:
for group in existing_registry_metadata.groups:
existing_registry_metadata_group_names.add(group.group_name)

self.is_removing_existing_group_from_org(
unique_group_names, existing_registry_metadata_group_names)
self.is_removing_existing_group_from_org(unique_group_names, existing_registry_metadata_group_names)

def get_payment_address_for_group(self, group_name):
for group in self.groups:
Expand Down
Loading

0 comments on commit dd2faaf

Please sign in to comment.