diff --git a/Products/Reportek/BdrAuthorizationMiddleware.py b/Products/Reportek/BdrAuthorizationMiddleware.py index 1595c951..32469d64 100644 --- a/Products/Reportek/BdrAuthorizationMiddleware.py +++ b/Products/Reportek/BdrAuthorizationMiddleware.py @@ -1,19 +1,16 @@ +import logging from time import time -from ZODB.PersistentMapping import PersistentMapping from OFS.SimpleItem import SimpleItem from plone.memoize import ram +from ZODB.PersistentMapping import PersistentMapping -import logging logger = logging.getLogger("Reportek") -__all__ = [ - 'BdrAuthorizationMiddleware' -] +__all__ = ["BdrAuthorizationMiddleware"] class BdrAuthorizationMiddleware(SimpleItem): - recheck_interval = 300 def __init__(self, url): @@ -23,40 +20,52 @@ def __init__(self, url): def setServiceRecheckInterval(self, seconds): self.recheck_interval = seconds - @ram.cache(lambda *args, **kwargs: args[2] + str(time() // kwargs['recheck_interval'])) # noqa - def getUserCollectionPaths(self, username, - recheck_interval=recheck_interval): + @ram.cache( + lambda *args, **kwargs: args[2] + + str(time() // kwargs["recheck_interval"]) + ) + def getUserCollectionPaths( + self, username, userdata=None, recheck_interval=recheck_interval + ): logger.debug( - "Get companies from middleware for ecas user: %s" % username) - accessiblePaths = self.FGASRegistryAPI.getCollectionPaths(username) + "Get companies from middleware for ecas user: %s" % username + ) + accessiblePaths = self.FGASRegistryAPI.getCollectionPaths( + username, userdata=userdata + ) return accessiblePaths - def authorizedUser(self, username, path): + def authorizedUser(self, username, path, userdata=None): if self.lockedCollection(path): logger.warning("This collection is locked down: %s!" % path) return False accessiblePaths = self.getUserCollectionPaths( - username, recheck_interval=self.recheck_interval) - if path in accessiblePaths.get('paths'): + username, userdata=userdata, recheck_interval=self.recheck_interval + ) + if path in accessiblePaths.get("paths"): return "RW" - if path in accessiblePaths.get('prev_paths'): + if path in accessiblePaths.get("prev_paths"): return "RO" def lockDownCollection(self, path, user): if path not in self.lockedDownCollections: self.lockedDownCollections[path] = None - self.lockedDownCollections[path] = {'state': 'locked', - 'ts': time(), - 'user': user} + self.lockedDownCollections[path] = { + "state": "locked", + "ts": time(), + "user": user, + } def unlockCollection(self, path, user): if path not in self.lockedDownCollections: # log unlock without lock self.lockedDownCollections[path] = None - self.lockedDownCollections[path] = {'state': 'unlocked', - 'ts': time(), - 'user': user} + self.lockedDownCollections[path] = { + "state": "unlocked", + "ts": time(), + "user": user, + } def lockedCollection(self, path): lockedItem = self.lockedDownCollections.get(path) - return lockedItem and lockedItem['state'] == 'locked' + return lockedItem and lockedItem["state"] == "locked" diff --git a/Products/Reportek/RegistryManagement.py b/Products/Reportek/RegistryManagement.py index bb3aa9cb..adba44b1 100644 --- a/Products/Reportek/RegistryManagement.py +++ b/Products/Reportek/RegistryManagement.py @@ -1,22 +1,22 @@ import logging from time import time -import Products import requests from AccessControl import ClassSecurityInfo from interfaces import IRegistryManagement from OFS.Folder import Folder from OFS.SimpleItem import SimpleItem from plone.memoize import ram -from Products.Reportek.constants import DF_URL_PREFIX from zope.interface import implementer +import Products +from Products.Reportek.constants import DF_URL_PREFIX + logger = logging.getLogger("Reportek") @implementer(IRegistryManagement) class RegistryManagement(Folder): - security = ClassSecurityInfo() def __init__(self, id, title): @@ -24,12 +24,11 @@ def __init__(self, id, title): self.title = title def all_meta_types(self): - types = ['Script (Python)', 'Folder', 'Page Template'] - return [t for t in Products.meta_types if t['name'] in types] + types = ["Script (Python)", "Folder", "Page Template"] + return [t for t in Products.meta_types if t["name"] in types] class BaseRegistryAPI(SimpleItem): - TIMEOUT = 120 def __init__(self, registry_name, url, token=None): @@ -41,28 +40,45 @@ def set_base_url(self, url, token=None): self.baseUrl = url self.token = token - def do_api_request(self, url, method='get', data=None, files=None, - cookies=None, headers=None, params=None, timeout=None, - raw=None): + def do_api_request( + self, + url, + method="get", + data=None, + files=None, + cookies=None, + headers=None, + params=None, + timeout=None, + raw=None, + ): api_req = requests.get - if method == 'post': + if method == "post": api_req = requests.post if not timeout: timeout = self.TIMEOUT try: - response = api_req(url, data=data, files=files, cookies=cookies, - headers=headers, params=params, verify=False, - timeout=timeout) + response = api_req( + url, + data=data, + files=files, + cookies=cookies, + headers=headers, + params=params, + verify=False, + timeout=timeout, + ) except Exception as e: logger.warning("Error contacting SatelliteRegistry (%s)" % str(e)) return None if response.status_code != requests.codes.ok: logger.warning( - '''Retrieved a %s status code when contacting ''' - '''SatelliteRegistry's url: %s ''' % (response.status_code, - url)) + """Retrieved a %s status code when contacting """ + """SatelliteRegistry's url: %s """ + % (response.status_code, url) + ) if raw: return response return None @@ -71,167 +87,212 @@ def do_api_request(self, url, method='get', data=None, files=None, class FGASRegistryAPI(BaseRegistryAPI): - DOMAIN_TO_OBLIGATION_FOLDER = { - 'FGAS': 'fgases', - 'ODS': 'ods' - } + DOMAIN_TO_OBLIGATION_FOLDER = {"FGAS": "fgases", "ODS": "ods"} # TODO: obtain those dynamically rather than hardcode them here DOMAIN_TO_OBLIGATION = { - 'FGAS': DF_URL_PREFIX + '713', - 'ODS': DF_URL_PREFIX + '213', + "FGAS": DF_URL_PREFIX + "713", + "ODS": DF_URL_PREFIX + "213", } COUNTRY_TO_FOLDER = { - 'uk': 'gb', - 'uk_gb': 'gb', - 'el': 'gr', - 'non_eu': 'non-eu' + "uk": "gb", + "uk_gb": "gb", + "el": "gr", + "non_eu": "non-eu", } - def get_registry_companies(self, domain='FGAS', detailed=False): - page = 'list-small' + def get_registry_companies(self, domain="FGAS", detailed=False): + page = "list-small" if detailed: - page = 'list' - url_prefix = '/'.join([self.baseUrl, 'undertaking', domain]) - url = '/'.join([url_prefix, page]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + page = "list" + url_prefix = "/".join([self.baseUrl, "undertaking", domain]) + url = "/".join([url_prefix, page]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() def get_stocks(self): - page = 'stocks' - url = '/'.join([self.baseUrl, page]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + page = "stocks" + url = "/".join([self.baseUrl, page]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() def get_stocks_dummy(self): - page = 'stocks/import' - url = '/'.join([self.baseUrl, page]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + page = "stocks/import" + url = "/".join([self.baseUrl, page]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def get_company_details(self, company_id, domain='FGAS'): - url = '/'.join([self.baseUrl, 'undertaking', domain, company_id, - 'details']) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def get_company_details(self, company_id, domain="FGAS"): + url = "/".join( + [self.baseUrl, "undertaking", domain, company_id, "details"] + ) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def get_company_details_short(self, company_id, domain='FGAS'): - url = '/'.join([self.baseUrl, 'undertaking', domain, company_id, - 'details-short']) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def get_company_details_short(self, company_id, domain="FGAS"): + url = "/".join( + [self.baseUrl, "undertaking", domain, company_id, "details-short"] + ) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def get_company_licences(self, company_id, year, data, domain='FGAS'): + def get_company_licences(self, company_id, year, data, domain="FGAS"): if year: - url = '/'.join([self.baseUrl, 'undertaking', domain, company_id, - 'licences', year, 'aggregated']) + url = "/".join( + [ + self.baseUrl, + "undertaking", + domain, + company_id, + "licences", + year, + "aggregated", + ] + ) else: - url = '/'.join([self.baseUrl, 'undertaking', domain, company_id, - 'licences', 'aggregated']) - response = self.do_api_request(url, method='post', data=data, - headers={'Authorization': self.token}, - raw=True) + url = "/".join( + [ + self.baseUrl, + "undertaking", + domain, + company_id, + "licences", + "aggregated", + ] + ) + response = self.do_api_request( + url, + method="post", + data=data, + headers={"Authorization": self.token}, + raw=True, + ) return response def get_company_stocks(self, company_id): - url = '/'.join([self.baseUrl, 'stocks', company_id]) - response = self.do_api_request(url, method='post', - headers={'Authorization': self.token}, - raw=True) + url = "/".join([self.baseUrl, "stocks", company_id]) + response = self.do_api_request( + url, method="post", headers={"Authorization": self.token}, raw=True + ) return response - def get_company_paus(self, company_id, domain='ODS'): - url = '/'.join([self.baseUrl, 'undertaking', - domain, company_id, 'pau']) - response = self.do_api_request(url, method='post', - headers={'Authorization': self.token}, - raw=True) + def get_company_paus(self, company_id, domain="ODS"): + url = "/".join( + [self.baseUrl, "undertaking", domain, company_id, "pau"] + ) + response = self.do_api_request( + url, method="post", headers={"Authorization": self.token}, raw=True + ) return response def sync_company(self, company_id, domain): - d_map = { - "FGAS": "fgases", - "ODS": "ods" - } - url = '/'.join([self.baseUrl, 'sync', d_map.get(domain)]) - response = self.do_api_request(url, params={"id": company_id}, - headers={'Authorization': self.token}) + d_map = {"FGAS": "fgases", "ODS": "ods"} + url = "/".join([self.baseUrl, "sync", d_map.get(domain)]) + response = self.do_api_request( + url, + params={"id": company_id}, + headers={"Authorization": self.token}, + ) if response: return response.json() - def getCompanyDetailsById(self, companyId, domain='FGAS'): + def getCompanyDetailsById(self, companyId, domain="FGAS"): details = self.get_company_details(companyId, domain=domain) - keysToVerify = ['domain', 'address', 'company_id', 'collection_id'] + keysToVerify = ["domain", "address", "company_id", "collection_id"] if details: if reduce(lambda i, x: i and x in details, keysToVerify, True): - country_code = details.get('country_code') + country_code = details.get("country_code") # If we have a NONEU_TYPE or AMBIGUOUS_TYPE company with a # legal representative, use the country_code from the # legal representative - c_type = details.get('address', {}).get( - 'country', {}).get('type') - rep = details.get('representative') + c_type = ( + details.get("address", {}).get("country", {}).get("type") + ) + rep = details.get("representative") previous_paths = [] - for representative in details.get('represent_history', []): - address = representative.get('address') + for representative in details.get("represent_history", []): + address = representative.get("address") if address: - country = address.get('country') + country = address.get("country") if country: - rep_country_code = country.get('code') + rep_country_code = country.get("code") path = self.buildCollectionPath( - details['domain'], + details["domain"], rep_country_code, - str(details['company_id']), - details['collection_id'], + str(details["company_id"]), + details["collection_id"], ) if self.unrestrictedTraverse(path, None): previous_paths.append(path) - for c_hist in details.get('country_history', []): + for c_hist in details.get("country_history", []): path = self.buildCollectionPath( - details['domain'], - c_hist, - str(details['company_id']), - details['collection_id'], - ) + details["domain"], + c_hist, + str(details["company_id"]), + details["collection_id"], + ) if self.unrestrictedTraverse(path, None): previous_paths.append(path) - details['previous_paths'] = previous_paths - if c_type in ['NONEU_TYPE', 'AMBIGUOUS_TYPE'] and rep: - address = rep.get('address') + details["previous_paths"] = previous_paths + if c_type in ["NONEU_TYPE", "AMBIGUOUS_TYPE"] and rep: + address = rep.get("address") if address: - country = address.get('country') + country = address.get("country") if country: - country_code = country.get('code') + country_code = country.get("code") path = self.buildCollectionPath( - details['domain'], + details["domain"], country_code, - str(details['company_id']), - details['collection_id'] + str(details["company_id"]), + details["collection_id"], ) if path: - details['path'] = '/{}'.format(path) - details['licences_path'] =\ - '/{}/aggregated_licences_listing'.format(path) - details['stocks_path'] = '/{}/stock_listing'.format(path) - details['paus_path'] =\ - '/{}/process_agent_uses_listing'.format(path) + details["path"] = "/{}".format(path) + details["licences_path"] = ( + "/{}/aggregated_licences_listing".format(path) + ) + details["stocks_path"] = "/{}/stock_listing".format(path) + details["paus_path"] = ( + "/{}/process_agent_uses_listing".format(path) + ) return details - def getCollectionPaths(self, username): - url = self.baseUrl + '/user/' + username + '/companies' - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getCollectionPaths(self, username, userdata=None): + """Get all collection paths for a username or userdata. + The username request is in the process of being deprecated. + The userdata request is the preferred method which should match + either the ecas_id or the username(e-mail) in the ecr.""" + # /user/companies?username=&ecas_id= + url = "{}/user/companies".format(self.baseUrl) + if userdata and requests.head(url).status_code != 404: + q_params = { + "username": userdata.get("username"), + "ecas_id": userdata.get("ecas_id"), + } + response = self.do_api_request( + url, params=q_params, headers={"Authorization": self.token} + ) + else: + url = self.baseUrl + "/user/" + username + "/companies" + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) rep_paths = {} paths = [] prev_paths = [] @@ -239,273 +300,328 @@ def getCollectionPaths(self, username): def build_paths(c, c_code): path = None try: - path = self.buildCollectionPath(c['domain'], c_code, - str(c['company_id']), - c['collection_id']) + path = self.buildCollectionPath( + c["domain"], + c_code, + str(c["company_id"]), + c["collection_id"], + ) if not path: raise ValueError( - "Cannot form path with company data: %s" % str(c)) + "Cannot form path with company data: %s" % str(c) + ) except Exception as e: logger.warning( - '''Error in company data received from ''' - '''SatelliteRegistry: %s''' % repr(e)) + """Error in company data received from """ + """SatelliteRegistry: %s""" % repr(e) + ) return path if response: companies = response.json() for c in companies: - c_code = c.get('country') - if c.get('representative_country'): - c_code = c.get('representative_country') + c_code = c.get("country") + if c.get("representative_country"): + c_code = c.get("representative_country") path = build_paths(c, c_code) if path: paths.append(path) - for lr in c.get('represent_history', []): - lr_address = lr.get('address', {}) - lr_c = lr_address.get('country') + for lr in c.get("represent_history", []): + lr_address = lr.get("address", {}) + lr_c = lr_address.get("country") if lr_c: - lr_c_code = lr_c.get('code') + lr_c_code = lr_c.get("code") prev_path = build_paths(c, lr_c_code) if self.unrestrictedTraverse(prev_path, None): prev_paths.append(prev_path) - for c_hist in c.get('country_history', []): + for c_hist in c.get("country_history", []): path = build_paths(c, c_hist) if self.unrestrictedTraverse(path, None): prev_paths.append(path) - rep_paths['paths'] = paths - rep_paths['prev_paths'] = prev_paths + rep_paths["paths"] = paths + rep_paths["prev_paths"] = prev_paths return rep_paths - def existsCompany(self, params, domain='FGAS'): - url = '/'.join([self.baseUrl, 'undertaking', domain, 'filter']) - response = self.do_api_request(url, - params=params, - headers={'Authorization': self.token}) + def existsCompany(self, params, domain="FGAS"): + url = "/".join([self.baseUrl, "undertaking", domain, "filter"]) + response = self.do_api_request( + url, params=params, headers={"Authorization": self.token} + ) if response: return response.json() - def verifyCandidate(self, companyId, userId, candidateId=None, - domain='FGAS'): + def verifyCandidate( + self, companyId, userId, candidateId=None, domain="FGAS" + ): # use the right pattern for Api url - verify_endpoint = '/'.join(['candidate', domain, 'verify-none', - companyId]) + verify_endpoint = "/".join( + ["candidate", domain, "verify-none", companyId] + ) if candidateId: - verify_endpoint = '/'.join(['candidate', domain, 'verify', - companyId, candidateId]) + verify_endpoint = "/".join( + ["candidate", domain, "verify", companyId, candidateId] + ) - api_url = '/'.join([self.baseUrl, verify_endpoint]) + api_url = "/".join([self.baseUrl, verify_endpoint]) - response = self.do_api_request(api_url, - data={'user': userId}, - method="post", - headers={'Authorization': self.token}) + response = self.do_api_request( + api_url, + data={"user": userId}, + method="post", + headers={"Authorization": self.token}, + ) if response: if response.status_code == requests.codes.ok: data = response.json() - if 'verified' in data and data['verified']: + if "verified" in data and data["verified"]: return True return False - def getCandidates(self, domain='FGAS'): - url = '/'.join([self.baseUrl, 'candidate', domain, 'list']) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getCandidates(self, domain="FGAS"): + url = "/".join([self.baseUrl, "candidate", domain, "list"]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() return [] - def getUsers(self, domain='FGAS'): - url = '/'.join([self.baseUrl, 'user', domain, 'list']) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getUsers(self, domain="FGAS"): + url = "/".join([self.baseUrl, "user", domain, "list"]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def getMatchingLog(self, domain='FGAS'): - url = '/'.join([self.baseUrl, 'log', 'matching', domain]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getMatchingLog(self, domain="FGAS"): + url = "/".join([self.baseUrl, "log", "matching", domain]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def getDataSyncLog(self, domain='FGAS'): - url = self.baseUrl + '/log/sync' - url = '/'.join([self.baseUrl, 'log', 'sync', domain]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getDataSyncLog(self, domain="FGAS"): + url = self.baseUrl + "/log/sync" + url = "/".join([self.baseUrl, "log", "sync", domain]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() - def companyNewOldIdCheck(self, companyId='', oldCompanyId=''): - url = '/'.join([self.baseUrl, - 'candidate/verify-matching-ids/{0}/{1}'.format( - companyId, oldCompanyId)]) - response = self.do_api_request(url, - method="get", - headers={'Authorization': self.token}) + def companyNewOldIdCheck(self, companyId="", oldCompanyId=""): + url = "/".join( + [ + self.baseUrl, + "candidate/verify-matching-ids/{0}/{1}".format( + companyId, oldCompanyId + ), + ] + ) + response = self.do_api_request( + url, method="get", headers={"Authorization": self.token} + ) return response.json() - def unverifyCompany(self, companyId, userId, domain='FGAS'): + def unverifyCompany(self, companyId, userId, domain="FGAS"): co = self.get_company_details(companyId, domain) if co: - url = '/'.join([self.baseUrl, 'candidate', domain, - 'unverify/{0}/'.format(companyId)]) - data = {'user': userId} - response = self.do_api_request(url, - data=data, - method="post", - headers={ - 'Authorization': self.token}) + url = "/".join( + [ + self.baseUrl, + "candidate", + domain, + "unverify/{0}/".format(companyId), + ] + ) + data = {"user": userId} + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if response: unverifyResponse = response.json() if not unverifyResponse: return None # unverify succeeded; proceed with unLock an email alerts - path = self._unlockCompany(str(companyId), - co['oldcompany_account'], - co['country_code'], co['domain'], - userId) + path = self._unlockCompany( + str(companyId), + co["oldcompany_account"], + co["country_code"], + co["domain"], + userId, + ) email_sending_failed = False - url = self.baseUrl + '/alert_lockdown/unmatch' + url = self.baseUrl + "/alert_lockdown/unmatch" data = { - 'company_id': companyId, - 'user': userId, - 'oldcollection_path': path + "company_id": companyId, + "user": userId, + "oldcollection_path": path, } - response = self.do_api_request(url, - data=data, - method="post", - headers={ - 'Authorization': self.token}) + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if not response: email_sending_failed = True if email_sending_failed: logger.warning( - "Lockdown notification emails of %s not sent" % path) + "Lockdown notification emails of %s not sent" % path + ) return unverifyResponse - def updateCompanyStatus(self, company_id, status, domain='FGAS'): - url = '/'.join([self.baseUrl, 'undertaking', domain, - company_id, 'statusupdate']) - data = {'status': status} - response = self.do_api_request(url, data=data, - method="post", - headers={'Authorization': self.token}) + def updateCompanyStatus(self, company_id, status, domain="FGAS"): + url = "/".join( + [self.baseUrl, "undertaking", domain, company_id, "statusupdate"] + ) + data = {"status": status} + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) return response - def getCompaniesExcelExport(self, domain='FGAS'): - url = '/'.join([self.baseUrl, 'export', 'undertaking', domain]) - response = self.do_api_request(url, - headers={'Authorization': self.token}) + def getCompaniesExcelExport(self, domain="FGAS"): + url = "/".join([self.baseUrl, "export", "undertaking", domain]) + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) return response - def getUsersExcelExport(self, domain='FGAS'): - url = '/'.join([self.baseUrl, 'export', 'user', 'list', domain]) - response = self.do_api_request(url, - headers={'Authorization': self.token}, - timeout=120) + def getUsersExcelExport(self, domain="FGAS"): + url = "/".join([self.baseUrl, "export", "user", "list", domain]) + response = self.do_api_request( + url, headers={"Authorization": self.token}, timeout=120 + ) return response def getSettings(self): - url = self.baseUrl + '/settings' - response = self.do_api_request(url, - headers={'Authorization': self.token}) + url = self.baseUrl + "/settings" + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() def getAllEmails(self): - url = self.baseUrl + '/mail/list' - response = self.do_api_request(url, - headers={'Authorization': self.token}) + url = self.baseUrl + "/mail/list" + response = self.do_api_request( + url, headers={"Authorization": self.token} + ) if response: return response.json() def addEmail(self, first_name, last_name, email): - url = self.baseUrl + '/mail/add' + url = self.baseUrl + "/mail/add" data = { - 'mail': email, - 'first_name': first_name, - 'last_name': last_name + "mail": email, + "first_name": first_name, + "last_name": last_name, } - response = self.do_api_request(url, data=data, method='post', - headers={'Authorization': self.token}) + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if response: return response.json() def delEmail(self, email): - url = self.baseUrl + '/mail/delete' - data = { - 'mail': email - } - response = self.do_api_request(url, data=data, method='post', - headers={'Authorization': self.token}) + url = self.baseUrl + "/mail/delete" + data = {"mail": email} + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if response: return response.json() - def lockDownCompany(self, company_id, old_collection_id, country_code, - domain, user): - path = self.buildCollectionPath(domain, country_code, - str(company_id), old_collection_id) + def lockDownCompany( + self, company_id, old_collection_id, country_code, domain, user + ): + path = self.buildCollectionPath( + domain, country_code, str(company_id), old_collection_id + ) bdrAuth = self.authMiddleware bdrAuth.lockDownCollection(path, user) email_sending_failed = False - url = '/'.join([self.baseUrl, 'alert_lockdown', 'wrong_match', domain]) - data = { - 'company_id': company_id, - 'user': user - } - response = self.do_api_request(url, data=data, - method='post', - headers={'Authorization': self.token}) + url = "/".join([self.baseUrl, "alert_lockdown", "wrong_match", domain]) + data = {"company_id": company_id, "user": user} + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if not response: email_sending_failed = True if email_sending_failed: logger.warning( - "Lockdown notification emails of %s not sent" % path) + "Lockdown notification emails of %s not sent" % path + ) - def _unlockCompany(self, company_id, old_collection_id, country_code, - domain, user): + def _unlockCompany( + self, company_id, old_collection_id, country_code, domain, user + ): path = self.buildCollectionPath( - domain, country_code, str(company_id), old_collection_id) + domain, country_code, str(company_id), old_collection_id + ) bdrAuth = self.authMiddleware bdrAuth.unlockCollection(path, user) return path - def unlockCompany(self, company_id, old_collection_id, country_code, - domain, user): + def unlockCompany( + self, company_id, old_collection_id, country_code, domain, user + ): path = self._unlockCompany( - company_id, old_collection_id, country_code, domain, user) + company_id, old_collection_id, country_code, domain, user + ) email_sending_failed = False - url = '/'.join([self.baseUrl, 'alert_lockdown', 'wrong_lockdown', - domain]) - data = { - 'company_id': company_id, - 'user': user - } - response = self.do_api_request(url, data=data, - method='post', - headers={'Authorization': self.token}) + url = "/".join( + [self.baseUrl, "alert_lockdown", "wrong_lockdown", domain] + ) + data = {"company_id": company_id, "user": user} + response = self.do_api_request( + url, + data=data, + method="post", + headers={"Authorization": self.token}, + ) if not response: email_sending_failed = True if email_sending_failed: logger.warning( - "Lockdown notification emails of %s not sent" % path) + "Lockdown notification emails of %s not sent" % path + ) - def lockedCompany(self, company_id, old_collection_id, country_code, - domain): + def lockedCompany( + self, company_id, old_collection_id, country_code, domain + ): path = self.buildCollectionPath( - domain, country_code, str(company_id), old_collection_id) + domain, country_code, str(company_id), old_collection_id + ) bdrAuth = self.authMiddleware return bdrAuth.lockedCollection(path) @@ -515,40 +631,46 @@ def getCountryFolder(cls, country_code): return cls.COUNTRY_TO_FOLDER.get(country_code, country_code) @classmethod - def buildCollectionPath(cls, domain, country_code, company_id, - old_collection_id=None): + def buildCollectionPath( + cls, domain, country_code, company_id, old_collection_id=None + ): obligation_folder = cls.DOMAIN_TO_OBLIGATION_FOLDER.get(domain) if not obligation_folder or not country_code: return None country_folder = cls.getCountryFolder(country_code) - collection_folder = (old_collection_id if old_collection_id - else company_id) + collection_folder = ( + old_collection_id if old_collection_id else company_id + ) - return '/'.join([str(obligation_folder), - str(country_folder), - str(collection_folder)]) + return "/".join( + [ + str(obligation_folder), + str(country_folder), + str(collection_folder), + ] + ) class BDRRegistryAPI(BaseRegistryAPI): - def get_registry_companies(self): - """ Get the list of companies from the Registry - """ - url = self.baseUrl + '/management/companies/export/json' + """Get the list of companies from the Registry""" + url = self.baseUrl + "/management/companies/export/json" response = self.do_api_request( - url, headers={'Authorization': self.bdr_registry_token}) + url, headers={"Authorization": self.bdr_registry_token} + ) if response: return response.json() @ram.cache(lambda *args, **kwargs: args[2] + str(time() // (60 * 60))) def get_company_details(self, company_id, domain=None): - """ Get company details from Registry - """ - url = self.baseUrl + \ - '/management/companies/account/{0}/{1}'.format(company_id, domain) + """Get company details from Registry""" + url = self.baseUrl + "/management/companies/account/{0}/{1}".format( + company_id, domain + ) response = self.do_api_request( - url, headers={'Authorization': self.bdr_registry_token}) + url, headers={"Authorization": self.bdr_registry_token} + ) if response: return response.json() @@ -556,29 +678,24 @@ def get_company_details(self, company_id, domain=None): @ram.cache(lambda *args, **kwargs: args[2] + str(time() // (10 * 60))) def get_user_details(self, username): """Get details for username""" - url = self.baseUrl + '/management/username/companies/' - params = { - "username": username - } + url = self.baseUrl + "/management/username/companies/" + params = {"username": username} response = self.do_api_request( url, params=params, - headers={ - 'Authorization': self.bdr_registry_token}) + headers={"Authorization": self.bdr_registry_token}, + ) if response and response.status_code == requests.codes.ok: return response.json() return [] def getCollectionPaths(self, username): - """ Get collections accessible by the user """ + """Get collections accessible by the user""" usr_details = self.get_user_details(username) - rep_paths = { - 'paths': [], - 'prev_paths': [] - } + rep_paths = {"paths": [], "prev_paths": []} if usr_details: for res in usr_details: - if res.get('has_reporting_folder'): - rep_paths['paths'].append(res.get('reporting_folder')) + if res.get("has_reporting_folder"): + rep_paths["paths"].append(res.get("reporting_folder")) return rep_paths diff --git a/Products/Reportek/ReportekEngine.py b/Products/Reportek/ReportekEngine.py index 8d1ad96b..20ba0327 100644 --- a/Products/Reportek/ReportekEngine.py +++ b/Products/Reportek/ReportekEngine.py @@ -24,54 +24,60 @@ import json import logging import os -import requests import tempfile -import xmlrpclib from copy import copy, deepcopy from operator import itemgetter -from StringIO import StringIO from time import strftime, time -from urlparse import urlparse -from zipfile import ZipFile, ZIP_DEFLATED +from zipfile import ZIP_DEFLATED, ZipFile # product imports import constants import Globals -import Products +import plone.protect.interfaces import RepUtils +import requests import transaction import xlwt +import xmlrpclib from AccessControl import ClassSecurityInfo, SpecialUsers, getSecurityManager from AccessControl.Permissions import view, view_management_screens -from AccessControl.SecurityManagement import (newSecurityManager, - setSecurityManager) -from config import REPORTEK_DEPLOYMENT, DEPLOYMENT_BDR, DEPLOYMENT_CDR +from AccessControl.SecurityManagement import ( + newSecurityManager, + setSecurityManager, +) +from config import DEPLOYMENT_BDR, DEPLOYMENT_CDR, REPORTEK_DEPLOYMENT from CountriesManager import CountriesManager from DataflowsManager import DataflowsManager from DateTime import DateTime from interfaces import IReportekEngine + # Zope imports from OFS.Folder import Folder from paginator import DiggPaginator, EmptyPage, InvalidPage from plone.memoize import ram -import plone.protect.interfaces -from zope.interface import alsoProvides -from Products.PageTemplates.PageTemplateFile import PageTemplateFile -from Products.Reportek.BdrAuthorizationMiddleware import \ - BdrAuthorizationMiddleware -from Products.Reportek.clamav import AVService -from Products.Reportek.constants import ECAS_ID, DEFAULT_CATALOG -from Products.Reportek.RepUtils import getToolByName -from Products.Reportek.ContentRegistryPingger import ContentRegistryPingger -from Products.Reportek.RegistryManagement import (BDRRegistryAPI, - FGASRegistryAPI) +from StringIO import StringIO from Toolz import Toolz +from urlparse import urlparse from ZODB.PersistentList import PersistentList from ZODB.PersistentMapping import PersistentMapping from zope.component import getUtility from zope.i18n.interfaces import II18nAware, INegotiator from zope.i18n.negotiator import normalize_lang -from zope.interface import implements +from zope.interface import alsoProvides, implements + +import Products +from Products.PageTemplates.PageTemplateFile import PageTemplateFile +from Products.Reportek.BdrAuthorizationMiddleware import ( + BdrAuthorizationMiddleware, +) +from Products.Reportek.clamav import AVService +from Products.Reportek.constants import DEFAULT_CATALOG, ECAS_ID +from Products.Reportek.ContentRegistryPingger import ContentRegistryPingger +from Products.Reportek.RegistryManagement import ( + BDRRegistryAPI, + FGASRegistryAPI, +) +from Products.Reportek.RepUtils import getToolByName __doc__ = """ Engine for the Reportek Product @@ -84,134 +90,115 @@ class ReportekEngine(Folder, Toolz, DataflowsManager, CountriesManager): - """ Stores generic attributes for Reportek """ + """Stores generic attributes for Reportek""" implements(IReportekEngine, II18nAware) - meta_type = 'Reportek Engine' - icon = 'misc_/Reportek/Converters' + meta_type = "Reportek Engine" + icon = "misc_/Reportek/Converters" security = ClassSecurityInfo() - manage_options = (( - {'label': 'View', 'action': 'index_html'}, - {'label': 'Properties', 'action': 'manage_properties'}, - {'label': 'UNS settings', 'action': 'uns_settings'}, - {'label': 'Migrations', 'action': 'migration_table'}, + manage_options = ( + {"label": "View", "action": "index_html"}, + {"label": "Properties", "action": "manage_properties"}, + {"label": "UNS settings", "action": "uns_settings"}, + {"label": "Migrations", "action": "migration_table"}, ) + Folder.manage_options[3:] - ) - _properties = ({'id': 'title', - 'type': 'string', - 'mode': 'w', - 'label': 'Title'}, - {'id': 'webq_url', - 'type': 'string', - 'mode': 'w'}, - {'id': 'webq_envelope_menu', - 'type': 'string', - 'mode': 'w'}, - {'id': 'webq_before_edit_page', - 'type': 'string', - 'mode': 'w'}, - {'id': 'UNS_server', - 'type': 'string', - 'mode': 'w'}, - {'id': 'UNS_username', - 'type': 'string', - 'mode': 'w'}, - {'id': 'UNS_password', - 'type': 'string', - 'mode': 'w'}, - {'id': 'UNS_channel_id', - 'type': 'string', - 'mode': 'w'}, - {'id': 'UNS_notification_types', - 'type': 'lines', - 'mode': 'w'}, - {'id': 'QA_application', - 'type': 'string', - 'mode': 'w'}, - {'id': 'globally_restricted_site', - 'type': 'tokens', - 'mode': 'w'} - ) + _properties = ( + {"id": "title", "type": "string", "mode": "w", "label": "Title"}, + {"id": "webq_url", "type": "string", "mode": "w"}, + {"id": "webq_envelope_menu", "type": "string", "mode": "w"}, + {"id": "webq_before_edit_page", "type": "string", "mode": "w"}, + {"id": "UNS_server", "type": "string", "mode": "w"}, + {"id": "UNS_username", "type": "string", "mode": "w"}, + {"id": "UNS_password", "type": "string", "mode": "w"}, + {"id": "UNS_channel_id", "type": "string", "mode": "w"}, + {"id": "UNS_notification_types", "type": "lines", "mode": "w"}, + {"id": "QA_application", "type": "string", "mode": "w"}, + {"id": "globally_restricted_site", "type": "tokens", "mode": "w"}, + ) # The URL to the WebQ XML-RPC server # Empty means no WebQ available - webq_url = 'http://cdr.eionet.europa.eu/webq/RpcRouter' + webq_url = "http://cdr.eionet.europa.eu/webq/RpcRouter" # The URL to the WebQ page which constructs a menu for a specified envelope # called via HTTP GET - webq_envelope_menu = 'http://cdr.eionet.europa.eu/webq/WebQMenu' + webq_envelope_menu = "http://cdr.eionet.europa.eu/webq/WebQMenu" # The URL to the WebQ webpage, before the user starts to use the edit form. # The purpose is to ask the capabilities of the user webbrowser # and what language the form should be in. # called via HTTP GET - webq_before_edit_page = 'http://cdr.eionet.europa.eu/webq/WebQEdit' + webq_before_edit_page = "http://cdr.eionet.europa.eu/webq/WebQEdit" # The default QA application used for manual and automatic triggered # QA operation # If this is empty, this Reportek instance does not have a QA system # linked to it - QA_application = '' + QA_application = "" qa_httpres = False exp_httpres = False globally_restricted_site = False cr_rmq = False env_fwd_rmq = False - env_fwd_rmq_queue = 'fwd_envelopes' + env_fwd_rmq_queue = "fwd_envelopes" col_sync_rmq = False col_sync_rmq_pub = False col_role_sync_rmq = False if REPORTEK_DEPLOYMENT == DEPLOYMENT_CDR: - cr_api_url = 'http://cr.eionet.europa.eu/ping' + cr_api_url = "http://cr.eionet.europa.eu/ping" else: - cr_api_url = '' - auth_middleware_url = '' - bdr_registry_url = '' - bdr_registry_token = '' + cr_api_url = "" + auth_middleware_url = "" + bdr_registry_url = "" + bdr_registry_token = "" bdr_registry_obligations = [] er_fgas_obligations = [] er_ods_obligations = [] preliminary_obligations = [] - er_url = '' - er_token = '' + er_url = "" + er_token = "" auth_middleware_recheck_interval = 300 XLS_max_rows = 1000 - clamav_rest_host = '' - clamd_host = '' + clamav_rest_host = "" + clamd_host = "" clamd_port = 3310 clamd_timeout = None clam_max_file_size = None - dfm_type = 'dfm_xmlrpc' - cm_type = 'cm_xmlrpc' + dfm_type = "dfm_xmlrpc" + cm_type = "cm_xmlrpc" def all_meta_types(self, interfaces=None): """ - What can you put inside me? Checks if the legal products are - actually installed in Zope + What can you put inside me? Checks if the legal products are + actually installed in Zope """ - types = ['Script (Python)', 'DTML Method', - 'DTML Document', 'Page Template'] + types = [ + "Script (Python)", + "DTML Method", + "DTML Document", + "Page Template", + ] l_result = [] for l_type in Products.meta_types: - if l_type['name'] in types: + if l_type["name"] in types: l_result.append(l_type) return l_result - def __init__(self, webq_url=''): - """ constructor """ + def __init__(self, webq_url=""): + """constructor""" DataflowsManager.__init__(self) CountriesManager.__init__(self) self.id = constants.ENGINE_ID # UNS configuration parameters - self.UNS_server = '' - self.UNS_username = '' - self.UNS_password = '' - self.UNS_channel_id = '' - self.UNS_notification_types = ['Envelope release', 'Envelope revoke'] + self.UNS_server = "" + self.UNS_username = "" + self.UNS_password = "" + self.UNS_channel_id = "" + self.UNS_notification_types = ["Envelope release", "Envelope revoke"] - security.declareProtected(view_management_screens, 'index_html') - index_html = PageTemplateFile('zpt/engine_index', globals()) + security.declareProtected(view_management_screens, "index_html") + index_html = PageTemplateFile("zpt/engine_index", globals()) # Setters for the Dataflows and Country Manager properties @DataflowsManager.dfm_type.setter @@ -232,25 +219,25 @@ def dfm_rest_timeout(self, value): @DataflowsManager.dfm_title.setter def dfm_title(self, value): - xmlrpc_dataflow = getattr(self, 'xmlrpc_dataflow', None) + xmlrpc_dataflow = getattr(self, "xmlrpc_dataflow", None) if xmlrpc_dataflow: self.xmlrpc_dataflow.title = value @DataflowsManager.dfm_url.setter def dfm_url(self, value): - xmlrpc_dataflow = getattr(self, 'xmlrpc_dataflow', None) + xmlrpc_dataflow = getattr(self, "xmlrpc_dataflow", None) if xmlrpc_dataflow: self.xmlrpc_dataflow.url = value @DataflowsManager.dfm_method.setter def dfm_method(self, value): - xmlrpc_dataflow = getattr(self, 'xmlrpc_dataflow', None) + xmlrpc_dataflow = getattr(self, "xmlrpc_dataflow", None) if xmlrpc_dataflow: self.xmlrpc_dataflow.method_name = value @DataflowsManager.dfm_timeout.setter def dfm_timeout(self, value): - xmlrpc_dataflow = getattr(self, 'xmlrpc_dataflow', None) + xmlrpc_dataflow = getattr(self, "xmlrpc_dataflow", None) if xmlrpc_dataflow: self.xmlrpc_dataflow.timeout = float(value) @@ -268,43 +255,43 @@ def cm_rest_timeout(self, value): @CountriesManager.cm_title.setter def cm_title(self, value): - xmlrpc_localities = getattr(self, 'xmlrpc_localities', None) + xmlrpc_localities = getattr(self, "xmlrpc_localities", None) if xmlrpc_localities: self.xmlrpc_localities.title = value @CountriesManager.cm_url.setter def cm_url(self, value): - xmlrpc_localities = getattr(self, 'xmlrpc_localities', None) + xmlrpc_localities = getattr(self, "xmlrpc_localities", None) if xmlrpc_localities: self.xmlrpc_localities.url = value @CountriesManager.cm_method.setter def cm_method(self, value): - xmlrpc_localities = getattr(self, 'xmlrpc_localities', None) + xmlrpc_localities = getattr(self, "xmlrpc_localities", None) if xmlrpc_localities: self.xmlrpc_localities.method_name = value @CountriesManager.cm_timeout.setter def cm_timeout(self, value): - xmlrpc_localities = getattr(self, 'xmlrpc_localities', None) + xmlrpc_localities = getattr(self, "xmlrpc_localities", None) if xmlrpc_localities: self.xmlrpc_localities.timeout = float(value) # security stuff security = ClassSecurityInfo() - security.declarePublic('getDeploymentType') + security.declarePublic("getDeploymentType") def getDeploymentType(self): return Products.Reportek.REPORTEK_DEPLOYMENT - security.declareProtected('View', 'get_rod_obligations') + security.declareProtected("View", "get_rod_obligations") def get_rod_obligations(self): - """ Returns a sorted list of obligations from ROD - """ - obligations = [(o.get('uri'), o.get('TITLE')) - for o in self.dataflow_rod()] + """Returns a sorted list of obligations from ROD""" + obligations = [ + (o.get("uri"), o.get("TITLE")) for o in self.dataflow_rod() + ] data = [] if obligations: @@ -312,84 +299,106 @@ def get_rod_obligations(self): return data - _manage_properties = PageTemplateFile('zpt/engine/prop', globals()) + _manage_properties = PageTemplateFile("zpt/engine/prop", globals()) - security.declareProtected(view_management_screens, 'manage_properties') + security.declareProtected(view_management_screens, "manage_properties") def manage_properties(self): - """ Manage the edited values """ - if self.REQUEST['REQUEST_METHOD'] == 'GET': + """Manage the edited values""" + if self.REQUEST["REQUEST_METHOD"] == "GET": return self._manage_properties() - self.title = self.REQUEST.get('title', self.title) - self.webq_url = self.REQUEST.get('webq_url', self.webq_url) + self.title = self.REQUEST.get("title", self.title) + self.webq_url = self.REQUEST.get("webq_url", self.webq_url) self.webq_envelope_menu = self.REQUEST.get( - 'webq_envelope_menu', self.webq_envelope_menu) + "webq_envelope_menu", self.webq_envelope_menu + ) self.webq_before_edit_page = self.REQUEST.get( - 'webq_before_edit_page', self.webq_before_edit_page) + "webq_before_edit_page", self.webq_before_edit_page + ) self.QA_application = self.REQUEST.get( - 'QA_application', self.QA_application) - self.qa_httpres = bool(self.REQUEST.get('qa_httpres', False)) - self.exp_httpres = bool(self.REQUEST.get('exp_httpres', False)) + "QA_application", self.QA_application + ) + self.qa_httpres = bool(self.REQUEST.get("qa_httpres", False)) + self.exp_httpres = bool(self.REQUEST.get("exp_httpres", False)) self.globally_restricted_site = bool( - self.REQUEST.get('globally_restricted_site', - self.globally_restricted_site)) - self.dfm_url = self.REQUEST.get('dfm_url', self.dfm_url) - self.dfm_type = self.REQUEST.get('dfm_type', self.dfm_type) - self.dfm_method = self.REQUEST.get('dfm_method', self.dfm_method) + self.REQUEST.get( + "globally_restricted_site", self.globally_restricted_site + ) + ) + self.dfm_url = self.REQUEST.get("dfm_url", self.dfm_url) + self.dfm_type = self.REQUEST.get("dfm_type", self.dfm_type) + self.dfm_method = self.REQUEST.get("dfm_method", self.dfm_method) self.dfm_obl_url_prefix = self.REQUEST.get( - 'dfm_obl_url_prefix', self.dfm_obl_url_prefix) - self.dfm_timeout = self.REQUEST.get('dfm_timeout', self.dfm_timeout) - self.dfm_rest_url = self.REQUEST.get('dfm_rest_url', self.dfm_rest_url) + "dfm_obl_url_prefix", self.dfm_obl_url_prefix + ) + self.dfm_timeout = self.REQUEST.get("dfm_timeout", self.dfm_timeout) + self.dfm_rest_url = self.REQUEST.get("dfm_rest_url", self.dfm_rest_url) self.dfm_rest_timeout = self.REQUEST.get( - 'dfm_rest_timeout', self.dfm_rest_timeout) + "dfm_rest_timeout", self.dfm_rest_timeout + ) - self.cm_type = self.REQUEST.get('cm_type', self.cm_type) - self.cm_url = self.REQUEST.get('cm_url', self.cm_url) - self.cm_method = self.REQUEST.get('cm_method', self.cm_method) - self.cm_timeout = self.REQUEST.get('cm_timeout', self.cm_timeout) - self.cm_rest_url = self.REQUEST.get('cm_rest_url', self.cm_rest_url) + self.cm_type = self.REQUEST.get("cm_type", self.cm_type) + self.cm_url = self.REQUEST.get("cm_url", self.cm_url) + self.cm_method = self.REQUEST.get("cm_method", self.cm_method) + self.cm_timeout = self.REQUEST.get("cm_timeout", self.cm_timeout) + self.cm_rest_url = self.REQUEST.get("cm_rest_url", self.cm_rest_url) self.cm_rest_timeout = self.REQUEST.get( - 'cm_rest_timeout', self.cm_rest_timeout) - - self.cr_api_url = self.REQUEST.get('cr_api_url', self.cr_api_url) - self.cr_rmq = bool(self.REQUEST.get('cr_rmq', False)) - self.env_fwd_rmq = bool(self.REQUEST.get('env_fwd_rmq', False)) - self.env_fwd_rmq_queue = self.REQUEST.get('env_fwd_rmq_queue', - 'fwd_envelopes') - self.col_sync_rmq = bool(self.REQUEST.get('col_sync_rmq', False)) - self.col_sync_rmq_pub = bool(self.REQUEST.get('col_sync_rmq_pub', - False)) - self.col_role_sync_rmq = bool(self.REQUEST.get( - 'col_role_sync_rmq', False)) + "cm_rest_timeout", self.cm_rest_timeout + ) + + self.cr_api_url = self.REQUEST.get("cr_api_url", self.cr_api_url) + self.cr_rmq = bool(self.REQUEST.get("cr_rmq", False)) + self.env_fwd_rmq = bool(self.REQUEST.get("env_fwd_rmq", False)) + self.env_fwd_rmq_queue = self.REQUEST.get( + "env_fwd_rmq_queue", "fwd_envelopes" + ) + self.col_sync_rmq = bool(self.REQUEST.get("col_sync_rmq", False)) + self.col_sync_rmq_pub = bool( + self.REQUEST.get("col_sync_rmq_pub", False) + ) + self.col_role_sync_rmq = bool( + self.REQUEST.get("col_role_sync_rmq", False) + ) if self.cr_api_url: self.contentRegistryPingger.api_url = self.cr_api_url self.contentRegistryPingger.cr_rmq = self.cr_rmq self.auth_middleware_url = self.REQUEST.get( - 'auth_middleware_url', self.auth_middleware_url) + "auth_middleware_url", self.auth_middleware_url + ) if self.auth_middleware_url: - self.auth_middleware_recheck_interval = int(self.REQUEST.get( - 'auth_middleware_recheck_interval', - self.auth_middleware_recheck_interval)) + self.auth_middleware_recheck_interval = int( + self.REQUEST.get( + "auth_middleware_recheck_interval", + self.auth_middleware_recheck_interval, + ) + ) self.authMiddleware.setServiceRecheckInterval( - self.auth_middleware_recheck_interval) + self.auth_middleware_recheck_interval + ) self.bdr_registry_url = self.REQUEST.get( - 'bdr_registry_url', self.bdr_registry_url) + "bdr_registry_url", self.bdr_registry_url + ) self.bdr_registry_token = self.REQUEST.get( - 'bdr_registry_token', self.bdr_registry_token) + "bdr_registry_token", self.bdr_registry_token + ) self.bdr_registry_obligations = self.REQUEST.get( - 'bdr_registry_obligations', self.bdr_registry_obligations) - self.er_url = self.REQUEST.get('er_url', self.er_url) - self.er_token = self.REQUEST.get('er_token', self.er_token) + "bdr_registry_obligations", self.bdr_registry_obligations + ) + self.er_url = self.REQUEST.get("er_url", self.er_url) + self.er_token = self.REQUEST.get("er_token", self.er_token) self.er_fgas_obligations = self.REQUEST.get( - 'er_fgas_obligations', self.er_fgas_obligations) + "er_fgas_obligations", self.er_fgas_obligations + ) self.er_ods_obligations = self.REQUEST.get( - 'er_ods_obligations', self.er_ods_obligations) - xls_max_rows = self.REQUEST.get('xls_max_rows', self.XLS_max_rows) + "er_ods_obligations", self.er_ods_obligations + ) + xls_max_rows = self.REQUEST.get("xls_max_rows", self.XLS_max_rows) try: self.rdf_export_envs_age = int( - self.REQUEST.get('rdf_export_envs_age', None)) + self.REQUEST.get("rdf_export_envs_age", None) + ) except ValueError: self.rdf_export_envs_age = None @@ -400,31 +409,38 @@ def manage_properties(self): self.XLS_max_rows = xls_max_rows self.preliminary_obligations = self.REQUEST.get( - 'preliminary_obligations', self.preliminary_obligations) + "preliminary_obligations", self.preliminary_obligations + ) self.clamav_rest_host = self.REQUEST.get( - 'clamav_rest_host', self.clamav_rest_host) - self.clamd_host = self.REQUEST.get('clamd_host', self.clamd_host) + "clamav_rest_host", self.clamav_rest_host + ) + self.clamd_host = self.REQUEST.get("clamd_host", self.clamd_host) try: - self.clamd_port = int(self.REQUEST.get( - 'clamd_port', self.clamd_port)) + self.clamd_port = int( + self.REQUEST.get("clamd_port", self.clamd_port) + ) except ValueError: self.clamd_port = 3310 try: - self.clamd_timeout = float(self.REQUEST.get( - 'clamd_timeout', self.clamd_timeout)) + self.clamd_timeout = float( + self.REQUEST.get("clamd_timeout", self.clamd_timeout) + ) except (ValueError, TypeError): self.clamd_timeout = None try: - self.clam_max_file_size = int(self.REQUEST.get( - 'clam_max_file_size', self.clam_max_file_size)) + self.clam_max_file_size = int( + self.REQUEST.get("clam_max_file_size", self.clam_max_file_size) + ) except (ValueError, TypeError): self.clam_max_file_size = None - self._AVService = AVService(self.clamav_rest_host, - self.clamd_host, - self.clamd_port, - self.clamd_timeout, - self.clam_max_file_size) + self._AVService = AVService( + self.clamav_rest_host, + self.clamd_host, + self.clamd_port, + self.clamd_timeout, + self.clam_max_file_size, + ) if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: self.BDRRegistryAPI.set_base_url(self.bdr_registry_url) self.FGASRegistryAPI.set_base_url(self.er_url, self.er_token) @@ -432,63 +448,70 @@ def manage_properties(self): # don't send the completed from back, the values set on self # must be used return self._manage_properties( - manage_tabs_message="Properties changed") + manage_tabs_message="Properties changed" + ) def canPingCR(self, envelope): """Check if a pingger is or can be created""" - if getSecurityManager().checkPermission('Release Envelopes', envelope): + if getSecurityManager().checkPermission("Release Envelopes", envelope): return bool(self.contentRegistryPingger) @property def contentRegistryPingger(self): if not self.cr_api_url and not self.cr_rmq: return None - pingger = getattr(self, '_contentRegistryPingger', None) + pingger = getattr(self, "_contentRegistryPingger", None) if pingger: return pingger else: self._contentRegistryPingger = ContentRegistryPingger( - self.cr_api_url, self.cr_rmq) + self.cr_api_url, self.cr_rmq + ) return self._contentRegistryPingger @property def authMiddleware(self): if not self.auth_middleware_url: return None - api = getattr(self, '_authMiddleware', None) + api = getattr(self, "_authMiddleware", None) if api: return api else: self._authMiddleware = BdrAuthorizationMiddleware( - self.auth_middleware_url) + self.auth_middleware_url + ) return self._authMiddleware @property def FGASRegistryAPI(self): - if not getattr(self, '_FGASRegistryAPI', None): + if not getattr(self, "_FGASRegistryAPI", None): self._FGASRegistryAPI = FGASRegistryAPI( - 'FGAS Registry', self.er_url, self.er_token) + "FGAS Registry", self.er_url, self.er_token + ) return self._FGASRegistryAPI @property def BDRRegistryAPI(self): - if not getattr(self, '_BDRRegistryAPI', None): + if not getattr(self, "_BDRRegistryAPI", None): self._BDRRegistryAPI = BDRRegistryAPI( - 'BDR Registry', self.bdr_registry_url) + "BDR Registry", self.bdr_registry_url + ) return self._BDRRegistryAPI @property def AVService(self): - if not getattr(self, '_AVService', None): - self._AVService = AVService(self.clamav_rest_host, - self.clamd_host, - self.clamd_port, - self.clamd_timeout) + if not getattr(self, "_AVService", None): + self._AVService = AVService( + self.clamav_rest_host, + self.clamd_host, + self.clamd_port, + self.clamd_timeout, + ) return self._AVService @property def cols_sync_history(self): - return getattr(self, '_cols_sync_history', None) + return getattr(self, "_cols_sync_history", None) def get_col_sync_history(self, path=None): hist = self.cols_sync_history @@ -504,48 +527,52 @@ def get_col_sync_history(self, path=None): return result def add_new_col_sync(self, path, m_time): - if getattr(self, 'col_sync_rmq', False): + if getattr(self, "col_sync_rmq", False): self.cols_sync_history[path] = { - 'modified': m_time, - 'ack': PersistentList() + "modified": m_time, + "ack": PersistentList(), } self._p_changed = True def set_depl_col_synced(self, depl, path, m_time): - if getattr(self, 'col_sync_rmq', False): - if self.cols_sync_history[path].get('modified') != m_time: + if getattr(self, "col_sync_rmq", False): + if self.cols_sync_history[path].get("modified") != m_time: self.add_new_col_sync(path, m_time) - if depl not in self.cols_sync_history[path]['ack']: - self.cols_sync_history[path]['ack'].append(depl) + if depl not in self.cols_sync_history[path]["ack"]: + self.cols_sync_history[path]["ack"].append(depl) self._p_changed = True def get_registry(self, collection): - registry = '' + registry = "" obl = collection.dataflow_uris if obl: if obl[0] in self.bdr_registry_obligations: - registry = 'BDRRegistryAPI' + registry = "BDRRegistryAPI" else: for obl in self.dataflow_uris: - if (obl in self.er_ods_obligations - or obl in self.er_fgas_obligations): + if ( + obl in self.er_ods_obligations + or obl in self.er_fgas_obligations + ): registry = "FGASRegistryAPI" break - if (not getattr(collection, '_company_id', None) - and getattr(collection, 'old_company_id', None)): - registry = 'BDRRegistryAPI' + if not getattr(collection, "_company_id", None) and getattr( + collection, "old_company_id", None + ): + registry = "BDRRegistryAPI" return getattr(self, registry, None) - security.declarePublic('getPartsOfYear') + security.declarePublic("getPartsOfYear") def getPartsOfYear(self): """ """ - return [''] + self.partofyear_table + return [""] + self.partofyear_table - security.declareProtected(view_management_screens, - 'update_company_collection') + security.declareProtected( + view_management_screens, "update_company_collection" + ) - security.declareProtected(view_management_screens, 'change_ownership') + security.declareProtected(view_management_screens, "change_ownership") def change_ownership(self, obj, newuser, deluser): """ """ @@ -558,31 +585,51 @@ def change_ownership(self, obj, newuser, deluser): obj.changeOwnership(wrapped_user) # change ownership obj.manage_delLocalRoles(owners) # delete the old owner # set local role to the new user - obj.manage_setLocalRoles(wrapped_user.getId(), ['Owner', ]) + obj.manage_setLocalRoles( + wrapped_user.getId(), + [ + "Owner", + ], + ) - def create_subcollection(self, parent_coll, title, dataflow_uris, - company_id=None, allow_envelopes=1, - allow_collections=1, suffix=None, - descr='', locality='', partofyear='', - year='', endyear=''): + def create_subcollection( + self, + parent_coll, + title, + dataflow_uris, + company_id=None, + allow_envelopes=1, + allow_collections=1, + suffix=None, + descr="", + locality="", + partofyear="", + year="", + endyear="", + ): """Create subcollection in parent_coll""" - for coll in parent_coll.objectValues('Report Collection'): + for coll in parent_coll.objectValues("Report Collection"): if coll.title == title: return coll - p_allow_c = getattr(parent_coll, 'allow_collections', 0) + p_allow_c = getattr(parent_coll, "allow_collections", 0) if not p_allow_c: parent_coll.allow_collections = 1 - sc_id = RepUtils.generate_id('col') + sc_id = RepUtils.generate_id("col") if suffix: - sc_id = ''.join([sc_id, suffix]) - parent_coll.manage_addCollection(dataflow_uris=dataflow_uris, - country=parent_coll.country, - id=sc_id, title=title, - allow_collections=allow_collections, - allow_envelopes=allow_envelopes, - descr=descr, locality=locality, - partofyear=partofyear, - year=year, endyear=endyear) + sc_id = "".join([sc_id, suffix]) + parent_coll.manage_addCollection( + dataflow_uris=dataflow_uris, + country=parent_coll.country, + id=sc_id, + title=title, + allow_collections=allow_collections, + allow_envelopes=allow_envelopes, + descr=descr, + locality=locality, + partofyear=partofyear, + year=year, + endyear=endyear, + ) sc = getattr(parent_coll, sc_id) if company_id: sc = getattr(parent_coll, sc_id) @@ -593,44 +640,64 @@ def create_subcollection(self, parent_coll, title, dataflow_uris, parent_coll.reindexObject() return sc - def create_fgas_collections(self, ctx, country_uri, company_id, name, - old_company_id=None): + def create_fgas_collections( + self, ctx, country_uri, company_id, name, old_company_id=None + ): # Hardcoded obligations should be fixed and retrieved automatically - parent_coll_df = ['http://rod.eionet.europa.eu/obligations/713'] - eq_imports_df = ['http://rod.eionet.europa.eu/obligations/765'] - bulk_imports_df = ['http://rod.eionet.europa.eu/obligations/764'] + parent_coll_df = ["http://rod.eionet.europa.eu/obligations/713"] + eq_imports_df = ["http://rod.eionet.europa.eu/obligations/765"] + bulk_imports_df = ["http://rod.eionet.europa.eu/obligations/764"] if old_company_id: main_env_id = old_company_id else: main_env_id = company_id - ctx.manage_addCollection(dataflow_uris=parent_coll_df, - country=country_uri, - id=main_env_id, - title=name, - allow_collections=1, allow_envelopes=1, - descr='', locality='', partofyear='', year='', - endyear='', old_company_id=old_company_id) + ctx.manage_addCollection( + dataflow_uris=parent_coll_df, + country=country_uri, + id=main_env_id, + title=name, + allow_collections=1, + allow_envelopes=1, + descr="", + locality="", + partofyear="", + year="", + endyear="", + old_company_id=old_company_id, + ) coll = getattr(ctx, main_env_id) coll.company_id = company_id - ei_id = ''.join([RepUtils.generate_id('col'), 'ei']) + ei_id = "".join([RepUtils.generate_id("col"), "ei"]) coll.manage_addCollection( dataflow_uris=eq_imports_df, country=country_uri, id=ei_id, - title='Upload of verification documents (equipment importers)', - allow_collections=0, allow_envelopes=1, - descr='', locality='', partofyear='', year='', endyear='', - old_company_id=old_company_id) - bi_id = ''.join([RepUtils.generate_id('col'), 'bi']) + title="Upload of verification documents (equipment importers)", + allow_collections=0, + allow_envelopes=1, + descr="", + locality="", + partofyear="", + year="", + endyear="", + old_company_id=old_company_id, + ) + bi_id = "".join([RepUtils.generate_id("col"), "bi"]) coll.manage_addCollection( dataflow_uris=bulk_imports_df, country=country_uri, id=bi_id, - title='''Upload of verification documents''' - ''' (HFC producers and bulk importers)''', - allow_collections=0, allow_envelopes=1, - descr='', locality='', partofyear='', year='', endyear='', - old_company_id=old_company_id) + title="""Upload of verification documents""" + """ (HFC producers and bulk importers)""", + allow_collections=0, + allow_envelopes=1, + descr="", + locality="", + partofyear="", + year="", + endyear="", + old_company_id=old_company_id, + ) ei = getattr(coll, ei_id) ei.company_id = company_id ei.reindexObject() @@ -640,59 +707,63 @@ def create_fgas_collections(self, ctx, country_uri, company_id, name, coll.allow_collections = 0 coll.reindexObject() - def update_company_collection(self, company_id, domain, country, - name, old_collection_id=None): + def update_company_collection( + self, company_id, domain, country, name, old_collection_id=None + ): """Update information on an existing old-type collection - (say, 'fgas30001') mainly setting it's `company_id` - (the id internal to Fgas Portal for instance) - If no `old_collection_id` is provided then a new collection will be - created with id=company_id=provided `company_id`. - If `old_collection_id` is provided, the the collection must exist - in the expected path deducted from the - domain/country/old_collection_id. It's company_id will be updated. - If `old_collection_id` does not exist at the expected location - nothing will happen.""" + (say, 'fgas30001') mainly setting it's `company_id` + (the id internal to Fgas Portal for instance) + If no `old_collection_id` is provided then a new collection will be + created with id=company_id=provided `company_id`. + If `old_collection_id` is provided, the the collection must exist + in the expected path deducted from the + domain/country/old_collection_id. It's company_id will be updated. + If `old_collection_id` does not exist at the expected location + nothing will happen.""" # Disable CSRF protection - if 'IDisableCSRFProtection' in dir(plone.protect.interfaces): - alsoProvides(self.REQUEST, - plone.protect.interfaces.IDisableCSRFProtection) + if "IDisableCSRFProtection" in dir(plone.protect.interfaces): + alsoProvides( + self.REQUEST, plone.protect.interfaces.IDisableCSRFProtection + ) if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: - self.REQUEST.RESPONSE.setHeader('Content-Type', 'application/json') - resp = {'status': 'fail', - 'message': ''} + self.REQUEST.RESPONSE.setHeader("Content-Type", "application/json") + resp = {"status": "fail", "message": ""} coll_path = self.FGASRegistryAPI.buildCollectionPath( - domain, country, company_id, old_collection_id) + domain, country, company_id, old_collection_id + ) if not coll_path: msg = ( - '''Cannot form path to collection, with details domain: ''' - ''' %s, country: %s, company_id: %s, old_collection_id: ''' - ''' %s.''' % (domain, country, company_id, - old_collection_id)) + """Cannot form path to collection, with details domain: """ + """ %s, country: %s, company_id: %s, old_collection_id: """ + """ %s.""" + % (domain, country, company_id, old_collection_id) + ) logger.warning(msg) # return failure (404) to the service calling us self.REQUEST.RESPONSE.setStatus(404) - resp['message'] = msg + resp["message"] = msg return json.dumps(resp) - path_parts = coll_path.split('/') + path_parts = coll_path.split("/") obligation_id = path_parts[0] country_id = path_parts[1] coll_id = path_parts[2] try: - country_folder_path = '/'.join([obligation_id, country_id]) + country_folder_path = "/".join([obligation_id, country_id]) country_folder = self.restrictedTraverse(country_folder_path) except KeyError: msg = ( - '''Cannot update collection %s. Path to collection ''' - '''does not exist''' % coll_path) + """Cannot update collection %s. Path to collection """ + """does not exist""" % coll_path + ) logger.warning(msg) # return 404 self.REQUEST.RESPONSE.setStatus(404) - resp['message'] = msg + resp["message"] = msg return json.dumps(resp) coll_id = company_id @@ -702,145 +773,155 @@ def update_company_collection(self, company_id, domain, country, coll = getattr(country_folder, coll_id, None) if not coll: try: - country_uri = getattr(country_folder, 'country', '') - if domain == 'FGAS': + country_uri = getattr(country_folder, "country", "") + if domain == "FGAS": self.create_fgas_collections( country_folder, country_uri, company_id, name, - old_company_id=old_collection_id) + old_company_id=old_collection_id, + ) else: country_folder.manage_addCollection( dataflow_uris=[ self.FGASRegistryAPI.DOMAIN_TO_OBLIGATION[ - domain]], + domain + ] + ], country=country_uri, id=company_id, title=name, - allow_collections=0, allow_envelopes=1, - descr='', locality='', partofyear='', year='', - endyear='', old_company_id=old_collection_id) + allow_collections=0, + allow_envelopes=1, + descr="", + locality="", + partofyear="", + year="", + endyear="", + old_company_id=old_collection_id, + ) coll = getattr(country_folder, coll_id) except Exception as e: msg = "Cannot create collection %s. " % coll_path logger.warning(msg + str(e)) # return failure (404) to the service calling us self.REQUEST.RESPONSE.setStatus(404) - resp['message'] = msg + resp["message"] = msg return json.dumps(resp) coll.company_id = company_id if old_collection_id: coll.old_company_id = old_collection_id coll.reindexObject() - resp['status'] = 'success' - resp['message'] = ('Collection %s updated/created succesfully' - % coll_path) + resp["status"] = "success" + resp["message"] = ( + "Collection %s updated/created succesfully" % coll_path + ) return json.dumps(resp) else: pass if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: - security.declareProtected('Audit Envelopes', 'globalworklist') - security.declareProtected('Audit Envelopes', 'searchfeedbacks') - security.declareProtected('Audit Envelopes', 'resultsfeedbacks') - security.declareProtected('Audit Envelopes', 'recent') - security.declareProtected('Audit Envelopes', 'recent_released') - security.declareProtected('Audit Envelopes', 'searchxml') - security.declareProtected('Audit Envelopes', 'resultsxml') - security.declareProtected('Audit Envelopes', 'search_dataflow') - security.declareProtected('Audit Envelopes', 'search_dataflow_url') - security.declareProtected('Audit Envelopes', 'lookup_last_delivery') - security.declareProtected('Audit Envelopes', 'getEnvelopesInfo') - security.declareProtected('Audit Envelopes', 'getSearchResults') - security.declareProtected('Audit Envelopes', 'getUniqueValuesFor') + security.declareProtected("Audit Envelopes", "globalworklist") + security.declareProtected("Audit Envelopes", "searchfeedbacks") + security.declareProtected("Audit Envelopes", "resultsfeedbacks") + security.declareProtected("Audit Envelopes", "recent") + security.declareProtected("Audit Envelopes", "recent_released") + security.declareProtected("Audit Envelopes", "searchxml") + security.declareProtected("Audit Envelopes", "resultsxml") + security.declareProtected("Audit Envelopes", "search_dataflow") + security.declareProtected("Audit Envelopes", "search_dataflow_url") + security.declareProtected("Audit Envelopes", "lookup_last_delivery") + security.declareProtected("Audit Envelopes", "getEnvelopesInfo") + security.declareProtected("Audit Envelopes", "getSearchResults") + security.declareProtected("Audit Envelopes", "getUniqueValuesFor") else: - security.declareProtected('View', 'globalworklist') - security.declareProtected('View', 'searchfeedbacks') - security.declareProtected('View', 'resultsfeedbacks') - security.declareProtected('View', 'recent') - security.declareProtected('View', 'recent_released') - security.declareProtected('View', 'searchxml') - security.declareProtected('View', 'resultsxml') - security.declareProtected('View', 'search_dataflow') - security.declareProtected('View', 'search_dataflow_url') - security.declareProtected('View', 'lookup_last_delivery') - security.declareProtected('View', 'getEnvelopesInfo') - security.declareProtected('View', 'getSearchResults') - security.declareProtected('View', 'getUniqueValuesFor') - - globalworklist = PageTemplateFile('zpt/engineGlobalWorklist', globals()) - searchfeedbacks = PageTemplateFile('zpt/engineSearchFeedbacks', globals()) + security.declareProtected("View", "globalworklist") + security.declareProtected("View", "searchfeedbacks") + security.declareProtected("View", "resultsfeedbacks") + security.declareProtected("View", "recent") + security.declareProtected("View", "recent_released") + security.declareProtected("View", "searchxml") + security.declareProtected("View", "resultsxml") + security.declareProtected("View", "search_dataflow") + security.declareProtected("View", "search_dataflow_url") + security.declareProtected("View", "lookup_last_delivery") + security.declareProtected("View", "getEnvelopesInfo") + security.declareProtected("View", "getSearchResults") + security.declareProtected("View", "getUniqueValuesFor") + + globalworklist = PageTemplateFile("zpt/engineGlobalWorklist", globals()) + searchfeedbacks = PageTemplateFile("zpt/engineSearchFeedbacks", globals()) resultsfeedbacks = PageTemplateFile( - 'zpt/engineResultsFeedbacks', globals()) - recent = PageTemplateFile('zpt/engineRecentUploads', globals()) - recent_released = PageTemplateFile('zpt/engineRecentReleased', globals()) - searchxml = PageTemplateFile('zpt/engineSearchXml', globals()) - resultsxml = PageTemplateFile('zpt/engineResultsXml', globals()) + "zpt/engineResultsFeedbacks", globals() + ) + recent = PageTemplateFile("zpt/engineRecentUploads", globals()) + recent_released = PageTemplateFile("zpt/engineRecentReleased", globals()) + searchxml = PageTemplateFile("zpt/engineSearchXml", globals()) + resultsxml = PageTemplateFile("zpt/engineResultsXml", globals()) - security.declarePublic('languages_box') - languages_box = PageTemplateFile('zpt/languages_box', globals()) + security.declarePublic("languages_box") + languages_box = PageTemplateFile("zpt/languages_box", globals()) - _searchdataflow = PageTemplateFile('zpt/searchdataflow', globals()) + _searchdataflow = PageTemplateFile("zpt/searchdataflow", globals()) - search_dataflow = PageTemplateFile('zpt/search_dataflow', globals()) + search_dataflow = PageTemplateFile("zpt/search_dataflow", globals()) def get_query_args(self): - """ Make a Catalog query object from the form in the REQUEST - """ + """Make a Catalog query object from the form in the REQUEST""" catalog_args = { - 'meta_type': ['Report Envelope', 'Repository Referral'] + "meta_type": ["Report Envelope", "Repository Referral"] } - status = self.REQUEST.get('release_status') - if status == 'anystatus': - catalog_args.pop('released', None) - elif status == 'released': - catalog_args['released'] = 1 - elif status == 'notreleased': - catalog_args['released'] = 0 + status = self.REQUEST.get("release_status") + if status == "anystatus": + catalog_args.pop("released", None) + elif status == "released": + catalog_args["released"] = 1 + elif status == "notreleased": + catalog_args["released"] = 0 else: return - if self.REQUEST.get('query_start'): - catalog_args['start'] = self.REQUEST['query_start'] - if self.REQUEST.get('sort_on'): - catalog_args['sort_on'] = self.REQUEST['sort_on'] - if self.REQUEST.get('sort_order'): - catalog_args['sort_order'] = self.REQUEST['sort_order'] - if self.REQUEST.get('dataflow_uris'): - catalog_args['dataflow_uris'] = self.REQUEST['dataflow_uris'] - if self.REQUEST.get('country'): - catalog_args['country'] = self.REQUEST['country'] - if self.REQUEST.get('years'): - catalog_args['years'] = self.REQUEST['years'] - if self.REQUEST.get('partofyear'): - catalog_args['partofyear'] = self.REQUEST['partofyear'] - - reportingdate_start = self.REQUEST.get('reportingdate_start') - reportingdate_end = self.REQUEST.get('reportingdate_end') + if self.REQUEST.get("query_start"): + catalog_args["start"] = self.REQUEST["query_start"] + if self.REQUEST.get("sort_on"): + catalog_args["sort_on"] = self.REQUEST["sort_on"] + if self.REQUEST.get("sort_order"): + catalog_args["sort_order"] = self.REQUEST["sort_order"] + if self.REQUEST.get("dataflow_uris"): + catalog_args["dataflow_uris"] = self.REQUEST["dataflow_uris"] + if self.REQUEST.get("country"): + catalog_args["country"] = self.REQUEST["country"] + if self.REQUEST.get("years"): + catalog_args["years"] = self.REQUEST["years"] + if self.REQUEST.get("partofyear"): + catalog_args["partofyear"] = self.REQUEST["partofyear"] + + reportingdate_start = self.REQUEST.get("reportingdate_start") + reportingdate_end = self.REQUEST.get("reportingdate_end") dateRangeQuery = {} if reportingdate_start and reportingdate_end: - dateRangeQuery['range'] = 'min:max' - dateRangeQuery['query'] = [reportingdate_start, reportingdate_end] + dateRangeQuery["range"] = "min:max" + dateRangeQuery["query"] = [reportingdate_start, reportingdate_end] elif reportingdate_start: - dateRangeQuery['range'] = 'min' - dateRangeQuery['query'] = reportingdate_start + dateRangeQuery["range"] = "min" + dateRangeQuery["query"] = reportingdate_start elif reportingdate_end: - dateRangeQuery['range'] = 'max' - dateRangeQuery['query'] = reportingdate_end + dateRangeQuery["range"] = "max" + dateRangeQuery["query"] = reportingdate_end if dateRangeQuery: - catalog_args['reportingdate'] = dateRangeQuery + catalog_args["reportingdate"] = dateRangeQuery return catalog_args - security.declareProtected('View', 'searchdataflow') + security.declareProtected("View", "searchdataflow") def searchdataflow(self, batch_size=None): """Search the ZCatalog for Report Envelopes, - show results and keep displaying the form """ + show results and keep displaying the form""" # show the initial default populated - if 'sort_on' not in self.REQUEST: + if "sort_on" not in self.REQUEST: return self._searchdataflow() catalog_args = self.get_query_args() @@ -851,17 +932,17 @@ def searchdataflow(self, batch_size=None): envelopeObjects = [] for eBrain in envelopes: o = eBrain.getObject() - if getSecurityManager().checkPermission('View', o): + if getSecurityManager().checkPermission("View", o): envelopeObjects.append(o) - return self._searchdataflow(results=envelopeObjects, - **self.REQUEST.form) + return self._searchdataflow( + results=envelopeObjects, **self.REQUEST.form + ) - security.declareProtected('View', 'get_df_objects') + security.declareProtected("View", "get_df_objects") def get_df_objects(self, catalog_args): - """ Query the catalog with the provided catalog_args - """ + """Query the catalog with the provided catalog_args""" envelopeObjects = [] if catalog_args: catalog = getToolByName(self, DEFAULT_CATALOG, None) @@ -869,84 +950,94 @@ def get_df_objects(self, catalog_args): for eBrain in envelopes: obj = eBrain.getObject() - if getSecurityManager().checkPermission('View', obj): + if getSecurityManager().checkPermission("View", obj): files = [] - for fileObj in obj.objectValues('Report Document'): - files.append({ - "filename": fileObj.id, - "title": str(fileObj.absolute_url_path()), - "url": (str(fileObj.absolute_url_path()) - + "/manage_document") - }) + for fileObj in obj.objectValues("Report Document"): + files.append( + { + "filename": fileObj.id, + "title": str(fileObj.absolute_url_path()), + "url": ( + str(fileObj.absolute_url_path()) + + "/manage_document" + ), + } + ) accepted = True - for fileObj in obj.objectValues('Report Feedback'): - if (fileObj.title in ( + for fileObj in obj.objectValues("Report Feedback"): + if fileObj.title in ( "Data delivery was not acceptable", - "Non-acceptance of F-gas report")): + "Non-acceptance of F-gas report", + ): accepted = False obligations = [] for uri in obj.dataflow_uris: - obligations.append(self.dataflow_lookup(uri)['TITLE']) + obligations.append(self.dataflow_lookup(uri)["TITLE"]) - countryName = '' - if obj.meta_type == 'Report Envelope': + countryName = "" + if obj.meta_type == "Report Envelope": countryName = obj.getCountryName() else: try: - countryName = obj.localities_dict()[ - obj.country]['name'] + countryName = obj.localities_dict()[obj.country][ + "name" + ] except KeyError: countryName = "Unknown" reported = obj.bobobase_modification_time() - if obj.meta_type == 'Report Envelope': + if obj.meta_type == "Report Envelope": reported = obj.reportingdate - company_id = '-' - if (hasattr(obj.aq_parent, 'company_id')): + company_id = "-" + if hasattr(obj.aq_parent, "company_id"): company_id = obj.aq_parent.company_id - envelopeObjects.append({ - 'company_id': company_id, - 'released': obj.released, - 'path': obj.absolute_url_path(), - 'country': countryName, - 'company': obj.aq_parent.title, - 'userid': obj.aq_parent.id, - 'title': obj.title, - 'id': obj.id, - 'years': {"start": obj.year, "end": obj.endyear}, - 'end_year': obj.endyear, - 'reportingdate': reported.strftime('%Y-%m-%d'), - 'files': files, - 'obligation': (obligations[0] if obligations - else "Unknown"), - 'accepted': accepted - }) + envelopeObjects.append( + { + "company_id": company_id, + "released": obj.released, + "path": obj.absolute_url_path(), + "country": countryName, + "company": obj.aq_parent.title, + "userid": obj.aq_parent.id, + "title": obj.title, + "id": obj.id, + "years": {"start": obj.year, "end": obj.endyear}, + "end_year": obj.endyear, + "reportingdate": reported.strftime("%Y-%m-%d"), + "files": files, + "obligation": ( + obligations[0] if obligations else "Unknown" + ), + "accepted": accepted, + } + ) return envelopeObjects def search_dataflow_url(self): """Search the ZCatalog for Report Envelopes, - show results and keep displaying the form """ + show results and keep displaying the form""" catalog_args = self.get_query_args() - if self.REQUEST.get('countries'): - isos = self.REQUEST.get('countries') - countries = filter(lambda c: c.get( - 'iso') in isos, self.localities_rod()) - catalog_args['country'] = [country['uri'] for country in countries] - if self.REQUEST.get('obligations'): - obligations = self.REQUEST.get('obligations') - df_dict = {o['PK_RA_ID']: o['uri'] for o in self.dataflow_rod()} + if self.REQUEST.get("countries"): + isos = self.REQUEST.get("countries") + countries = filter( + lambda c: c.get("iso") in isos, self.localities_rod() + ) + catalog_args["country"] = [country["uri"] for country in countries] + if self.REQUEST.get("obligations"): + obligations = self.REQUEST.get("obligations") + df_dict = {o["PK_RA_ID"]: o["uri"] for o in self.dataflow_rod()} if not isinstance(obligations, list): obligations = [obligations] df_uris = [df_dict[obl] for obl in obligations] - catalog_args['dataflow_uris'] = df_uris + catalog_args["dataflow_uris"] = df_uris return json.dumps(self.get_df_objects(catalog_args)) @@ -955,65 +1046,80 @@ def assign_roles(self, user, role, local_roles, doc): doc.manage_setLocalRoles(user, local_roles) def remove_roles(self, user, role, local_roles, doc): - doc.manage_delLocalRoles(userids=[user, ]) + doc.manage_delLocalRoles( + userids=[ + user, + ] + ) if role in local_roles: local_roles.remove(role) if local_roles: doc.manage_setLocalRoles(user, local_roles) - security.declareProtected(view_management_screens, 'show_local_roles') + security.declareProtected(view_management_screens, "show_local_roles") def show_local_roles(self, userid=None, REQUEST=None): - """ REST API to get local roles for user id """ + """REST API to get local roles for user id""" def filter_objects(root): - accepted_meta_types = ['Report Collection', 'Report Envelope'] + accepted_meta_types = ["Report Collection", "Report Envelope"] for node in root.objectValues(): if node.meta_type in accepted_meta_types: yield node - if node.meta_type == 'Report Collection': + if node.meta_type == "Report Collection": for subnode in filter_objects(node): if subnode.meta_type in accepted_meta_types: yield subnode - userid = userid or REQUEST.get('userid') + userid = userid or REQUEST.get("userid") if userid: from collections import defaultdict + resp = defaultdict(list) for country in self.getCountriesList(): for obj in filter_objects(country): role = obj.get_local_roles_for_userid(userid) if role: - resp[userid].append({ - 'roles': role, - 'ob_url': obj.absolute_url(), - 'ob_title': obj.title, - 'extra': {'country': country.title} - }) + resp[userid].append( + { + "roles": role, + "ob_url": obj.absolute_url(), + "ob_title": obj.title, + "extra": {"country": country.title}, + } + ) try: import json except ImportError: return resp - REQUEST.RESPONSE.setHeader('Content-Type', 'application/json') + REQUEST.RESPONSE.setHeader("Content-Type", "application/json") return json.dumps(resp, indent=4) return None @staticmethod def clean_pattern(pattern): pattern = pattern.strip() - pattern = pattern.replace('\\', '/') - dirs = [item for item in pattern.split('/') if item] - pattern = '/'.join(dirs) + pattern = pattern.replace("\\", "/") + dirs = [item for item in pattern.split("/") if item] + pattern = "/".join(dirs) return pattern - def response_messages(self, crole, users, ccountries, dataflow_uris, - fail_pattern, success_pattern, modifier): + def response_messages( + self, + crole, + users, + ccountries, + dataflow_uris, + fail_pattern, + success_pattern, + modifier, + ): messages = [] for country in ccountries: query = { - 'dataflow_uris': dataflow_uris, - 'meta_type': 'Report Collection', - 'country': country + "dataflow_uris": dataflow_uris, + "meta_type": "Report Collection", + "country": country, } catalog = getToolByName(self, DEFAULT_CATALOG, None) @@ -1021,57 +1127,59 @@ def response_messages(self, crole, users, ccountries, dataflow_uris, if not brains: message = fail_pattern % ( crole, - ', '.join(users), - self.localities_dict().get( - country, {'name': 'Unknown'})['name'] + ", ".join(users), + self.localities_dict().get(country, {"name": "Unknown"})[ + "name" + ], ) - messages.append({ - 'status': 'fail', - 'message': message - }) + messages.append({"status": "fail", "message": message}) break collections = [] for brain in brains: doc = brain.getObject() for user in users: local_roles = [ - role for role in doc.get_local_roles_for_userid(user) - if role != 'Client' + role + for role in doc.get_local_roles_for_userid(user) + if role != "Client" ] modifier(user, crole, local_roles, doc) - collections.append('
  • %s
  • ' % doc.absolute_url()) + collections.append("
  • %s
  • " % doc.absolute_url()) message = success_pattern % ( - crole, ', '.join(users), ''.join(collections)) - messages.append({ - 'status': 'success', - 'message': message - }) + crole, + ", ".join(users), + "".join(collections), + ) + messages.append({"status": "success", "message": message}) return messages - security.declareProtected('View', 'getCountriesList') + security.declareProtected("View", "getCountriesList") def getCountriesList(self): """ """ - l_countries = self.getParentNode().objectValues('Report Collection') - return RepUtils.utSortByAttr(l_countries, 'title') + l_countries = self.getParentNode().objectValues("Report Collection") + return RepUtils.utSortByAttr(l_countries, "title") - security.declarePrivate('sitemap_filter') + security.declarePrivate("sitemap_filter") def sitemap_filter(self, objs): - return [x for x in objs - if x.meta_type in ['Report Collection', - 'Report Envelope', - 'Repository Referral']] + return [ + x + for x in objs + if x.meta_type + in ["Report Collection", "Report Envelope", "Repository Referral"] + ] - security.declareProtected('View', 'getSitemap') + security.declareProtected("View", "getSitemap") - def getSitemap(self, tree_root=None, tree_pre='tree'): + def getSitemap(self, tree_root=None, tree_pre="tree"): from ZTUtils import SimpleTreeMaker + if tree_root is None: tree_root = self.getPhysicalRoot() tm = SimpleTreeMaker(tree_pre) tm.setChildAccess(filter=self.sitemap_filter) - tm.setSkip('') + tm.setSkip("") try: tree, rows = tm.cookieTree(tree_root) except ValueError: @@ -1079,33 +1187,33 @@ def getSitemap(self, tree_root=None, tree_pre='tree'): tree_root.REQUEST.form.clear() tree, rows = tm.cookieTree(tree_root) rows.pop(0) - return {'root': tree, 'rows': rows} + return {"root": tree, "rows": rows} - security.declareProtected('View', 'sitemap') - sitemap = PageTemplateFile('zpt/engine/sitemap', globals()) + security.declareProtected("View", "sitemap") + sitemap = PageTemplateFile("zpt/engine/sitemap", globals()) - security.declarePublic('getWebQURL') + security.declarePublic("getWebQURL") def getWebQURL(self): - """ return '' if there's no WebQuestionnaire attached to this - application + """return '' if there's no WebQuestionnaire attached to this + application """ return self.webq_url def getNotCompletedWorkitems(self, sortby, how, REQUEST=None): - """ Loops for all the workitems that are in the 'active','inactive', - 'fallout' status and returns their list + """Loops for all the workitems that are in the 'active','inactive', + 'fallout' status and returns their list """ catalog = getToolByName(self, DEFAULT_CATALOG, None) query = { - 'meta_type': 'Workitem', - 'status': ['active', 'inactive', 'fallout'], - 'sort_on': sortby + "meta_type": "Workitem", + "status": ["active", "inactive", "fallout"], + "sort_on": sortby, } - if how == 'desc': - query['sort_order'] = 'reverse' + if how == "desc": + query["sort_order"] = "reverse" workitems = catalog.searchResults(**query) @@ -1114,10 +1222,11 @@ def getNotCompletedWorkitems(self, sortby, how, REQUEST=None): else: paginator = DiggPaginator( - workitems, 20, body=5, padding=2, orphans=5) + workitems, 20, body=5, padding=2, orphans=5 + ) try: - page = int(REQUEST.get('page', '1')) + page = int(REQUEST.get("page", "1")) except ValueError: page = 1 @@ -1126,12 +1235,13 @@ def getNotCompletedWorkitems(self, sortby, how, REQUEST=None): except (EmptyPage, InvalidPage): workitems = paginator.page(paginator.num_pages) - workitems.object_list = [ob.getObject() - for ob in workitems.object_list] + workitems.object_list = [ + ob.getObject() for ob in workitems.object_list + ] return workitems def zipEnvelopes(self, envelopes=[], REQUEST=None, RESPONSE=None): - """ Zip several envelopes together with the metadata """ + """Zip several envelopes together with the metadata""" import zip_content envelopes = RepUtils.utConvertToList(envelopes) @@ -1153,50 +1263,60 @@ def zipEnvelopes(self, envelopes=[], REQUEST=None, RESPONSE=None): if env_ob.released: env_name = env_ob.title - for doc in env_ob.objectValues('Report Document'): + for doc in env_ob.objectValues("Report Document"): if getSecurityManager().checkPermission(view, doc): with doc.data_file.open() as doc_file: tmp_copy = RepUtils.temporary_named_copy(doc_file) with tmp_copy: - outzd.write(tmp_copy.name, - "%s/%s" % (env_name, str(doc.getId()))) + outzd.write( + tmp_copy.name, + "%s/%s" % (env_name, str(doc.getId())), + ) # write metadata.txt metadata_file = RepUtils.TmpFile( - zip_content.get_metadata_content(env_ob)) - outzd.write(str(metadata_file), - "%s/%s" % (env_name, 'metadata.txt')) + zip_content.get_metadata_content(env_ob) + ) + outzd.write( + str(metadata_file), "%s/%s" % (env_name, "metadata.txt") + ) # write README.txt readme_file = RepUtils.TmpFile( - zip_content.get_readme_content(env_ob)) - outzd.write(str(readme_file), - "%s/%s" % (env_name, 'README.txt')) + zip_content.get_readme_content(env_ob) + ) + outzd.write( + str(readme_file), "%s/%s" % (env_name, "README.txt") + ) # write history.txt history_file = RepUtils.TmpFile( - zip_content.get_history_content(env_ob)) - outzd.write(str(history_file), - "%s/%s" % (env_name, 'history.txt')) + zip_content.get_history_content(env_ob) + ) + outzd.write( + str(history_file), "%s/%s" % (env_name, "history.txt") + ) outzd.close() stat = os.stat(tmpfile) - RESPONSE.setHeader('Content-Type', 'application/x-zip') - RESPONSE.setHeader('Content-Disposition', - 'attachment; filename="%s.zip"' % 'envelopes') - RESPONSE.setHeader('Content-Length', stat[6]) + RESPONSE.setHeader("Content-Type", "application/x-zip") + RESPONSE.setHeader( + "Content-Disposition", + 'attachment; filename="%s.zip"' % "envelopes", + ) + RESPONSE.setHeader("Content-Length", stat[6]) RepUtils.copy_file(tmpfile, RESPONSE) os.unlink(tmpfile) - return '' + return "" def __getObjects(self, p_brains): - """ Get objects from catalog identifiers. - Skip over the ones that don't really exist. + """Get objects from catalog identifiers. + Skip over the ones that don't really exist. """ l_catalog = getattr(self, constants.DEFAULT_CATALOG) - records = map(getattr, p_brains, ('data_record_id_',)*len(p_brains)) + records = map(getattr, p_brains, ("data_record_id_",) * len(p_brains)) objects = [] for record in records: try: @@ -1207,14 +1327,11 @@ def __getObjects(self, p_brains): def get_apps_wks(self, apps): catalog = self.unrestrictedTraverse(constants.DEFAULT_CATALOG) - return catalog(meta_type='Workitem', - status='active', - activity_id=apps) + return catalog(meta_type="Workitem", status="active", activity_id=apps) def get_running_envelopes(self): catalog = self.unrestrictedTraverse(constants.DEFAULT_CATALOG) - return catalog(meta_type='Report Envelope', - wf_status='forward') + return catalog(meta_type="Report Envelope", wf_status="forward") def get_forward_envelopes(self): """Return a JSON array with envelopes that are in forward wf_status.""" @@ -1226,15 +1343,15 @@ def get_forward_envelopes(self): return self.jsonify(result) def runAutomaticApplications(self, p_applications, REQUEST=None): - """ Searches for the active workitems of activities that need - triggering on regular basis and calls triggerApplication for them - Example of activity that needs further triggering: AutomaticQA + """Searches for the active workitems of activities that need + triggering on regular basis and calls triggerApplication for them + Example of activity that needs further triggering: AutomaticQA - Note: Since this method is called using a HTTP get, - the p_applications parameter cannot be a list, but a string. - To include more than one applications, separate them by || + Note: Since this method is called using a HTTP get, + the p_applications parameter cannot be a list, but a string. + To include more than one applications, separate them by || """ - apps = p_applications.split('||') + apps = p_applications.split("||") result = [] brains = self.get_apps_wks(apps) @@ -1246,21 +1363,23 @@ def runAutomaticApplications(self, p_applications, REQUEST=None): if not REQUEST: transaction.commit() except Exception as e: - msg = 'Error while triggering application for: '\ - '{} - ({})'.format(brain.getURL(), str(e)) + msg = ( + "Error while triggering application for: " + "{} - ({})".format(brain.getURL(), str(e)) + ) logger.error(msg) return result def getWkAppsActive(self, p_applications, REQUEST=None): - """ Searches for the active workitems of activities that need - triggering on regular basis. + """Searches for the active workitems of activities that need + triggering on regular basis. - Note: Since this method is called using a HTTP get, - the p_applications parameter cannot be a list, but a string. - To include more than one applications, separate them by || + Note: Since this method is called using a HTTP get, + the p_applications parameter cannot be a list, but a string. + To include more than one applications, separate them by || """ - apps = p_applications.split('||') + apps = p_applications.split("||") result = [] brains = self.get_apps_wks(apps) @@ -1270,50 +1389,53 @@ def getWkAppsActive(self, p_applications, REQUEST=None): return self.jsonify(result) def _xmlrpc_search_delivery(self, dataflow_uris, country): - """ Looks for Report Envelopes with the given attributes - """ + """Looks for Report Envelopes with the given attributes""" l_catalog = getattr(self, constants.DEFAULT_CATALOG) l_result = [] - l_deliveries = l_catalog(meta_type='Report Envelope', country=country) + l_deliveries = l_catalog(meta_type="Report Envelope", country=country) for l_delivery in l_deliveries: l_obj = l_delivery.getObject() if RepUtils.utIsSubsetOf(l_obj.dataflow_uris, dataflow_uris): l_result.append(l_obj) return l_result - def lookup_last_delivery(self, dataflow_uris, country, - reporting_period=''): - """ Find the newest delivery with the same location and dataflows, - but is older than the reporting_period in the argument - If the reporting_period is not provided, finds them all until today + def lookup_last_delivery( + self, dataflow_uris, country, reporting_period="" + ): + """Find the newest delivery with the same location and dataflows, + but is older than the reporting_period in the argument + If the reporting_period is not provided, finds them all until today """ if reporting_period: l_reporting_period = DateTime(reporting_period) else: l_reporting_period = DateTime() l_deliveries = self._xmlrpc_search_delivery( - dataflow_uris=dataflow_uris, country=country) + dataflow_uris=dataflow_uris, country=country + ) if l_deliveries: # order all envelopes by start data in reverse and # filter only the ones that have the start date previous than # l_reporting_period - return RepUtils.utSortByMethod(l_deliveries, 'getStartDate', - l_reporting_period, 1) + return RepUtils.utSortByMethod( + l_deliveries, "getStartDate", l_reporting_period, 1 + ) return [] - security.declareProtected(view_management_screens, 'harvestXforms') + security.declareProtected(view_management_screens, "harvestXforms") def harvestXforms(self): - """ Deprecated - use (with care) load_from_dd() on each - DataflowMappingsRecord object for similar result + """Deprecated - use (with care) load_from_dd() on each + DataflowMappingsRecord object for similar result - calls getXforms from the WebQ, and updates the - DataflowMappingRecord table for the haswebform attribute - To be called on regular basis by a cron job + calls getXforms from the WebQ, and updates the + DataflowMappingRecord table for the haswebform attribute + To be called on regular basis by a cron job """ raise DeprecationWarning( - '''DataflowMappingRecord objects have been deprecated,''' - ''' use DataflowMappingsRecord objects instead''') + """DataflowMappingRecord objects have been deprecated,""" + """ use DataflowMappingsRecord objects instead""" + ) ########################################################################### # @@ -1322,39 +1444,49 @@ def harvestXforms(self): ########################################################################### def getEnvelopesInfo(self, obligation): - """ Returns a list with all information about envelopes for a certain - obligation, including the XML files inside + """Returns a list with all information about envelopes for a certain + obligation, including the XML files inside """ reslist = [] catalog = getToolByName(self, DEFAULT_CATALOG, None) - l_params = {'meta_type': 'Report Envelope', - 'dataflow_uris': obligation, 'released': 1} + l_params = { + "meta_type": "Report Envelope", + "dataflow_uris": obligation, + "released": 1, + } for obj in self.__getObjects(catalog.searchResults(**l_params)): - res = {'url': obj.absolute_url(0), - 'title': obj.title, - 'description': obj.descr, - 'dataflow_uris': obj.dataflow_uris, - 'country': obj.country, - 'country_name': obj.getCountryName(), - 'country_code': obj.getCountryCode(), - 'locality': obj.locality, - 'released': obj.reportingdate.HTML4(), - 'startyear': obj.year, - 'endyear': obj.endyear, - 'partofyear': obj.partofyear, - } + res = { + "url": obj.absolute_url(0), + "title": obj.title, + "description": obj.descr, + "dataflow_uris": obj.dataflow_uris, + "country": obj.country, + "country_name": obj.getCountryName(), + "country_code": obj.getCountryCode(), + "locality": obj.locality, + "released": obj.reportingdate.HTML4(), + "startyear": obj.year, + "endyear": obj.endyear, + "partofyear": obj.partofyear, + } filelist = [] - for file in obj.objectValues('Report Document'): - if file.content_type == 'text/xml': - if file.acquiredRolesAreUsedBy('View'): + for file in obj.objectValues("Report Document"): + if file.content_type == "text/xml": + if file.acquiredRolesAreUsedBy("View"): restricted = 0 else: restricted = 1 filelist.append( - [file.id, file.content_type, file.xml_schema_location, - file.title, restricted]) - res['files'] = filelist + [ + file.id, + file.content_type, + file.xml_schema_location, + file.title, + restricted, + ] + ) + res["files"] = filelist reslist.append(res) return reslist @@ -1364,25 +1496,28 @@ def getEnvelopesInfo(self, obligation): # ########################################################################### - security.declareProtected('View', 'subscriptions_html') + security.declareProtected("View", "subscriptions_html") subscriptions_html = PageTemplateFile( - 'zpt/engine/subscriptions', globals()) + "zpt/engine/subscriptions", globals() + ) - security.declareProtected('View', 'uns_settings') - uns_settings = PageTemplateFile('zpt/engine/unsinterface', globals()) + security.declareProtected("View", "uns_settings") + uns_settings = PageTemplateFile("zpt/engine/unsinterface", globals()) _migration_table = PageTemplateFile( - 'zpt/engine/migration_table', globals()) + "zpt/engine/migration_table", globals() + ) - security.declareProtected('View management screens', 'migration_table') + security.declareProtected("View management screens", "migration_table") def migration_table(self): """List all migrations applied to this deployment and their details""" - do_update = self.REQUEST.form.get('update') + do_update = self.REQUEST.form.get("update") if do_update: upd_module = importlib.import_module( - '.'.join(['Products.Reportek.updates', do_update])) - app = self.unrestrictedTraverse('/') + ".".join(["Products.Reportek.updates", do_update]) + ) + app = self.unrestrictedTraverse("/") upd_module.update(app) migs = getattr(self, constants.MIGRATION_ID) @@ -1392,93 +1527,122 @@ def migration_table(self): for migrationOb in migs: migrationItem = { - 'name': migrationOb.name, - 'version': migrationOb.version, - 'first': migrationOb.toDate(migrationOb.first_ts), - 'current': migrationOb.toDate(migrationOb.current_ts) + "name": migrationOb.name, + "version": migrationOb.version, + "first": migrationOb.toDate(migrationOb.first_ts), + "current": migrationOb.toDate(migrationOb.current_ts), } done_rows.append(migrationItem) upd_path = os.path.dirname(Products.Reportek.updates.__file__) - upd_files = [f.split('.py')[0] for f in os.listdir(upd_path) - if os.path.isfile('/'.join([upd_path, f])) and - f.startswith('u') and len(f.split('.py')) > 1] - applied = [upd.get('name') for upd in done_rows] + upd_files = [ + f.split(".py")[0] + for f in os.listdir(upd_path) + if os.path.isfile("/".join([upd_path, f])) + and f.startswith("u") + and len(f.split(".py")) > 1 + ] + applied = [upd.get("name") for upd in done_rows] for f in set(upd_files): applicable = False if f not in applied: upd_module = importlib.import_module( - '.'.join(['Products.Reportek.updates', f])) + ".".join(["Products.Reportek.updates", f]) + ) if REPORTEK_DEPLOYMENT in upd_module.APPLIES_TO: applicable = True - todo_rows.append({ - 'name': f, - 'applicable': applicable, - 'version': upd_module.VERSION, - }) - todo_rows = sorted(todo_rows, key=lambda o: o.get('version'), - reverse=True) - return self._migration_table(todo_migrationRows=todo_rows, - done_migrationRows=done_rows) + todo_rows.append( + { + "name": f, + "applicable": applicable, + "version": upd_module.VERSION, + } + ) + todo_rows = sorted( + todo_rows, key=lambda o: o.get("version"), reverse=True + ) + return self._migration_table( + todo_migrationRows=todo_rows, done_migrationRows=done_rows + ) security.declareProtected( - 'View management screens', 'manage_editUNSInterface') + "View management screens", "manage_editUNSInterface" + ) - def manage_editUNSInterface(self, UNS_server, UNS_username, UNS_password, - UNS_password_confirmation, UNS_channel_id, - UNS_notification_types, REQUEST=None): - """ Edit the UNS related properties """ + def manage_editUNSInterface( + self, + UNS_server, + UNS_username, + UNS_password, + UNS_password_confirmation, + UNS_channel_id, + UNS_notification_types, + REQUEST=None, + ): + """Edit the UNS related properties""" if UNS_password != UNS_password_confirmation: if REQUEST is not None: REQUEST.RESPONSE.redirect( - '''uns_settings?manage_tabs_message=''' - '''Password and confirmation do not match!''') + """uns_settings?manage_tabs_message=""" + """Password and confirmation do not match!""" + ) return 0 self.UNS_server = UNS_server self.UNS_username = UNS_username self.UNS_password = UNS_password self.UNS_channel_id = UNS_channel_id self.UNS_notification_types = [ - x for x in UNS_notification_types if x != ''] + x for x in UNS_notification_types if x != "" + ] if REQUEST is not None: REQUEST.RESPONSE.redirect( - 'uns_settings?manage_tabs_message=Saved changes') + "uns_settings?manage_tabs_message=Saved changes" + ) return 1 def uns_notifications_enabled(self): - return bool(os.environ.get('UNS_NOTIFICATIONS', 'off') == 'on') + return bool(os.environ.get("UNS_NOTIFICATIONS", "off") == "on") def rmq_connector_enabled(self): - return bool(os.environ.get('RABBITMQ_ENABLED', 'off') == 'on') + return bool(os.environ.get("RABBITMQ_ENABLED", "off") == "on") - security.declarePrivate('get_uns_xmlrpc_server') + security.declarePrivate("get_uns_xmlrpc_server") def get_uns_xmlrpc_server(self): if self.uns_notifications_enabled(): - url = self.UNS_server + '/rpcrouter' + url = self.UNS_server + "/rpcrouter" if self.UNS_username: - frag = '%s:%s@' % (self.UNS_username, self.UNS_password) - url = url.replace('http://', 'http://' + frag) - url = url.replace('https://', 'https://' + frag) + frag = "%s:%s@" % (self.UNS_username, self.UNS_password) + url = url.replace("http://", "http://" + frag) + url = url.replace("https://", "https://" + frag) return xmlrpclib.Server(url) def get_ecas_userid(self, username): - ecas_path = '/acl_users/' + ECAS_ID + ecas_path = "/acl_users/" + ECAS_ID try: ecas = self.unrestrictedTraverse(ecas_path, None) if ecas: return ecas.getEcasUserId(username) except Exception: - logger.info('Unable to get ecas info') + logger.info("Unable to get ecas info") + + def get_ecas_user(self, username): + ecas_path = "/acl_users/" + ECAS_ID + try: + ecas = self.unrestrictedTraverse(ecas_path, None) + if ecas: + return ecas.getEcasUser(username) + except Exception: + logger.info("Unable to get ecas info") - security.declareProtected('View', 'canUserSubscribeToUNS') + security.declareProtected("View", "canUserSubscribeToUNS") - def canUserSubscribeToUNS(self, user_id='', REQUEST=None): - """ Indicates if the user given as parameter or authenticated - is allowed to subscribe to UNS for this channel + def canUserSubscribeToUNS(self, user_id="", REQUEST=None): + """Indicates if the user given as parameter or authenticated + is allowed to subscribe to UNS for this channel """ if not user_id: - user_id = self.REQUEST['AUTHENTICATED_USER'].getUserName() - if user_id == 'Anonymous User': + user_id = self.REQUEST["AUTHENTICATED_USER"].getUserName() + if user_id == "Anonymous User": return 0 # TODO: cache results for a few minutes try: @@ -1487,84 +1651,114 @@ def canUserSubscribeToUNS(self, user_id='', REQUEST=None): if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: if self.get_ecas_userid(user_id): user_id = self.get_ecas_userid(user_id) - l_ret = l_server.UNSService.canSubscribe(self.UNS_channel_id, - user_id) + l_ret = l_server.UNSService.canSubscribe( + self.UNS_channel_id, user_id + ) return l_ret except Exception as e: - logger.warning("Unable to contact UNS to check if {0} can " - "subscribe: {1}".format(user_id, e)) + logger.warning( + "Unable to contact UNS to check if {0} can " + "subscribe: {1}".format(user_id, e) + ) return 0 - security.declareProtected('View', 'subscribeToUNS') - - def subscribeToUNS(self, filter_country='', dataflow_uris=[], - filter_event_types=[], REQUEST=None): - """ Creates new or updates existing subscription to the specified - If there is a request, returns a message, otherwise, returns - (1, '') for success - (0, error_description) for failure + security.declareProtected("View", "subscribeToUNS") + + def subscribeToUNS( + self, + filter_country="", + dataflow_uris=[], + filter_event_types=[], + REQUEST=None, + ): + """Creates new or updates existing subscription to the specified + If there is a request, returns a message, otherwise, returns + (1, '') for success + (0, error_description) for failure """ l_filters = [] - if dataflow_uris not in [[], ['']]: + if dataflow_uris not in [[], [""]]: for df_uri in dataflow_uris: - df_title = self.dataflow_lookup(df_uri)['TITLE'] + df_title = self.dataflow_lookup(df_uri)["TITLE"] if filter_country: l_filters.append( - {'''http://rod.eionet.europa.eu/schema.rdf''' - '''#obligation''': df_title, - '''http://rod.eionet.europa.eu/schema.rdf''' - '''#locality''': filter_country}) + { + """http://rod.eionet.europa.eu/schema.rdf""" + """#obligation""": df_title, + """http://rod.eionet.europa.eu/schema.rdf""" + """#locality""": filter_country, + } + ) else: l_filters.append( - {'''http://rod.eionet.europa.eu/schema.rdf''' - '''#obligation''': df_title}) + { + """http://rod.eionet.europa.eu/schema.rdf""" + """#obligation""": df_title + } + ) elif filter_country: l_filters.append( - {'''http://rod.eionet.europa.eu/schema.rdf''' - '''#locality''': filter_country}) + { + """http://rod.eionet.europa.eu/schema.rdf""" + """#locality""": filter_country + } + ) l_filters_final = [] if l_filters != []: for l_filter_event_type in filter_event_types: for l_filter in l_filters: l_tmp = copy(l_filter) - l_tmp['''http://rod.eionet.europa.eu/schema.rdf''' - '''#event_type'''] = l_filter_event_type + l_tmp[ + """http://rod.eionet.europa.eu/schema.rdf""" + """#event_type""" + ] = l_filter_event_type l_filters_final.append(l_tmp) else: for l_filter_event_type in filter_event_types: l_filters_final.append( - {'''http://rod.eionet.europa.eu/schema.rdf''' - '''#event_type''': l_filter_event_type}) + { + """http://rod.eionet.europa.eu/schema.rdf""" + """#event_type""": l_filter_event_type + } + ) try: l_server = self.get_uns_xmlrpc_server() if l_server is not None: - user_id = self.REQUEST['AUTHENTICATED_USER'].getUserName() + user_id = self.REQUEST["AUTHENTICATED_USER"].getUserName() if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: if self.get_ecas_userid(user_id): user_id = self.get_ecas_userid(user_id) l_server.UNSService.makeSubscription( - self.UNS_channel_id, user_id, l_filters_final) + self.UNS_channel_id, user_id, l_filters_final + ) if REQUEST is not None: REQUEST.RESPONSE.redirect( - '''subscriptions_html?info_title=Information''' - '''&info_msg=Subscription made successfully''') - return (1, '') + """subscriptions_html?info_title=Information""" + """&info_msg=Subscription made successfully""" + ) + return (1, "") except Exception as err: if REQUEST is not None: REQUEST.RESPONSE.redirect( - '''subscriptions_html?info_title=Error&info_msg=Your ''' - '''subscription could not be made because of the ''' - '''following error: %s''' % str(err)) + """subscriptions_html?info_title=Error&info_msg=Your """ + """subscription could not be made because of the """ + """following error: %s""" % str(err) + ) return (0, str(err)) - security.declareProtected('View', 'sendNotificationToUNS') - - def sendNotificationToUNS(self, envelope, notification_type, - notification_label, actor='system', - description=''): - """ Sends events data to the specified UNS's push channel """ + security.declareProtected("View", "sendNotificationToUNS") + + def sendNotificationToUNS( + self, + envelope, + notification_type, + notification_label, + actor="system", + description="", + ): + """Sends events data to the specified UNS's push channel""" res = 0 try: l_server = self.get_uns_xmlrpc_server() @@ -1575,50 +1769,91 @@ def sendNotificationToUNS(self, envelope, notification_type, l_id = "%s/events#ts%s" % (envelope.absolute_url(), l_time) l_res = [] l_res.append( - [l_id, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type', - 'http://rod.eionet.europa.eu/schema.rdf#Workflowevent']) + [ + l_id, + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", + "http://rod.eionet.europa.eu/schema.rdf#Workflowevent", + ] + ) l_res.append( - [l_id, 'http://purl.org/dc/elements/1.1/title', - notification_label]) + [ + l_id, + "http://purl.org/dc/elements/1.1/title", + notification_label, + ] + ) if description: l_res.append( - [l_id, 'http://purl.org/dc/elements/1.1/description', - description]) + [ + l_id, + "http://purl.org/dc/elements/1.1/description", + description, + ] + ) l_res.append( - [l_id, 'http://purl.org/dc/elements/1.1/identifier', - envelope.absolute_url()]) + [ + l_id, + "http://purl.org/dc/elements/1.1/identifier", + envelope.absolute_url(), + ] + ) l_res.append( - [l_id, 'http://purl.org/dc/elements/1.1/date', - strftime('%Y-%b-%d %H:%M:%S')]) + [ + l_id, + "http://purl.org/dc/elements/1.1/date", + strftime("%Y-%b-%d %H:%M:%S"), + ] + ) # l_res.append( # [l_id, 'http://rod.eionet.europa.eu/schema.rdf#label', # notification_label]) - l_dataflows = [self.dataflow_lookup( - x)['TITLE'] for x in envelope.dataflow_uris] + l_dataflows = [ + self.dataflow_lookup(x)["TITLE"] + for x in envelope.dataflow_uris + ] for l_dataflow in l_dataflows: l_res.append( - [l_id, - 'http://rod.eionet.europa.eu/schema.rdf#obligation', - str(l_dataflow)]) + [ + l_id, + "http://rod.eionet.europa.eu/schema.rdf#obligation", # noqa + str(l_dataflow), + ] + ) l_res.append( - [l_id, 'http://rod.eionet.europa.eu/schema.rdf#locality', - str(envelope.getCountryName())]) + [ + l_id, + "http://rod.eionet.europa.eu/schema.rdf#locality", + str(envelope.getCountryName()), + ] + ) l_res.append( - [l_id, 'http://rod.eionet.europa.eu/schema.rdf#actor', - actor]) + [ + l_id, + "http://rod.eionet.europa.eu/schema.rdf#actor", + actor, + ] + ) l_res.append( - [l_id, 'http://rod.eionet.europa.eu/schema.rdf#event_type', - notification_type]) + [ + l_id, + "http://rod.eionet.europa.eu/schema.rdf#event_type", + notification_type, + ] + ) l_server.UNSService.sendNotification( - self.UNS_channel_id, l_res) + self.UNS_channel_id, l_res + ) res = 1 except Exception as e: - logger.warning("Unable to send UNS notification for: {0}: {1}" - .format(envelope.absolute_url(), e)) + logger.warning( + "Unable to send UNS notification for: {0}: {1}".format( + envelope.absolute_url(), e + ) + ) res = 0 return res - security.declarePrivate('uns_subscribe_actors') + security.declarePrivate("uns_subscribe_actors") def uns_subscribe_actors(self, actors, filters): l_server = self.get_uns_xmlrpc_server() @@ -1628,12 +1863,16 @@ def uns_subscribe_actors(self, actors, filters): if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: if self.get_ecas_userid(act): act = self.get_ecas_userid(act) - l_server.UNSService.makeSubscription(self.UNS_channel_id, - act, filters) + l_server.UNSService.makeSubscription( + self.UNS_channel_id, act, filters + ) return 1 except Exception as e: - logger.warning("Unable to subscribe actors: {0} to UNS: {1}" - .format(actors, e)) + logger.warning( + "Unable to subscribe actors: {0} to UNS: {1}".format( + actors, e + ) + ) return 0 ########################################################################### @@ -1642,27 +1881,26 @@ def uns_subscribe_actors(self, actors, filters): # ########################################################################### - def getListAsDict(self, ob_list=[], key=''): - """ Get a list of dictionaries and return a tuple of (key, value) - """ - groups = list(set([x.get(key, '') for x in ob_list])) + def getListAsDict(self, ob_list=[], key=""): + """Get a list of dictionaries and return a tuple of (key, value)""" + groups = list(set([x.get(key, "") for x in ob_list])) groups.sort() for group in groups: - yield group, [x for x in ob_list if x.get(key, '') == group] + yield group, [x for x in ob_list if x.get(key, "") == group] - security.declareProtected('View', 'messageDialog') + security.declareProtected("View", "messageDialog") - def messageDialog(self, message='', action='', REQUEST=None): - """ displays a message dialog """ + def messageDialog(self, message="", action="", REQUEST=None): + """displays a message dialog""" return self.message_dialog(message=message, action=action) - message_dialog = PageTemplateFile('zpt/message_dialog', globals()) + message_dialog = PageTemplateFile("zpt/message_dialog", globals()) - security.declarePublic('getEnvelopeByURL') + security.declarePublic("getEnvelopeByURL") def getEnvelopeByURL(self, file_url, REQUEST=None): - """ return the URL of an envelope based on a contained object URL """ - REQUEST.RESPONSE.setHeader('Content-type', 'text/xml') + """return the URL of an envelope based on a contained object URL""" + REQUEST.RESPONSE.setHeader("Content-type", "text/xml") parsde_url = urlparse(file_url) file_obj = self.unrestrictedTraverse(parsde_url[2], None) return """ @@ -1670,14 +1908,15 @@ def getEnvelopeByURL(self, file_url, REQUEST=None): """ % file_obj.getMySelf().absolute_url(1) security.declareProtected( - 'View management screens', 'manage_raise_exception') + "View management screens", "manage_raise_exception" + ) def manage_raise_exception(self): - """ Generate exception to check that it's handled properly """ - raise ValueError('hello world') + """Generate exception to check that it's handled properly""" + raise ValueError("hello world") def getSearchResults(self, **kwargs): - [kwargs.pop(el) for el in kwargs.keys() if kwargs[el] in [None, '']] + [kwargs.pop(el) for el in kwargs.keys() if kwargs[el] in [None, ""]] catalog = getToolByName(self, DEFAULT_CATALOG, None) return catalog.searchResults(**kwargs) @@ -1685,16 +1924,16 @@ def getUniqueValuesFor(self, value): catalog = getToolByName(self, DEFAULT_CATALOG, None) return catalog.uniqueValuesFor(value) - security.declarePublic('getAvailableLanguages') + security.declarePublic("getAvailableLanguages") def getAvailableLanguages(self): """Get avalilable languages as their .mo files are found in - locales/ folders map them to their localized name. + locales/ folders map them to their localized name. """ negociator = getUtility(INegotiator) return negociator.getAvailableLanguages() - security.declarePublic('getSelectedLanguage') + security.declarePublic("getSelectedLanguage") def getSelectedLanguage(self): """Get selected language for this requester. The lang is selected by @@ -1708,141 +1947,175 @@ def setCookieLanguage(self): """Sets the language of the site by cookie. negotiator will read this pref from cookie on every request """ - new_lang = self.REQUEST.get('chlang') + new_lang = self.REQUEST.get("chlang") if new_lang: new_lang = normalize_lang(new_lang) self.REQUEST.RESPONSE.setCookie( - 'reportek_language', new_lang, path='/') - self.REQUEST.RESPONSE.redirect(self.REQUEST['HTTP_REFERER']) + "reportek_language", new_lang, path="/" + ) + self.REQUEST.RESPONSE.redirect(self.REQUEST["HTTP_REFERER"]) if REPORTEK_DEPLOYMENT == DEPLOYMENT_BDR: # make it accessible from browser - security.declarePublic('getUserCollections') + security.declarePublic("getUserCollections") def getUserCollections(self): - if not getattr(self, 'REQUEST', None): + if not getattr(self, "REQUEST", None): return [] + # Disable CSRF protection + if "IDisableCSRFProtection" in dir(plone.protect.interfaces): + alsoProvides( + self.REQUEST, + plone.protect.interfaces.IDisableCSRFProtection, + ) collections = {} - username = self.REQUEST['AUTHENTICATED_USER'].getUserName() - ecas = (self.unrestrictedTraverse('/acl_users/' - + constants.ECAS_ID)) + username = self.REQUEST["AUTHENTICATED_USER"].getUserName() + ecas = self.unrestrictedTraverse("/acl_users/" + constants.ECAS_ID) ecas_user_id = ecas.getEcasUserId(username) # these are disjunct, so it is safe to add them all together # normally only one of the lists will have results, but they could # be all empty too - middleware_collections = { - 'rw': [], - 'ro': [] - } + middleware_collections = {"rw": [], "ro": []} def get_colls(paths): """Paths is a dictionary {'paths': [], 'prev_paths': []}""" acc_paths = list( - set(paths.get('paths') + paths.get('prev_paths'))) - colls = { - 'rw': [], - 'ro': [] - } + set(paths.get("paths") + paths.get("prev_paths")) + ) + colls = {"rw": [], "ro": []} for colPath in acc_paths: - path = str(colPath) if colPath.startswith( - '/') else '/{}'.format(str(colPath)) + path = ( + str(colPath) + if colPath.startswith("/") + else "/{}".format(str(colPath)) + ) try: - if colPath in paths.get('paths'): - colls['rw'].append(self.unrestrictedTraverse(path)) + if colPath in paths.get("paths"): + colls["rw"].append(self.unrestrictedTraverse(path)) else: - colls['ro'].append(self.unrestrictedTraverse(path)) + colls["ro"].append(self.unrestrictedTraverse(path)) except Exception: logger.warning("Cannot traverse path: %s" % (path)) return colls logger.debug( - '''Attempt to interrogate middleware for authorizations for''' - ''' user:id %s:%s''' % (username, ecas_user_id)) + """Attempt to interrogate middleware for authorizations for""" + """ user:id %s:%s""" % (username, ecas_user_id) + ) if ecas_user_id: + # If the ecas_user_id is not the username, we need to include + # them both in the call to ecr + userdata = {"username": username, "ecas_id": ecas_user_id} user_paths = self.authMiddleware.getUserCollectionPaths( ecas_user_id, - recheck_interval=self.authMiddleware.recheck_interval) + userdata=userdata, + recheck_interval=self.authMiddleware.recheck_interval, + ) colls = get_colls(user_paths) - middleware_collections['rw'] += [ - col for col in colls.get('rw') - if col not in middleware_collections['rw']] - middleware_collections['ro'] += [ - col for col in colls.get('ro') - if col not in middleware_collections['ro']] + middleware_collections["rw"] += [ + col + for col in colls.get("rw") + if col not in middleware_collections["rw"] + ] + middleware_collections["ro"] += [ + col + for col in colls.get("ro") + if col not in middleware_collections["ro"] + ] catalog = getToolByName(self, DEFAULT_CATALOG, None) - middleware_collections['rw'] += [ - br.getObject() for br in catalog.searchResults( - **{'meta_type': 'Report Collection', - 'id': username}) - if not br.getObject() in middleware_collections['rw']] + middleware_collections["rw"] += [ + br.getObject() + for br in catalog.searchResults( + **{"meta_type": "Report Collection", "id": username} + ) + if not br.getObject() in middleware_collections["rw"] + ] # check BDR registry user_paths = self.BDRRegistryAPI.getCollectionPaths(username) colls = get_colls(user_paths) - middleware_collections['rw'] += [ - col for col in colls.get('rw') - if col not in middleware_collections['rw']] - middleware_collections['ro'] += [ - col for col in colls.get('ro') - if col not in middleware_collections['ro']] - - collections['Reporter'] = middleware_collections - local_roles = ['Auditor', 'ClientFG', - 'ClientODS', 'ClientCARS', 'ClientHDV'] + middleware_collections["rw"] += [ + col + for col in colls.get("rw") + if col not in middleware_collections["rw"] + ] + middleware_collections["ro"] += [ + col + for col in colls.get("ro") + if col not in middleware_collections["ro"] + ] + + collections["Reporter"] = middleware_collections + local_roles = [ + "Auditor", + "ClientFG", + "ClientODS", + "ClientCARS", + "ClientHDV", + ] local_r_col = catalog.searchResults( - **{'meta_type': 'Report Collection', - 'local_unique_roles': local_roles}) + **{ + "meta_type": "Report Collection", + "local_unique_roles": local_roles, + } + ) - auditor = [br.getObject() for br in local_r_col - if 'Auditor' in br.local_defined_roles.get(username, []) - and len(br.getPath().split('/')) == 3] + auditor = [ + br.getObject() + for br in local_r_col + if "Auditor" in br.local_defined_roles.get(username, []) + and len(br.getPath().split("/")) == 3 + ] def is_client(l_roles): - c_roles = ['ClientFG', 'ClientODS', 'ClientCARS', 'ClientHDV'] + c_roles = ["ClientFG", "ClientODS", "ClientCARS", "ClientHDV"] return [role for role in l_roles if role in c_roles] - client = [br.getObject() for br in local_r_col - if is_client(br.local_defined_roles.get(username, [])) - and len(br.getPath().split('/')) == 3] + client = [ + br.getObject() + for br in local_r_col + if is_client(br.local_defined_roles.get(username, [])) + and len(br.getPath().split("/")) == 3 + ] - collections['Auditor'] = auditor - collections['Client'] = client + collections["Auditor"] = auditor + collections["Client"] = client return collections else: - security.declarePublic('getUserCollections') + security.declarePublic("getUserCollections") def getUserCollections(self): - raise RuntimeError('Method not allowed on this distribution.') + raise RuntimeError("Method not allowed on this distribution.") - security.declareProtected('View', 'xls_export') + security.declareProtected("View", "xls_export") def xls_export(self, envelopes=None, catalog_args=None): - """ XLS Export for catalog results - """ + """XLS Export for catalog results""" env_objs = [] catalog = getToolByName(self, DEFAULT_CATALOG, None) if envelopes: envelopes = RepUtils.utConvertToList(envelopes) - env_objs = [self.unrestrictedTraverse( - env, None) for env in envelopes] + env_objs = [ + self.unrestrictedTraverse(env, None) for env in envelopes + ] else: if not catalog_args: catalog_args = self.get_query_args() - if self.REQUEST.get('sort_on'): - catalog_args['sort_on'] = self.REQUEST['sort_on'] - if self.REQUEST.get('sort_order'): - catalog_args['sort_order'] = self.REQUEST['sort_order'] + if self.REQUEST.get("sort_on"): + catalog_args["sort_on"] = self.REQUEST["sort_on"] + if self.REQUEST.get("sort_order"): + catalog_args["sort_order"] = self.REQUEST["sort_order"] if catalog_args: brains = catalog.searchResults(**catalog_args) if brains: env_objs = [brain.getObject() for brain in brains] wb = xlwt.Workbook() - sheet = wb.add_sheet('Results') + sheet = wb.add_sheet("Results") header = dict(RepUtils.write_xls_header(sheet)) idx = 1 # Start from row 1 @@ -1856,47 +2129,46 @@ def xls_export(self, envelopes=None, catalog_args=None): if idx > self.XLS_max_rows: break - return self.download_xls(wb, 'searchresults.xls') + return self.download_xls(wb, "searchresults.xls") - security.declareProtected('View', 'download_xls') + security.declareProtected("View", "download_xls") def download_xls(self, wb, filename): - """ Return an .xls file - """ + """Return an .xls file""" xls = StringIO() wb.save(xls) xls.seek(0) self.REQUEST.response.setHeader( - 'Content-type', 'application/vnd.ms-excel; charset=utf-8' + "Content-type", "application/vnd.ms-excel; charset=utf-8" ) self.REQUEST.response.setHeader( - 'Content-Disposition', 'attachment; filename={0}'.format(filename) + "Content-Disposition", "attachment; filename={0}".format(filename) ) return xls.read() - security.declareProtected('View management screens', 'zip_cache_cleanup') + security.declareProtected("View management screens", "zip_cache_cleanup") def zip_cache_cleanup(self, days=7): """Deletes files older than days.""" return RepUtils.cleanup_zip_cache(days=days) - security.declareProtected('View', 'jsonify') + security.declareProtected("View", "jsonify") def jsonify(self, value, ensure_ascii=False): """Return the value as JSON""" - self.REQUEST.RESPONSE.setHeader('Content-Type', 'application/json') + self.REQUEST.RESPONSE.setHeader("Content-Type", "application/json") return json.dumps(value, indent=4, ensure_ascii=ensure_ascii) - security.declareProtected('View management screens', 'get_left_menu_tmpl') + security.declareProtected("View management screens", "get_left_menu_tmpl") def get_left_menu_tmpl(self, username=None): """Get the left menu template for username.""" sm = getSecurityManager() nobody = SpecialUsers.nobody - left_hand_tmpl = self.unrestrictedTraverse('/left_menu_buttons') - root = self.unrestrictedTraverse('/') + left_hand_tmpl = self.unrestrictedTraverse("/left_menu_buttons") + root = self.unrestrictedTraverse("/") tmp_user = None try: try: @@ -1920,34 +2192,33 @@ def get_left_menu_tmpl(self, username=None): @ram.cache(lambda *args: time() // (60 * 60 * 12)) # 12 hours def get_main_cols(self): root = self.getPhysicalRoot() - collections = root.objectValues('Report Collection') + collections = root.objectValues("Report Collection") collections.sort(key=lambda x: x.title_or_id().lower()) return collections def find_col_with_different_id(self, parent, metadata): """Find collection that may have a different id in the parent coll""" - relevant = [ - 'country', - 'dataflow_uris', - 'locality', - 'parent' - ] + relevant = ["country", "dataflow_uris", "locality", "parent"] result = None # Verify that we have metadata and that we have dataflow_uris in it if metadata: - for coll in parent.objectValues('Report Collection'): - if coll.title == metadata.get('title'): + for coll in parent.objectValues("Report Collection"): + if coll.title == metadata.get("title"): result = coll break # Run check only if the collection id is auto-generated - if (coll.id.startswith('col') and len(coll.id) == 9 - and metadata.get('dataflow_uris')): + if ( + coll.id.startswith("col") + and len(coll.id) == 9 + and metadata.get("dataflow_uris") + ): differs = [] for attr in relevant: - if attr == 'parent': - coll_attr = '/'.join( - coll.getParentNode().getPhysicalPath()) + if attr == "parent": + coll_attr = "/".join( + coll.getParentNode().getPhysicalPath() + ) else: coll_attr = getattr(coll, attr, None) if coll_attr != metadata.get(attr, None): @@ -1959,39 +2230,41 @@ def find_col_with_different_id(self, parent, metadata): return result - security.declareProtected('View management screens', 'sync_collection') + security.declareProtected("View management screens", "sync_collection") def sync_collection(self): """Sync local collection from remote collection""" - collection = self.REQUEST.form.get('collection') + collection = self.REQUEST.form.get("collection") auth = ( - os.environ.get('COLLECTION_SYNC_USER', ''), - os.environ.get('COLLECTION_SYNC_PASS', '') + os.environ.get("COLLECTION_SYNC_USER", ""), + os.environ.get("COLLECTION_SYNC_PASS", ""), ) results = [] - remote_depl = '://'.join([ - collection.split('://')[0], - collection.split('://')[-1].split('/')[0] - ]) - collections = collection.split('://')[-1].split('/')[1:] - collection = '' + remote_depl = "://".join( + [ + collection.split("://")[0], + collection.split("://")[-1].split("/")[0], + ] + ) + collections = collection.split("://")[-1].split("/")[1:] + collection = "" for coll in collections: if coll: metadata = None - collection = '/'.join([collection, coll]) + collection = "/".join([collection, coll]) result = { - 'action': None, + "action": None, } try: - url = '/'.join([remote_depl, collection, 'metadata']) + url = "/".join([remote_depl, collection, "metadata"]) headers = {"Accept": "application/json"} - depl = self.REQUEST.SERVER_URL.split( - '://')[-1].split('.')[0] - data = { - 'deployment': depl - } - res = requests.post(url, auth=auth, headers=headers, - data=data) + depl = self.REQUEST.SERVER_URL.split("://")[-1].split(".")[ + 0 + ] + data = {"deployment": depl} + res = requests.post( + url, auth=auth, headers=headers, data=data + ) if res.ok: metadata = RepUtils.encode_dict(res.json()) elif res.status_code == 404: @@ -1999,36 +2272,44 @@ def sync_collection(self): # deleted, but we're not deleting it from the clients pass else: - msg = ('[SYNC] Unable to retrieve remote collection ' - 'metadata: {}'.format(res.status_code)) + msg = ( + "[SYNC] Unable to retrieve remote collection " + "metadata: {}".format(res.status_code) + ) logger.error(msg) - return self.jsonify({'error': msg}) + return self.jsonify({"error": msg}) except Exception as e: msg = ( - '[SYNC] Unable to retrieve remote collection ' - 'metadata: {}'.format(str(e))) + "[SYNC] Unable to retrieve remote collection " + "metadata: {}".format(str(e)) + ) logger.error(msg) - return self.jsonify({'error': msg}) + return self.jsonify({"error": msg}) if metadata: local_c = self.unrestrictedTraverse(collection, None) if not local_c: - pcpath = '/'.join(collection.split('/')[:-1]) + pcpath = "/".join(collection.split("/")[:-1]) local_diff_id = None if pcpath: parent = self.unrestrictedTraverse(pcpath) local_diff_id = self.find_col_with_different_id( - parent, - metadata) + parent, metadata + ) else: continue col_args = deepcopy(metadata) - for arg in ['local_roles', 'modification_time', - 'parent', 'restricted']: + for arg in [ + "local_roles", + "modification_time", + "parent", + "restricted", + ]: del col_args[arg] if not local_c: if not local_diff_id: - allow_colls = getattr(parent, 'allow_collections', - None) + allow_colls = getattr( + parent, "allow_collections", None + ) chg_allow_colls = False if not allow_colls: parent.allow_collections = 1 @@ -2039,55 +2320,67 @@ def sync_collection(self): local_c = self.unrestrictedTraverse(collection) logger.info( "[SYNC] Created collection: {}".format( - local_c.absolute_url())) - result['action'] = 'created' + local_c.absolute_url() + ) + ) + result["action"] = "created" else: # Check if can_move_released prevents renaming - can_move = getattr(parent, - 'can_move_released', False) + can_move = getattr( + parent, "can_move_released", False + ) chg_can_move = False if not can_move: chg_can_move = True parent.can_move_released = True # Rename existing collection with different id parent.manage_renameObject( - local_diff_id.id, metadata.get('id')) + local_diff_id.id, metadata.get("id") + ) local_c = self.unrestrictedTraverse(collection) if chg_can_move: del parent.can_move_released logger.info( "[SYNC] Renamed collection: {}".format( - local_c.absolute_url())) - result['action'] = 'modified' + local_c.absolute_url() + ) + ) + result["action"] = "modified" logger.info( "[SYNC] Created collection: {}".format( - local_c.absolute_url())) + local_c.absolute_url() + ) + ) - result['collection'] = local_c.absolute_url() - result['metadata'] = col_args + result["collection"] = local_c.absolute_url() + result["metadata"] = col_args else: changed = False for key in col_args: - if key == 'allow_referrals': - if local_c.are_referrals_allowed() != col_args[ - key]: + if key == "allow_referrals": + if ( + local_c.are_referrals_allowed() + != col_args[key] + ): changed = True break elif getattr(local_c, key) != col_args[key]: changed = True break if changed: - del col_args['id'] - result['action'] = 'modified' - result['collection'] = local_c.absolute_url() - result['metadata'] = col_args + del col_args["id"] + result["action"] = "modified" + result["collection"] = local_c.absolute_url() + result["metadata"] = col_args local_c.manage_editCollection(**col_args) logger.info( "[SYNC] Updated collection: {} - {}".format( - local_c.absolute_url(), col_args)) + local_c.absolute_url(), col_args + ) + ) roles_info = {} # Sync roles - for roleinfo in metadata.get('local_roles'): + for roleinfo in metadata.get("local_roles"): entity, roles = roleinfo cur_roles = local_c.get_local_roles_for_userid(entity) if tuple(roles) != cur_roles: @@ -2101,26 +2394,26 @@ def sync_collection(self): local_c.manage_setLocalRoles(entity, roles) logger.info( "[SYNC] Setting roles: {} for {}".format( - roles, entity)) + roles, entity + ) + ) if roles_info: - result['action'] = 'modified' - result['collection'] = local_c.absolute_url() - result['roles'] = roles_info + result["action"] = "modified" + result["collection"] = local_c.absolute_url() + result["roles"] = roles_info local_c.reindexObject() results.append(result) return self.jsonify(results) def init_cols_sync(self): - query = { - 'meta_type': 'Report Collection' - } + query = {"meta_type": "Report Collection"} catalog = getToolByName(self, DEFAULT_CATALOG, None) brains = catalog.searchResults(**query) data = PersistentMapping() for brain in brains: data[brain.getPath()] = { - 'modified': brain.bobobase_modification_time.HTML4(), - 'ack': PersistentList() + "modified": brain.bobobase_modification_time.HTML4(), + "ack": PersistentList(), } return data diff --git a/Products/Reportek/ReportekPropertiedUser.py b/Products/Reportek/ReportekPropertiedUser.py index 0b1eedf1..c492e4d1 100644 --- a/Products/Reportek/ReportekPropertiedUser.py +++ b/Products/Reportek/ReportekPropertiedUser.py @@ -1,17 +1,16 @@ +import logging + from Products.PluggableAuthService.PropertiedUser import PropertiedUser -from Products.Reportek.constants import ENGINE_ID, ECAS_ID -from Products.Reportek.config import REPORTEK_DEPLOYMENT from Products.Reportek.config import DEPLOYMENT_CDR as CDR +from Products.Reportek.config import REPORTEK_DEPLOYMENT +from Products.Reportek.constants import ECAS_ID, ENGINE_ID -import logging logger = logging.getLogger("Reportek") class ReportekPropertiedUser(PropertiedUser): - def getGroups(self): - """ Also return LDAP groups if applicable - """ + """Also return LDAP groups if applicable""" local_groups = super(ReportekPropertiedUser, self).getGroups() if REPORTEK_DEPLOYMENT == CDR: return list(set(local_groups) | set(self.get_ldap_groups())) @@ -23,38 +22,39 @@ def get_roles_for_user_in_context(self, obj, user_id): # when creating objects in a parent, we don't have an absolute_url yet # look for the parent - if not hasattr(obj, 'absolute_url'): + if not hasattr(obj, "absolute_url"): # when accessing ZMI specific objects we don't have im_self either # if the classical authorization failed then fail this one too # because this is not a FGAS Portal case - if not hasattr(obj, 'im_self'): + if not hasattr(obj, "im_self"): return [] obj = obj.im_self - current_path_parts = obj.absolute_url(1).split('/') + current_path_parts = obj.absolute_url(1).split("/") if len(current_path_parts) < 3: return [] - col_path = '/'.join(current_path_parts[:3]) + col_path = "/".join(current_path_parts[:3]) m_auth = self.get_middleware_authorization(user_id, col_path) if m_auth: - return {'RW': ['Owner'], 'RO': ['Reader']}.get(m_auth) + return {"RW": ["Owner"], "RO": ["Reader"]}.get(m_auth) ldap_roles = self.get_ldap_role_in_context(obj, user_id) if ldap_roles: return ldap_roles return [] def getRolesInContext(self, object): - """ Return the roles in the context - """ - basic_roles = super(ReportekPropertiedUser, - self).getRolesInContext(object) + """Return the roles in the context""" + basic_roles = super(ReportekPropertiedUser, self).getRolesInContext( + object + ) user_id = self.getId() middleware_roles = self.get_roles_for_user_in_context(object, user_id) return list(set(basic_roles) | set(middleware_roles)) def allowed(self, object, object_roles=None): basic = super(ReportekPropertiedUser, self).allowed( - object, object_roles) + object, object_roles + ) if basic: return 1 @@ -67,24 +67,22 @@ def allowed(self, object, object_roles=None): return None def get_ldap_groups(self): - """ Return the user's ldap groups - """ - acl_users = getattr(self, 'acl_users') - ldapplugins = acl_users.objectIds('LDAP Multi Plugin') + """Return the user's ldap groups""" + acl_users = getattr(self, "acl_users") + ldapplugins = acl_users.objectIds("LDAP Multi Plugin") for plugin_id in ldapplugins: plugin = acl_users[plugin_id] - ldapfolder = getattr(plugin, 'acl_users') + ldapfolder = getattr(plugin, "acl_users") if ldapfolder: user = ldapfolder.getUserById(self.getId()) - return getattr(user, '_ldap_groups', []) + return getattr(user, "_ldap_groups", []) return [] def get_ldap_role_in_context(self, obj, user_id): - """ Return the LDAP Group's role in context - """ + """Return the LDAP Group's role in context""" ldap_groups = self.get_ldap_groups() roles = [] if ldap_groups: @@ -96,18 +94,23 @@ def get_ldap_role_in_context(self, obj, user_id): return roles def get_middleware_authorization(self, user_id, base_path): - engine = self.unrestrictedTraverse('/' + ENGINE_ID) + engine = self.unrestrictedTraverse("/" + ENGINE_ID) authMiddleware = engine.authMiddleware - ecas = self.unrestrictedTraverse('/acl_users/' + ECAS_ID, None) + ecas = self.unrestrictedTraverse("/acl_users/" + ECAS_ID, None) if ecas: ecas_user_id = ecas.getEcasUserId(user_id) - logger.debug(( - "Attempt to interrogate middleware for " - "authorizations for user:id %s:%s") % ( - user_id, - ecas_user_id)) + logger.debug( + ( + "Attempt to interrogate middleware for " + "authorizations for user:id %s:%s" + ) + % (user_id, ecas_user_id) + ) if not ecas_user_id: return False if authMiddleware: - return authMiddleware.authorizedUser(ecas_user_id, base_path) + user = {"username": user_id, "ecas_id": ecas_user_id} + return authMiddleware.authorizedUser( + ecas_user_id, base_path, user=user + ) return False diff --git a/Products/Reportek/ReportekUserFactoryPlugin.py b/Products/Reportek/ReportekUserFactoryPlugin.py index e1483cc0..9b740609 100644 --- a/Products/Reportek/ReportekUserFactoryPlugin.py +++ b/Products/Reportek/ReportekUserFactoryPlugin.py @@ -1,29 +1,30 @@ from AccessControl import ClassSecurityInfo from App.class_init import InitializeClass +from zope.interface import implements + from Products.PageTemplates.PageTemplateFile import PageTemplateFile from Products.PluggableAuthService.interfaces.plugins import IUserFactoryPlugin from Products.PluggableAuthService.plugins.BasePlugin import BasePlugin from Products.Reportek.ReportekPropertiedUser import ReportekPropertiedUser -from zope.interface import implements __all__ = [ - 'ReportekUserFactoryPlugin', - 'manage_addReportekUserFactoryPluginForm', - 'addReportekUserFactoryPlugin', + "ReportekUserFactoryPlugin", + "manage_addReportekUserFactoryPluginForm", + "addReportekUserFactoryPlugin", ] class ReportekUserFactoryPlugin(BasePlugin): implements(IUserFactoryPlugin) - meta_type = 'Reportek User Factory Plugin' + meta_type = "Reportek User Factory Plugin" security = ClassSecurityInfo() def __init__(self, id, title=None): self._setId(id) self.title = title - security.declarePrivate('createUser') + security.declarePrivate("createUser") def createUser(self, user_id, name): # here we can check if this user has information in the middleware @@ -33,21 +34,23 @@ def createUser(self, user_id, name): manage_addReportekUserFactoryPluginForm = PageTemplateFile( - 'zpt/reportekufAdd', globals(), - __name__='manage_addReportekUserFactoryPluginForm') + "zpt/reportekufAdd", + globals(), + __name__="manage_addReportekUserFactoryPluginForm", +) -def addReportekUserFactoryPlugin(dispatcher, id, title='', RESPONSE=None): - """ Add a Local Role Plugin to 'dispatcher'. - """ +def addReportekUserFactoryPlugin(dispatcher, id, title="", RESPONSE=None): + """Add a Local Role Plugin to 'dispatcher'.""" plugin = ReportekUserFactoryPlugin(id, title) dispatcher._setObject(id, plugin) if RESPONSE is not None: - RESPONSE.redirect('%s/manage_main?manage_tabs_message=%s' % - (dispatcher.absolute_url(), - 'ReportekUserFactory+added.')) + RESPONSE.redirect( + "%s/manage_main?manage_tabs_message=%s" + % (dispatcher.absolute_url(), "ReportekUserFactory+added.") + ) InitializeClass(ReportekUserFactoryPlugin) diff --git a/Products/Reportek/zpt/engineMacros.zpt b/Products/Reportek/zpt/engineMacros.zpt index 8a54dacb..4cfa04c3 100644 --- a/Products/Reportek/zpt/engineMacros.zpt +++ b/Products/Reportek/zpt/engineMacros.zpt @@ -1,100 +1,122 @@ - + + + + - - + - - - - - - + + + - - - - - + + - (Previous n results) - - - + )" + > + (Previous n results) + + - - - - - - - - - - - if result has getObject, then it is a brain, fetch the object - else it probably is an already fetched object - return function - that returns the initial object back - - - - +
    + + + - - -
    + + + + + + if result has getObject, then it is a brain, fetch the object else + it probably is an already fetched object - return function that + returns the initial object back + + + + + + + + - - + - (Next n results) + )" + > + (Next n results) - - + @@ -188,34 +216,190 @@ tal:define="placeholder placeholder | nothing; multiple multiple | nothing; disabled disabled | nothing; - empty_select empty_select | nothing;"> - + name name" + > - + repeat="instrument python:sorted(rod_data.keys())" + > - + tal:content="string:[${prefix}] ${obl/title}" + > + + + + +

    + Click on the link below to access your company registry page +

    + +
    + +

    Click on the link below to start the reporting

    + + + +
    + + + + + + +

    + Click on the link below to view your archived reporting folders +

    + + +
    + + + + + +

    Click on the link below to inspect deliveries

    + + + +
    + + + + + + + +
    + + + + + + +
    +

    It appears you have no reporting tasks.

    +

    + If you are registered as data reporter in the + European registry + and you still see this message, please: +

    +
      +
    • + try to login again in a few minutes, perhaps using another browser, + or forcing a page refresh: Ctrl+Shift+R +
    • +
    • + contact the + BDR helpdesk + to look into the issue +
    • +
    +
    + +