diff --git a/.github/workflows/conda-package-build.yml b/.github/workflows/conda-package-build.yml index f66b513..80d4d19 100644 --- a/.github/workflows/conda-package-build.yml +++ b/.github/workflows/conda-package-build.yml @@ -10,9 +10,9 @@ on: branches: - '**' - jobs: build: uses: openalea/github-action-conda-build/.github/workflows/conda-package-build.yml@main secrets: anaconda_token: ${{ secrets.ANACONDA_TOKEN }} + diff --git a/conda/meta.yaml b/conda/meta.yaml index 01610a3..5dbda22 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -23,7 +23,8 @@ requirements: - bs4 - pygments - colorlog - - requests + - requests + - pyyaml - requests_cache - jsf diff --git a/example/quick_start.ipynb b/example/quick_start.ipynb index ae65dcb..f339d71 100644 --- a/example/quick_start.ipynb +++ b/example/quick_start.ipynb @@ -31,7 +31,7 @@ } ], "source": [ - "from agroservices.ipm.ipm import IPM\n", + "from openalea.agroservices.ipm.ipm import IPM\n", "ipm = IPM() \n", "sources = ipm.get_weatherdatasource()\n", "sources.keys()" diff --git a/example/weather_ipm_interface.ipynb b/example/weather_ipm_interface.ipynb index 131b0e7..ec2e59b 100644 --- a/example/weather_ipm_interface.ipynb +++ b/example/weather_ipm_interface.ipynb @@ -13,8 +13,8 @@ "metadata": {}, "outputs": [], "source": [ - "from agroservices.ipm.ipm import IPM\n", - "from agroservices.ipm.datadir import datadir\n", + "from openalea.agroservices.ipm.ipm import IPM\n", + "from openalea.agroservices.ipm.datadir import datadir\n", "ipm = IPM()" ] }, diff --git a/setup.py b/setup.py index 2837f43..565443d 100644 --- a/setup.py +++ b/setup.py @@ -22,12 +22,13 @@ pkg_root_dir = 'src' packages = find_namespace_packages(where='src', include=['openalea.*']) -name = "agroservices" + +name = "openalea.agroservices" _version = {} -with open("src/agroservices/version.py") as fp: +with open("src/openalea/agroservices/version.py") as fp: exec(fp.read(), _version) -version = _version['version'] +version = _version['__version__'] description = '' long_description = ''' @@ -41,6 +42,7 @@ url = 'https://github.com/H2020-IPM-openalea/agroservices' license = "CeCILL-C" + setup( name=name, version=version, diff --git a/src/agroservices/version.py b/src/agroservices/version.py deleted file mode 100644 index 20ab5a9..0000000 --- a/src/agroservices/version.py +++ /dev/null @@ -1 +0,0 @@ -version = '1.0.1' diff --git a/src/openalea/agroservices/__init__.py b/src/openalea/agroservices/__init__.py new file mode 100644 index 0000000..dedd5b2 --- /dev/null +++ b/src/openalea/agroservices/__init__.py @@ -0,0 +1,15 @@ +# -*- python -*- +# +# Copyright INRIA - CIRAD - INRA +# +# Distributed under the Cecill-C License. +# See accompanying file LICENSE.txt or copy at +# http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html +# +# ============================================================================== + +from .version import __version__ +__version__ = __version__ + +from . import ipm +from .ipm import * diff --git a/src/agroservices/credentials.py b/src/openalea/agroservices/credentials.py similarity index 83% rename from src/agroservices/credentials.py rename to src/openalea/agroservices/credentials.py index 5c98b08..69f480d 100644 --- a/src/agroservices/credentials.py +++ b/src/openalea/agroservices/credentials.py @@ -1,5 +1,5 @@ import os -import json +import ujson credential_dir = os.path.dirname(__file__) + '/credentials/' @@ -9,5 +9,5 @@ def get_credentials(agroservice='ipm'): res = {} if os.path.exists(jsonfile): with open(jsonfile) as json_file: - res = json.load(json_file) + res = ujson.load(json_file) return res \ No newline at end of file diff --git a/src/agroservices/credentials/my_agroservice.json b/src/openalea/agroservices/credentials/my_agroservice.json similarity index 100% rename from src/agroservices/credentials/my_agroservice.json rename to src/openalea/agroservices/credentials/my_agroservice.json diff --git a/src/agroservices/extern/__init__.py b/src/openalea/agroservices/extern/__init__.py similarity index 100% rename from src/agroservices/extern/__init__.py rename to src/openalea/agroservices/extern/__init__.py diff --git a/src/agroservices/extern/easydev/__init__.py b/src/openalea/agroservices/extern/easydev/__init__.py similarity index 100% rename from src/agroservices/extern/easydev/__init__.py rename to src/openalea/agroservices/extern/easydev/__init__.py diff --git a/src/agroservices/extern/easydev/config_tools.py b/src/openalea/agroservices/extern/easydev/config_tools.py similarity index 87% rename from src/agroservices/extern/easydev/config_tools.py rename to src/openalea/agroservices/extern/easydev/config_tools.py index bf494ab..9c9db06 100644 --- a/src/agroservices/extern/easydev/config_tools.py +++ b/src/openalea/agroservices/extern/easydev/config_tools.py @@ -20,13 +20,13 @@ except ImportError: from configparser import ConfigParser - import os import appdirs -__all__ = ["CustomConfig", "DynamicConfigParser", "ConfigExample", +__all__ = ["CustomConfig", "DynamicConfigParser", "ConfigExample", "load_configfile"] + # 53, 59, 64-65, 181, 260, 262, 267-270, 288-290, 329, 332-337, 340-341, 359-360, 386-388, 400-401, 406-426, 431-435 @@ -36,28 +36,30 @@ class _DictSection(object): Reference: https://gist.github.com/dangoakachan/3855920 """ + def __init__(self, config, section): object.__setattr__(self, '_config', config) object.__setattr__(self, '_section', section) def __getattr__(self, attr): return self.get(attr, None) + __getitem__ = __getattr__ - def get(self, attr, default = None): + def get(self, attr, default=None): if attr in self: return self._config.get(self._section, attr) - else: #pragma: no cover + else: # pragma: no cover return default def __setattr__(self, attr, value): if attr.startswith('_'): object.__setattr__(self, attr, value) - else: #pragma: no cover + else: # pragma: no cover self.__setitem__(attr, value) def __setitem__(self, attr, value): - if self._section not in self._config: #pragma: no cover + if self._section not in self._config: # pragma: no cover self._config.add_section(self._section) self._config.set(self._section, attr, str(value)) @@ -75,12 +77,12 @@ def __contains__(self, attr): return config.has_section(section) and config.has_option(section, attr) -class ConfigExample(object): +class ConfigExample: """Create a simple example of ConfigParser instance to play with :: - >>> from easydev.pipeline.config import ConfigExample + >>> from openalea.agroservices.extern.easydev.config_tools import ConfigExample >>> c = ConfigExample().config # The ConfigParser instance >>> assert 'General' in c.sections() >>> assert 'GA' in c.sections() @@ -108,6 +110,7 @@ class ConfigExample(object): [] """ + def __init__(self): self.config = ConfigParser() self.config.add_section('General') @@ -124,7 +127,7 @@ class DynamicConfigParser(ConfigParser, object): .. code-block:: python - >>> from easydev.config_tools import ConfigExample + >>> from openalea.agroservices.extern.easydev.config_tools import ConfigExample >>> standard_config_file = ConfigExample().config >>> c = DynamicConfigParser(standard_config_file) >>> @@ -163,6 +166,7 @@ class DynamicConfigParser(ConfigParser, object): """ + def __init__(self, config_or_filename=None, *args, **kargs): object.__setattr__(self, '_filename', config_or_filename) @@ -174,14 +178,15 @@ def __init__(self, config_or_filename=None, *args, **kargs): self.read(self._filename) elif isinstance(config_or_filename, ConfigParser): self._replace_config(config_or_filename) - elif config_or_filename == None: + elif config_or_filename is None: pass else: - raise TypeError("config_or_filename must be a valid filename or valid ConfigParser instance") + raise TypeError( + "config_or_filename must be a valid filename or valid ConfigParser instance") def read(self, filename): """Load a new config from a filename (remove all previous sections)""" - if os.path.isfile(filename)==False: + if not os.path.isfile(filename): raise IOError("filename {0} not found".format(filename)) config = ConfigParser() @@ -224,7 +229,6 @@ def get_options(self, section): def section2dict(self, section): """utility that extract options of a ConfigParser section into a dictionary - :param ConfigParser config: a ConfigParser instance :param str section: the section to extract :returns: a dictionary where key/value contains all the @@ -257,7 +261,7 @@ def section2dict(self, section): .. note:: an integer is cast into an int """ options = {} - for option in self.options(section): # pragma no cover + for option in self.options(section): # pragma no cover data = self.get(section, option, raw=True) if data.lower() in ['true', 'yes']: options[option] = True @@ -266,12 +270,12 @@ def section2dict(self, section): elif data in ['None', None, 'none', '']: options[option] = None else: - try: # numbers + try: # numbers try: options[option] = self.getint(section, option) except: options[option] = self.getfloat(section, option) - except: #string + except: # string options[option] = self.get(section, option, raw=True) return options @@ -287,14 +291,13 @@ def save(self, filename): """ try: - if os.path.exists(filename) == True: + if os.path.exists(filename): print("Warning: over-writing %s " % filename) - fp = open(filename,'w') - except Exception as err: #pragma: no cover + fp = open(filename, 'w') + except Exception as err: # pragma: no cover print(err) raise Exception('filename could not be opened') - self.write(fp) fp.close() @@ -308,7 +311,7 @@ def add_option(self, section, option, value=None): >>> c.add_option("general", "verbose", True) """ assert section in self.sections(), "unknown section" - #TODO I had to cast to str with DictSection + # TODO I had to cast to str with DictSection self.set(section, option, value=str(value)) def __str__(self): @@ -316,14 +319,15 @@ def __str__(self): for section in self.sections(): str_ += '[' + section + ']\n' for option in self.options(section): - data = self.get(section, option, raw=True) - str_ += option + ' = ' + str(data)+'\n' + data = self.get(section, option, raw=True) + str_ += option + ' = ' + str(data) + '\n' str_ += '\n\n' return str_ def __getattr__(self, key): return _DictSection(self, key) + __getitem__ = __getattr__ def __setattr__(self, attr, value): @@ -343,6 +347,7 @@ def __setitem__(self, attr, value): def __delattr__(self, attr): if attr in self: self.remove_section(attr) + def __contains__(self, attr): return self.has_section(attr) @@ -356,16 +361,17 @@ def __eq__(self, data): for option in self.options(section): try: - if str(self.get(section, option,raw=True)) != \ - str(data.get(section,option, raw=True)): - print("option %s in section %s differ" % (option, section)) + if str(self.get(section, option, raw=True)) != \ + str(data.get(section, option, raw=True)): + print("option %s in section %s differ" % ( + option, section)) return False - except: # pragma: no cover + except: # pragma: no cover return False return True -class CustomConfig(object): +class CustomConfig: """Base class to manipulate a config directory""" def __init__(self, name, verbose=False): @@ -379,38 +385,41 @@ def init(self): def _get_config_dir(self): sdir = self.appdirs.user_config_dir return self._get_and_create(sdir) + user_config_dir = property(_get_config_dir, - doc="return directory of this configuration file") + doc="return directory of this configuration file") + def _get_and_create(self, sdir): if not os.path.exists(sdir): print("Creating directory %s " % sdir) try: self._mkdirs(sdir) - except Exception: #pragma: no cover + except Exception: # pragma: no cover print("Could not create the path %s " % sdir) return None return sdir - def _mkdirs(self, newdir, mode=0o777): + @staticmethod + def _mkdirs(newdir, mode=0o777): """See :func:`easydev.tools.mkdirs`""" - from easydev.tools import mkdirs + from openalea.agroservices.extern.easydev.tools import mkdirs mkdirs(newdir, mode) def remove(self): try: sdir = self.appdirs.user_config_dir os.rmdir(sdir) - except Exception as err: #pragma: no cover + except Exception as err: # pragma: no cover raise Exception(err) -def _load_configfile(configpath): #pragma: no cover +def _load_configfile(configpath): # pragma: no cover "Tries to load a JSON or YAML file into a dict." try: with open(configpath) as f: try: - import json - return json.load(f) + import ujson + return ujson.load(f) except ValueError: f.seek(0) # try again try: @@ -426,7 +435,7 @@ def _load_configfile(configpath): #pragma: no cover "In case of YAML, make sure to not mix " "whitespace and tab indentation.") except Exception as err: - raise(err) + raise err def load_configfile(configpath): @@ -434,6 +443,5 @@ def load_configfile(configpath): config = _load_configfile(configpath) if not isinstance(config, dict): raise IOError("Config file must be given as JSON or YAML " - "with keys at top level.") + "with keys at top level.") return config - diff --git a/src/agroservices/extern/easydev/logging_tools.py b/src/openalea/agroservices/extern/easydev/logging_tools.py similarity index 99% rename from src/agroservices/extern/easydev/logging_tools.py rename to src/openalea/agroservices/extern/easydev/logging_tools.py index 42fb010..edcb63a 100644 --- a/src/agroservices/extern/easydev/logging_tools.py +++ b/src/openalea/agroservices/extern/easydev/logging_tools.py @@ -28,7 +28,7 @@ } -class Logging(object): +class Logging: """logging utility. :: >>> l = Logging("root", "INFO") diff --git a/src/agroservices/extern/easydev/tools.py b/src/openalea/agroservices/extern/easydev/tools.py similarity index 79% rename from src/agroservices/extern/easydev/tools.py rename to src/openalea/agroservices/extern/easydev/tools.py index f7b222d..b473b73 100644 --- a/src/agroservices/extern/easydev/tools.py +++ b/src/openalea/agroservices/extern/easydev/tools.py @@ -17,16 +17,16 @@ ############################################################################## """toolkit to ease development""" import subprocess -import json +import ujson import os import sys __all__ = ["shellcmd", "swapdict", "check_param_in_list", - "check_range", "precision", "AttrDict", "DevTools", "execute", - "touch", "mkdirs"] + "check_range", "precision", "AttrDict", "DevTools", "execute", + "touch", "mkdirs"] -def precision(data, digit=2): +def precision(data, digit=2): """Round values in a list keeping only N digits precision :: @@ -37,7 +37,7 @@ def precision(data, digit=2): 2100 """ - data = int(data*pow(10, digit)) + data = int(data * pow(10, digit)) data /= pow(10., digit) return data @@ -52,15 +52,17 @@ def check_range(value, a, b, strict=False): .. doctest:: - >>> from easydev.tools import check_range + >>> from openalea.agroservices.extern.easydev.tools import check_range >>> check_range(1,0, 2) """ if strict is True: if value <= a: - raise ValueError(" {} must be greater (or equal) than {}".format(value, a)) + raise ValueError( + " {} must be greater (or equal) than {}".format(value, a)) if value >= b: - raise ValueError(" {} must be less (or less) than {}".format(value, b)) + raise ValueError( + " {} must be less (or less) than {}".format(value, b)) elif strict is False: if value < a: raise ValueError(" {} must be greater than {}".format(value, a)) @@ -88,8 +90,9 @@ def check_param_in_list(param, valid_values, name=None): check_param_in_list(mode, ["on", "off"]) """ if isinstance(valid_values, list) is False: - - raise TypeError("the valid_values second argument must be a list of valid values. {0} was provided.".format(valid_values)) + raise TypeError( + "the valid_values second argument must be a list of valid values. {0} was provided.".format( + valid_values)) if param not in valid_values: if name: @@ -115,7 +118,7 @@ def shellcmd(cmd, show=False, verbose=False, ignore_errors=False): print(cmd) try: ret = subprocess.Popen([cmd], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) + stderr=subprocess.PIPE, shell=True) output = ret.stdout.read().strip() error = ret.stderr.read().strip() @@ -133,7 +136,8 @@ def shellcmd(cmd, show=False, verbose=False, ignore_errors=False): return output except Exception as err: - raise Exception("Error:: Command (%s) failed. Error message is %s" % (cmd, err)) + raise Exception( + "Error:: Command (%s) failed. Error message is %s" % (cmd, err)) def execute(cmd, showcmd=True, verbose=True): @@ -180,7 +184,8 @@ def swapdict(dic, check_ambiguity=True): """ # this version is more elegant but slightly slower : return {v:k for k,v in dic.items()} if check_ambiguity: - assert len(set(dic.keys())) == len(set(dic.values())), "values is not a set. ambiguities for keys." + assert len(set(dic.keys())) == len( + set(dic.values())), "values is not a set. ambiguities for keys." return dict(zip(dic.values(), dic.keys())) @@ -211,6 +216,7 @@ def mkdirs(newdir, mode=0o777): if err.errno != errno.EEXIST or not os.path.isdir(newdir): raise + class AttrDict(dict): """dictionary-like object that exposes its keys as attributes. @@ -231,7 +237,7 @@ class AttrDict(dict): .. doctest:: - >>> from easydev import AttrDict + >>> from openalea.agroservices.extern.easydev.tools import AttrDict >>> a = AttrDict(**{'value': 1}) >>> a.value 1 @@ -260,6 +266,7 @@ class AttrDict(dict): then *a* is indeed a dictionary. """ + def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ = self @@ -273,7 +280,7 @@ def update(self, content): # accepts dict and attrdict classes try: from collections import OrderedDict - except: + except ImportError: OrderedDict = AttrDict if content.__class__ not in [dict, OrderedDict, AttrDict]: @@ -290,24 +297,26 @@ def from_json(self, filename): """ does not remove existing keys put replace them if already present """ - res = json.load(open(filename, "r")) - for k,v in res.items(): + res = ujson.load(open(filename, "r")) + for k, v in res.items(): self[k] = v def to_json(self, filename=None): - import json + import ujson if filename is not None: with open(filename, "w") as fout: - json.dump(self, fout) + ujson.dump(self, fout) else: - return json.dumps(self) + return ujson.dumps(self) -class DevTools(object): +class DevTools: """Aggregate of easydev.tools functions. """ - def check_range(self, value, a, b): + + @staticmethod + def check_range(value, a, b): """wrapper around :func:`easydev.check_range`""" check_range(value, a, b, strict=False) @@ -317,52 +326,57 @@ def check_param_in_list(self, param, valid_values): for name in param: check_param_in_list(name, list(valid_values)) - def swapdict(self, d): + @staticmethod + def swapdict(d): """wrapper around :func:`easydev.swapdict`""" return swapdict(d) - def to_list(self, query): + @staticmethod + def to_list(query): """Cast to a list if possible 'a' ->['a'] 1 -> [1] """ - from easydev import codecs + from openalea.agroservices.extern.easydev import codecs return codecs.to_list(query) - def list2string(self, query, sep=",", space=False): + @staticmethod + def list2string(query, sep=",", space=False): """ see :func:`easydev.tools.list2string` """ - from easydev import codecs + from openalea.agroservices.extern.easydev import codecs return codecs.list2string(query, sep=sep, space=space) - def to_json(self, dictionary): + @staticmethod + def to_json(dictionary): """Transform a dictionary to a json object""" - return json.dumps(dictionary) + return ujson.dumps(dictionary) - def mkdir(self, dirname): - """Create a directory if it does not exists; pass without error otherwise""" + @staticmethod + def mkdir(dirname): + """Create a directory if it does not exist; pass without error otherwise""" try: os.mkdir(dirname) except OSError: - pass # exists already + pass # exists already except Exception as err: - raise(err) + raise err - def shellcmd(self, cmd, show=False, verbose=False, ignore_errors=False): + @staticmethod + def shellcmd(cmd, show=False, verbose=False, ignore_errors=False): """See :func:`shellcmd`""" - return shellcmd(cmd, show=show, verbose=verbose, ignore_errors=ignore_errors) + return shellcmd(cmd, show=show, verbose=verbose, + ignore_errors=ignore_errors) - def check_exists(self, filename): - """Raise error message if the file does not exists""" + @staticmethod + def check_exists(filename): + """Raise error message if the file does not exist""" if os.path.exists(filename) is False: raise ValueError("This file %s does not exists" % filename) - def mkdirs(self, dirname, mode=0o777): + @staticmethod + def mkdirs(dirname, mode=0o777): mkdirs(dirname, mode=mode) - - - - diff --git a/src/agroservices/extern/xmltools.py b/src/openalea/agroservices/extern/xmltools.py similarity index 100% rename from src/agroservices/extern/xmltools.py rename to src/openalea/agroservices/extern/xmltools.py diff --git a/src/agroservices/ipm/__init__.py b/src/openalea/agroservices/ipm/__init__.py similarity index 100% rename from src/agroservices/ipm/__init__.py rename to src/openalea/agroservices/ipm/__init__.py diff --git a/src/agroservices/ipm/data/GeoJson.json b/src/openalea/agroservices/ipm/data/GeoJson.json similarity index 100% rename from src/agroservices/ipm/data/GeoJson.json rename to src/openalea/agroservices/ipm/data/GeoJson.json diff --git a/src/agroservices/ipm/data/IPM Decisions Weather API tests.postman_collection.json b/src/openalea/agroservices/ipm/data/IPM Decisions Weather API tests.postman_collection.json similarity index 100% rename from src/agroservices/ipm/data/IPM Decisions Weather API tests.postman_collection.json rename to src/openalea/agroservices/ipm/data/IPM Decisions Weather API tests.postman_collection.json diff --git a/src/agroservices/ipm/data/countries.json b/src/openalea/agroservices/ipm/data/countries.json similarity index 100% rename from src/agroservices/ipm/data/countries.json rename to src/openalea/agroservices/ipm/data/countries.json diff --git a/src/agroservices/ipm/data/dss_meta_data.json b/src/openalea/agroservices/ipm/data/dss_meta_data.json similarity index 100% rename from src/agroservices/ipm/data/dss_meta_data.json rename to src/openalea/agroservices/ipm/data/dss_meta_data.json diff --git a/src/agroservices/ipm/data/model_input_psilarobse.json b/src/openalea/agroservices/ipm/data/model_input_psilarobse.json similarity index 100% rename from src/agroservices/ipm/data/model_input_psilarobse.json rename to src/openalea/agroservices/ipm/data/model_input_psilarobse.json diff --git a/src/agroservices/ipm/data/model_input_psilartemp.json b/src/openalea/agroservices/ipm/data/model_input_psilartemp.json similarity index 100% rename from src/agroservices/ipm/data/model_input_psilartemp.json rename to src/openalea/agroservices/ipm/data/model_input_psilartemp.json diff --git a/src/agroservices/ipm/data/modeloutput.json b/src/openalea/agroservices/ipm/data/modeloutput.json similarity index 100% rename from src/agroservices/ipm/data/modeloutput.json rename to src/openalea/agroservices/ipm/data/modeloutput.json diff --git a/src/agroservices/ipm/data/schema_fieldobservation.json b/src/openalea/agroservices/ipm/data/schema_fieldobservation.json similarity index 100% rename from src/agroservices/ipm/data/schema_fieldobservation.json rename to src/openalea/agroservices/ipm/data/schema_fieldobservation.json diff --git a/src/agroservices/ipm/data/schema_geojson.json b/src/openalea/agroservices/ipm/data/schema_geojson.json similarity index 100% rename from src/agroservices/ipm/data/schema_geojson.json rename to src/openalea/agroservices/ipm/data/schema_geojson.json diff --git a/src/agroservices/ipm/data/schema_weatherdata.json b/src/openalea/agroservices/ipm/data/schema_weatherdata.json similarity index 100% rename from src/agroservices/ipm/data/schema_weatherdata.json rename to src/openalea/agroservices/ipm/data/schema_weatherdata.json diff --git a/src/agroservices/ipm/data/test_yaml_validate.yaml b/src/openalea/agroservices/ipm/data/test_yaml_validate.yaml similarity index 100% rename from src/agroservices/ipm/data/test_yaml_validate.yaml rename to src/openalea/agroservices/ipm/data/test_yaml_validate.yaml diff --git a/src/agroservices/ipm/data/weather_data.json b/src/openalea/agroservices/ipm/data/weather_data.json similarity index 100% rename from src/agroservices/ipm/data/weather_data.json rename to src/openalea/agroservices/ipm/data/weather_data.json diff --git a/src/agroservices/ipm/datadir.py b/src/openalea/agroservices/ipm/datadir.py similarity index 77% rename from src/agroservices/ipm/datadir.py rename to src/openalea/agroservices/ipm/datadir.py index 0a2f606..3dd9d22 100644 --- a/src/agroservices/ipm/datadir.py +++ b/src/openalea/agroservices/ipm/datadir.py @@ -1,15 +1,19 @@ import os -import json +import ujson datadir = os.path.dirname(__file__) + '/data/' + def postman_tests(): """Undeclared options of weatheradapter services""" - with open(datadir + 'IPM Decisions Weather API tests.postman_collection.json') as jsonfile: - postman = json.load(jsonfile) + with open( + datadir + 'IPM Decisions Weather API tests.postman_collection.json') as jsonfile: + postman = ujson.load(jsonfile) adapters = {it['name']: it for it in - [item for item in postman['item'] if item['name'] == 'WeatherAdapterService'][0]['item'] + [item for item in postman['item'] if + item['name'] == 'WeatherAdapterService'][0]['item'] } + def _read(test): d = dict() d['name'] = test['name'] @@ -17,6 +21,7 @@ def _read(test): d['endpoint'] = '/'.join(url['host'] + url['path']) d['call'] = {it['key']: it['value'] for it in url['query']} return d + mapping = {'FruitWeb/Davis': 'info.fruitweb', 'MeteoBot': 'com.meteobot', 'Metos (FieldClimate)': 'net.ipmdecisions.metos', @@ -26,7 +31,8 @@ def _read(test): 'FMI (Finnish Meteorological Service) forecasts': 'fi.fmi.forecast.location', 'DMI (Danish Meteorological Service) PointWeb GRID': 'dk.dmi.pointweather', 'SLU Lantmet (Sweden) GRID': 'se.slu.lantmet'} - return {mapping[k] : _read(v) for k,v in adapters.items()} + return {mapping[k]: _read(v) for k, v in adapters.items()} + def country_mapping(): """mapping of alpha3 to alpha 2 country codes @@ -34,5 +40,5 @@ def country_mapping(): {c.alpha_3: c.alpha_2 for c in pycountry.countries} """ with open(datadir + 'countries.json') as input: - mapping = json.load(input) + mapping = ujson.load(input) return mapping diff --git a/src/agroservices/ipm/fakers.py b/src/openalea/agroservices/ipm/fakers.py similarity index 98% rename from src/agroservices/ipm/fakers.py rename to src/openalea/agroservices/ipm/fakers.py index 8148951..4567ab4 100644 --- a/src/agroservices/ipm/fakers.py +++ b/src/openalea/agroservices/ipm/fakers.py @@ -2,11 +2,11 @@ import datetime import random -import json +import ujson from copy import deepcopy from faker import Faker from jsf import JSF -from agroservices.ipm.datadir import country_mapping +from openalea.agroservices.ipm.datadir import country_mapping Geojson_point = """{{ "type": "FeatureCollection", @@ -244,7 +244,7 @@ def model_field_observations(model, quantifications, latitude=None, longitude=No length = len(quantifications) latitude = random.uniform(0, 90) if latitude is None else latitude longitude = random.uniform(0, 180) if longitude is None else longitude - location = json.loads(Geojson_point.format(longitude=longitude, latitude=latitude)) + location = ujson.loads(Geojson_point.format(longitude=longitude, latitude=latitude)) if time is None: start = datetime.datetime.today().astimezone() time = [(start + datetime.timedelta(days=i)).isoformat() for i in range(length)] diff --git a/src/agroservices/ipm/fixes.py b/src/openalea/agroservices/ipm/fixes.py similarity index 100% rename from src/agroservices/ipm/fixes.py rename to src/openalea/agroservices/ipm/fixes.py diff --git a/src/agroservices/ipm/ipm.py b/src/openalea/agroservices/ipm/ipm.py similarity index 94% rename from src/agroservices/ipm/ipm.py rename to src/openalea/agroservices/ipm/ipm.py index 7dfb3b1..8d9b11a 100644 --- a/src/agroservices/ipm/ipm.py +++ b/src/openalea/agroservices/ipm/ipm.py @@ -10,14 +10,16 @@ ################## Interface Python IPM using Bioservice ######################################################## -import json from pathlib import Path from typing import Union -import agroservices.ipm.fakers as fakers -import agroservices.ipm.fixes as fixes -from agroservices.ipm.datadir import datadir -from agroservices.services import REST +import ujson +import yaml + +import openalea.agroservices.ipm.fakers as fakers +import openalea.agroservices.ipm.fixes as fixes +from openalea.agroservices.ipm.datadir import datadir +from openalea.agroservices.services import REST __all__ = ["IPM"] @@ -25,7 +27,7 @@ def load_model(dssid, model): model = fixes.fix_prior_load_model(dssid, model) if 'input_schema' in model['execution']: - model['execution']['input_schema'] = json.loads( + model['execution']['input_schema'] = ujson.loads( model['execution']['input_schema']) model = fixes.fix_load_model(dssid, model) return model @@ -42,7 +44,7 @@ class IPM(REST): Interface to the IPM https://ipmdecisions.nibio.no/ .. doctest:: - >>> from agroservices.ipm.ipm import IPM + >>> from openalea.agroservices.ipm.ipm import IPM >>> ipm = IPM() WeatherMetaDataService @@ -102,7 +104,7 @@ def __init__(self, name='IPM', url="https://platform.ipmdecisions.net", url=url, *args, **kwargs) - self.callback = callback # use in all methods) + self.callback = callback # use in all methods ########################## MetaDataService ########################################## @@ -143,12 +145,11 @@ def get_qc(self) -> list: # schema weather data def get_schema_weatherdata(self) -> dict: - """Get a schema that describes the IPM Decision platform's format for exchange of weather data + """Get a schema that describes the IPM Decision platform's format for + exchange of weather data - Returns - ------- - dict - the schema that describes the IPM Decision platform's format for exchange of weather data + Returns ------- dict the schema that describes the IPM Decision + platform's format for exchange of weather data """ res = self.http_get( "api/wx/rest/schema/weatherdata", @@ -175,12 +176,12 @@ def post_schema_weatherdata_validate(self, jsonfile: Union[ if the data is valid or not """ with open(jsonfile) as json_file: - data = json.load(json_file) + data = ujson.load(json_file) res = self.http_post( "api/wx/rest/schema/weatherdata/validate", frmt='json', - data=json.dumps(data), + data=ujson.dumps(data), headers={"Content-Type": "application/json"} ) return res @@ -222,7 +223,7 @@ def get_weatheradapter(self, source: dict, params: dict = None, if not source['authentication_type'] == 'CREDENTIALS': res = self.http_get(endpoint, params=params, frmt='json') else: - params['credentials'] = json.dumps(credentials) + params['credentials'] = ujson.dumps(credentials) res = self.http_post(endpoint, data=params, frmt='json') return res @@ -260,7 +261,7 @@ def get_weatherdatasource(self, source_id=None, access_type=None, for r in res: if 'geoJSON' in r['spatial']: if r['spatial']['geoJSON'] is not None: - r['spatial']['geoJSON'] = json.loads( + r['spatial']['geoJSON'] = ujson.loads( r['spatial']['geoJSON']) sources = {item['id']: item for item in res} @@ -307,12 +308,12 @@ def post_weatherdatasource_location( ) with open(geoJsonfile) as json_file: - data = json.load(json_file) + data = ujson.load(json_file) res = self.http_post( "api/wx/rest/weatherdatasource/location", frmt='json', - data=json.dumps(data), + data=ujson.dumps(data), params=params, headers={"Content-Type": "application/json"} @@ -428,7 +429,7 @@ def get_dss(self, execution_type=None) -> dict: def post_dss_location( self, geoJsonfile: Union[str, Path] = "GeoJson.json") -> list: - """Search for DSS models that have been validated for the specific location. The location can by any valid Geometry, such as Point or Polygon. Example geoJson input + """Search for DSS models that have been validated for the specific location. The location can be any valid Geometry, such as Point or Polygon. Example geoJson input Parameters ---------- @@ -441,12 +442,12 @@ def post_dss_location( A list of all the matching DSS models """ with open(geoJsonfile) as json_file: - data = json.load(json_file) + data = ujson.load(json_file) res = self.http_post( "api/dss/rest/dss/location", frmt='json', - data=json.dumps(data), + data=ujson.dumps(data), headers={"Content-Type": "application/json"} ) return res @@ -464,7 +465,7 @@ def get_dssId( Returns ------- dict - informations about a specific DSS + information about a specific DSS """ res = self.http_get( "api/dss/rest/dss/{}".format(DSSId), @@ -486,7 +487,7 @@ def get_cropCode( Returns ------- dict - all informations about DSS corresponding of cropCode + all information about DSS corresponding of cropCode """ res = self.http_get( "api/dss/rest/dss/crop/{}".format(cropCode), @@ -676,12 +677,12 @@ def post_schema_modeloutput_validate( if the data is valid or not """ with open(jsonfile) as json_file: - data = json.load(json_file) + data = ujson.load(json_file) res = self.http_post( "api/dss/rest/schema/modeloutput/validate", frmt='json', - data=json.dumps(data), + data=ujson.dumps(data), headers={"Content-Type": "application/json"} ) @@ -718,14 +719,14 @@ def post_schema_dss_yaml_validate( def write_weatherdata_schema(self): schema = self.get_schema_weatherdata() - json_object = json.dumps(schema, indent=4) + json_object = ujson.dumps(schema, indent=4) with open(datadir + "schema_weatherdata.json", "w") as outfile: outfile.write(json_object) def write_fieldobservation_schema(self): schema = self.get_schema_fieldobservation() - json_object = json.dumps(schema, indent=4) + json_object = ujson.dumps(schema, indent=4) with open(datadir + "schema_fieldobservation.json", "w") as outfile: outfile.write(json_object) @@ -764,7 +765,7 @@ def run_model( res = self.http_post( endpoint, frmt='json', - data=json.dumps(input_data), + data=ujson.dumps(input_data), headers={"Content-Type": "application/json"}, timeout=timeout ) @@ -772,7 +773,7 @@ def run_model( res = self.http_post( endpoint, frmt='json', - data=json.dumps(input_data), + data=ujson.dumps(input_data), headers={"Content-Type": "application/json"} ) diff --git a/src/agroservices/phis/__init__.py b/src/openalea/agroservices/phis/__init__.py similarity index 100% rename from src/agroservices/phis/__init__.py rename to src/openalea/agroservices/phis/__init__.py diff --git a/src/agroservices/phis/phis.py b/src/openalea/agroservices/phis/phis.py similarity index 90% rename from src/agroservices/phis/phis.py rename to src/openalea/agroservices/phis/phis.py index 4c9fe8d..42c58b3 100644 --- a/src/agroservices/phis/phis.py +++ b/src/openalea/agroservices/phis/phis.py @@ -8,12 +8,11 @@ """ Web service to GET and POST data to phis v1 """ # ============================================================================== -import urllib from urllib.parse import quote import requests import six -from agroservices.services import REST +from openalea.agroservices.services import REST # ============================================================================== @@ -30,7 +29,7 @@ def __init__(self, name='Phis', url=url, *args, **kwargs) - self.callback = callback # use in all methods) + self.callback = callback # use in all methods def post_json(self, web_service, json_txt, timeout=10., overwriting=False, **kwargs): @@ -79,7 +78,7 @@ def get_all_data(self, web_service, timeout=10., **kwargs): :param web_service: (str) name of web service requested :param timeout: (float) timeout for connexion in seconds - :param kwargs: (str) arguments relative to web service (see http://147.100.202.17/m3p/api-docs/) + :param kwargs: (str) arguments relative to web service (see http://147.100.202.17/m3p/wapi-docs/) :return: (list of dict) data relative to web service and parameters """ @@ -92,16 +91,23 @@ def get_all_data(self, web_service, timeout=10., **kwargs): kwargs['pageSize'] = 50000 else: kwargs['pageSize'] = 10 + if kwargs['sessionId'] is not None: + headers = {'Authorization': f'{kwargs["sessionId"]}'} + else: + headers = None while total_pages > current_page: kwargs['page'] = current_page response = requests.request(method='GET', url=self.url + web_service, + headers=headers, params=kwargs, timeout=timeout) if response.status_code == 200: values.extend(response.json()) elif response.status_code == 500: - raise Exception("Server error") + print() + raise Exception( + "Server error " + response.json()["result"]["message"]) else: raise Exception( response.json()["result"]["message"]) @@ -143,7 +149,7 @@ def ws_projects(self, session_id, project_name=''): :return: (list of dict) projects list (one value only in list if project_name specified) """ - return self.get_all_data('projects/' + project_name, + return self.get_all_data('core/projects/?name=' + project_name, sessionId=session_id) def ws_germplasms(self, session_id, experiment_uri=None, species_uri=None, @@ -163,12 +169,20 @@ def ws_germplasms(self, session_id, experiment_uri=None, species_uri=None, raise Exception( "You must specify one of experiment_uri, species_uri or germplasms_uri") if isinstance(germplasm_uri, six.string_types): - return self.get_all_data('germplasms/' + quote( - germplasm_uri), sessionId=session_id) + return self.get_all_data('core/germplasm?uri=' + quote( + germplasm_uri), sessionId=session_id) else: if isinstance(experiment_uri, list): experiment_uri = ','.join(experiment_uri) - return self.get_all_data('germplasms', + + query = "core/germplasm" + if germplasm_uri is not None: + query += "?uri=" + quote(germplasm_uri) + if species_uri is not None: + query += "?species=" + quote(species_uri) + if experiment_uri is not None: + query += "?experiment=" + quote(experiment_uri) + return self.get_all_data(query, sessionId=session_id, experimentURI=experiment_uri, speciesURI=species_uri, @@ -227,7 +241,7 @@ def ws_variables(self, session_id, experiment_uri, category='environment', :return: (list of dict) available variables for an experiment """ - return self.get_all_data('variables/category/' + category, + return self.get_all_data('core/variables' + category, sessionId=session_id, experimentURI=experiment_uri, imageryProvider=provider) @@ -240,17 +254,18 @@ def ws_experiments(self, session_id, project_name=None, season=None, :param session_id: (str) token got from ws_token() :param project_name: (str) specify a project name to get specifics experiments information - :param season: (int or str) find experiments by season (eg. 2012, 2013...) + :param season: (int or str) find experiments by season (e.g. 2012, 2013...) :param experiment_uri: (str) specify an experiment URI to get detailed information :return: (list of dict) experiments information """ if project_name is None and season is None and experiment_uri is None: raise Exception( - "You must specify one parameter of project_name, season or experiment_uri") + "You must specify one parameter of project_name, season or " + "experiment_uri") if isinstance(experiment_uri, six.string_types): return self.get_all_data('core/experiments/' + quote( - experiment_uri) + '/details', + experiment_uri) + '/details', sessionId=session_id) else: return self.get_all_data('core/experiments', @@ -348,7 +363,7 @@ def ws_plant_moves(self, session_id, experiment_uri, plant_uri, (list of dict) plant moves data """ return self.get_all_data('plants/' + quote( - plant_uri) + '/moves', + plant_uri) + '/moves', timeout=20., sessionId=session_id, experimentURI=experiment_uri, @@ -393,7 +408,7 @@ def ws_images_analysis(self, session_id, experiment_uri, date=None, :param session_id: (str) token got from ws_token() :param experiment_uri: (str) an experiment URI - :param date: (str) retrieve phenotypes data from images which have been took at a specific day . Format :yyyy-MM-dd + :param date: (str) retrieve phenotypes data from images which have been taken at a specific day . Format :yyyy-MM-dd :param provider: (str) origin of the data :param label_view: (str) label view, something like side0, side30, ..., side330, top0 :param variables_name: (str or list of str) name of one or several weighing variables @@ -419,3 +434,22 @@ def ws_images_analysis(self, session_id, experiment_uri, date=None, date=date, provider=provider, labelView=label_view, variablesName=variables_name) + + def ws_species(self, session_id, name=None, uri=None): + """ Get images analysis data for a specific experiment + See http://147.100.202.17/m3p/api-docs/ for exact documentation + + :param session_id: (str) token got from ws_token() + :param name: (str) the common name of the plant + :param uri: (str) plant URI to get only values specified plant + :return: + (list of dict) images analysis data for a specific experiment + """ + if name is not None: + return self.get_all_data('core/species', + sessionId=session_id, + name=name) + elif uri is not None: + return self.get_all_data('core/species', + sessionId=session_id, + uri=uri) diff --git a/src/agroservices/services.py b/src/openalea/agroservices/services.py similarity index 83% rename from src/agroservices/services.py rename to src/openalea/agroservices/services.py index ef9a98c..9548626 100644 --- a/src/agroservices/services.py +++ b/src/openalea/agroservices/services.py @@ -15,7 +15,7 @@ # documentation: http://packages.python.org/bioservices # ############################################################################## -#$Id$ +# $Id$ """Modules with common tools to access web resources""" from __future__ import print_function from __future__ import division @@ -27,18 +27,21 @@ import traceback from .settings import AgroServicesConfig -from agroservices.extern.xmltools import easyXML +from openalea.agroservices.extern.xmltools import easyXML # fixing compatiblity python 2 and 3 related to merging or urllib and urllib2 in python 3 try: - #python 3 + # python 3 from urllib.request import urlopen from urllib.parse import urlparse, urlencode from urllib.error import HTTPError from urllib.request import Request except: from urllib import urlencode - from urllib2 import urlopen, Request, HTTPError + from urllib2 import urlopen, Request, HTTPError + +from openalea.agroservices.extern.easydev.logging_tools import Logging +from openalea.agroservices.extern.easydev.tools import DevTools # fixing compatibility issue of input/raw_input if 'raw_input' in __builtins__: input = raw_input @@ -48,10 +51,6 @@ sys.path = [x for x in sys.path if 'suds-' not in x] -from agroservices.extern.easydev.logging_tools import Logging -from agroservices.extern.easydev.tools import DevTools - - __all__ = ["Service", "WSDLService", "AgroServicesError", "REST"] @@ -64,7 +63,7 @@ def __str__(self): return repr(self.value) -class Service(object): +class Service: """Base class for WSDL and REST classes .. seealso:: :class:`REST`, :class:`WSDLService` @@ -78,14 +77,14 @@ class Service(object): 404: 'Not found. The resource you requests does not exist', 405: 'Method not allowed', 406: "Not Acceptable. Usually headers issue", - 410: 'Gone. The resource you requested was removed.', + 410: 'Gone. The resource you requested was removed.', 415: "Unsupported Media Type", 500: 'Internal server error. Most likely a temporary problem', 503: 'Service not available. The server is being updated, try again later' - } + } def __init__(self, name, url=None, verbose=True, requests_per_sec=10, - url_defined_later=False): + url_defined_later=False): """.. rubric:: Constructor :param str name: a name for this service @@ -95,7 +94,7 @@ def __init__(self, name, url=None, verbose=True, requests_per_sec=10, :param requests_per_sec: maximum number of requests per seconds are restricted to 3. You can change that value. If you reach the limit, an error is raise. The reason for this limitation is - that some services (e.g.., NCBI) may black list you IP. + that some services (e.g.., NCBI) may blacklist you IP. If you need or can do more (e.g., ChEMBL does not seem to have restrictions), change the value. You can also have several instance but again, if you send too many requests at the same, your future @@ -112,7 +111,7 @@ def __init__(self, name, url=None, verbose=True, requests_per_sec=10, The attribute :attr:`~Service.debugLevel` can be used to set the behaviour of the logging messages. If the argument verbose is True, the debugLebel - is set to INFO. If verbose if False, the debugLevel is set to WARNING. + is set to INFO. If verbose is False, the debugLevel is set to WARNING. However, you can use the :attr:`debugLevel` attribute to change it to one of DEBUG, INFO, WARNING, ERROR, CRITICAL. debugLevel=WARNING means that only WARNING, ERROR and CRITICAL messages are shown. @@ -129,13 +128,14 @@ def __init__(self, name, url=None, verbose=True, requests_per_sec=10, urlopen(self.url, timeout=5) except Exception as err: if url_defined_later is False: - self.logging.warning("The URL (%s) provided cannot be reached." % self.url) + self.logging.warning( + "The URL (%s) provided cannot be reached." % self.url) self._easyXMLConversion = True # used by HGNC where some XML contains non-utf-8 characters !! # should be able to fix it with requests once HGNC works again - #self._fixing_unicode = False - #self._fixing_encoding = "utf-8" + # self._fixing_unicode = False + # self._fixing_encoding = "utf-8" self.devtools = DevTools() self.settings = AgroServicesConfig() @@ -146,7 +146,7 @@ def _calls(self): time_lapse = 1. / self.requests_per_sec current_time = time.time() dt = current_time - self._last_call - + if self._last_call == 0: self._last_call = current_time return @@ -156,17 +156,17 @@ def _calls(self): return else: time.sleep(time_lapse - dt) - - def _get_caching(self): return self.settings.params['cache.on'][0] + def _set_caching(self, caching): self.devtools.check_param_in_list(caching, [True, False]) self.settings.params['cache.on'][0] = caching # reset the session, which will be automatically created if we # access to the session attribute self._session = None + CACHING = property(_get_caching, _set_caching) def _get_url(self): @@ -177,6 +177,7 @@ def _set_url(self, url): if url is not None: url = url.rstrip("/") self._url = url + url = property(_get_url, _set_url, doc="URL of this service") def _get_easyXMLConversion(self): @@ -186,12 +187,13 @@ def _set_easyXMLConversion(self, value): if isinstance(value, bool) is False: raise TypeError("value must be a boolean value (True/False)") self._easyXMLConversion = value + easyXMLConversion = property(_get_easyXMLConversion, - _set_easyXMLConversion, - doc="""If True, xml output from a request are converted to easyXML object (Default behaviour).""") + _set_easyXMLConversion, + doc="""If True, xml output from a request are converted to easyXML object (Default behaviour).""") def easyXML(self, res): - """Use this method to convert a XML document into an + """Use this method to convert an XML document into an :class:`~agroservices.xmltools.easyXML` object The easyXML object provides utilities to ease access to the XML @@ -201,7 +203,7 @@ def easyXML(self, res): .. doctest:: - >>> from agroservices import * + >>> from openalea.agroservices import * >>> doc = " 1 2 " >>> s = Service("name") >>> res = s.easyXML(doc) @@ -216,7 +218,8 @@ def __str__(self): txt = "This is an instance of %s service" % self.name return txt - def pubmed(self, Id): + @staticmethod + def pubmed(Id): """Open a pubmed Id into a browser tab :param Id: a valid pubmed Id in string or integer format. @@ -229,17 +232,19 @@ def pubmed(self, Id): import webbrowser webbrowser.open(url + str(Id)) - def on_web(self, url): + @staticmethod + def on_web(url): """Open a URL into a browser""" import webbrowser webbrowser.open(url) - def save_str_to_image(self, data, filename): + @staticmethod + def save_str_to_image(data, filename): """Save string object into a file converting into binary""" - with open(filename,'wb') as f: + with open(filename, 'wb') as f: import binascii try: - #python3 + # python3 newres = binascii.a2b_base64(bytes(data, "utf-8")) except: newres = binascii.a2b_base64(data) @@ -284,8 +289,9 @@ def __init__(self, name, url, verbose=True, cache=False): # reference to the service self.serv = self.suds.service self._update_settings() - except Exception : - self.logging.error("Could not connect to the service %s " % self.url) + except Exception: + self.logging.error( + "Could not connect to the service %s " % self.url) raise Exception def _update_settings(self): @@ -298,15 +304,17 @@ def wsdl_methods_info(self): print('%s(%s) ' % ( method.name, ', '.join('type:%s: %s - element %s' % - (part.type, part.name, part.element) for part in - method.soap.input.body.parts))) + (part.type, part.name, part.element) for part in + method.soap.input.body.parts))) except: print(method) + def _get_methods(self): return [x.name for x in self.suds.wsdl.services[0].ports[0].methods.values()] + wsdl_methods = property(_get_methods, - doc="returns methods available in the WSDL service") + doc="returns methods available in the WSDL service") def wsdl_create_factory(self, name, **kargs): params = self.suds.factory.create(name) @@ -319,7 +327,7 @@ def wsdl_create_factory(self, name, **kargs): import agroservices params.tool = "BioServices, " + agroservices.__version__ - for k,v in kargs.items(): + for k, v in kargs.items(): from suds import sudsobject keys = sudsobject.asdict(params).keys() if k in keys: @@ -331,19 +339,22 @@ def wsdl_create_factory(self, name, **kargs): def _get_timeout(self): return self.suds.options.timeout + def _set_timeout(self, value): self.suds.set_options(timeout=value) self.settings.TIMEOUT = value + TIMEOUT = property(_get_timeout, _set_timeout) class RESTbase(Service): _service = "REST" + def __init__(self, name, url=None, verbose=True, requests_per_sec=3, url_defined_later=False): super(RESTbase, self).__init__(name, url, verbose=verbose, - requests_per_sec=requests_per_sec, - url_defined_later=url_defined_later) + requests_per_sec=requests_per_sec, + url_defined_later=url_defined_later) self.logging.info("Initialising %s service (REST)" % self.name) self.last_response = None @@ -361,11 +372,12 @@ def http_delete(self): raise NotImplementedError - -import requests # replacement for urllib2 (2-3 times faster) +import requests # replacement for urllib2 (2-3 times faster) from requests.models import Response -import requests_cache # use caching wihh requests -#import grequests # use asynchronous requests with gevent +import requests_cache # use caching with requests + + +# import grequests # use asynchronous requests with gevent # Note that grequests should be imported after requests_cache. Otherwise, # one should use a session instance when calling grequests.get, which we do # here below @@ -379,7 +391,7 @@ class REST(RESTbase): Get one value:: - >>> from agroservices import REST + >>> from openalea.agroservices.services import REST >>> s = REST("test", "https://www.ebi.ac.uk/chemblws") >>> res = s.get_one("targets/CHEMBL2476.json", "json") >>> res['organism'] @@ -394,7 +406,7 @@ class REST(RESTbase): >>> # requests will be stored in a local sqlite database >>> s.get_one("targets/CHEMBL2476") >>> # Disconnect your wiki and any network connections. - >>> # Without caching you cannot fetch any requests but with + >>> # Without caching, you cannot fetch any requests but with >>> # the CACHING on, you can retrieve previous requests: >>> s.get_one("targets/CHEMBL2476") @@ -452,13 +464,15 @@ class REST(RESTbase): 'xml': 'application/xml', 'yaml': 'text/x-yaml' } - #special_characters = ['/', '#', '+'] + + # special_characters = ['/', '#', '+'] def __init__(self, name, url=None, verbose=True, cache=False, - requests_per_sec=3, proxies=[], cert=None, url_defined_later=False): + requests_per_sec=3, proxies=[], cert=None, + url_defined_later=False): super(REST, self).__init__(name, url, verbose=verbose, - requests_per_sec=requests_per_sec, - url_defined_later=url_defined_later) + requests_per_sec=requests_per_sec, + url_defined_later=url_defined_later) self.proxies = proxies self.cert = cert @@ -470,7 +484,7 @@ def __init__(self, name, url=None, verbose=True, cache=False, self.settings.params['cache.on'][0] = cache if self.CACHING: - #import requests_cache + # import requests_cache self.logging.info("Using local cache %s" % self.CACHE_NAME) requests_cache.install_cache(self.CACHE_NAME) @@ -485,7 +499,8 @@ def delete_cache(self): else: self.logging.info("Reply 'y' to delete the file") - def clear_cache(self): + @staticmethod + def clear_cache(): from requests_cache import clear clear() @@ -510,6 +525,7 @@ def _get_session(self): else: self._session = self._create_session() return self._session + session = property(_get_session) def _create_session(self): @@ -519,8 +535,9 @@ def _create_session(self): """ self.logging.debug("Creating session (uncached version)") self._session = requests.Session() - adapter = requests.adapters.HTTPAdapter(max_retries=self.settings.MAX_RETRIES) - #, pool_block=True does not work with asynchronous requests + adapter = requests.adapters.HTTPAdapter( + max_retries=self.settings.MAX_RETRIES) + # , pool_block=True does not work with asynchronous requests self._session.mount('http://', adapter) self._session.mount('https://', adapter) return self._session @@ -529,16 +546,19 @@ def _create_cache_session(self): """Creates a cached session using requests_cache package""" self.logging.debug("Creating session (cache version)") if not self._session: - #import requests_cache + # import requests_cache self.logging.debug("No cached session created yet. Creating one") self._session = requests_cache.CachedSession(self.CACHE_NAME, - backend='sqlite', fast_save=self.settings.FAST_SAVE) + backend='sqlite', + fast_save=self.settings.FAST_SAVE) return self._session def _get_timeout(self): return self.settings.TIMEOUT + def _set_timeout(self, value): self.settings.TIMEOUT = value + TIMEOUT = property(_get_timeout, _set_timeout) def _process_get_request(self, url, session, frmt, data=None, **kwargs): @@ -567,7 +587,8 @@ def _interpret_returned_request(self, res, frmt): # finally return res.content - def _apply(self, iterable, fn, *args, **kwargs): + @staticmethod + def _apply(iterable, fn, *args, **kwargs): return [fn(x, *args, **kwargs) for x in iterable if x is not None] def _get_async(self, keys, frmt='json', params={}): @@ -578,10 +599,12 @@ def _get_async(self, keys, frmt='json', params={}): # build the requests urls = self._get_all_urls(keys, frmt) self.logging.debug("grequests.get processing") - rs = (grequests.get(url, session=session, params=params) for key,url in zip(keys, urls)) + rs = (grequests.get(url, session=session, params=params) for + key, url in zip(keys, urls)) # execute them self.logging.debug("grequests.map call") - ret = grequests.map(rs, size=min(self.settings.CONCURRENT, len(keys))) + ret = grequests.map(rs, + size=min(self.settings.CONCURRENT, len(keys))) self.last_response = ret self.logging.debug("grequests.map call done") return ret @@ -607,19 +630,21 @@ def http_get(self, query, frmt='json', params={}, **kargs): * if list is larger than ASYNC_THRESHOLD, use asynchronous call. """ - if isinstance(query, list) and len(query) > self.settings.ASYNC_THRESHOLD: + if isinstance(query, list) and len( + query) > self.settings.ASYNC_THRESHOLD: self.logging.debug("Running async call for a list") return self.get_async(query, frmt, params=params, **kargs) - if isinstance(query, list) and len(query) <= self.settings.ASYNC_THRESHOLD: + if isinstance(query, list) and len( + query) <= self.settings.ASYNC_THRESHOLD: self.logging.debug("Running sync call for a list") - return [self.get_one(key, frmt, params=params, **kargs) for key in query] - #return self.get_sync(query, frmt) + return [self.get_one(key, frmt, params=params, **kargs) for key in + query] + # return self.get_sync(query, frmt) # OTHERWISE self.logging.debug("Running http_get (single call mode)") - #return self.get_one(**{'frmt': frmt, 'query': query, 'params':params}) - + # return self.get_one(**{'frmt': frmt, 'query': query, 'params':params}) # if user provide a content, let us use it, otherwise, it will be the # same as the frmt provided @@ -629,15 +654,14 @@ def http_get(self, query, frmt='json', params={}, **kargs): # agroservices and the content defined here above headers = kargs.get("headers") if headers is None: - headers = {} - headers['User-Agent'] = self.getUserAgent() + headers = {'User-Agent': self.getUserAgent()} if content is None: headers['Accept'] = self.content_types[frmt] else: headers['Accept'] = content kargs.update({"headers": headers}) - return self.get_one(query, frmt=frmt, params=params, **kargs) + return self.get_one(query, frmt=frmt, params=params, **kargs) def get_one(self, query=None, frmt='json', params={}, **kargs): """ @@ -647,9 +671,9 @@ def get_one(self, query=None, frmt='json', params={}, **kargs): self._calls() url = self._build_url(query) - if url.count('//') >1: + if url.count('//') > 1: self.logging.warning("URL of the services contains a double //." + - "Check your URL and remove trailing /") + "Check your URL and remove trailing /") self.logging.debug(url) try: kargs['params'] = params @@ -661,7 +685,7 @@ def get_one(self, query=None, frmt='json', params={}, **kargs): if hasattr(self, 'authentication'): kargs['auth'] = self.authentication - #res = self.session.get(url, **{'timeout':self.TIMEOUT, 'params':params}) + # res = self.session.get(url, **{'timeout':self.TIMEOUT, 'params':params}) res = self.session.get(url, **kargs) self.last_response = res @@ -675,36 +699,35 @@ def get_one(self, query=None, frmt='json', params={}, **kargs): except Exception as err: self.logging.critical(err) self.logging.critical("""Query unsuccesful. Maybe too slow response. - Consider increasing it with settings.TIMEOUT attribute {}""".format(self.settings.TIMEOUT)) + Consider increasing it with settings.TIMEOUT attribute {}""".format( + self.settings.TIMEOUT)) def http_post(self, query, params=None, data=None, - frmt='xml', headers=None, files=None, content=None, **kargs): + frmt='xml', headers=None, files=None, content=None, **kargs): # query and frmt are agroservices parameters. Others are post parameters # NOTE in requests.get you can use params parameter # BUT in post, you use data # only single post implemented for now unlike get that can be asynchronous # or list of queries - # if user provide a header, we use it otherwise, we use the header from # agroservices and the content defined here above if headers is None: - headers = {} - headers['User-Agent'] = self.getUserAgent() + headers = {'User-Agent': self.getUserAgent()} if content is None: headers['Accept'] = self.content_types[frmt] else: headers['Accept'] = content self.logging.debug("Running http_post (single call mode)") - kargs.update({'query':query}) - kargs.update({'headers':headers}) - kargs.update({'files':files}) + kargs.update({'query': query}) + kargs.update({'headers': headers}) + kargs.update({'files': files}) kargs['proxies'] = self.proxies kargs['cert'] = self.cert - kargs.update({'params':params}) - kargs.update({'data':data}) - kargs.update({'frmt':frmt}) + kargs.update({'params': params}) + kargs.update({'data': data}) + kargs.update({'frmt': frmt}) return self.post_one(**kargs) def post_one(self, query=None, frmt='json', **kargs): @@ -719,25 +742,27 @@ def post_one(self, query=None, frmt='json', **kargs): try: return res.decode() except: - self.logging.debug("BioServices:: Could not decode the response") + self.logging.debug( + "BioServices:: Could not decode the response") return res except Exception as err: traceback.print_exc() return None - def getUserAgent(self): - #self.logging.info('getUserAgent: Begin') + @staticmethod + def getUserAgent(): + # self.logging.info('getUserAgent: Begin') urllib_agent = 'Python-requests/%s' % requests.__version__ - #clientRevision = '' - from agroservices import version + # clientRevision = '' + from openalea.agroservices import version clientVersion = version user_agent = 'AgroServices/%s (agroservices.%s; Python %s; %s) %s' % ( clientVersion, os.path.basename(__file__), platform.python_version(), platform.system(), urllib_agent ) - #self.logging.info('getUserAgent: user_agent: ' + user_agent) - #self.logging.info('getUserAgent: End') + # self.logging.info('getUserAgent: user_agent: ' + user_agent) + # self.logging.info('getUserAgent: End') return user_agent def get_headers(self, content='default'): @@ -746,12 +771,11 @@ def get_headers(self, content='default'): so that it has the same behaviour as urllib2 (Sept 2014) """ - headers = {} - headers['User-Agent'] = self.getUserAgent() - headers['Accept'] = self.content_types[content] - headers['Content-Type'] = self.content_types[content] - #"application/json;odata=verbose" required in reactome - #headers['Content-Type'] = "application/json;odata=verbose" required in reactome + headers = {'User-Agent': self.getUserAgent(), + 'Accept': self.content_types[content], + 'Content-Type': self.content_types[content]} + # "application/json;odata=verbose" required in reactome + # headers['Content-Type'] = "application/json;odata=verbose" required in reactome return headers def debug_message(self): @@ -760,12 +784,11 @@ def debug_message(self): print(self.last_response.status_code) def http_delete(self, query, params=None, - frmt='xml', headers=None, **kargs): + frmt='xml', headers=None, **kargs): kargs.update({'query': query}) kargs.update({'params': params}) kargs.update({'frmt': frmt}) - return self.delete_one(**kargs) def delete_one(self, query, frmt='json', **kargs): diff --git a/src/agroservices/settings.py b/src/openalea/agroservices/settings.py similarity index 76% rename from src/agroservices/settings.py rename to src/openalea/agroservices/settings.py index ceb4707..d88f554 100644 --- a/src/agroservices/settings.py +++ b/src/openalea/agroservices/settings.py @@ -4,8 +4,10 @@ @author: cokelaer """ +import errno import os -from agroservices.extern.easydev.config_tools import DynamicConfigParser +from openalea.agroservices.extern.easydev.config_tools import \ + DynamicConfigParser import copy import shutil @@ -33,26 +35,29 @@ def underline(text, symbol="="): return text + "\n" + length * symbol - -#TODO Move some contents to easydev.config_tools +# TODO Move some contents to easydev.config_tools # first item if the value # second item if a type or TUPLE of types possible # third item is documentation defaultParams = { - 'user.email': ["unknown", (str), "email addresss that may be used in some utilities (e.g. EUtils)"], - 'general.timeout': [30, (int,float), ""], + 'user.email': ["unknown", str, + "email addresss that may be used in some utilities (e.g. EUtils)"], + 'general.timeout': [30, (int, float), ""], 'general.max_retries': [3, int, ''], 'general.async_concurrent': [50, int, ''], - 'general.async_threshold': [10, int, 'when to switch to asynchronous requests'], - 'cache.tag_suffix': ["_agroservices_database",str, 'suffix to append for cache databases'], + 'general.async_threshold': [10, int, + 'when to switch to asynchronous requests'], + 'cache.tag_suffix': ["_agroservices_database", str, + 'suffix to append for cache databases'], 'cache.on': [False, bool, 'CACHING on/off'], 'cache.fast': [True, bool, "FAST_SAVE option"], - 'chemspider.token': [None, (str, type(None)), 'token see http://www.chemspider.com'], + 'chemspider.token': [None, (str, type(None)), + 'token see http://www.chemspider.com'], } -class ConfigReadOnly(object): +class ConfigReadOnly: """A generic Config file handler Uses appdirs from ypi to handle the XDG protocol @@ -66,6 +71,7 @@ class ConfigReadOnly(object): config file is possible at any time (meth:`read_`) """ + def __init__(self, name=None, default_params={}): """name is going to be the generic name of the config folder @@ -87,7 +93,7 @@ def __init__(self, name=None, default_params={}): self.config_parser = DynamicConfigParser() # Now, create the missing directories if needed - self.init() # and read the user config file updating params if needed + self.init() # and read the user config file updating params if needed def read_user_config_file_and_update_params(self): """Read the configuration file and update parameters @@ -115,7 +121,8 @@ def read_user_config_file_and_update_params(self): msg = "Welcome to %s" % self.name.capitalize() print(underline(msg)) print("It looks like you do not have a configuration file.") - print("We are creating one with default values in %s ." % self.user_config_file_path) + print( + "We are creating one with default values in %s ." % self.user_config_file_path) print("Done") self.create_default_config_file() @@ -132,10 +139,14 @@ def read_user_config_file_and_update_params(self): if isinstance(value, cast) is True: self.params[newkey][0] = value else: - print("Warning:: found an incorrect type while parsing {} file. In section '{}', the option '{}' should be a {}. Found value {}. Trying a cast...".format(self.user_config_file_path, section, key, cast, value)) + print( + "Warning:: found an incorrect type while parsing {} file. In section '{}', the option '{}' should be a {}. Found value {}. Trying a cast...".format( + self.user_config_file_path, section, key, cast, + value)) self.params[newkey][0] = cast(value) else: - print("Warning:: found invalid option or section in %s (ignored):" % self.user_config_file_path) + print( + "Warning:: found invalid option or section in %s (ignored):" % self.user_config_file_path) print(" %s %s" % (section, option)) def _get_home(self): @@ -156,9 +167,11 @@ def _get_home(self): if homedir is not None and os.path.isdir(homedir): return homedir return None + home = property(_get_home) - def _mkdirs(self, newdir, mode=0o777): + @staticmethod + def _mkdirs(newdir, mode=0o777): """from matplotlib mkdirs make directory *newdir* recursively, and set *mode*. Equivalent to :: @@ -192,24 +205,28 @@ def _get_and_create(self, sdir): def _get_config_dir(self): sdir = self.appdirs.user_config_dir return self._get_and_create(sdir) + user_config_dir = property(_get_config_dir, - doc="return directory of this configuration file") + doc="return directory of this configuration file") def _get_cache_dir(self): sdir = self.appdirs.user_cache_dir return self._get_and_create(sdir) + user_cache_dir = property(_get_cache_dir, - doc="return directory of the cache") + doc="return directory of the cache") def _get_config_file_path(self): - return self.user_config_dir + os.sep +self.config_file + return self.user_config_dir + os.sep + self.config_file + user_config_file_path = property(_get_config_file_path, - doc="return configuration filename (with fullpath)") + doc="return configuration filename (with fullpath)") def _get_config_file(self): return self.name + ".cfg" + config_file = property(_get_config_file, - doc="config filename (without path)") + doc="config filename (without path)") def init(self): """Reads the user_config_file and update params. @@ -220,11 +237,13 @@ def init(self): try: _ = self.user_config_dir except: - print("Could not retrieve or create the config file and/or directory in %s" % self.name) + print( + "Could not retrieve or create the config file and/or directory in %s" % self.name) try: _ = self.user_cache_dir except: - print("Could not retrieve or create the cache file and/or directory in %s" % self.name) + print( + "Could not retrieve or create the cache file and/or directory in %s" % self.name) self.read_user_config_file_and_update_params() def create_default_config_file(self, force=False): @@ -235,26 +254,30 @@ def create_default_config_file(self, force=False): # we need to copy the file into a backup file filename = self.user_config_file_path + '.bk' if os.path.exists(filename) and force is False: - print("""Trying to save the current config file {} into a backup file {}\n but it exists already. Please remove the backup file first or set the 'force' parameter to True""".format(self.user_config_file_path, filename)) + print( + """Trying to save the current config file {} into a backup file {}\n but it exists already. Please remove the backup file first or set the 'force' parameter to True""".format( + self.user_config_file_path, filename)) return else: shutil.copy(self.user_config_file_path, filename) # Now, we can rewrite the configuration file - sections = sorted(set([x.split(".")[0] for x in self._default_params.keys()])) + sections = sorted( + set([x.split(".")[0] for x in self._default_params.keys()])) if 'general' in sections: - sections = ["general"] + [x for x in sections if x!="general"] + sections = ["general"] + [x for x in sections if x != "general"] - fh = open(self.user_config_file_path, "w") # open and delete content + fh = open(self.user_config_file_path, "w") # open and delete content for section in sections: - fh.write("[" + section +"]\n") - options = [x.split(".")[1] for x in self._default_params.keys() if x.startswith(section+".")] + fh.write("[" + section + "]\n") + options = [x.split(".")[1] for x in self._default_params.keys() if + x.startswith(section + ".")] for option in options: key = section + '.' + option value = self._default_params[key] try: - fh.write("# {}\n{} = {}\n".format(value[2], - option, value[0])) + fh.write("# {}\n{} = {}\n".format(value[2], + option, value[0])) except: print('Could not write this value/option. skipped') print(value, option) @@ -274,30 +297,39 @@ def __init__(self): # some aliases def _get_caching(self): return self.params['cache.on'][0] + def _set_caching(self, value): self.params['cache.on'][0] = value + CACHING = property(_get_caching) def _get_fast_save(self): return self.params['cache.fast'][0] + FAST_SAVE = property(_get_fast_save) def _get_async_concurrent(self): return self.params['general.async_concurrent'][0] + CONCURRENT = property(_get_async_concurrent) def _get_async_threshold(self): return self.params['general.async_threshold'][0] + ASYNC_THRESHOLD = property(_get_async_threshold) def _get_timeout(self): return self.params['general.timeout'][0] + def _set_timeout(self, timeout): self.params['general.timeout'][0] = timeout + TIMEOUT = property(_get_timeout, _set_timeout) def _get_max_retries(self): return self.params['general.max_retries'][0] + def _set_max_retries(self, max_retries): self.params['general.max_retries'][0] = max_retries + MAX_RETRIES = property(_get_max_retries, _set_max_retries) diff --git a/src/openalea/agroservices/version.py b/src/openalea/agroservices/version.py new file mode 100644 index 0000000..e05fc45 --- /dev/null +++ b/src/openalea/agroservices/version.py @@ -0,0 +1,18 @@ +""" +Maintain version for this package. +Do not edit this file, use 'version' section of config. +""" +# {# pkglts, version +# -*- coding: utf-8 -*- + +MAJOR = 1 +"""(int) Version major component.""" + +MINOR = 1 +"""(int) Version minor component.""" + +POST = 4 +"""(int) Version post or bugfix component.""" + +__version__ = ".".join([str(s) for s in (MAJOR, MINOR, POST)]) +# #} \ No newline at end of file diff --git a/test/test_IPM.py b/test/test_IPM.py index 10586c4..1447941 100644 --- a/test/test_IPM.py +++ b/test/test_IPM.py @@ -1,10 +1,19 @@ -import json +import ujson from urllib.request import urlopen -from agroservices.ipm.ipm import IPM -from agroservices.ipm.datadir import datadir -import agroservices.ipm.fakers as fakers -def test_url(): +import pytest + +from openalea.agroservices.ipm.ipm import IPM +from openalea.agroservices.ipm.datadir import datadir +import openalea.agroservices.ipm.fakers as fakers + + +@pytest.fixture +def ipm(): + return IPM() + + +def var_test_url(): ipm = IPM() assert ipm.url is not None try: @@ -14,77 +23,85 @@ def test_url(): else: assert True + ipm_ok = False -if test_url(): +if var_test_url(): ipm = IPM() ipm_ok = True -def keys_exists(dict_, keys, test = all): + +def keys_exists(dict_, keys, test=all): return test(key in dict_ for key in keys) - -################# MetaDataService ################################# +################# MetaDataService ################################# -def test_get_parameter(): +def test_get_parameter(ipm): if ipm_ok: res = ipm.get_parameter() assert type(res) is list - assert keys_exists(res[0],('id','name','description','unit')) + assert keys_exists(res[0], ('id', 'name', 'description', 'unit')) -def test_get_qc(): + +def test_get_qc(ipm): if ipm_ok: res = ipm.get_qc() assert type(res) is list - assert keys_exists(res[0],('id','name','description')) + assert keys_exists(res[0], ('id', 'name', 'description')) + -def test_get_schema_weatherdata(): +def test_get_schema_weatherdata(ipm): if ipm_ok: res = ipm.get_schema_weatherdata() assert type(res) is dict -def test_post_schema_weatherdata_validate(): + +def test_post_schema_weatherdata_validate(ipm): if ipm_ok: - res = ipm.post_schema_weatherdata_validate(jsonfile=datadir + 'weather_data.json') + res = ipm.post_schema_weatherdata_validate( + jsonfile=datadir + 'weather_data.json') assert type(res) is dict - assert res["isValid"]==True + assert res["isValid"] == True + -def test_get_schema_fieldobservation(): - ipm=IPM() +def test_get_schema_fieldobservation(ipm): res = ipm.get_schema_fieldobservation() assert type(res) is dict - -def test_get_schema_modeloutput(): - ipm=IPM() + +def test_get_schema_modeloutput(ipm): res = ipm.get_schema_modeloutput() - assert res is not None + assert res is not None assert type(res) is dict -def test_post_schema_modeloutput_validate(): - ipm=IPM() - res = ipm.post_schema_modeloutput_validate(jsonfile=datadir + 'modeloutput.json') + +def test_post_schema_modeloutput_validate(ipm): + res = ipm.post_schema_modeloutput_validate( + jsonfile=datadir + 'modeloutput.json') assert type(res) is dict - assert res['isValid']==True + assert res['isValid'] == True + ######################### WeatherAdaptaterService ####################### -def test_get_weatheradapter(): +def test_get_weatheradapter(ipm): """Canonical test described in doc https://github.com/H2020-IPM-Decisions/WeatherService/blob/develop/docs/weather_service.md""" - ipm = IPM() params = dict(weatherStationId=5, - parameters='1002,2001,3002,3101', - interval=3600, - timeStart='2020-05-01T00:00:00+02:00', - timeEnd= '2020-05-02T00:00:00+02:00') + parameters='1002,2001,3002,3101', + interval=3600, + timeStart='2020-05-01T00:00:00+02:00', + timeEnd='2020-05-02T00:00:00+02:00') source = ipm.get_weatherdatasource('no.nibio.lmt') res = ipm.get_weatheradapter(source, params) assert type(res) is dict - assert all(key in res for key in ('timeStart', 'timeEnd', 'interval', 'weatherParameters', 'locationWeatherData')) - assert all(var in res['weatherParameters'] for var in [1002, 2001, 3002, 3101]) + assert all(key in res for key in ( + 'timeStart', 'timeEnd', 'interval', 'weatherParameters', + 'locationWeatherData')) + assert all( + var in res['weatherParameters'] for var in [1002, 2001, 3002, 3101]) assert res['timeStart'] == '2020-04-30T22:00:00Z' assert res['timeEnd'] == '2020-05-01T22:00:00Z' assert res['locationWeatherData'][0]['length'] == 25 @@ -93,99 +110,115 @@ def test_get_weatheradapter(): params = fakers.weather_adapter_params(source, station_id=5) res = ipm.get_weatheradapter(source, params) assert type(res) is dict - assert all(key in res for key in ('timeStart', 'timeEnd', 'interval', 'weatherParameters', 'locationWeatherData')) - - + assert all(key in res for key in ( + 'timeStart', 'timeEnd', 'interval', 'weatherParameters', + 'locationWeatherData')) #################### WeatherDataService ######################################### -def test_get_weatherdatasource(): - ipm=IPM() +def test_get_weatherdatasource(ipm): res = ipm.get_weatherdatasource() assert type(res) is dict - assert keys_exists(res[next(iter(res))],('name','description','public_URL','endpoint','needs_data_control','access_type','temporal','parameters','spatial')) + assert keys_exists(res[next(iter(res))], ( + 'name', 'description', 'public_URL', 'endpoint', 'needs_data_control', + 'access_type', 'temporal', 'parameters', 'spatial')) + -def test_get_weatherdatasource_location_point(): - ipm=IPM() - res = ipm.get_weatherdatasource_location_point(latitude=59.678835236960765,longitude=12.01629638671875, tolerance=0) +def test_get_weatherdatasource_location_point(ipm): + res = ipm.get_weatherdatasource_location_point(latitude=59.678835236960765, + longitude=12.01629638671875, + tolerance=0) assert type(res) is list - assert keys_exists(res[0],('id', 'name', 'description', 'public_URL', 'endpoint', 'authentication_type', 'needs_data_control', 'access_type', 'priority', 'temporal', 'parameters', 'spatial', 'organization', 'active')) + assert keys_exists(res[0], ( + 'id', 'name', 'description', 'public_URL', 'endpoint', + 'authentication_type', 'needs_data_control', 'access_type', 'priority', + 'temporal', 'parameters', 'spatial', 'organization', 'active')) -def test_post_weatherdatasource_location(): - ipm=IPM() - res = ipm.post_weatherdatasource_location( + +def test_post_weatherdatasource_location(ipm): + res = ipm.post_weatherdatasource_location( tolerance=0, geoJsonfile=datadir + "GeoJson.json" - ) + ) assert type(res) is list - assert keys_exists(res[0].keys(),('id', 'name', 'description', 'public_URL', 'endpoint', 'authentication_type', 'needs_data_control', 'access_type', 'priority', 'temporal', 'parameters', 'spatial', 'organization', 'active') - ) - + assert keys_exists(res[0].keys(), ( + 'id', 'name', 'description', 'public_URL', 'endpoint', + 'authentication_type', 'needs_data_control', 'access_type', 'priority', + 'temporal', 'parameters', 'spatial', 'organization', 'active') + ) #################### DSSService #################################################### -def test_get_crop(): - ipm=IPM() +def test_get_crop(ipm): res = ipm.get_crop() assert type(res) is list -def test_get_cropCode(): - ipm=IPM() + +def test_get_cropCode(ipm): res = ipm.get_cropCode(cropCode="DAUCS") assert type(res) is dict item = next(iter(res.values())) - assert keys_exists(item,('models','id','version','name','url','languages','organization')) + assert keys_exists(item, ( + 'models', 'id', 'version', 'name', 'url', 'languages', 'organization')) item = next(iter(item['models'].values())) assert 'DAUCS' in item['crops'] -def test_get_dss(): - ipm=IPM() + +def test_get_dss(ipm): res = ipm.get_dss() assert type(res) is dict item = next(iter(res.values())) - assert keys_exists(item,('models','id','version','name','url','languages','organization')) + assert keys_exists(item, ( + 'models', 'id', 'version', 'name', 'url', 'languages', 'organization')) -def test_get_dssId(): - ipm=IPM() +def test_get_dssId(ipm): res = ipm.get_dssId(DSSId='no.nibio.vips') assert type(res) is dict - assert keys_exists(res.keys(),('models','id','version','name','url','languages','organization')) - assert res['id']=='no.nibio.vips' + assert keys_exists(res.keys(), ( + 'models', 'id', 'version', 'name', 'url', 'languages', 'organization')) + assert res['id'] == 'no.nibio.vips' + -def test_get_model(): - ipm=IPM() - res = ipm.get_model(DSSId='no.nibio.vips',ModelId='PSILARTEMP') +def test_get_model(ipm): + res = ipm.get_model(DSSId='no.nibio.vips', ModelId='PSILARTEMP') assert type(res) is dict - assert keys_exists(res.keys(),('name', 'id', 'version', 'type_of_decision', 'type_of_output', 'description_URL', 'description', 'citation', 'keywords', 'pests', 'crops', 'authors', 'execution', 'input', 'valid_spatial', 'output')) - assert res['id']== 'PSILARTEMP' + assert keys_exists(res.keys(), ( + 'name', 'id', 'version', 'type_of_decision', 'type_of_output', + 'description_URL', 'description', 'citation', 'keywords', 'pests', + 'crops', + 'authors', 'execution', 'input', 'valid_spatial', 'output')) + assert res['id'] == 'PSILARTEMP' -def test_get_pest(): - ipm=IPM() + +def test_get_pest(ipm): res = ipm.get_pest() assert type(res) is list -def test_get_pestCode(): - ipm=IPM() + +def test_get_pestCode(ipm): res = ipm.get_pestCode(pestCode='PSILRO') assert type(res) is dict item = next(iter(res.values())) - assert keys_exists(item,('models','id','version','name','url','languages','organization')) + assert keys_exists(item, ( + 'models', 'id', 'version', 'name', 'url', 'languages', 'organization')) item = next(iter(item['models'].values())) assert item['pests'] == ['PSILRO'] -def test_get_dss_location(): - ipm=IPM() - res = ipm.get_dss_location_point(latitude=59.67883523696076, longitude=12.01629638671875) + +def test_get_dss_location(ipm): + res = ipm.get_dss_location_point(latitude=59.67883523696076, + longitude=12.01629638671875) assert type(res) is dict item = next(iter(res.values())) - assert keys_exists(item,('models','id','version','name','url','languages','organization')) + assert keys_exists(item, ( + 'models', 'id', 'version', 'name', 'url', 'languages', 'organization')) -def test_post_dss_location(): - ipm = IPM() - res= ipm.post_dss_location(geoJsonfile=datadir + "GeoJson.json") + +def test_post_dss_location(ipm): + res = ipm.post_dss_location(geoJsonfile=datadir + "GeoJson.json") assert type(res) is list assert keys_exists(res[0].keys(), ( 'models', @@ -195,15 +228,15 @@ def test_post_dss_location(): 'url', 'languages', 'organization') - ) + ) -def test_run_model(): - ipm = IPM() - model = ipm.get_model(DSSId='no.nibio.vips',ModelId='PSILARTEMP') + +def test_run_model(ipm): + model = ipm.get_model(DSSId='no.nibio.vips', ModelId='PSILARTEMP') # run with predifined model input: path = datadir + 'model_input_psilartemp.json' with open(path) as json_file: - model_input = json.load(json_file) + model_input = ujson.load(json_file) res = ipm.run_model(model, model_input) assert isinstance(res, dict) assert 'locationResult' in res @@ -212,4 +245,3 @@ def test_run_model(): res = ipm.run_model(model, input_data) assert isinstance(res, dict) assert 'locationResult' in res - diff --git a/test/test_dss.py b/test/test_dss.py index a1e080a..19b5d36 100644 --- a/test/test_dss.py +++ b/test/test_dss.py @@ -1,12 +1,12 @@ # run pytest -rA --tb=no to see which service passes -import json +import ujson import pytest -from agroservices.ipm.datadir import datadir -from agroservices.ipm.ipm import IPM -import agroservices.ipm.fakers as ipm_fakers -from agroservices.services import Service +from openalea.agroservices.ipm.datadir import datadir +from openalea.agroservices.ipm.ipm import IPM +import openalea.agroservices.ipm.fakers as ipm_fakers +from openalea.agroservices.services import Service ipm = IPM() @@ -99,7 +99,7 @@ def test_run_model_field(): model = ipm.get_model(DSSId='no.nibio.vips', ModelId='PSILAROBSE') path = datadir + 'model_input_psilarobse.json' with open(path) as json_file: - model_input = json.load(json_file) + model_input = ujson.load(json_file) res = ipm.run_model(model, model_input) assert isinstance(res, dict) assert 'locationResult' in res diff --git a/test/test_dss_input_data_fakers.py b/test/test_dss_input_data_fakers.py index c4d4856..0ca2b5a 100644 --- a/test/test_dss_input_data_fakers.py +++ b/test/test_dss_input_data_fakers.py @@ -1,6 +1,6 @@ import pytest -from agroservices.ipm.ipm import IPM -from agroservices.ipm import fakers as ipm_fakers +from openalea.agroservices.ipm.ipm import IPM +from openalea.agroservices.ipm import fakers as ipm_fakers ipm = IPM() onthefly = ipm.get_dss('ONTHEFLY') diff --git a/test/test_phis.py b/test/test_phis.py index 38e314b..319b8a7 100644 --- a/test/test_phis.py +++ b/test/test_phis.py @@ -1,9 +1,14 @@ +import pytest import requests -from agroservices.phis.phis import Phis +from openalea.agroservices.phis.phis import Phis -def test_url(): - phis = Phis() +@pytest.fixture +def phis(): + return Phis() + + +def test_url(phis): assert phis.url is not None try: requests.get(phis.url) @@ -13,8 +18,7 @@ def test_url(): assert True -def test_token(): - phis = Phis() +def test_token(phis): json = '{ \ "identifier": "phenoarch@lepse.inra.fr",\ "password": "phenoarch"\ @@ -22,11 +26,43 @@ def test_token(): response, _ = phis.post_json('security/authenticate', json) token = response.json()['result']['token'] + print(token) assert len(token) > 1 -def test_ws_experiments(): - phis = Phis() +def test_ws_project(phis): + json = '{ \ + "identifier": "phenoarch@lepse.inra.fr",\ + "password": "phenoarch"\ + }' + + response, _ = phis.post_json('security/authenticate', json) + token = response.json()['result']['token'] + data = phis.ws_projects(session_id=token, project_name='EPPN2020') + print(data) + data = phis.ws_projects(session_id=token, project_name='G2WAS') + print(data) + data = phis.ws_projects(session_id=token, project_name='EXPOSE') + print(data) + + +def test_ws_germplasms(phis): + json = '{ \ + "identifier": "phenoarch@lepse.inra.fr",\ + "password": "phenoarch"\ + }' + + response, _ = phis.post_json('security/authenticate', json) + token = response.json()['result']['token'] + data = phis.ws_germplasms(session_id=token, + germplasm_uri="http://phenome.inrae.fr/m3p/id/germplasm/accesion.2369_udel") + print(data) + data = phis.ws_germplasms(session_id=token, + species_uri="http://aims.fao.org/aos/agrovoc/c_8504") + print(data) + + +def test_ws_species(phis): json = '{ \ "identifier": "phenoarch@lepse.inra.fr",\ "password": "phenoarch"\ @@ -34,6 +70,7 @@ def test_ws_experiments(): response, _ = phis.post_json('security/authenticate', json) token = response.json()['result']['token'] - data = phis.ws_experiments(experiment_uri='m3p:id/experiment/g2was2022', - session_id=token) - print(data) \ No newline at end of file + data_name = phis.ws_species(session_id=token, name="sorghum") + data_uri = phis.ws_species(session_id=token, + uri="http://aims.fao.org/aos/agrovoc/c_7247") + assert data_uri == data_name diff --git a/test/test_weatherdata.py b/test/test_weatherdata.py index 272c811..b53f825 100644 --- a/test/test_weatherdata.py +++ b/test/test_weatherdata.py @@ -1,8 +1,8 @@ # run pytest with -rP option to see which service passes import pytest -from agroservices.ipm.ipm import IPM -from agroservices.ipm.fakers import weather_adapter_params -from agroservices.credentials import get_credentials +from openalea.agroservices.ipm.ipm import IPM +from openalea.agroservices.ipm.fakers import weather_adapter_params +from openalea.agroservices.credentials import get_credentials def keys_exists(dict_, keys, test=all):