From e76cc3ceba7a6db414e2751adf6779457c8b056e Mon Sep 17 00:00:00 2001 From: dekken Date: Thu, 26 Oct 2023 17:13:51 +0200 Subject: [PATCH] initial work on new wheel release mechanism --- .github/workflows/build_nix.yml | 11 +- .../{build_osx.yml.off => build_osx.yml} | 9 +- .github/workflows/build_win.yml | 3 +- .github/workflows/merge_master.yaml | 110 ++ .github/workflows/pythonpublish-linux.yml | 2 +- .github/workflows/pythonpublish-osx.yml | 6 +- .github/workflows/pythonpublish-win.yml | 4 +- .gitignore | 2 + .travis.yml.off | 48 - MANIFEST.in | 9 +- appveyor.yml | 47 - lib/mkn.yaml | 4 +- old_setup.py | 955 +++++++++++++++++ pyproject.toml | 33 + setup.py | 993 +----------------- sh/configure_env.sh | 5 +- sh/mkn.sh | 2 +- sh/swig.sh | 6 +- tick/base/tests/base_test.py | 2 +- 19 files changed, 1177 insertions(+), 1074 deletions(-) rename .github/workflows/{build_osx.yml.off => build_osx.yml} (82%) create mode 100644 .github/workflows/merge_master.yaml delete mode 100644 .travis.yml.off delete mode 100644 appveyor.yml create mode 100644 old_setup.py create mode 100644 pyproject.toml diff --git a/.github/workflows/build_nix.yml b/.github/workflows/build_nix.yml index 4c0720832..9a9d3f72a 100644 --- a/.github/workflows/build_nix.yml +++ b/.github/workflows/build_nix.yml @@ -11,6 +11,7 @@ env: TICK_DEBUG: 0 TICK_WERROR: 0 MKN_GCC_PREFERRED: 1 + PYTHON_EMBED_LIB: 1 jobs: build: @@ -21,10 +22,10 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: ['3.7', '3.9', '3.10', '3.11'] # '3.8' has "'tp_print' is deprecated [-Werror,-Wdeprecated-declarations]" + python-version: ['3.9', '3.10', '3.11', '3.12'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true @@ -43,11 +44,11 @@ jobs: gcc -v swig -version - - name: pip + - name: pip # tensorflow not released for 3.12 as of 26-OCT-2023 run: | python3 -m pip install wheel pip --upgrade python3 -m pip install -r requirements.txt - python3 -m pip install tensorflow-cpu + [ "${{ matrix.python-version }}" != "3.12" ] && python3 -m pip install tensorflow-cpu - name: build run: | @@ -61,4 +62,4 @@ jobs: - name: pytest run: | - python3 setup.py pytest + python3 -m unittest discover -v . "*_test.py" diff --git a/.github/workflows/build_osx.yml.off b/.github/workflows/build_osx.yml similarity index 82% rename from .github/workflows/build_osx.yml.off rename to .github/workflows/build_osx.yml index edb4f5343..b14f745ca 100644 --- a/.github/workflows/build_osx.yml.off +++ b/.github/workflows/build_osx.yml @@ -12,6 +12,7 @@ env: TICK_WERROR: 0 + jobs: build: name: Python ${{ matrix.python-version }} @@ -21,10 +22,10 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: ['3.7', '3.9', '3.10'] # '3.8' has "'tp_print' is deprecated [-Werror,-Wdeprecated-declarations]" + python-version: ['3.9', '3.10', '3.11', '3.12'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true @@ -48,6 +49,7 @@ jobs: python3 -m pip install wheel pip --upgrade python3 -m pip install -r requirements.txt + - name: build run: | curl -Lo mkn https://github.com/mkn/mkn/releases/download/latest/mkn_osx @@ -60,4 +62,5 @@ jobs: - name: pytest run: | - python -m unittest discover -v . "*_test.py" + echo "Tests not run until test failures assessed./" + # python -m unittest discover -v . "*_test.py" diff --git a/.github/workflows/build_win.yml b/.github/workflows/build_win.yml index 02f9e873d..e7f004d48 100644 --- a/.github/workflows/build_win.yml +++ b/.github/workflows/build_win.yml @@ -22,7 +22,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v3 @@ -49,6 +49,7 @@ jobs: python3 -m pip install wheel pip --upgrade python3 -m pip install -r requirements.txt + - name: build run: | # MINGW link interferres with MSVC link.exe bash -c "rm /bin/link" diff --git a/.github/workflows/merge_master.yaml b/.github/workflows/merge_master.yaml new file mode 100644 index 000000000..895fff853 --- /dev/null +++ b/.github/workflows/merge_master.yaml @@ -0,0 +1,110 @@ +name: Master merge + +# on: +# push: +# branches: [ master ] +# workflow_dispatch: + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +env: + TICK_DEBUG: 0 + TICK_WERROR: 0 + MKN_GCC_PREFERRED: 1 + MKN_COMPILE_THREADS: 2 + +jobs: + + build_linux: + runs-on: ubuntu-latest + container: quay.io/pypa/manylinux2014_x86_64 + + strategy: + fail-fast: false + max-parallel: 4 + matrix: + python-version: ['cp39-cp39', 'cp310-cp310', 'cp311-cp311', 'cp312-cp312'] + + steps: + - name: add Python dir to path + run: | + echo "/opt/python/${{ matrix.python-version }}/bin" >> $GITHUB_PATH + - uses: actions/checkout@v3 + with: + submodules: true + - name: Build for Python ${{ matrix.python-version }} + run: | + git config --global --add safe.directory '*' + python3 -V + python3 -m pip install -U pip + python3 -m pip install -U build twine wheel + python3 -m pip install -U -r requirements.txt + + curl -Lo mkn https://github.com/mkn/mkn/releases/download/latest/mkn_manylinux + chmod +x mkn + PATH=$PWD:$PATH KLOG=3 ./sh/mkn.sh + python3 -m build + python3 -m twine check dist/* + rename 'linux_x86_64' 'manylinux_2_28_x86_64' dist/*.whl + + - name: Make wheels universal + run: for wheel in $(ls dist/*.whl); do auditwheel repair $wheel; done + - name: Save packages as artifacts + uses: actions/upload-artifact@v3 + with: + name: wheels + path: wheelhouse/*.whl + + + test_wheels: + needs: [build_linux] + strategy: + matrix: + os: [ubuntu-latest] # ,macos-11, windows-latest + python-version: ['3.9', '3.10', '3.11', '3.12'] + runs-on: ${{ matrix.os }} + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - uses: actions/download-artifact@v3 + with: + name: wheels + path: dist + - name: install wheel (Unix) + if: runner.os != 'Windows' + run: | + pip install --find-links $GITHUB_WORKSPACE/dist tick + - name: install wheel (Windows) + if: runner.os == 'Windows' + run: | + pip install --find-links $env:GITHUB_WORKSPACE\dist tick + - uses: actions/checkout@v3 + - name: run tests + run: | + python3 -V + pip install -r requirements.txt + python3 -m unittest discover -v . "*_test.py" + + upload_test_pypi: + needs: [test_wheels] + runs-on: ubuntu-latest + # upload to test PyPI on github pushes + # if: github.event_name == 'push' && github.repository_owner == 'Tick' + steps: + - uses: actions/download-artifact@v3 + with: + name: wheels + path: dist + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_PASSWORD }} + repository-url: https://test.pypi.org/legacy/ + skip-existing: false diff --git a/.github/workflows/pythonpublish-linux.yml b/.github/workflows/pythonpublish-linux.yml index eb9505254..837e9d1c5 100644 --- a/.github/workflows/pythonpublish-linux.yml +++ b/.github/workflows/pythonpublish-linux.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest container: jeandet/manylinuxcpp2017 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true diff --git a/.github/workflows/pythonpublish-osx.yml b/.github/workflows/pythonpublish-osx.yml index acaf7c9a5..afacf329d 100644 --- a/.github/workflows/pythonpublish-osx.yml +++ b/.github/workflows/pythonpublish-osx.yml @@ -14,14 +14,14 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] name: Python ${{ matrix.python-version }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: x64 diff --git a/.github/workflows/pythonpublish-win.yml b/.github/workflows/pythonpublish-win.yml index 6fb5af9d8..4f38233ab 100644 --- a/.github/workflows/pythonpublish-win.yml +++ b/.github/workflows/pythonpublish-win.yml @@ -17,11 +17,11 @@ jobs: python-version: ['3.7', '3.8', '3.9', '3.10'] name: Python ${{ matrix.python-version }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: x64 diff --git a/.gitignore b/.gitignore index 3c23e2486..45d52312d 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,5 @@ tools/benchmark/data env*/ venv*/ tickf.yml +dist +tick.egg-info/ diff --git a/.travis.yml.off b/.travis.yml.off deleted file mode 100644 index 7e60a20ba..000000000 --- a/.travis.yml.off +++ /dev/null @@ -1,48 +0,0 @@ -language: generic - -branches: - only: - - master - -# Cache is irrelevant for linux/docker builds, but useful to speed up osx -cache: - directories: - - $HOME/.pyenv - - $HOME/googletest - -matrix: - include: - - os: linux - services: docker - env: - - DOCKER_IMAGE=xdatainitiative/tick_debian:0.5.1 - - PYVER=3.6.8 - - os: linux - services: docker - env: - - DOCKER_IMAGE=xdatainitiative/tick_debian:0.5.1 - - PYVER=3.7.3 - - os: osx - language: generic - sudo: required - env: - - PYVER=3.6.8 - osx_image: xcode10.2 - - os: osx - language: generic - sudo: required - env: - - PYVER=3.7.3 - osx_image: xcode10.2 - -install: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source tools/travis/osx_install.sh; fi - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull $DOCKER_IMAGE; fi - -script: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source tools/travis/osx_run.sh; fi - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run -v `pwd`:/io -e PYVER=${PYVER} "$DOCKER_IMAGE" /io/tools/travis/docker_run.sh; fi - -notifications: - slack: - secure: m9B/ZFfTZRZTRT5yVXdHKOfO+f7Nnq5w9sewxMXFe0RQ/jaEFWt2b/jA9gTR1WJKvQ/05hmShMlz7UMCZguXqElYzsLvdribmV94TvmFpJdsoF80palZzzjRCm9lNHx66RWJ4l5p9wKAS+73tafDBBlGwaOqc234le+YxYsTIGgzeb69WS547LkHhbmPTgbhG+k8jAfE0OByXsSmaOTVyqbWerK1WF8KzlGx5WqWkGhECloTBLdcK0+oxelHYGdwI2CCRi/SfOfEyO6ceyV6hqE6Xt0g/RJhO4lEs9ahIVryUKUTpYxbgO0eWJ0LwM0OolfCAaphKHogd4Z6zHGr2EXmkXzElUcT5fMFhSD1/KxBXWxSl3TdAOpzaXG5WTYDV+38rM15FGhQD+q9monR3q7aHs/P7BkAcZBJGns61gTV1R9o+xMV4naMGH7/SRPxS9Jmz6h7Wg6WzoH2E2/Qq0YL+w3BDg6vpXNoBstmX/3D0G7S+SFmQcZqXQ18RGC5HqP1YA0gJyX89Bl8ZRBD4JTplh/Mnb20ORBNt+aaWzGEdOf7DRcGfc0Yl7Wyap92+dvX+wW7KPEUU04uRlKk/FHXA6U3NcsL9Le//+SsPveW7iCMF8e2jct9E5e7kCd2fkiExwH130zfhmDXApTBHVZGjiB/swx0X+utiXkQUbQ= diff --git a/MANIFEST.in b/MANIFEST.in index 8249414e5..aaad6eca8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,8 +1,3 @@ -# Setuptools is not aware of source headers, or non-module SWIG files, so we -# grab them here -global-include *.h -global-include *.i -global-include *.inl +recursive-include tick * +recursive-exclude lib * -# Getting the Cereal header-only library -recursive-include lib/third_party/cereal/include *.h *.hpp \ No newline at end of file diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index acd78fc4b..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,47 +0,0 @@ - -version: 0.6.{build} - -branches: - only: - - master - -max_jobs: 1 - -image: -- Visual Studio 2019 - -init: - - git config --global core.autocrlf input - -clone_depth: 1 - -hosts: - queue-server: 127.0.0.1 - db.server.com: 127.0.0.2 - -platform: x64 - -configuration: Release - -cache: - - C:\ProgramData\chocolatey\bin -> appveyor.yml # swig.exe - - C:\ProgramData\chocolatey\lib -> appveyor.yml # supporting swig Lib files. - -install: - - git submodule update --init - - curl -o mkn.exe -L https://github.com/Dekken/maiken/raw/binaries/win10_x64/mkn.exe - - IF NOT EXIST C:\ProgramData\chocolatey\bin\swig.exe choco install tools\windows\swig.4.0.0.nupkg --yes --limit-output - -build_script: - - SET PATH=C:\Program Files\Git\usr\bin;%PATH% - - cp "C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64\rc.exe" %CD% - - cp %CD%\rc.exe %CD%\lib - - call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars64.bat" - - cl # print compiler version - # doesnt exist for somereason? - #- mkdir -p "C:\Program Files (x86)\Windows Kits\10\include\10.0.16299.0\cppwinrt" - - bash -c "PATH=\"$PWD:/c/Python37-x64/:/c/Program Files/Git/usr/bin:$PATH\"; tools/appveyor/run.sh" - -artifacts: - - path: "dist\\*.whl" - name: Wheels diff --git a/lib/mkn.yaml b/lib/mkn.yaml index 253f0b2fc..08d4afb75 100644 --- a/lib/mkn.yaml +++ b/lib/mkn.yaml @@ -30,7 +30,9 @@ property: nix_cargs: ${nixish_cargs} - bsd_cargs: ${nixish_cargs} + # sprintf deprecated but used in swig and only OSX complains + # https://developer.apple.com/forums/thread/714675 + bsd_cargs: ${nixish_cargs} -Wno-deprecated-declarations nixish_largs: -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time diff --git a/old_setup.py b/old_setup.py new file mode 100644 index 000000000..4e1ae3ea3 --- /dev/null +++ b/old_setup.py @@ -0,0 +1,955 @@ +#!/usr/bin/env python +# -*- coding: utf8 -*- + +# python setup.py build_ext --inplace + +""" +setup.py file +""" +import multiprocessing +import os +import pathlib +import platform +import re +import shutil +import subprocess +import sys +import sysconfig +import time +import unittest +import warnings + +from abc import ABC + +from setuptools import find_packages, setup, Command +from setuptools.command.install import install +from setuptools.extension import Extension + +# deprecated! +import distutils +from distutils.command.build import build +from distutils.command.clean import clean +from distutils import sysconfig as distconfig +# deprecated! + + + +from packaging import version + +force_blas = False +if "--force-blas" in sys.argv: + force_blas = True + sys.argv.remove("--force-blas") + +# Available debug flags +# +# DEBUG_C_ARRAY : count #allocations of C-arrays +# DEBUG_ARRAY : Track creation/destruction of Array objects +# DEBUG_SHAREDARRAY : Track creation/destruction of SharedArray objects +# DEBUG_VARRAY : Track VArray +# DEBUG_COSTLY_THROW : Enables some costly tests to throw error +# (such as Array[i] if i not in range) +# DEBUG_VERBOSE : Error messages from CPP extensions will include +# backtrace and error loc + +# debug_flags = ['DEBUG_C_ARRAY', 'DEBUG_ARRAY', 'DEBUG_COSTLY_THROW', +# 'DEBUG_SHAREDARRAY', 'DEBUG_VARRAY', 'DEBUG_VERBOSE'] + +TICK_DEBUG=1 +# allow disable debug +if os.environ.get('TICK_DEBUG') is not None: + TICK_DEBUG=os.environ['TICK_DEBUG'] + +TICK_WERROR=1 +# allow disable Werror +if os.environ.get('TICK_WERROR') is not None: + TICK_WERROR=os.environ['TICK_WERROR'] + +debug_flags = [] + +if TICK_DEBUG == 1 or TICK_DEBUG == "1": + debug_flags = ['DEBUG_COSTLY_THROW'] + +TICK_CMAKE_GENERATOR=None +if os.environ.get('TICK_CMAKE_GENERATOR') is not None: + TICK_CMAKE_GENERATOR=os.environ['TICK_CMAKE_GENERATOR'] + +# If true, add compilation flags to use fast (but maybe inaccurate) math +# See https://gcc.gnu.org/wiki/FloatingPointMath +use_fast_math = True + +version_info = sys.version_info + +python_min_ver = (3, 6, 0) +python_ver = (version_info.major, version_info.minor, version_info.micro) + +if python_ver < python_min_ver: + txt = 'Python version {0}.{1}.{2} ' \ + 'lower than the required version >= {3}.{4}.{5}.' + + warnings.warn(txt.format(*(python_ver + python_min_ver))) + +# The next block ensures that we build a link-time linkable dynamic library for +# OSX builds instead of a bundle. +# +# Snippet from http://stackoverflow.com/a/32765319/2299947 +if sys.platform == 'darwin': + vars = distconfig.get_config_vars() + vars['LDSHARED'] = vars['LDSHARED'].replace('-bundle', '-dynamiclib') + +# If we're installing via a wheel or not +is_building_tick = any(arg in ("build", + "build_ext", + "bdist", + "bdist_wheel", + "develop",) for arg in sys.argv) + +# Obtain the numpy include directory. +# This logic works across numpy versions. +numpy_available = False +numpy_include = "" +blas_info = {} +try: + import numpy as np + from numpy.distutils.system_info import get_info + + try: + numpy_include = np.get_include() + except AttributeError: + numpy_include = np.get_numpy_include() + + # Determine if we have an available BLAS implementation + if force_blas: # activated with build --force-blas + blas_info = get_info("blas_opt", 0) + elif platform.system() == 'Windows': + try: + with open(os.devnull, 'w') as devnull: + exitCode = subprocess.check_output( + "python tools/python/blas/check_cblas.py build_ext", + stderr=devnull, + shell=True) + blas_info = get_info("blas_opt", 0) + except subprocess.CalledProcessError as subError: + print("Error executing check_cblas.py - cblas not found") + else: + try: + with open(os.devnull, 'w') as devnull: + exitCode = subprocess.check_output( + "python tools/python/blas/check_mkl.py build_ext", + stderr=devnull, + shell=True) + blas_info = get_info("blas_opt", 0) + except subprocess.CalledProcessError as subError: + print("Error executing check_mkl.py - mkl not found") + + numpy_available = True +except ImportError as e: + if is_building_tick: + print(e) + warnings.warn("numpy is not installed:\n" + " - Include directory for numpy integration may not be " + "correct\n " + " - BLAS will not be used for this build\n") + +# sometimes disabling blas is desired +if os.environ.get('TICK_NO_OPTS') is not None: + if os.environ['TICK_NO_OPTS'] == '1': + blas_info = {} + +# By default, we assume that scipy uses 32 bit integers for indices in sparse +# arrays +sparse_indices_flag = "-DTICK_SPARSE_INDICES_INT32" +try: + from scipy.sparse import sputils + + sparsearray_type = sputils.get_index_dtype() + + if sparsearray_type == np.int64: + sparse_indices_flag = "-DTICK_SPARSE_INDICES_INT64" +except ImportError as e: + if is_building_tick and numpy_available: + print(e) + warnings.warn("scipy is not installed, unable to determine " + "sparse array integer type (assuming 32 bits)\n") + +if os.name == 'posix': + if platform.system() == 'Darwin': + os_version = platform.mac_ver()[0] + # keep only major + minor + os_version = '.'.join(os_version.split('.')[:2]) + + if version.parse(os_version) < version.parse('10.9'): + raise ValueError( + 'You need to have at least mac os 10.9 to build this package') + + # We set this variable manually because anaconda set it to a deprecated + # one + os.environ['MACOSX_DEPLOYMENT_TARGET'] = os_version + +# check for debug pyenv - PYVER must be exported as env var. Debug pyenv setup: +# PYENV=3.7.0 +# CFLAGS="-O0 -ggdb" CONFIGURE_OPTS="--enable-shared" pyenv install -kg $PYVER +# PYENV=${PYENV}-debug +# eval "$(pyenv init -)" +# pyenv global ${PYVER} +# pyenv local ${PYVER} + +PYVER = "" +PYVER_DBG = "" +if os.environ.get('PYVER') is not None: + PYVER = os.environ['PYVER'] + if PYVER.endswith("-debug"): + PYVER_DBG = "-pydebug" + +# Directory containing built .so files before they are moved either +# in source (with build flag --inplace) or to site-packages (by install) +# E.g. build/lib.macosx-10.11-x86_64-3.5 +build_dir = "build/lib.{}-{}"+PYVER_DBG +build_dir = build_dir.format(distutils.util.get_platform(), + ".".join(sys.version.split(".")[:2])) + +class SwigExtension(Extension): + """This only adds information about extension construction, useful for + library sharing + """ + + def __init__(self, *args, module_ref=None, ext_name=None, **kwargs): + super().__init__(*args, **kwargs) + self.module_ref = module_ref + self.ext_name = ext_name + +class SwigPath: + """Small class to handle module creation and check project structure + """ + + def __init__(self, module_path, extension_name): + module_path = os.path.normpath(module_path) + + + # Module C++ source directory (e.g. lib/cpp/tick/base) + self.src = os.path.join(module_path, 'src') + + # Module SWIG interface files directory (e.g. tick/array/swig) + self.swig = "lib/swig/" + module_path[5:] + + # Module build directory. Will contain generated .py files, and .so + # files if built with flag --inplace. + # + # E.g. tick/array/build + self.build = os.path.join(module_path, 'build') + + self.extension_name = extension_name + self.private_extension_name = '_' + extension_name + + # Transform folder path to module path + self.extension_path = self.build \ + .replace('.', '') \ + .replace('/', '.') \ + + '.' + self.private_extension_name + + # Filename of the produced .so file (e.g. _array.so) + self.lib_filename = '{}{}'.format(self.private_extension_name, + sysconfig.get_config_var('EXT_SUFFIX')) + +def create_extension(extension_name, module_dir, + cpp_files, h_files, swig_files, folders=[], + include_modules=None, extra_compile_args=None, + swig_opts=None): + ... + +def create_extension(extension_name, module_dir, + cpp_files, h_files, swig_files, folders=[], + include_modules=None, extra_compile_args=None, + swig_opts=None): + swig_path = SwigPath(module_dir, extension_name) + extension_path = swig_path.extension_path + + # Add directory to filenames + def add_dir_name(dir_name, filenames): + return list(os.path.join(dir_name, filename) for filename in filenames) + + swig_files = add_dir_name("lib/swig/tick/" + module_dir[7:], swig_files) + + for folder in folders: + for file in os.listdir(folder): + file_path = os.path.join(folder, file) + if os.path.isfile(file_path): + _, ext = os.path.splitext(file) + if ext == '.cpp': + cpp_files += [os.path.join(folder, file)] + elif ext == ".txt": + pass + elif ext == ".inl": + pass + else: + warnings.warn('Included file %s in folder %s has an ' + 'unknown extension "%s"' % (file, + folder, + ext)) + + min_swig_opts = ['-py3', + '-c++', + '-Ilib/swig', + '-Ilib/include', + '-outdir', swig_path.build, + ] + + if swig_opts is None: + swig_opts = min_swig_opts + else: + swig_opts.extend(min_swig_opts) + + # Here we set the minimum compile flags. + min_extra_compile_args = ["-D_FILE_OFFSET_BITS=64", + "-DPYTHON_LINK", + "-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION", + '-Ilib/include', + sparse_indices_flag, + '-std=c++11', + '-O2', # -O3 is sometimes dangerous and has caused segfaults on Travis + '-DNDEBUG', # some assertions fail without this (TODO tbh) + ] + if TICK_DEBUG == 0 or TICK_DEBUG == "0": + min_extra_compile_args.append("-g0") + + if use_fast_math: + min_extra_compile_args.append('-ffast-math') + + if extra_compile_args is None: + extra_compile_args = min_extra_compile_args + else: + extra_compile_args.extend(min_extra_compile_args) + + extra_compile_args.append("-Wall") + + if platform.system() == 'Windows': + extra_compile_args.append("-DBUILDING_DLL") + elif TICK_WERROR == 1 or TICK_WERROR == "1": + ## Added -Wall to get all warnings and -Werror to treat them as errors + extra_compile_args.append("-Werror") + + libraries = [] + library_dirs = [] + runtime_library_dirs = [] + extra_link_args = [] + define_macros = [] + extra_include_dirs = ["include", "swig"] + + # Deal with (optional) BLAS + extra_compile_args.extend(blas_info.get("extra_compile_args", [])) + extra_link_args.extend(blas_info.get("extra_link_args", [])) + libraries.extend(blas_info.get("libraries", [])) + library_dirs.extend(blas_info.get("library_dirs", [])) + define_macros.extend(blas_info.get("define_macros", [])) + + if 'define_macros' in blas_info and \ + any(key == 'HAVE_CBLAS' for key, _ in blas_info['define_macros']): + define_macros.append(('TICK_USE_CBLAS', None)) + if "libraries" in blas_info and "mkl_rt" in blas_info["libraries"]: + define_macros.append(('TICK_USE_MKL', None)) + extra_include_dirs.extend(blas_info["include_dirs"]) + if platform.system() != 'Windows': + for lib_dir in blas_info["library_dirs"]: + extra_link_args.append( + "-Wl,-rpath,"+ lib_dir + ) + # if not Linux assume MacOS + if platform.system() != 'Linux': + rel_path = os.path.relpath(lib_dir, swig_path.build) + if os.path.exists(rel_path): + extra_link_args.append("-Wl,-rpath,@loader_path/"+ rel_path) + + if include_modules is None: + include_modules = [] + + # Include all what need for module link + for mod in include_modules: + if mod.__class__ != SwigPath: + raise ValueError("Unhandled class for included module") + + for opts in [swig_opts, extra_compile_args]: + opts.extend(["-I" + mod.swig]) + + # Because setuptools produces shared object files with non-standard + # On windows we need to use ".lib" rather than ".pyd" + # when linking libs to other libs + if platform.system() == 'Windows': + lib = os.path.join(build_dir, mod.build, "_"+mod.extension_name) + lib += os.path.splitext(sysconfig.get_config_var("EXT_SUFFIX"))[0] + libraries.append(lib) + elif platform.system() == 'Linux': + lib_dir = os.path.abspath(os.path.join(build_dir, mod.build)) + extra_link_args.append("-L"+lib_dir) + extra_link_args.append("-Wl,-rpath,"+lib_dir) + extra_link_args.append("-l:"+mod.lib_filename) + else: + extra_link_args.append(os.path.abspath( + os.path.join(build_dir, mod.build, mod.lib_filename))) + + # Make sure that the runtime linker can find shared object + # dependencies by using the relative path to the dependency library. + rel_path = os.path.relpath(mod.build, swig_path.build) + if platform.system() == 'Linux': + # $ORIGIN refers to the location of the current shared object file + # at runtime + runtime_library_dirs.append("\$ORIGIN/%s" % rel_path) + elif platform.system() == 'Windows': + pass + else: # Assuming non-Windows builds for now + # For OSX builds we use @loader_path instead + extra_link_args.append( + "-Wl,-rpath,%s" % '@loader_path/%s' % rel_path + ) + + # Setting the SONAME/install_name for the built libraries. It ensures that + # the runtime linker will have a chance to find the libraries even after + # they're moved (during install, for example) + filename = swig_path.lib_filename + if platform.system() == 'Linux': + extra_link_args.append('-Wl,-soname,%s' % filename) + elif platform.system() == 'Windows': + pass + else: + # For OSX the install_name needs to be prefixed with @rpath + extra_link_args.append('-Wl,-install_name,@rpath/%s' % filename) + + for df in debug_flags: + full_flag = "-D" + df + + extra_compile_args.append(full_flag) + + if df == 'DEBUG_COSTLY_THROW': + swig_opts.append(full_flag) + + # Adding Cereal serialization library + extra_include_dirs.append("lib/third_party/cereal/include") + + # Adding numpy include directory + if numpy_include: + extra_include_dirs.append(numpy_include) + + # This is to override the use of IMPLIB in distutils + # which puts the lib/exp files in the wrong directory + # see: https://github.com/python/cpython/blob/08bb8a41cc976343795bd0e241cd7388e9f44ad5/Lib/distutils/_msvccompiler.py#L467 + if platform.system() == 'Windows': + implib = "/IMPLIB:" + os.path.abspath( + os.path.join(build_dir, swig_path.build, "_"+extension_name)) + implib += os.path.splitext(sysconfig.get_config_var("EXT_SUFFIX"))[0] + extra_link_args.append(implib + ".lib") + + core_module = SwigExtension(extension_path, module_ref=swig_path, + sources=swig_files + cpp_files, + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + define_macros=define_macros, + swig_opts=swig_opts, + libraries=libraries, + include_dirs=extra_include_dirs, + library_dirs=library_dirs, + runtime_library_dirs=runtime_library_dirs, + depends=h_files, + language="c++", + ext_name=extension_name) + + return core_module + + +############################## +# Create extensions +############################## + +array_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/array" + ], + "swig_files": ["array_module.i"], + "module_dir": "./tick/array/", + "extension_name": "array" +} + +array_extension = create_extension(**array_extension_info) + +base_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/base", + "lib/cpp/base/math" + ], + "swig_files": ["base_module.i"], + "module_dir": "./tick/base", + "extension_name": "base", + "include_modules": [array_extension.module_ref] +} + +base_extension = create_extension(**base_extension_info) + +base_array_modules = [array_extension.module_ref, base_extension.module_ref] + +array_test_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": ["lib/cpp/array_test"], + "swig_files": ["array_test_module.i"], + "module_dir": "./tick/array_test/", + "extension_name": "array_test", + "include_modules": base_array_modules, +} + +test_extension = create_extension(**array_test_extension_info) + +random_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": ["lib/cpp/random"], + "swig_files": ["crandom_module.i"], + "module_dir": "./tick/random/", + "extension_name": "crandom", + "include_modules": base_array_modules +} + +random_extension = create_extension(**random_extension_info) + +base_model_core_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/base_model" + ], + "swig_files": ["base_model_module.i"], + "module_dir": "./tick/base_model/", + "extension_name": "base_model", + "include_modules": base_array_modules +} +base_model_core = create_extension(**base_model_core_info) + +linear_model_core_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/linear_model" + ], + "swig_files": ["linear_model_module.i"], + "module_dir": "./tick/linear_model/", + "extension_name": "linear_model", + "include_modules": base_array_modules + + [ + base_model_core.module_ref, + ] +} +linear_model_core = create_extension(**linear_model_core_info) + +hawkes_simulation_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/hawkes/simulation", + "lib/cpp/hawkes/simulation/hawkes_baselines", + "lib/cpp/hawkes/simulation/hawkes_kernels" + ], + "swig_files": [ + "hawkes_simulation_module.i" + ], + "module_dir": "./tick/hawkes/simulation/", + "extension_name": "hawkes_simulation", + "include_modules": base_array_modules + [random_extension.module_ref] +} +hawkes_simulation_extension = \ + create_extension(**hawkes_simulation_extension_info) + +hawkes_model_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/hawkes/model", + "lib/cpp/hawkes/model/base", + "lib/cpp/hawkes/model/list_of_realizations", + ], + "swig_files": [ + "hawkes_model_module.i" + ], + "module_dir": "./tick/hawkes/model/", + "extension_name": "hawkes_model", + "include_modules": base_array_modules + [base_model_core.module_ref] +} +hawkes_model_extension = create_extension(**hawkes_model_extension_info) + +hawkes_inference_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/hawkes/inference", + ], + "swig_files": [ + "hawkes_inference_module.i" + ], + "module_dir": "./tick/hawkes/inference/", + "extension_name": "hawkes_inference", + "include_modules": base_array_modules + + [ + base_model_core.module_ref, + hawkes_model_extension.module_ref, + ] +} +hawkes_inference_extension = create_extension(**hawkes_inference_extension_info) + +prox_core_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/prox" + ], + "swig_files": ["prox_module.i"], + "module_dir": "./tick/prox/", + "extension_name": "prox", + "include_modules": base_array_modules +} +prox_core = create_extension(**prox_core_info) + +robust_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/robust" + ], + "swig_files": ["robust_module.i"], + "module_dir": "./tick/robust/", + "extension_name": "robust", + "include_modules": base_array_modules + [ + base_model_core.module_ref,linear_model_core.module_ref] +} +robust_extension = create_extension(**robust_extension_info) + +solver_core_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/solver" + ], + "swig_files": ["solver_module.i"], + "module_dir": "./tick/solver/", + "extension_name": "solver", + "include_modules": base_array_modules + [random_extension.module_ref, + base_model_core.module_ref, + linear_model_core.module_ref, + prox_core.module_ref, + robust_extension.module_ref] +} +solver_core = create_extension(**solver_core_info) + +preprocessing_core_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/preprocessing" + ], + "swig_files": ["preprocessing_module.i"], + "module_dir": "./tick/preprocessing/", + "extension_name": "preprocessing", + "include_modules": base_array_modules +} + +preprocessing_core = create_extension(**preprocessing_core_info) + +survival_extension_info = { + "cpp_files": [], + "h_files": [], + "folders": [ + "lib/cpp/survival" + ], + "swig_files": ["survival_module.i"], + "module_dir": "./tick/survival/", + "extension_name": "survival", + "include_modules": base_array_modules + [base_model_core.module_ref] +} +survival_extension = create_extension(**survival_extension_info) + +tick_modules = [ + array_extension, base_extension, test_extension, + random_extension, base_model_core, linear_model_core, + hawkes_simulation_extension, hawkes_model_extension, + hawkes_inference_extension, + prox_core, preprocessing_core, + robust_extension, survival_extension, solver_core +] + +# Abstract class for tick-specific commands that need access to common build +# directories +class TickCommand(Command, ABC): + tick_dir = os.path.abspath(os.path.join(os.curdir, 'tick')) + cpp_build_dir = os.path.abspath(os.path.join(build_dir, 'cpptest')) + + user_options = [] + + def initialize_options(self): + """Set default values for options.""" + pass + + def finalize_options(self): + """Post-process options.""" + pass + + +class TickBuild(build): + swig_min_ver = (4, 0, 0) + + @staticmethod + def extract_swig_version(swig_ver_str): + m = re.search('SWIG Version (\d+).(\d+).(\d+)', swig_ver_str) + + if not m: + txt = 'Could not extract SWIG version from string: {0}' + + warnings.warn(txt.format(swig_ver_str)) + + return 0, 0, 0 + + return tuple(int(x) for x in m.groups()[0:3]) + + def run(self): + swig_ver = self.extract_swig_version( + str(subprocess.check_output(['swig', '-version']))) + + if swig_ver < self.swig_min_ver: + txt = 'SWIG version {0}.{1}.{2} ' \ + 'lower than the required version >= {3}.{4}.{5}. ' \ + 'This will likely cause build errors!' + + warnings.warn(txt.format(*(swig_ver + self.swig_min_ver))) + + self.run_command('build_ext') + build.run(self) + + +class TickInstall(install): + def run(self): + self.run_command('build_ext') + install.run(self) + + +class BuildRunCPPTests(TickCommand): + description = 'build and run tick C++ tests' + + def run(self): + self.run_command('makecpptest') + self.run_command('runcpptest') + + +class RunCPPTests(TickCommand): + description = 'run tick C++ tests' + + def run(self): + make_cmd = ['make', 'check'] + subprocess.check_call(make_cmd, cwd=self.cpp_build_dir) + + +class BuildCPPTests(TickCommand): + build_jobs = 1 + description = 'build tick C++ tests' + user_options = [ + ('build-jobs=', 'j', + 'number of parallel build jobs (default is number of available CPU ' + 'cores reported by Python)'), + ] + + def initialize_options(self): + """Set default values for options.""" + self.build_jobs = multiprocessing.cpu_count() + + def run(self): + relpath = os.path.relpath(self.tick_dir, self.cpp_build_dir) + cmake_exe = os.environ.get('TICK_CMAKE', 'cmake') + + + cmake_cmd = [cmake_exe, + '-DTICK_REBUILD_LIBS=OFF', + '-DBENCHMARK=OFF', + relpath + '/../lib'] + + if TICK_CMAKE_GENERATOR is not None: + cmake_cmd.extend(['-G', '{}'.format(TICK_CMAKE_GENERATOR)]) + + # Feed the path to the built C++ extensions so CMake does not have to + # build them again + for mod in tick_modules: + full_path = os.path.abspath( + os.path.join(mod.module_ref.build, mod.module_ref.lib_filename)) + + cmake_cmd.append( + '-DTICK_LIB_{}={}'.format(mod.ext_name.upper(), full_path)) + + if 'define_macros' in blas_info and \ + any(key == 'HAVE_CBLAS' for key, _ in blas_info['define_macros']): + cmake_cmd.append('-DUSE_BLAS=ON') + + os.makedirs(os.path.join(self.cpp_build_dir, 'cpptest'), exist_ok=True) + subprocess.check_call(cmake_cmd, cwd=self.cpp_build_dir) + + make_cmd = ['make', 'VERBOSE=1', 'all', '-j{}'.format(self.build_jobs)] + subprocess.check_call(make_cmd, cwd=self.cpp_build_dir) + + +class RunCPPLint(TickCommand): + description = 'run cpplint on tick C++ source files' + + CPPLINT_DIRS = [ + 'lib/include', + 'lib/cpp', + ] + + def run(self): + try: + import cpplint as cl + + cl_state = cl._cpplint_state + error_count = 0 + + for dir in self.CPPLINT_DIRS: + print("Processing {}".format(dir)) + + cl_state.ResetErrorCounts() + filenames = list(pathlib.Path(dir).glob('**/*.h')) + \ + list(pathlib.Path(dir).glob('**/*.cpp')) + + for filename in filenames: + cl.ProcessFile(str(filename), cl_state.verbose_level) + cl_state.PrintErrorCounts() + + error_count += cl_state.error_count + print('') + + if error_count > 0: + raise RuntimeError("Codestyle check by cpplint failed") + + except ImportError: + warnings.warn("Stylecheck by cpplint failed because cpplint " + "is not installed as a Python module") + + +class RunPyLint(TickCommand): + # We need to define if and how we run pylint + + description = 'run tick PyLint codestyle check' + start_dir = '.' + + @staticmethod + def run(): + raise NotImplementedError('Running pylint from setup.py' + 'not supported yet') + +class RunPyTests(TickCommand): + description = 'run tick Python tests' + start_dir = '.' + + user_options = [ + ('start-dir=', 's', + 'directory to start looking for Python tests (e.g. tick/simulation)'), + ] + + def initialize_options(self): + """Set default values for options.""" + self.start_dir = '.' + + def run(self): + if platform.system() == 'Windows': + print("The pytest command has issues with threads on Windows") + print('Instead please run:') + print('python3 -m unittest discover -v . "*_test.py"') + exit(1) + loader = unittest.TestLoader() + alltests = loader.discover(self.start_dir, pattern="*_test.py") + result = unittest.TextTestRunner(verbosity=2).run(alltests) + sys.exit(not result.wasSuccessful()) + + +class RunTestSuites(TickCommand): + description = 'run tick Python and C++ tests' + + def run(self): + self.run_command('cpptest') + self.run_command('pytest') + + +class CleanTick(clean): + description = 'cleans all generated and built files' + + def run(self): + seconds_until_clean = 5 + + print("Cleaning source directories in %d seconds..." % + seconds_until_clean) + + time.sleep(seconds_until_clean) + + clean.run(self) + + shutil.rmtree(build_dir, ignore_errors=True) + + patterns = [ + '**/*.so', + '**/*_wrap.cpp', + '**/__pycache__/*.pyc', + '**/__pycache__', + ] + + for paths in (pathlib.Path(os.curdir).glob(p) for p in patterns): + for path in paths: + print("Deleting {}".format(path)) + + if path.is_dir(): + path.rmdir() + else: + path.unlink() + + +setup(name="tick", + version='0.7.0.1', + author="Emmanuel Bacry, " + "Stephane Gaiffas, " + "Martin Bompaire, " + "Søren V. Poulsen, " + "Maryan Morel, " + "Simon Bussy, " + "Philip Deegan", + author_email='martin.bompaire@polytechnique.edu, ' + 'philip.deegan@polytechnique.edu', + url="https://x-datainitiative.github.io/tick/", + description="Module for statistical learning, with a particular emphasis " + "on time-dependent modelling", + ext_modules=tick_modules, + install_requires=['numpy', + 'scipy', + 'numpydoc', + 'matplotlib', + 'sphinx', + 'pandas', + 'dill', + 'scikit-learn'], + packages=find_packages(), + cmdclass={'build': TickBuild, + 'install': TickInstall, + 'makecpptest': BuildCPPTests, + 'runcpptest': RunCPPTests, + 'cpptest': BuildRunCPPTests, + 'cpplint': RunCPPLint, + 'pytest': RunPyTests, + 'pylint': RunPyLint, + 'test': RunTestSuites, + 'clean': CleanTick}, + classifiers=['Development Status :: 3 - Alpha', + 'Intended Audience :: Science/Research', + 'Intended Audience :: Developers', + 'Programming Language :: C++', + 'Programming Language :: Python', + 'Topic :: Software Development', + 'Topic :: Scientific/Engineering', + 'Operating System :: POSIX', + 'Operating System :: Unix', + 'Operating System :: MacOS', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: BSD License'], + ) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..6f1834ab9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[project] +name = "tick" +version = "0.7.0.2" + +dependencies = [ + 'numpy', + 'numpydoc', + 'scipy', + 'matplotlib', + 'scikit-learn', + 'dill', + 'pandas', + 'packaging', +] + +description = "Module for statistical learning, with a particular emphasis on time-dependent modelling" +readme = "README.md" +requires-python = ">=3.7" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://x-datainitiative.github.io/tick/" +"Bug Tracker" = "https://github.com/X-DataInitiative/tick/issues" + +[build-system] +requires = [ + "setuptools>=42", "wheel", "packaging", +] + diff --git a/setup.py b/setup.py index 61a9393b4..2d05e7b09 100644 --- a/setup.py +++ b/setup.py @@ -1,950 +1,45 @@ -#!/usr/bin/env python -# -*- coding: utf8 -*- -# python setup.py build_ext --inplace - -""" -setup.py file -""" -import multiprocessing -import os -import pathlib -import platform -import re -import shutil -import subprocess -import sys -import sysconfig -import time -import unittest -import warnings - -from abc import ABC - -from setuptools import find_packages, setup, Command -from setuptools.command.install import install -from setuptools.extension import Extension - -# deprecated! -import distutils -from distutils.command.build import build -from distutils.command.clean import clean -from distutils import sysconfig as distconfig -# deprecated! - - - -from packaging import version - -force_blas = False -if "--force-blas" in sys.argv: - force_blas = True - sys.argv.remove("--force-blas") - -# Available debug flags -# -# DEBUG_C_ARRAY : count #allocations of C-arrays -# DEBUG_ARRAY : Track creation/destruction of Array objects -# DEBUG_SHAREDARRAY : Track creation/destruction of SharedArray objects -# DEBUG_VARRAY : Track VArray -# DEBUG_COSTLY_THROW : Enables some costly tests to throw error -# (such as Array[i] if i not in range) -# DEBUG_VERBOSE : Error messages from CPP extensions will include -# backtrace and error loc - -# debug_flags = ['DEBUG_C_ARRAY', 'DEBUG_ARRAY', 'DEBUG_COSTLY_THROW', -# 'DEBUG_SHAREDARRAY', 'DEBUG_VARRAY', 'DEBUG_VERBOSE'] - -TICK_DEBUG=1 -# allow disable debug -if os.environ.get('TICK_DEBUG') is not None: - TICK_DEBUG=os.environ['TICK_DEBUG'] - -TICK_WERROR=1 -# allow disable Werror -if os.environ.get('TICK_WERROR') is not None: - TICK_WERROR=os.environ['TICK_WERROR'] - -debug_flags = [] - -if TICK_DEBUG == 1 or TICK_DEBUG == "1": - debug_flags = ['DEBUG_COSTLY_THROW'] - -TICK_CMAKE_GENERATOR=None -if os.environ.get('TICK_CMAKE_GENERATOR') is not None: - TICK_CMAKE_GENERATOR=os.environ['TICK_CMAKE_GENERATOR'] - -# If true, add compilation flags to use fast (but maybe inaccurate) math -# See https://gcc.gnu.org/wiki/FloatingPointMath -use_fast_math = True - -version_info = sys.version_info - -python_min_ver = (3, 6, 0) -python_ver = (version_info.major, version_info.minor, version_info.micro) - -if python_ver < python_min_ver: - txt = 'Python version {0}.{1}.{2} ' \ - 'lower than the required version >= {3}.{4}.{5}.' - - warnings.warn(txt.format(*(python_ver + python_min_ver))) - -# The next block ensures that we build a link-time linkable dynamic library for -# OSX builds instead of a bundle. -# -# Snippet from http://stackoverflow.com/a/32765319/2299947 -if sys.platform == 'darwin': - vars = distconfig.get_config_vars() - vars['LDSHARED'] = vars['LDSHARED'].replace('-bundle', '-dynamiclib') - -# If we're installing via a wheel or not -is_building_tick = any(arg in ("build", - "build_ext", - "bdist", - "bdist_wheel", - "develop",) for arg in sys.argv) - -# Obtain the numpy include directory. -# This logic works across numpy versions. -numpy_available = False -numpy_include = "" -blas_info = {} -try: - import numpy as np - from numpy.distutils.system_info import get_info - - try: - numpy_include = np.get_include() - except AttributeError: - numpy_include = np.get_numpy_include() - - # Determine if we have an available BLAS implementation - if force_blas: # activated with build --force-blas - blas_info = get_info("blas_opt", 0) - elif platform.system() == 'Windows': - try: - with open(os.devnull, 'w') as devnull: - exitCode = subprocess.check_output( - "python tools/python/blas/check_cblas.py build_ext", - stderr=devnull, - shell=True) - blas_info = get_info("blas_opt", 0) - except subprocess.CalledProcessError as subError: - print("Error executing check_cblas.py - cblas not found") - else: - try: - with open(os.devnull, 'w') as devnull: - exitCode = subprocess.check_output( - "python tools/python/blas/check_mkl.py build_ext", - stderr=devnull, - shell=True) - blas_info = get_info("blas_opt", 0) - except subprocess.CalledProcessError as subError: - print("Error executing check_mkl.py - mkl not found") - - numpy_available = True -except ImportError as e: - if is_building_tick: - print(e) - warnings.warn("numpy is not installed:\n" - " - Include directory for numpy integration may not be " - "correct\n " - " - BLAS will not be used for this build\n") - -# sometimes disabling blas is desired -if os.environ.get('TICK_NO_OPTS') is not None: - if os.environ['TICK_NO_OPTS'] == '1': - blas_info = {} - -# By default, we assume that scipy uses 32 bit integers for indices in sparse -# arrays -sparse_indices_flag = "-DTICK_SPARSE_INDICES_INT32" -try: - from scipy.sparse import sputils - - sparsearray_type = sputils.get_index_dtype() - - if sparsearray_type == np.int64: - sparse_indices_flag = "-DTICK_SPARSE_INDICES_INT64" -except ImportError as e: - if is_building_tick and numpy_available: - print(e) - warnings.warn("scipy is not installed, unable to determine " - "sparse array integer type (assuming 32 bits)\n") - -if os.name == 'posix': - if platform.system() == 'Darwin': - os_version = platform.mac_ver()[0] - # keep only major + minor - os_version = '.'.join(os_version.split('.')[:2]) - - if version.parse(os_version) < version.parse('10.9'): - raise ValueError( - 'You need to have at least mac os 10.9 to build this package') - - # We set this variable manually because anaconda set it to a deprecated - # one - os.environ['MACOSX_DEPLOYMENT_TARGET'] = os_version - -# check for debug pyenv - PYVER must be exported as env var. Debug pyenv setup: -# PYENV=3.7.0 -# CFLAGS="-O0 -ggdb" CONFIGURE_OPTS="--enable-shared" pyenv install -kg $PYVER -# PYENV=${PYENV}-debug -# eval "$(pyenv init -)" -# pyenv global ${PYVER} -# pyenv local ${PYVER} - -PYVER = "" -PYVER_DBG = "" -if os.environ.get('PYVER') is not None: - PYVER = os.environ['PYVER'] - if PYVER.endswith("-debug"): - PYVER_DBG = "-pydebug" - -# Directory containing built .so files before they are moved either -# in source (with build flag --inplace) or to site-packages (by install) -# E.g. build/lib.macosx-10.11-x86_64-3.5 -build_dir = "build/lib.{}-{}"+PYVER_DBG -build_dir = build_dir.format(distutils.util.get_platform(), - ".".join(sys.version.split(".")[:2])) - -class SwigExtension(Extension): - """This only adds information about extension construction, useful for - library sharing - """ - - def __init__(self, *args, module_ref=None, ext_name=None, **kwargs): - super().__init__(*args, **kwargs) - self.module_ref = module_ref - self.ext_name = ext_name - -class SwigPath: - """Small class to handle module creation and check project structure - """ - - def __init__(self, module_path, extension_name): - module_path = os.path.normpath(module_path) - - - # Module C++ source directory (e.g. lib/cpp/tick/base) - self.src = os.path.join(module_path, 'src') - - # Module SWIG interface files directory (e.g. tick/array/swig) - self.swig = "lib/swig/" + module_path[5:] - - # Module build directory. Will contain generated .py files, and .so - # files if built with flag --inplace. - # - # E.g. tick/array/build - self.build = os.path.join(module_path, 'build') - - self.extension_name = extension_name - self.private_extension_name = '_' + extension_name - - # Transform folder path to module path - self.extension_path = self.build \ - .replace('.', '') \ - .replace('/', '.') \ - + '.' + self.private_extension_name - - # Filename of the produced .so file (e.g. _array.so) - self.lib_filename = '{}{}'.format(self.private_extension_name, - sysconfig.get_config_var('EXT_SUFFIX')) - - -def create_extension(extension_name, module_dir, - cpp_files, h_files, swig_files, folders=[], - include_modules=None, extra_compile_args=None, - swig_opts=None): - swig_path = SwigPath(module_dir, extension_name) - extension_path = swig_path.extension_path - - # Add directory to filenames - def add_dir_name(dir_name, filenames): - return list(os.path.join(dir_name, filename) for filename in filenames) - - swig_files = add_dir_name("lib/swig/tick/" + module_dir[7:], swig_files) - - for folder in folders: - for file in os.listdir(folder): - file_path = os.path.join(folder, file) - if os.path.isfile(file_path): - _, ext = os.path.splitext(file) - if ext == '.cpp': - cpp_files += [os.path.join(folder, file)] - elif ext == ".txt": - pass - elif ext == ".inl": - pass - else: - warnings.warn('Included file %s in folder %s has an ' - 'unknown extension "%s"' % (file, - folder, - ext)) - - min_swig_opts = ['-py3', - '-c++', - '-Ilib/swig', - '-Ilib/include', - '-outdir', swig_path.build, - ] - - if swig_opts is None: - swig_opts = min_swig_opts - else: - swig_opts.extend(min_swig_opts) - - # Here we set the minimum compile flags. - min_extra_compile_args = ["-D_FILE_OFFSET_BITS=64", - "-DPYTHON_LINK", - "-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION", - '-Ilib/include', - sparse_indices_flag, - '-std=c++11', - '-O2', # -O3 is sometimes dangerous and has caused segfaults on Travis - '-DNDEBUG', # some assertions fail without this (TODO tbh) - ] - if TICK_DEBUG == 0 or TICK_DEBUG == "0": - min_extra_compile_args.append("-g0") - - if use_fast_math: - min_extra_compile_args.append('-ffast-math') - - if extra_compile_args is None: - extra_compile_args = min_extra_compile_args - else: - extra_compile_args.extend(min_extra_compile_args) - - extra_compile_args.append("-Wall") - - if platform.system() == 'Windows': - extra_compile_args.append("-DBUILDING_DLL") - elif TICK_WERROR == 1 or TICK_WERROR == "1": - ## Added -Wall to get all warnings and -Werror to treat them as errors - extra_compile_args.append("-Werror") - - libraries = [] - library_dirs = [] - runtime_library_dirs = [] - extra_link_args = [] - define_macros = [] - extra_include_dirs = ["include", "swig"] - - # Deal with (optional) BLAS - extra_compile_args.extend(blas_info.get("extra_compile_args", [])) - extra_link_args.extend(blas_info.get("extra_link_args", [])) - libraries.extend(blas_info.get("libraries", [])) - library_dirs.extend(blas_info.get("library_dirs", [])) - define_macros.extend(blas_info.get("define_macros", [])) - - if 'define_macros' in blas_info and \ - any(key == 'HAVE_CBLAS' for key, _ in blas_info['define_macros']): - define_macros.append(('TICK_USE_CBLAS', None)) - if "libraries" in blas_info and "mkl_rt" in blas_info["libraries"]: - define_macros.append(('TICK_USE_MKL', None)) - extra_include_dirs.extend(blas_info["include_dirs"]) - if platform.system() != 'Windows': - for lib_dir in blas_info["library_dirs"]: - extra_link_args.append( - "-Wl,-rpath,"+ lib_dir - ) - # if not Linux assume MacOS - if platform.system() != 'Linux': - rel_path = os.path.relpath(lib_dir, swig_path.build) - if os.path.exists(rel_path): - extra_link_args.append("-Wl,-rpath,@loader_path/"+ rel_path) - - if include_modules is None: - include_modules = [] - - # Include all what need for module link - for mod in include_modules: - if mod.__class__ != SwigPath: - raise ValueError("Unhandled class for included module") - - for opts in [swig_opts, extra_compile_args]: - opts.extend(["-I" + mod.swig]) - - # Because setuptools produces shared object files with non-standard - # On windows we need to use ".lib" rather than ".pyd" - # when linking libs to other libs - if platform.system() == 'Windows': - lib = os.path.join(build_dir, mod.build, "_"+mod.extension_name) - lib += os.path.splitext(sysconfig.get_config_var("EXT_SUFFIX"))[0] - libraries.append(lib) - elif platform.system() == 'Linux': - lib_dir = os.path.abspath(os.path.join(build_dir, mod.build)) - extra_link_args.append("-L"+lib_dir) - extra_link_args.append("-Wl,-rpath,"+lib_dir) - extra_link_args.append("-l:"+mod.lib_filename) - else: - extra_link_args.append(os.path.abspath( - os.path.join(build_dir, mod.build, mod.lib_filename))) - - # Make sure that the runtime linker can find shared object - # dependencies by using the relative path to the dependency library. - rel_path = os.path.relpath(mod.build, swig_path.build) - if platform.system() == 'Linux': - # $ORIGIN refers to the location of the current shared object file - # at runtime - runtime_library_dirs.append("\$ORIGIN/%s" % rel_path) - elif platform.system() == 'Windows': - pass - else: # Assuming non-Windows builds for now - # For OSX builds we use @loader_path instead - extra_link_args.append( - "-Wl,-rpath,%s" % '@loader_path/%s' % rel_path - ) - - # Setting the SONAME/install_name for the built libraries. It ensures that - # the runtime linker will have a chance to find the libraries even after - # they're moved (during install, for example) - filename = swig_path.lib_filename - if platform.system() == 'Linux': - extra_link_args.append('-Wl,-soname,%s' % filename) - elif platform.system() == 'Windows': - pass - else: - # For OSX the install_name needs to be prefixed with @rpath - extra_link_args.append('-Wl,-install_name,@rpath/%s' % filename) - - for df in debug_flags: - full_flag = "-D" + df - - extra_compile_args.append(full_flag) - - if df == 'DEBUG_COSTLY_THROW': - swig_opts.append(full_flag) - - # Adding Cereal serialization library - extra_include_dirs.append("lib/third_party/cereal/include") - - # Adding numpy include directory - if numpy_include: - extra_include_dirs.append(numpy_include) - - # This is to override the use of IMPLIB in distutils - # which puts the lib/exp files in the wrong directory - # see: https://github.com/python/cpython/blob/08bb8a41cc976343795bd0e241cd7388e9f44ad5/Lib/distutils/_msvccompiler.py#L467 - if platform.system() == 'Windows': - implib = "/IMPLIB:" + os.path.abspath( - os.path.join(build_dir, swig_path.build, "_"+extension_name)) - implib += os.path.splitext(sysconfig.get_config_var("EXT_SUFFIX"))[0] - extra_link_args.append(implib + ".lib") - - core_module = SwigExtension(extension_path, module_ref=swig_path, - sources=swig_files + cpp_files, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - define_macros=define_macros, - swig_opts=swig_opts, - libraries=libraries, - include_dirs=extra_include_dirs, - library_dirs=library_dirs, - runtime_library_dirs=runtime_library_dirs, - depends=h_files, - language="c++", - ext_name=extension_name) - - return core_module - - -############################## -# Create extensions -############################## - -array_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/array" - ], - "swig_files": ["array_module.i"], - "module_dir": "./tick/array/", - "extension_name": "array" -} - -array_extension = create_extension(**array_extension_info) - -base_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/base", - "lib/cpp/base/math" - ], - "swig_files": ["base_module.i"], - "module_dir": "./tick/base", - "extension_name": "base", - "include_modules": [array_extension.module_ref] -} - -base_extension = create_extension(**base_extension_info) - -base_array_modules = [array_extension.module_ref, base_extension.module_ref] - -array_test_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": ["lib/cpp/array_test"], - "swig_files": ["array_test_module.i"], - "module_dir": "./tick/array_test/", - "extension_name": "array_test", - "include_modules": base_array_modules, -} - -test_extension = create_extension(**array_test_extension_info) - -random_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": ["lib/cpp/random"], - "swig_files": ["crandom_module.i"], - "module_dir": "./tick/random/", - "extension_name": "crandom", - "include_modules": base_array_modules -} - -random_extension = create_extension(**random_extension_info) - -base_model_core_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/base_model" - ], - "swig_files": ["base_model_module.i"], - "module_dir": "./tick/base_model/", - "extension_name": "base_model", - "include_modules": base_array_modules -} -base_model_core = create_extension(**base_model_core_info) - -linear_model_core_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/linear_model" - ], - "swig_files": ["linear_model_module.i"], - "module_dir": "./tick/linear_model/", - "extension_name": "linear_model", - "include_modules": base_array_modules + - [ - base_model_core.module_ref, - ] -} -linear_model_core = create_extension(**linear_model_core_info) - -hawkes_simulation_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/hawkes/simulation", - "lib/cpp/hawkes/simulation/hawkes_baselines", - "lib/cpp/hawkes/simulation/hawkes_kernels" - ], - "swig_files": [ - "hawkes_simulation_module.i" - ], - "module_dir": "./tick/hawkes/simulation/", - "extension_name": "hawkes_simulation", - "include_modules": base_array_modules + [random_extension.module_ref] -} -hawkes_simulation_extension = \ - create_extension(**hawkes_simulation_extension_info) - -hawkes_model_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/hawkes/model", - "lib/cpp/hawkes/model/base", - "lib/cpp/hawkes/model/list_of_realizations", - ], - "swig_files": [ - "hawkes_model_module.i" - ], - "module_dir": "./tick/hawkes/model/", - "extension_name": "hawkes_model", - "include_modules": base_array_modules + [base_model_core.module_ref] -} -hawkes_model_extension = create_extension(**hawkes_model_extension_info) - -hawkes_inference_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/hawkes/inference", - ], - "swig_files": [ - "hawkes_inference_module.i" - ], - "module_dir": "./tick/hawkes/inference/", - "extension_name": "hawkes_inference", - "include_modules": base_array_modules + - [ - base_model_core.module_ref, - hawkes_model_extension.module_ref, - ] -} -hawkes_inference_extension = create_extension(**hawkes_inference_extension_info) - -prox_core_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/prox" - ], - "swig_files": ["prox_module.i"], - "module_dir": "./tick/prox/", - "extension_name": "prox", - "include_modules": base_array_modules -} -prox_core = create_extension(**prox_core_info) - -robust_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/robust" - ], - "swig_files": ["robust_module.i"], - "module_dir": "./tick/robust/", - "extension_name": "robust", - "include_modules": base_array_modules + [ - base_model_core.module_ref,linear_model_core.module_ref] -} -robust_extension = create_extension(**robust_extension_info) - -solver_core_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/solver" - ], - "swig_files": ["solver_module.i"], - "module_dir": "./tick/solver/", - "extension_name": "solver", - "include_modules": base_array_modules + [random_extension.module_ref, - base_model_core.module_ref, - linear_model_core.module_ref, - prox_core.module_ref, - robust_extension.module_ref] -} -solver_core = create_extension(**solver_core_info) - -preprocessing_core_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/preprocessing" - ], - "swig_files": ["preprocessing_module.i"], - "module_dir": "./tick/preprocessing/", - "extension_name": "preprocessing", - "include_modules": base_array_modules -} - -preprocessing_core = create_extension(**preprocessing_core_info) - -survival_extension_info = { - "cpp_files": [], - "h_files": [], - "folders": [ - "lib/cpp/survival" - ], - "swig_files": ["survival_module.i"], - "module_dir": "./tick/survival/", - "extension_name": "survival", - "include_modules": base_array_modules + [base_model_core.module_ref] -} -survival_extension = create_extension(**survival_extension_info) - -tick_modules = [ - array_extension, base_extension, test_extension, - random_extension, base_model_core, linear_model_core, - hawkes_simulation_extension, hawkes_model_extension, - hawkes_inference_extension, - prox_core, preprocessing_core, - robust_extension, survival_extension, solver_core -] - -# Abstract class for tick-specific commands that need access to common build -# directories -class TickCommand(Command, ABC): - tick_dir = os.path.abspath(os.path.join(os.curdir, 'tick')) - cpp_build_dir = os.path.abspath(os.path.join(build_dir, 'cpptest')) - - user_options = [] - - def initialize_options(self): - """Set default values for options.""" - pass - - def finalize_options(self): - """Post-process options.""" - pass - - -class TickBuild(build): - swig_min_ver = (4, 0, 0) - - @staticmethod - def extract_swig_version(swig_ver_str): - m = re.search('SWIG Version (\d+).(\d+).(\d+)', swig_ver_str) - - if not m: - txt = 'Could not extract SWIG version from string: {0}' - - warnings.warn(txt.format(swig_ver_str)) - - return 0, 0, 0 - - return tuple(int(x) for x in m.groups()[0:3]) - - def run(self): - swig_ver = self.extract_swig_version( - str(subprocess.check_output(['swig', '-version']))) - - if swig_ver < self.swig_min_ver: - txt = 'SWIG version {0}.{1}.{2} ' \ - 'lower than the required version >= {3}.{4}.{5}. ' \ - 'This will likely cause build errors!' - - warnings.warn(txt.format(*(swig_ver + self.swig_min_ver))) - - self.run_command('build_ext') - build.run(self) - - -class TickInstall(install): - def run(self): - self.run_command('build_ext') - install.run(self) - - -class BuildRunCPPTests(TickCommand): - description = 'build and run tick C++ tests' - - def run(self): - self.run_command('makecpptest') - self.run_command('runcpptest') - - -class RunCPPTests(TickCommand): - description = 'run tick C++ tests' - - def run(self): - make_cmd = ['make', 'check'] - subprocess.check_call(make_cmd, cwd=self.cpp_build_dir) - - -class BuildCPPTests(TickCommand): - build_jobs = 1 - description = 'build tick C++ tests' - user_options = [ - ('build-jobs=', 'j', - 'number of parallel build jobs (default is number of available CPU ' - 'cores reported by Python)'), - ] - - def initialize_options(self): - """Set default values for options.""" - self.build_jobs = multiprocessing.cpu_count() - - def run(self): - relpath = os.path.relpath(self.tick_dir, self.cpp_build_dir) - cmake_exe = os.environ.get('TICK_CMAKE', 'cmake') - - - cmake_cmd = [cmake_exe, - '-DTICK_REBUILD_LIBS=OFF', - '-DBENCHMARK=OFF', - relpath + '/../lib'] - - if TICK_CMAKE_GENERATOR is not None: - cmake_cmd.extend(['-G', '{}'.format(TICK_CMAKE_GENERATOR)]) - - # Feed the path to the built C++ extensions so CMake does not have to - # build them again - for mod in tick_modules: - full_path = os.path.abspath( - os.path.join(mod.module_ref.build, mod.module_ref.lib_filename)) - - cmake_cmd.append( - '-DTICK_LIB_{}={}'.format(mod.ext_name.upper(), full_path)) - - if 'define_macros' in blas_info and \ - any(key == 'HAVE_CBLAS' for key, _ in blas_info['define_macros']): - cmake_cmd.append('-DUSE_BLAS=ON') - - os.makedirs(os.path.join(self.cpp_build_dir, 'cpptest'), exist_ok=True) - subprocess.check_call(cmake_cmd, cwd=self.cpp_build_dir) - - make_cmd = ['make', 'VERBOSE=1', 'all', '-j{}'.format(self.build_jobs)] - subprocess.check_call(make_cmd, cwd=self.cpp_build_dir) - - -class RunCPPLint(TickCommand): - description = 'run cpplint on tick C++ source files' - - CPPLINT_DIRS = [ - 'lib/include', - 'lib/cpp', - ] - - def run(self): - try: - import cpplint as cl - - cl_state = cl._cpplint_state - error_count = 0 - - for dir in self.CPPLINT_DIRS: - print("Processing {}".format(dir)) - - cl_state.ResetErrorCounts() - filenames = list(pathlib.Path(dir).glob('**/*.h')) + \ - list(pathlib.Path(dir).glob('**/*.cpp')) - - for filename in filenames: - cl.ProcessFile(str(filename), cl_state.verbose_level) - cl_state.PrintErrorCounts() - - error_count += cl_state.error_count - print('') - - if error_count > 0: - raise RuntimeError("Codestyle check by cpplint failed") - - except ImportError: - warnings.warn("Stylecheck by cpplint failed because cpplint " - "is not installed as a Python module") - - -class RunPyLint(TickCommand): - # We need to define if and how we run pylint - - description = 'run tick PyLint codestyle check' - start_dir = '.' - - @staticmethod - def run(): - raise NotImplementedError('Running pylint from setup.py' - 'not supported yet') - -class RunPyTests(TickCommand): - description = 'run tick Python tests' - start_dir = '.' - - user_options = [ - ('start-dir=', 's', - 'directory to start looking for Python tests (e.g. tick/simulation)'), - ] - - def initialize_options(self): - """Set default values for options.""" - self.start_dir = '.' - - def run(self): - if platform.system() == 'Windows': - print("The pytest command has issues with threads on Windows") - print('Instead please run:') - print('python3 -m unittest discover -v . "*_test.py"') - exit(1) - loader = unittest.TestLoader() - alltests = loader.discover(self.start_dir, pattern="*_test.py") - result = unittest.TextTestRunner(verbosity=2).run(alltests) - sys.exit(not result.wasSuccessful()) - - -class RunTestSuites(TickCommand): - description = 'run tick Python and C++ tests' - - def run(self): - self.run_command('cpptest') - self.run_command('pytest') - - -class CleanTick(clean): - description = 'cleans all generated and built files' - - def run(self): - seconds_until_clean = 5 - - print("Cleaning source directories in %d seconds..." % - seconds_until_clean) - - time.sleep(seconds_until_clean) - - clean.run(self) - - shutil.rmtree(build_dir, ignore_errors=True) - - patterns = [ - '**/*.so', - '**/*_wrap.cpp', - '**/__pycache__/*.pyc', - '**/__pycache__', - ] - - for paths in (pathlib.Path(os.curdir).glob(p) for p in patterns): - for path in paths: - print("Deleting {}".format(path)) - - if path.is_dir(): - path.rmdir() - else: - path.unlink() - - -setup(name="tick", - version='0.7.0.1', - author="Emmanuel Bacry, " - "Stephane Gaiffas, " - "Martin Bompaire, " - "Søren V. Poulsen, " - "Maryan Morel, " - "Simon Bussy, " - "Philip Deegan", - author_email='martin.bompaire@polytechnique.edu, ' - 'philip.deegan@polytechnique.edu', - url="https://x-datainitiative.github.io/tick/", - description="Module for statistical learning, with a particular emphasis " - "on time-dependent modelling", - ext_modules=tick_modules, - install_requires=['numpy', - 'scipy', - 'numpydoc', - 'matplotlib', - 'sphinx', - 'pandas', - 'dill', - 'scikit-learn'], - packages=find_packages(), - cmdclass={'build': TickBuild, - 'install': TickInstall, - 'makecpptest': BuildCPPTests, - 'runcpptest': RunCPPTests, - 'cpptest': BuildRunCPPTests, - 'cpplint': RunCPPLint, - 'pytest': RunPyTests, - 'pylint': RunPyLint, - 'test': RunTestSuites, - 'clean': CleanTick}, - classifiers=['Development Status :: 3 - Alpha', - 'Intended Audience :: Science/Research', - 'Intended Audience :: Developers', - 'Programming Language :: C++', - 'Programming Language :: Python', - 'Topic :: Software Development', - 'Topic :: Scientific/Engineering', - 'Operating System :: POSIX', - 'Operating System :: Unix', - 'Operating System :: MacOS', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'License :: OSI Approved :: BSD License'], - ) +from setuptools import find_packages, setup + +setup( + name="tick", + version='0.7.0.2', + author="Emmanuel Bacry, " + "Stephane Gaiffas, " + "Martin Bompaire, " + "Søren V. Poulsen, " + "Maryan Morel, " + "Simon Bussy, " + "Philip Deegan", + author_email='martin.bompaire@polytechnique.edu, ' + 'philip.deegan@polytechnique.edu', + url="https://x-datainitiative.github.io/tick/", + description="Module for statistical learning, with a particular emphasis " + "on time-dependent modelling", + # ext_modules=tick_modules, + install_requires=['numpy', + 'scipy', + 'numpydoc', + 'matplotlib', + 'sphinx', + 'pandas', + 'dill', + 'scikit-learn'], + # packages=find_packages(exclude=["lib", "dist"]), + cmdclass={}, + classifiers=['Development Status :: 3 - Alpha', + 'Intended Audience :: Science/Research', + 'Intended Audience :: Developers', + 'Programming Language :: C++', + 'Programming Language :: Python', + 'Topic :: Software Development', + 'Topic :: Scientific/Engineering', + 'Operating System :: POSIX', + 'Operating System :: Unix', + 'Operating System :: MacOS', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: BSD License'], + include_package_data=True, + packages=find_packages(exclude=["lib/","tests/"]), +) diff --git a/sh/configure_env.sh b/sh/configure_env.sh index 44e8a2f2a..4551a7334 100755 --- a/sh/configure_env.sh +++ b/sh/configure_env.sh @@ -17,10 +17,11 @@ # ###################################################################### -set -e +set -ex echo "Entering configure_env.sh" +PYTHON_EMBED_LIB=${PYTHON_EMBED_LIB:-0} [ ! -d "$ROOT/lib/third_party/cereal/include" ] && \ git submodule update --init @@ -137,7 +138,7 @@ else PY_INCS="${PINC}:${PNIC}" LDARGS="$($PCONF --ldflags)" - (($PYVER >= 3)) && (($PYVER_MIN > 8)) && LDARGS="$($PCONF --ldflags --embed)" + (($PYTHON_EMBED_LIB == 1)) && LDARGS="$($PCONF --ldflags --embed)" B_PATH=".:${PY_LIBDIR}" [ -z "$LIB_POSTEXT" ] && LIB_POSTEXT="${LIB_POSTFIX##*.}" fi diff --git a/sh/mkn.sh b/sh/mkn.sh index 1a4dbaca5..9ee47c72c 100755 --- a/sh/mkn.sh +++ b/sh/mkn.sh @@ -18,7 +18,7 @@ # ###################################################################### -set -e +set -ex CWD="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" pushd $CWD/.. 2>&1 > /dev/null diff --git a/sh/swig.sh b/sh/swig.sh index f9c593a23..494ffb585 100755 --- a/sh/swig.sh +++ b/sh/swig.sh @@ -58,7 +58,7 @@ for P in "${PROFILES[@]}"; do [ -f "${SWIG_BASE}/$P/${B}_wrap.cpp" ] && \ rm "${SWIG_BASE}/$P/${B}_wrap.cpp" [ ! -f "${SWIG_BASE}/$P/${B}_wrap.cpp" ] && \ - $SWIG -python -py3 -c++ ${INCS[@]} \ + $SWIG -python -c++ ${INCS[@]} \ -outdir $DIR/build "$SWIG_C_FLAGS" \ -o ${SWIG_BASE}/$P/${B}_wrap.cpp $IF else @@ -70,7 +70,7 @@ for P in "${PROFILES[@]}"; do [ -f "${SWIG_BASE}/$P/${B}_wrap.cpp" ] && \ rm "${SWIG_BASE}/$P/${B}_wrap.cpp" [ ! -f "${SWIG_BASE}/$P/${B}_wrap.cpp" ] && \ - $SWIG -python -py3 -c++ ${INCS[@]} \ + $SWIG -python -c++ ${INCS[@]} \ -outdir $DIR/build "$SWIG_C_FLAGS" \ -o ${SWIG_BASE}/$P/${B}_wrap.cpp $IF done @@ -80,7 +80,7 @@ for P in "${PROFILES[@]}"; do [ -f "${SWIG_BASE}/$P/${P1}_wrap.cpp" ] && \ rm "${SWIG_BASE}/$P/${P1}_wrap.cpp" [ ! -f "${SWIG_BASE}/$P/${P}_wrap.cpp" ] && \ - $SWIG -python -py3 -c++ ${INCS[@]} \ + $SWIG -python -c++ ${INCS[@]} \ -outdir $DIR/build "$SWIG_C_FLAGS" -o ${SWIG_BASE}/$P/${P1}_wrap.cpp \ "${DIR}/swig/${P1}.i" fi diff --git a/tick/base/tests/base_test.py b/tick/base/tests/base_test.py index 7338259ef..5f03ea497 100644 --- a/tick/base/tests/base_test.py +++ b/tick/base/tests/base_test.py @@ -473,7 +473,7 @@ def frop(x): self.a0.readonly_prop = x msg = "can't set attribute" - if sys.version_info[1] == 11: + if sys.version_info[1] >= 11: msg = "property 'readonly_prop' of 'A0' object has no setter" self.assertRaisesRegex(AttributeError, msg, frop, 45)