Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-autoray for openSUSE:Factory checked in at 2021-02-19 23:45:06 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-autoray (Old) and /work/SRC/openSUSE:Factory/.python-autoray.new.28504 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-autoray" Fri Feb 19 23:45:06 2021 rev:2 rq:873666 version:0.2.5 Changes: -------- --- /work/SRC/openSUSE:Factory/python-autoray/python-autoray.changes 2020-08-19 18:59:53.067943722 +0200 +++ /work/SRC/openSUSE:Factory/.python-autoray.new.28504/python-autoray.changes 2021-02-19 23:45:46.747393616 +0100 @@ -1,0 +2,7 @@ +Fri Feb 19 06:18:20 UTC 2021 - andy great <[email protected]> + +- Update to version 0.2.5. + * No changelog given. +- Skip python36 because numpy no longer support it. + +------------------------------------------------------------------- Old: ---- autoray-0.2.3.tar.gz New: ---- autoray-0.2.5.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-autoray.spec ++++++ --- /var/tmp/diff_new_pack.c1OjZr/_old 2021-02-19 23:45:47.251394111 +0100 +++ /var/tmp/diff_new_pack.c1OjZr/_new 2021-02-19 23:45:47.255394115 +0100 @@ -1,7 +1,7 @@ # # spec file for package python-autoray # -# Copyright (c) 2020 SUSE LLC +# Copyright (c) 2021 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -18,20 +18,25 @@ %define packagename autoray %define skip_python2 1 +%define skip_python36 1 %{?!python_module:%define python_module() python-%{**} python3-%{**}} Name: python-autoray -Version: 0.2.3 +Version: 0.2.5 Release: 0 Summary: A lightweight python automatic-array library License: Apache-2.0 URL: https://github.com/jcmgray/autoray Source: https://github.com/jcmgray/autoray/archive/%{version}.tar.gz#/autoray-%{version}.tar.gz +BuildRequires: %{python_module dask-array} BuildRequires: %{python_module numpy} BuildRequires: %{python_module pytest} +BuildRequires: %{python_module scipy} BuildRequires: %{python_module setuptools} BuildRequires: fdupes BuildRequires: python-rpm-macros +Requires: python-dask-array Requires: python-numpy +Requires: python-scipy BuildArch: noarch %python_subpackages @@ -55,6 +60,7 @@ %files %{python_files} %doc README.rst %license LICENSE -%{python_sitelib}/* +%{python_sitelib}/%{packagename}-*.egg-info +%{python_sitelib}/%{packagename} %changelog ++++++ autoray-0.2.3.tar.gz -> autoray-0.2.5.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/.travis.yml new/autoray-0.2.5/.travis.yml --- old/autoray-0.2.3/.travis.yml 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/.travis.yml 1970-01-01 01:00:00.000000000 +0100 @@ -1,22 +0,0 @@ -dist: xenial -language: python -python: - - 3.5 - - 3.6 - - 3.7 - - 3.8 - -before_install: - - travis_wait bash deps/travis-install-conda.sh - - export PATH="$HOME/conda/bin:$PATH" - - source activate "test-environment-${TRAVIS_PYTHON_VERSION}" - -install: - - pip install . - -script: - - pytest - -after_success: - - coveralls - - codecov diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/README.rst new/autoray-0.2.5/README.rst --- old/autoray-0.2.3/README.rst 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/README.rst 2021-01-27 03:56:07.000000000 +0100 @@ -15,9 +15,9 @@ * `pytorch <https://pytorch.org/>`_ * ... and indeed **any** library that provides a numpy-*ish* api. -.. image:: https://travis-ci.org/jcmgray/autoray.svg?branch=master - :target: https://travis-ci.org/jcmgray/autoray - :alt: Travis-CI +.. image:: https://dev.azure.com/autoray-org/autoray/_apis/build/status/jcmgray.autoray?branchName=master + :target: https://dev.azure.com/autoray-org/autoray/_build/latest?definitionId=1&branchName=master + :alt: Azure Pipelines .. image:: https://codecov.io/gh/jcmgray/autoray/branch/master/graph/badge.svg :target: https://codecov.io/gh/jcmgray/autoray :alt: Code Coverage diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/autoray/_version.py new/autoray-0.2.5/autoray/_version.py --- old/autoray-0.2.3/autoray/_version.py 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/autoray/_version.py 2021-01-27 03:56:07.000000000 +0100 @@ -23,9 +23,9 @@ # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). - git_refnames = " (HEAD -> master, tag: 0.2.3)" - git_full = "19ff65d02b645f5710fbdecfd259a9d3a0f35d29" - git_date = "2020-05-22 13:19:36 -0700" + git_refnames = " (tag: 0.2.5)" + git_full = "1a7c9a3cbef271c828ed4527b89894c090673909" + git_date = "2021-01-26 18:56:07 -0800" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/autoray/autoray.py new/autoray-0.2.5/autoray/autoray.py --- old/autoray-0.2.3/autoray/autoray.py 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/autoray/autoray.py 2021-01-27 03:56:07.000000000 +0100 @@ -19,6 +19,7 @@ import importlib import functools +import itertools from collections import OrderedDict import numpy as _numpy @@ -83,14 +84,25 @@ return get_lib_fn(backend, fn)(*args, **kwargs) [email protected]_cache(128) +def _infer_class_backend_cached(T): + if issubclass(T, _numpy.ndarray): + return 'numpy' + + lib = T.__module__.split('.')[0] + + # check if lib should mapped entirely to another lib + backend = _BACKEND_ALIASES.get(lib, lib) + + return backend + + def infer_backend(array): """Get the name of the library that defined the class of ``array`` - unless ``array`` is directly a subclass of ``numpy.ndarray``, in which case assume ``numpy`` is the desired backend. """ - if isinstance(array, _numpy.ndarray): - return 'numpy' - return array.__class__.__module__.split('.')[0] + return _infer_class_backend_cached(array.__class__) def get_lib_fn(backend, fn): @@ -223,10 +235,11 @@ def astype(x, dtype_name, **kwargs): """Cast array as type ``dtype_name`` - tries ``x.astype`` first. """ + dtype = to_backend_dtype(dtype_name, like=x) try: - return x.astype(dtype_name, **kwargs) + return x.astype(dtype, **kwargs) except AttributeError: - return do('astype', x, dtype_name, **kwargs) + return do('astype', x, dtype, **kwargs) def to_numpy(x): @@ -453,8 +466,11 @@ # storage & backend specific functions # # --------------------------------------------------------------------------- # +# lookup for mapping entire lib to another +_BACKEND_ALIASES = {} + # global (non function specific) aliases -_MODULE_ALIASES = {'decimal': 'math', 'builtins': 'numpy'} +_MODULE_ALIASES = {} # lookup for when functions are elsewhere than the expected location _SUBMODULE_ALIASES = {} @@ -480,7 +496,7 @@ # ---------------------------------- numpy ---------------------------------- # def numpy_to_numpy(x): - return do('array', x) + return do('array', x, like='numpy') _FUNCS['numpy', 'to_numpy'] = numpy_to_numpy @@ -549,14 +565,16 @@ return x.__array__() -_FUNCS['jax', 'to_numpy'] = jax_to_numpy -_FUNCS['jax', 'random.seed'] = jax_random_seed -_FUNCS['jax', 'random.uniform'] = jax_random_uniform -_FUNCS['jax', 'random.normal'] = jax_random_normal +_BACKEND_ALIASES['jaxlib'] = 'jax' _MODULE_ALIASES['jax'] = 'jax.numpy' _SUBMODULE_ALIASES['jax', 'complex'] = 'jax.lax' +_SUBMODULE_ALIASES['jax', 'linalg.expm'] = 'jax.scipy.linalg' _CUSTOM_WRAPPERS['jax', 'linalg.qr'] = qr_allow_fat _CUSTOM_WRAPPERS['jax', 'linalg.svd'] = svd_not_full_matrices_wrapper +_FUNCS['jax', 'to_numpy'] = jax_to_numpy +_FUNCS['jax', 'random.seed'] = jax_random_seed +_FUNCS['jax', 'random.uniform'] = jax_random_uniform +_FUNCS['jax', 'random.normal'] = jax_random_normal # -------------------------------- autograd --------------------------------- # @@ -574,6 +592,7 @@ _FUNCS['dask', 'to_numpy'] = dask_to_numpy _FUNCS['dask', 'complex'] = complex_add_re_im +_FUNC_ALIASES['dask', 'abs'] = 'absolute' _MODULE_ALIASES['dask'] = 'dask.array' _CUSTOM_WRAPPERS['dask', 'linalg.svd'] = svd_manual_full_matrices_kwarg @@ -581,7 +600,7 @@ # ---------------------------------- mars ----------------------------------- # def mars_to_numpy(x): - return x.execute() + return x.to_numpy() _FUNCS['mars', 'to_numpy'] = mars_to_numpy @@ -612,12 +631,108 @@ _SUBMODULE_ALIASES['ctf', 'linalg.qr'] = 'ctf' +# ------------------------------- sparse------------------------------------- # + +def sparse_array(x): + return do('COO.from_numpy', x, like='sparse') + + +def sparse_to_numpy(x): + return x.todense() + + +def sparse_complex(x, y): + return x + 1j * y + + +def sparse_transpose(x, axes=None): + return x.transpose(axes) + + +def sparse_sum(x, axis=None, keepdims=False, dtype=None, out=None): + return x.sum(axis=axis, keepdims=keepdims, dtype=dtype, out=out) + + +def sparse_prod(x, axis=None, keepdims=False, dtype=None, out=None): + return x.prod(axis=axis, keepdims=keepdims, dtype=dtype, out=out) + + +def sparse_conj(x): + return x.conj() + + +def sparse_real(x): + return x.real + + +def sparse_imag(x): + return x.imag + + +def sparse_count_nonzero(x): + return x.nnz + + +def sparse_random_uniform(low=0.0, high=1.0, size=None, **kwargs): + + def rvs(nnz): + return do('random.uniform', low, high, (nnz,), like='numpy') + + return do('random', size, data_rvs=rvs, **kwargs, like='sparse') + + +def sparse_random_normal(loc=0.0, scale=1.0, size=None, **kwargs): + + def rvs(nnz): + return do('random.normal', loc, scale, (nnz,), like='numpy') + + return do('random', size, data_rvs=rvs, **kwargs, like='sparse') + + +_FUNCS['sparse', 'array'] = sparse_array +_FUNCS['sparse', 'to_numpy'] = sparse_to_numpy +_FUNCS['sparse', 'transpose'] = sparse_transpose +_FUNCS['sparse', 'sum'] = sparse_sum +_FUNCS['sparse', 'prod'] = sparse_prod +_FUNCS['sparse', 'conj'] = sparse_conj +_FUNCS['sparse', 'real'] = sparse_real +_FUNCS['sparse', 'imag'] = sparse_imag +_FUNCS['sparse', 'complex'] = sparse_complex +_FUNCS['sparse', 'count_nonzero'] = sparse_count_nonzero +_FUNCS['sparse', 'random.uniform'] = sparse_random_uniform +_FUNCS['sparse', 'random.normal'] = sparse_random_normal + +# sparse uses numpys __array_func__ interface +for f in ('log', 'log2', 'log10', 'exp', 'sqrt', 'sign', + 'sin', 'cos', 'tan', 'arcsin', 'arccos', 'arctan', + 'sinh', 'cosh', 'tanh', 'arcsinh', 'arccosh', 'arctanh'): + _SUBMODULE_ALIASES['sparse', f] = 'numpy' + + # ------------------------------- tensorflow -------------------------------- # def tensorflow_to_numpy(x): return x.numpy() +def tensorflow_pad_wrap(tf_pad): + + def numpy_like(array, pad_width, mode='constant', constant_values=0): + if mode != 'constant': + raise NotImplementedError + + try: + if len(pad_width) == 1: + pad_width = pad_width * len(array.shape) + except TypeError: + pad_width = ((pad_width, pad_width),) * len(array.shape) + + return tf_pad(array, pad_width, mode='CONSTANT', + constant_values=constant_values) + + return numpy_like + + _FUNCS['tensorflow', 'to_numpy'] = tensorflow_to_numpy _SUBMODULE_ALIASES['tensorflow', 'log'] = 'tensorflow.math' @@ -650,6 +765,7 @@ _CUSTOM_WRAPPERS['tensorflow', 'linalg.qr'] = qr_allow_fat _CUSTOM_WRAPPERS['tensorflow', 'tril'] = tril_to_band_part _CUSTOM_WRAPPERS['tensorflow', 'triu'] = triu_to_band_part +_CUSTOM_WRAPPERS['tensorflow', 'pad'] = tensorflow_pad_wrap _CUSTOM_WRAPPERS['tensorflow', 'random.uniform'] = make_translator([ ('low', ('minval', 0.0)), ('high', ('maxval', 1.0)), @@ -669,6 +785,10 @@ # ---------------------------------- torch ---------------------------------- # +def torch_to_numpy(x): + return x.detach().cpu().numpy() + + def torch_transpose(x, axes=None): if axes is None: axes = reversed(range(0, x.ndimension())) @@ -676,44 +796,85 @@ def torch_count_nonzero(x): - return do('sum', x != 0, like=x) + return do('sum', x != 0, like='torch') def torch_astype(x, dtype): return x.to(dtype=to_backend_dtype(dtype, like=x)) [email protected]_cache(32) +def _torch_get_dtype_name(dtype): + return str(dtype).split('.')[-1] + + def torch_get_dtype_name(x): - return str(x.dtype).split('.')[-1] + return _torch_get_dtype_name(x.dtype) -def torch_to_numpy(x): - return x.detach().cpu().numpy() +def torch_real(x): + # torch doesn't support calling real on real arrays + try: + if x.is_complex(): + return x.real + except AttributeError: + pass + return x -def torch_linalg_solve(a, b): - return do('solve', b, a)[0] +def torch_imag(x): + # torch doesn't support calling imag on real arrays + try: + if x.is_complex(): + return x.imag + except AttributeError: + pass + return do('zeros_like', x, like='torch') -def torch_linalg_lstsq(a, b): - return do('lstsq', b, a)[0] +def torch_linalg_solve(a, b): + return do('solve', b, a, like='torch')[0] def torch_linalg_eigh(x): - return tuple(do('symeig', x, eigenvectors=True)) + return tuple(do('symeig', x, eigenvectors=True, like='torch')) def torch_linalg_eigvalsh(x): - return do('symeig', x, eigenvectors=False)[0] + return do('symeig', x, eigenvectors=False, like='torch')[0] + + +def torch_pad(array, pad_width, mode='constant', constant_values=0): + if mode != 'constant': + raise NotImplementedError + + try: + # numpy takes pads like ((0, 0), (1, 1), ... (n-1, n-1)) + # torch takes pads like (n-1, n-1, n-2, n-2, n-3, n-3, ...) + pad = tuple(itertools.chain.from_iterable(pad_width))[::-1] + + # a single tuple was specified ((a, b),) - use for all axes + if len(pad) == 2: + pad = pad * array.ndimension() + + except TypeError: + # assume int + pad = (pad_width,) * 2 * array.ndimension() + + return do('nn.functional.pad', array, pad=pad, + mode=mode, value=constant_values, like='torch') +_FUNCS['torch', 'pad'] = torch_pad +_FUNCS['torch', 'real'] = torch_real +_FUNCS['torch', 'imag'] = torch_imag _FUNCS['torch', 'astype'] = torch_astype _FUNCS['torch', 'to_numpy'] = torch_to_numpy +_FUNCS['torch', 'complex'] = complex_add_re_im _FUNCS['torch', 'transpose'] = torch_transpose _FUNCS['torch', 'count_nonzero'] = torch_count_nonzero _FUNCS['torch', 'get_dtype_name'] = torch_get_dtype_name _FUNCS['torch', 'linalg.solve'] = torch_linalg_solve -_FUNCS['torch', 'linalg.lstsq'] = torch_linalg_lstsq _FUNCS['torch', 'linalg.eigh'] = torch_linalg_eigh _FUNCS['torch', 'linalg.eigvalsh'] = torch_linalg_eigvalsh @@ -723,10 +884,11 @@ _FUNC_ALIASES['torch', 'concatenate'] = 'cat' _FUNC_ALIASES['torch', 'random.normal'] = 'randn' _FUNC_ALIASES['torch', 'random.uniform'] = 'rand' +_FUNC_ALIASES['torch', 'linalg.expm'] = 'matrix_exp' _SUBMODULE_ALIASES['torch', 'linalg.qr'] = 'torch' _SUBMODULE_ALIASES['torch', 'linalg.svd'] = 'torch' -_SUBMODULE_ALIASES['torch', 'linalg.norm'] = 'torch' +_SUBMODULE_ALIASES['torch', 'linalg.expm'] = 'torch' _SUBMODULE_ALIASES['torch', 'random.normal'] = 'torch' _SUBMODULE_ALIASES['torch', 'random.uniform'] = 'torch' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/azure-pipelines.yml new/autoray-0.2.5/azure-pipelines.yml --- old/autoray-0.2.3/azure-pipelines.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/azure-pipelines.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,40 @@ +variables: + which_tests: 'all' + +jobs: + +- job: Linux + strategy: + matrix: + py36: + conda_env: py36 + py37: + conda_env: py37 + py38: + conda_env: py38 + pool: + vmImage: 'ubuntu-latest' + steps: + - template: ci/azure/unit-tests.yml + + +- job: Windows + strategy: + matrix: + py38: + conda_env: py-win + pool: + vmImage: 'windows-latest' + steps: + - template: ci/azure/unit-tests.yml + + +- job: MaxOSX + strategy: + matrix: + py38: + conda_env: py-mac + pool: + vmImage: 'macOS-latest' + steps: + - template: ci/azure/unit-tests.yml diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/azure/add-conda-to-path.yml new/autoray-0.2.5/ci/azure/add-conda-to-path.yml --- old/autoray-0.2.3/ci/azure/add-conda-to-path.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/azure/add-conda-to-path.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,18 @@ +# https://docs.microsoft.com/en-us/azure/devops/pipelines/languages/anaconda +steps: + +- bash: | + echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH (Linux) + condition: eq(variables['Agent.OS'], 'Linux') + +- bash: | + echo "##vso[task.prependpath]$CONDA/bin" + sudo chown -R $USER $CONDA + displayName: Add conda to PATH (OS X) + condition: eq(variables['Agent.OS'], 'Darwin') + +- powershell: | + Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" + displayName: Add conda to PATH (Windows) + condition: eq(variables['Agent.OS'], 'Windows_NT') diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/azure/install.yml new/autoray-0.2.5/ci/azure/install.yml --- old/autoray-0.2.3/ci/azure/install.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/azure/install.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,22 @@ +parameters: + env_file: ci/requirements/$CONDA_ENV.yml + +steps: + +- template: add-conda-to-path.yml + +- bash: | + conda update -y conda + conda env create -n autoray-tests --file ${{ parameters.env_file }} + displayName: Install conda dependencies + +- bash: | + source activate autoray-tests + python -m pip install --no-deps -e . + displayName: Install autoray + +- bash: | + source activate autoray-tests + conda info -a + conda list + displayName: Version info diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/azure/unit-tests.yml new/autoray-0.2.5/ci/azure/unit-tests.yml --- old/autoray-0.2.3/ci/azure/unit-tests.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/azure/unit-tests.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,21 @@ +steps: + +- template: install.yml + +- bash: | + source activate autoray-tests + python -c "import autoray" + displayName: "Import autoray" + +- bash: | + source activate autoray-tests + pytest \ + --cov=autoray tests/ \ + --cov-report=xml \ + tests + condition: eq(variables['which_tests'], 'all') + displayName: "Run all tests" + +- bash: | + bash <(curl -s https://codecov.io/bash) + displayName: "Upload coverage to codecov.io" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/requirements/py-mac.yml new/autoray-0.2.5/ci/requirements/py-mac.yml --- old/autoray-0.2.3/ci/requirements/py-mac.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/requirements/py-mac.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,20 @@ +channels: + - pytorch + - defaults + - conda-forge +dependencies: + - numpy + - dask + - pytorch>=1.4 + - cpuonly + - sparse + - pytest + - pytest-cov + - coverage + - coveralls + - codecov + - pip + - pip: + - pymars + - jax + - jaxlib diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/requirements/py-win.yml new/autoray-0.2.5/ci/requirements/py-win.yml --- old/autoray-0.2.3/ci/requirements/py-win.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/requirements/py-win.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,19 @@ +channels: + - pytorch + - defaults + - conda-forge +dependencies: + - numpy + - tensorflow>=2 + - dask + - pytorch>=1.4 + - cpuonly + - sparse + - pytest + - pytest-cov + - coverage + - coveralls + - codecov + - pip + - pip: + - pymars diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/requirements/py36.yml new/autoray-0.2.5/ci/requirements/py36.yml --- old/autoray-0.2.3/ci/requirements/py36.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/requirements/py36.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,22 @@ +channels: + - pytorch + - defaults + - conda-forge +dependencies: + - python=3.6 + - numpy + - tensorflow>=2 + - dask + - pytorch>=1.4 + - cpuonly + - sparse + - pytest + - pytest-cov + - coverage + - coveralls + - codecov + - pip + - pip: + - pymars + - jax + - jaxlib diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/requirements/py37.yml new/autoray-0.2.5/ci/requirements/py37.yml --- old/autoray-0.2.3/ci/requirements/py37.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/requirements/py37.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,22 @@ +channels: + - pytorch + - defaults + - conda-forge +dependencies: + - python=3.7 + - numpy + - tensorflow>=2 + - dask + - pytorch>=1.4 + - cpuonly + - sparse + - pytest + - pytest-cov + - coverage + - coveralls + - codecov + - pip + - pip: + - pymars + - jax + - jaxlib diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/ci/requirements/py38.yml new/autoray-0.2.5/ci/requirements/py38.yml --- old/autoray-0.2.3/ci/requirements/py38.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/autoray-0.2.5/ci/requirements/py38.yml 2021-01-27 03:56:07.000000000 +0100 @@ -0,0 +1,22 @@ +channels: + - pytorch + - defaults + - conda-forge +dependencies: + - python=3.8 + - numpy + - tensorflow>=2 + - dask + - pytorch>=1.4 + - cpuonly + - sparse + - pytest + - pytest-cov + - coverage + - coveralls + - codecov + - pip + - pip: + - pymars + - jax + - jaxlib diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/deps/requirements-py3.yml new/autoray-0.2.5/deps/requirements-py3.yml --- old/autoray-0.2.3/deps/requirements-py3.yml 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/deps/requirements-py3.yml 1970-01-01 01:00:00.000000000 +0100 @@ -1,20 +0,0 @@ -channels: - - pytorch - - defaults - - conda-forge -dependencies: - - numpy - - tensorflow>=2 - - dask - - pytorch>=1.4 - - cpuonly - - pytest - - pytest-cov - - coverage - - coveralls - - codecov - - pip - - pip: - - pymars - - jax - - jaxlib diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/deps/travis-install-conda.sh new/autoray-0.2.5/deps/travis-install-conda.sh --- old/autoray-0.2.3/deps/travis-install-conda.sh 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/deps/travis-install-conda.sh 1970-01-01 01:00:00.000000000 +0100 @@ -1,17 +0,0 @@ -#!/bin/sh - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -ENV="test-environment-${TRAVIS_PYTHON_VERSION}" - -wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; -bash miniconda.sh -b -p $HOME/conda -export PATH="$HOME/conda/bin:$PATH" -hash -r -conda config --set always_yes yes --set changeps1 no -conda update -q conda -conda info -a -conda env create \ - --name $ENV \ - python=$TRAVIS_PYTHON_VERSION \ - --file $DIR/requirements-py3.yml -source activate $ENV diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/autoray-0.2.3/tests/test_autoray.py new/autoray-0.2.5/tests/test_autoray.py --- old/autoray-0.2.3/tests/test_autoray.py 2020-05-22 22:19:36.000000000 +0200 +++ new/autoray-0.2.5/tests/test_autoray.py 2021-01-27 03:56:07.000000000 +0100 @@ -7,19 +7,33 @@ # find backends to tests BACKENDS = ['numpy'] -for lib in ['cupy', 'dask', 'tensorflow', 'torch', 'mars', 'jax']: +for lib in ['cupy', 'dask', 'tensorflow', 'torch', 'mars', 'jax', 'sparse']: if importlib.util.find_spec(lib): - BACKENDS.append(lib) + BACKENDS.append(pytest.param(lib)) if lib == 'jax': + import os from jax.config import config config.update("jax_enable_x64", True) + os.environ['XLA_PYTHON_CLIENT_ALLOCATOR'] = 'platform' + + else: + BACKENDS.append(pytest.param( + lib, + marks=pytest.mark.skipif(True, reason=f"No {lib}.") + )) JAX_RANDOM_KEY = None def gen_rand(shape, backend, dtype='float64'): + + if 'complex' in dtype: + re = gen_rand(shape, backend) + im = gen_rand(shape, backend) + return ar.astype(ar.do('complex', re, im), dtype) + if backend == 'jax': from jax import random as jrandom @@ -31,7 +45,13 @@ return jrandom.uniform(subkey, shape=shape, dtype=dtype) - x = ar.do('random.uniform', size=shape, like=backend) + elif backend == 'sparse': + x = ar.do('random.uniform', size=shape, like=backend, + density=0.5, format='coo', fill_value=0) + + else: + x = ar.do('random.uniform', size=shape, like=backend) + x = ar.astype(x, ar.to_backend_dtype(dtype, backend)) assert ar.get_dtype_name(x) == dtype return x @@ -42,6 +62,8 @@ def test_basic(backend, fn): x = gen_rand((2, 3, 4), backend) y = ar.do(fn, x) + if (backend == 'sparse') and (fn is 'sum'): + pytest.xfail("Sparse 'sum' outputs dense.") assert ar.infer_backend(x) == ar.infer_backend(y) == backend @@ -54,6 +76,9 @@ (ar.reshape, [(5, 3)]), ]) def test_attribute_prefs(backend, fn, args): + if (backend is 'torch') and fn in (ar.real, ar.imag): + pytest.xfail("Pytorch doesn't support complex numbers yet...") + x = gen_rand((3, 5), backend) y = fn(x, *args) assert ar.infer_backend(x) == ar.infer_backend(y) == backend @@ -77,6 +102,8 @@ @pytest.mark.parametrize('backend', BACKENDS) def test_mgs(backend): + if backend == 'sparse': + pytest.xfail("Sparse doesn't support linear algebra yet...") x = gen_rand((3, 5), backend) Ux = modified_gram_schmidt(x) y = ar.do('sum', Ux @ ar.dag(Ux)) @@ -103,6 +130,8 @@ @pytest.mark.parametrize('backend', BACKENDS) def test_mgs_np_mimic(backend): + if backend == 'sparse': + pytest.xfail("Sparse doesn't support linear algebra yet...") x = gen_rand((3, 5), backend) Ux = modified_gram_schmidt_np_mimic(x) y = ar.do('sum', Ux @ ar.dag(Ux)) @@ -111,6 +140,8 @@ @pytest.mark.parametrize('backend', BACKENDS) def test_linalg_svd_square(backend): + if backend == 'sparse': + pytest.xfail("Sparse doesn't support linear algebra yet...") x = gen_rand((5, 4), backend) U, s, V = ar.do('linalg.svd', x) assert ( @@ -129,6 +160,9 @@ def test_translator_random_uniform(backend): from autoray import numpy as anp + if backend == 'sparse': + pytest.xfail("Sparse will have zeros") + x = anp.random.uniform(low=-10, size=(4, 5), like=backend) assert (ar.to_numpy(x) > -10).all() assert (ar.to_numpy(x) < 1.0).all() @@ -141,8 +175,13 @@ @pytest.mark.parametrize('backend', BACKENDS) def test_translator_random_normal(backend): from autoray import numpy as anp - x = anp.random.normal(100.0, 0.1, size=(4, 5), like=backend) + + if backend == 'sparse': + assert (x.data > 90.0).all() + assert (x.data < 110.0).all() + return + assert (ar.to_numpy(x) > 90.0).all() assert (ar.to_numpy(x) < 110.0).all() @@ -164,12 +203,18 @@ xl = ar.do('tril', x) xln = ar.to_numpy(xl) assert xln[0, 1] == 0.0 - assert (xln > 0.0).sum() == 10 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert (xln > 0.0).sum() == 10 xl = ar.do('tril', x, k=1) xln = ar.to_numpy(xl) - assert xln[0, 1] != 0.0 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert xln[0, 1] != 0.0 assert xln[0, 2] == 0.0 - assert (xln > 0.0).sum() == 13 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert (xln > 0.0).sum() == 13 if backend == 'tensorflow': with pytest.raises(ValueError): @@ -182,12 +227,18 @@ xl = ar.do('triu', x) xln = ar.to_numpy(xl) assert xln[1, 0] == 0.0 - assert (xln > 0.0).sum() == 10 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert (xln > 0.0).sum() == 10 xl = ar.do('triu', x, k=-1) xln = ar.to_numpy(xl) - assert xln[1, 0] != 0.0 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert xln[1, 0] != 0.0 assert xln[2, 0] == 0.0 - assert (xln > 0.0).sum() == 13 + if backend != 'sparse': + # this won't work for sparse because density < 1 + assert (xln > 0.0).sum() == 13 if backend == 'tensorflow': with pytest.raises(ValueError): @@ -197,6 +248,8 @@ @pytest.mark.parametrize('backend', BACKENDS) @pytest.mark.parametrize('shape', [(4, 3), (4, 4), (3, 4)]) def test_qr_thin_square_fat(backend, shape): + if backend == 'sparse': + pytest.xfail("Sparse doesn't support linear algebra yet...") x = gen_rand(shape, backend) Q, R = ar.do('linalg.qr', x) xn, Qn, Rn = map(ar.to_numpy, (x, Q, R)) @@ -204,21 +257,21 @@ @pytest.mark.parametrize('backend', BACKENDS) -def test_count_nonzero(backend): [email protected]('array_dtype', ['int', 'float', 'bool']) +def test_count_nonzero(backend, array_dtype): + if backend == 'mars': import mars if mars._version.version_info < (0, 4, 0, ''): pytest.xfail('mars count_nonzero bug fixed in version 0.4.') - x = ar.do('array', [0, 1, 2, 0, 3], like=backend) - nz = ar.do('count_nonzero', x) - assert ar.to_numpy(nz) == 3 - - x = ar.do('array', [0., 1., 2., 0., 3.], like=backend) - nz = ar.do('count_nonzero', x) - assert ar.to_numpy(nz) == 3 - x = ar.do('array', [False, True, True, False, True], like=backend) + if array_dtype == 'int': + x = ar.do('array', [0, 1, 2, 0, 3], like=backend) + elif array_dtype == 'float': + x = ar.do('array', [0., 1., 2., 0., 3.], like=backend) + elif array_dtype == 'bool': + x = ar.do('array', [False, True, True, False, True], like=backend) nz = ar.do('count_nonzero', x) assert ar.to_numpy(nz) == 3 @@ -252,12 +305,15 @@ def test_complex_creation(backend, real_dtype): if backend == 'torch': pytest.xfail("Pytorch doesn't support complex numbers yet...") + if (backend == 'sparse') and (real_dtype == 'float32'): + pytest.xfail("Bug in sparse where single precision isn't maintained " + "after scalar multiplication.") x = ar.do( 'complex', - ar.astype(ar.do('random.normal', size=(3, 4), + ar.astype(ar.do('random.uniform', size=(3, 4), like=backend), real_dtype), - ar.astype(ar.do('random.normal', size=(3, 4), + ar.astype(ar.do('random.uniform', size=(3, 4), like=backend), real_dtype) ) assert ar.get_dtype_name(x) == {'float32': 'complex64', @@ -265,6 +321,91 @@ @pytest.mark.parametrize('backend', BACKENDS) [email protected]('dtype_in,dtype_out', [ + ('float32', 'float32'), + ('float64', 'float64'), + ('complex64', 'float32'), + ('complex128', 'float64'), +]) +def test_real_imag(backend, dtype_in, dtype_out): + x = gen_rand((3, 4), backend, dtype_in) + + re = ar.do('real', x) + im = ar.do('imag', x) + + assert ar.infer_backend(re) == backend + assert ar.infer_backend(im) == backend + + assert ar.get_dtype_name(re) == dtype_out + assert ar.get_dtype_name(im) == dtype_out + + assert ar.do('allclose', ar.to_numpy(x).real, ar.to_numpy(re)) + assert ar.do('allclose', ar.to_numpy(x).imag, ar.to_numpy(im)) + + [email protected]('backend', BACKENDS) [email protected]('dtype', [ + 'float32', 'float64', + 'complex64', 'complex128', +]) +def test_linalg_solve(backend, dtype): + if backend == 'sparse': + pytest.xfail("Sparse doesn't support linear algebra yet...") + + A = gen_rand((4, 4), backend, dtype) + b = gen_rand((4, 1), backend, dtype) + x = ar.do('linalg.solve', A, b) + assert ar.do('allclose', ar.to_numpy(A @ x), ar.to_numpy(b), rtol=1e-4) + + [email protected]('backend', BACKENDS) [email protected]('dtype', [ + 'float32', 'float64', + 'complex64', 'complex128', +]) +def test_linalg_eigh(backend, dtype): + if backend == 'sparse': + pytest.xfail("sparse doesn't support linalg.eigh yet.") + if backend == 'dask': + pytest.xfail("dask doesn't support linalg.eigh yet.") + if backend == 'mars': + pytest.xfail("mars doesn't support linalg.eigh yet.") + if (backend == 'torch') and ('complex' in dtype): + pytest.xfail("Pytorch doesn't fully support complex yet.") + + A = gen_rand((4, 4), backend, dtype) + A = A + ar.dag(A) + el, ev = ar.do('linalg.eigh', A) + B = (ev * ar.reshape(el, (1, -1))) @ ar.dag(ev) + assert ar.do('allclose', ar.to_numpy(A), ar.to_numpy(B), rtol=1e-3) + + [email protected]('backend', BACKENDS) +def test_pad(backend): + if backend == 'sparse': + pytest.xfail("sparse doesn't support linalg.eigh yet.") + if backend == 'mars': + pytest.xfail("mars doesn't support linalg.eigh yet.") + + A = gen_rand((3, 4, 5), backend) + + for pad_width, new_shape in [ + # same pad before and after for every axis + (2, (7, 8, 9)), + # same pad for every axis + (((1, 2),), (6, 7, 8)), + # different pad for every axis + (((4, 3), (2, 4), (3, 2)), (10, 10, 10)) + ]: + B = ar.do('pad', A, pad_width) + assert B.shape == new_shape + assert ( + ar.to_numpy(ar.do("sum", A)) == + pytest.approx(ar.to_numpy(ar.do("sum", B))) + ) + + [email protected]('backend', BACKENDS) def test_register_function(backend): x = ar.do('ones', shape=(2, 3), like=backend)
