I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,30 @@
"""This module contains some general purpose utilities that are used across
SymPy.
"""
from .iterables import (flatten, group, take, subsets,
variations, numbered_symbols, cartes, capture, dict_merge,
prefixes, postfixes, sift, topological_sort, unflatten,
has_dups, has_variety, reshape, rotations)
from .misc import filldedent
from .lambdify import lambdify
from .decorator import threaded, xthreaded, public, memoize_property
from .timeutils import timed
__all__ = [
'flatten', 'group', 'take', 'subsets', 'variations', 'numbered_symbols',
'cartes', 'capture', 'dict_merge', 'prefixes', 'postfixes', 'sift',
'topological_sort', 'unflatten', 'has_dups', 'has_variety', 'reshape',
'rotations',
'filldedent',
'lambdify',
'threaded', 'xthreaded', 'public', 'memoize_property',
'timed',
]

View File

@ -0,0 +1,22 @@
""" This sub-module is private, i.e. external code should not depend on it.
These functions are used by tests run as part of continuous integration.
Once the implementation is mature (it should support the major
platforms: Windows, OS X & Linux) it may become official API which
may be relied upon by downstream libraries. Until then API may break
without prior notice.
TODO:
- (optionally) clean up after tempfile.mkdtemp()
- cross-platform testing
- caching of compiler choice and intermediate files
"""
from .compilation import compile_link_import_strings, compile_run_strings
from .availability import has_fortran, has_c, has_cxx
__all__ = [
'compile_link_import_strings', 'compile_run_strings',
'has_fortran', 'has_c', 'has_cxx',
]

View File

@ -0,0 +1,77 @@
import os
from .compilation import compile_run_strings
from .util import CompilerNotFoundError
def has_fortran():
if not hasattr(has_fortran, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.f90', (
'program foo\n'
'print *, "hello world"\n'
'end program'
))], clean=True
)
except CompilerNotFoundError:
has_fortran.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_fortran.result = False
else:
has_fortran.result = True
return has_fortran.result
def has_c():
if not hasattr(has_c, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.c', (
'#include <stdio.h>\n'
'int main(){\n'
'printf("hello world\\n");\n'
'return 0;\n'
'}'
))], clean=True
)
except CompilerNotFoundError:
has_c.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_c.result = False
else:
has_c.result = True
return has_c.result
def has_cxx():
if not hasattr(has_cxx, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.cxx', (
'#include <iostream>\n'
'int main(){\n'
'std::cout << "hello world" << std::endl;\n'
'}'
))], clean=True
)
except CompilerNotFoundError:
has_cxx.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_cxx.result = False
else:
has_cxx.result = True
return has_cxx.result

View File

@ -0,0 +1,657 @@
import glob
import os
import shutil
import subprocess
import sys
import tempfile
import warnings
from sysconfig import get_config_var, get_config_vars, get_path
from .runners import (
CCompilerRunner,
CppCompilerRunner,
FortranCompilerRunner
)
from .util import (
get_abspath, make_dirs, copy, Glob, ArbitraryDepthGlob,
glob_at_depth, import_module_from_file, pyx_is_cplus,
sha256_of_string, sha256_of_file, CompileError
)
if os.name == 'posix':
objext = '.o'
elif os.name == 'nt':
objext = '.obj'
else:
warnings.warn("Unknown os.name: {}".format(os.name))
objext = '.o'
def compile_sources(files, Runner=None, destdir=None, cwd=None, keep_dir_struct=False,
per_file_kwargs=None, **kwargs):
""" Compile source code files to object files.
Parameters
==========
files : iterable of str
Paths to source files, if ``cwd`` is given, the paths are taken as relative.
Runner: CompilerRunner subclass (optional)
Could be e.g. ``FortranCompilerRunner``. Will be inferred from filename
extensions if missing.
destdir: str
Output directory, if cwd is given, the path is taken as relative.
cwd: str
Working directory. Specify to have compiler run in other directory.
also used as root of relative paths.
keep_dir_struct: bool
Reproduce directory structure in `destdir`. default: ``False``
per_file_kwargs: dict
Dict mapping instances in ``files`` to keyword arguments.
\\*\\*kwargs: dict
Default keyword arguments to pass to ``Runner``.
Returns
=======
List of strings (paths of object files).
"""
_per_file_kwargs = {}
if per_file_kwargs is not None:
for k, v in per_file_kwargs.items():
if isinstance(k, Glob):
for path in glob.glob(k.pathname):
_per_file_kwargs[path] = v
elif isinstance(k, ArbitraryDepthGlob):
for path in glob_at_depth(k.filename, cwd):
_per_file_kwargs[path] = v
else:
_per_file_kwargs[k] = v
# Set up destination directory
destdir = destdir or '.'
if not os.path.isdir(destdir):
if os.path.exists(destdir):
raise OSError("{} is not a directory".format(destdir))
else:
make_dirs(destdir)
if cwd is None:
cwd = '.'
for f in files:
copy(f, destdir, only_update=True, dest_is_dir=True)
# Compile files and return list of paths to the objects
dstpaths = []
for f in files:
if keep_dir_struct:
name, ext = os.path.splitext(f)
else:
name, ext = os.path.splitext(os.path.basename(f))
file_kwargs = kwargs.copy()
file_kwargs.update(_per_file_kwargs.get(f, {}))
dstpaths.append(src2obj(f, Runner, cwd=cwd, **file_kwargs))
return dstpaths
def get_mixed_fort_c_linker(vendor=None, cplus=False, cwd=None):
vendor = vendor or os.environ.get('SYMPY_COMPILER_VENDOR', 'gnu')
if vendor.lower() == 'intel':
if cplus:
return (FortranCompilerRunner,
{'flags': ['-nofor_main', '-cxxlib']}, vendor)
else:
return (FortranCompilerRunner,
{'flags': ['-nofor_main']}, vendor)
elif vendor.lower() == 'gnu' or 'llvm':
if cplus:
return (CppCompilerRunner,
{'lib_options': ['fortran']}, vendor)
else:
return (FortranCompilerRunner,
{}, vendor)
else:
raise ValueError("No vendor found.")
def link(obj_files, out_file=None, shared=False, Runner=None,
cwd=None, cplus=False, fort=False, extra_objs=None, **kwargs):
""" Link object files.
Parameters
==========
obj_files: iterable of str
Paths to object files.
out_file: str (optional)
Path to executable/shared library, if ``None`` it will be
deduced from the last item in obj_files.
shared: bool
Generate a shared library?
Runner: CompilerRunner subclass (optional)
If not given the ``cplus`` and ``fort`` flags will be inspected
(fallback is the C compiler).
cwd: str
Path to the root of relative paths and working directory for compiler.
cplus: bool
C++ objects? default: ``False``.
fort: bool
Fortran objects? default: ``False``.
extra_objs: list
List of paths to extra object files / static libraries.
\\*\\*kwargs: dict
Keyword arguments passed to ``Runner``.
Returns
=======
The absolute path to the generated shared object / executable.
"""
if out_file is None:
out_file, ext = os.path.splitext(os.path.basename(obj_files[-1]))
if shared:
out_file += get_config_var('EXT_SUFFIX')
if not Runner:
if fort:
Runner, extra_kwargs, vendor = \
get_mixed_fort_c_linker(
vendor=kwargs.get('vendor', None),
cplus=cplus,
cwd=cwd,
)
for k, v in extra_kwargs.items():
if k in kwargs:
kwargs[k].expand(v)
else:
kwargs[k] = v
else:
if cplus:
Runner = CppCompilerRunner
else:
Runner = CCompilerRunner
flags = kwargs.pop('flags', [])
if shared:
if '-shared' not in flags:
flags.append('-shared')
run_linker = kwargs.pop('run_linker', True)
if not run_linker:
raise ValueError("run_linker was set to False (nonsensical).")
out_file = get_abspath(out_file, cwd=cwd)
runner = Runner(obj_files+(extra_objs or []), out_file, flags, cwd=cwd, **kwargs)
runner.run()
return out_file
def link_py_so(obj_files, so_file=None, cwd=None, libraries=None,
cplus=False, fort=False, extra_objs=None, **kwargs):
""" Link Python extension module (shared object) for importing
Parameters
==========
obj_files: iterable of str
Paths to object files to be linked.
so_file: str
Name (path) of shared object file to create. If not specified it will
have the basname of the last object file in `obj_files` but with the
extension '.so' (Unix).
cwd: path string
Root of relative paths and working directory of linker.
libraries: iterable of strings
Libraries to link against, e.g. ['m'].
cplus: bool
Any C++ objects? default: ``False``.
fort: bool
Any Fortran objects? default: ``False``.
extra_objs: list
List of paths of extra object files / static libraries to link against.
kwargs**: dict
Keyword arguments passed to ``link(...)``.
Returns
=======
Absolute path to the generate shared object.
"""
libraries = libraries or []
include_dirs = kwargs.pop('include_dirs', [])
library_dirs = kwargs.pop('library_dirs', [])
# Add Python include and library directories
# PY_LDFLAGS does not available on all python implementations
# e.g. when with pypy, so it's LDFLAGS we need to use
if sys.platform == "win32":
warnings.warn("Windows not yet supported.")
elif sys.platform == 'darwin':
cfgDict = get_config_vars()
kwargs['linkline'] = kwargs.get('linkline', []) + [cfgDict['LDFLAGS']]
library_dirs += [cfgDict['LIBDIR']]
# In macOS, linker needs to compile frameworks
# e.g. "-framework CoreFoundation"
is_framework = False
for opt in cfgDict['LIBS'].split():
if is_framework:
kwargs['linkline'] = kwargs.get('linkline', []) + ['-framework', opt]
is_framework = False
elif opt.startswith('-l'):
libraries.append(opt[2:])
elif opt.startswith('-framework'):
is_framework = True
# The python library is not included in LIBS
libfile = cfgDict['LIBRARY']
libname = ".".join(libfile.split('.')[:-1])[3:]
libraries.append(libname)
elif sys.platform[:3] == 'aix':
# Don't use the default code below
pass
else:
if get_config_var('Py_ENABLE_SHARED'):
cfgDict = get_config_vars()
kwargs['linkline'] = kwargs.get('linkline', []) + [cfgDict['LDFLAGS']]
library_dirs += [cfgDict['LIBDIR']]
for opt in cfgDict['BLDLIBRARY'].split():
if opt.startswith('-l'):
libraries += [opt[2:]]
else:
pass
flags = kwargs.pop('flags', [])
needed_flags = ('-pthread',)
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
return link(obj_files, shared=True, flags=flags, cwd=cwd, cplus=cplus, fort=fort,
include_dirs=include_dirs, libraries=libraries,
library_dirs=library_dirs, extra_objs=extra_objs, **kwargs)
def simple_cythonize(src, destdir=None, cwd=None, **cy_kwargs):
""" Generates a C file from a Cython source file.
Parameters
==========
src: str
Path to Cython source.
destdir: str (optional)
Path to output directory (default: '.').
cwd: path string (optional)
Root of relative paths (default: '.').
**cy_kwargs:
Second argument passed to cy_compile. Generates a .cpp file if ``cplus=True`` in ``cy_kwargs``,
else a .c file.
"""
from Cython.Compiler.Main import (
default_options, CompilationOptions
)
from Cython.Compiler.Main import compile as cy_compile
assert src.lower().endswith('.pyx') or src.lower().endswith('.py')
cwd = cwd or '.'
destdir = destdir or '.'
ext = '.cpp' if cy_kwargs.get('cplus', False) else '.c'
c_name = os.path.splitext(os.path.basename(src))[0] + ext
dstfile = os.path.join(destdir, c_name)
if cwd:
ori_dir = os.getcwd()
else:
ori_dir = '.'
os.chdir(cwd)
try:
cy_options = CompilationOptions(default_options)
cy_options.__dict__.update(cy_kwargs)
# Set language_level if not set by cy_kwargs
# as not setting it is deprecated
if 'language_level' not in cy_kwargs:
cy_options.__dict__['language_level'] = 3
cy_result = cy_compile([src], cy_options)
if cy_result.num_errors > 0:
raise ValueError("Cython compilation failed.")
# Move generated C file to destination
# In macOS, the generated C file is in the same directory as the source
# but the /var is a symlink to /private/var, so we need to use realpath
if os.path.realpath(os.path.dirname(src)) != os.path.realpath(destdir):
if os.path.exists(dstfile):
os.unlink(dstfile)
shutil.move(os.path.join(os.path.dirname(src), c_name), destdir)
finally:
os.chdir(ori_dir)
return dstfile
extension_mapping = {
'.c': (CCompilerRunner, None),
'.cpp': (CppCompilerRunner, None),
'.cxx': (CppCompilerRunner, None),
'.f': (FortranCompilerRunner, None),
'.for': (FortranCompilerRunner, None),
'.ftn': (FortranCompilerRunner, None),
'.f90': (FortranCompilerRunner, None), # ifort only knows about .f90
'.f95': (FortranCompilerRunner, 'f95'),
'.f03': (FortranCompilerRunner, 'f2003'),
'.f08': (FortranCompilerRunner, 'f2008'),
}
def src2obj(srcpath, Runner=None, objpath=None, cwd=None, inc_py=False, **kwargs):
""" Compiles a source code file to an object file.
Files ending with '.pyx' assumed to be cython files and
are dispatched to pyx2obj.
Parameters
==========
srcpath: str
Path to source file.
Runner: CompilerRunner subclass (optional)
If ``None``: deduced from extension of srcpath.
objpath : str (optional)
Path to generated object. If ``None``: deduced from ``srcpath``.
cwd: str (optional)
Working directory and root of relative paths. If ``None``: current dir.
inc_py: bool
Add Python include path to kwarg "include_dirs". Default: False
\\*\\*kwargs: dict
keyword arguments passed to Runner or pyx2obj
"""
name, ext = os.path.splitext(os.path.basename(srcpath))
if objpath is None:
if os.path.isabs(srcpath):
objpath = '.'
else:
objpath = os.path.dirname(srcpath)
objpath = objpath or '.' # avoid objpath == ''
if os.path.isdir(objpath):
objpath = os.path.join(objpath, name + objext)
include_dirs = kwargs.pop('include_dirs', [])
if inc_py:
py_inc_dir = get_path('include')
if py_inc_dir not in include_dirs:
include_dirs.append(py_inc_dir)
if ext.lower() == '.pyx':
return pyx2obj(srcpath, objpath=objpath, include_dirs=include_dirs, cwd=cwd,
**kwargs)
if Runner is None:
Runner, std = extension_mapping[ext.lower()]
if 'std' not in kwargs:
kwargs['std'] = std
flags = kwargs.pop('flags', [])
needed_flags = ('-fPIC',)
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
# src2obj implies not running the linker...
run_linker = kwargs.pop('run_linker', False)
if run_linker:
raise CompileError("src2obj called with run_linker=True")
runner = Runner([srcpath], objpath, include_dirs=include_dirs,
run_linker=run_linker, cwd=cwd, flags=flags, **kwargs)
runner.run()
return objpath
def pyx2obj(pyxpath, objpath=None, destdir=None, cwd=None,
include_dirs=None, cy_kwargs=None, cplus=None, **kwargs):
"""
Convenience function
If cwd is specified, pyxpath and dst are taken to be relative
If only_update is set to `True` the modification time is checked
and compilation is only run if the source is newer than the
destination
Parameters
==========
pyxpath: str
Path to Cython source file.
objpath: str (optional)
Path to object file to generate.
destdir: str (optional)
Directory to put generated C file. When ``None``: directory of ``objpath``.
cwd: str (optional)
Working directory and root of relative paths.
include_dirs: iterable of path strings (optional)
Passed onto src2obj and via cy_kwargs['include_path']
to simple_cythonize.
cy_kwargs: dict (optional)
Keyword arguments passed onto `simple_cythonize`
cplus: bool (optional)
Indicate whether C++ is used. default: auto-detect using ``.util.pyx_is_cplus``.
compile_kwargs: dict
keyword arguments passed onto src2obj
Returns
=======
Absolute path of generated object file.
"""
assert pyxpath.endswith('.pyx')
cwd = cwd or '.'
objpath = objpath or '.'
destdir = destdir or os.path.dirname(objpath)
abs_objpath = get_abspath(objpath, cwd=cwd)
if os.path.isdir(abs_objpath):
pyx_fname = os.path.basename(pyxpath)
name, ext = os.path.splitext(pyx_fname)
objpath = os.path.join(objpath, name + objext)
cy_kwargs = cy_kwargs or {}
cy_kwargs['output_dir'] = cwd
if cplus is None:
cplus = pyx_is_cplus(pyxpath)
cy_kwargs['cplus'] = cplus
interm_c_file = simple_cythonize(pyxpath, destdir=destdir, cwd=cwd, **cy_kwargs)
include_dirs = include_dirs or []
flags = kwargs.pop('flags', [])
needed_flags = ('-fwrapv', '-pthread', '-fPIC')
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
options = kwargs.pop('options', [])
if kwargs.pop('strict_aliasing', False):
raise CompileError("Cython requires strict aliasing to be disabled.")
# Let's be explicit about standard
if cplus:
std = kwargs.pop('std', 'c++98')
else:
std = kwargs.pop('std', 'c99')
return src2obj(interm_c_file, objpath=objpath, cwd=cwd,
include_dirs=include_dirs, flags=flags, std=std,
options=options, inc_py=True, strict_aliasing=False,
**kwargs)
def _any_X(srcs, cls):
for src in srcs:
name, ext = os.path.splitext(src)
key = ext.lower()
if key in extension_mapping:
if extension_mapping[key][0] == cls:
return True
return False
def any_fortran_src(srcs):
return _any_X(srcs, FortranCompilerRunner)
def any_cplus_src(srcs):
return _any_X(srcs, CppCompilerRunner)
def compile_link_import_py_ext(sources, extname=None, build_dir='.', compile_kwargs=None,
link_kwargs=None, extra_objs=None):
""" Compiles sources to a shared object (Python extension) and imports it
Sources in ``sources`` which is imported. If shared object is newer than the sources, they
are not recompiled but instead it is imported.
Parameters
==========
sources : list of strings
List of paths to sources.
extname : string
Name of extension (default: ``None``).
If ``None``: taken from the last file in ``sources`` without extension.
build_dir: str
Path to directory in which objects files etc. are generated.
compile_kwargs: dict
keyword arguments passed to ``compile_sources``
link_kwargs: dict
keyword arguments passed to ``link_py_so``
extra_objs: list
List of paths to (prebuilt) object files / static libraries to link against.
Returns
=======
The imported module from of the Python extension.
"""
if extname is None:
extname = os.path.splitext(os.path.basename(sources[-1]))[0]
compile_kwargs = compile_kwargs or {}
link_kwargs = link_kwargs or {}
try:
mod = import_module_from_file(os.path.join(build_dir, extname), sources)
except ImportError:
objs = compile_sources(list(map(get_abspath, sources)), destdir=build_dir,
cwd=build_dir, **compile_kwargs)
so = link_py_so(objs, cwd=build_dir, fort=any_fortran_src(sources),
cplus=any_cplus_src(sources), extra_objs=extra_objs, **link_kwargs)
mod = import_module_from_file(so)
return mod
def _write_sources_to_build_dir(sources, build_dir):
build_dir = build_dir or tempfile.mkdtemp()
if not os.path.isdir(build_dir):
raise OSError("Non-existent directory: ", build_dir)
source_files = []
for name, src in sources:
dest = os.path.join(build_dir, name)
differs = True
sha256_in_mem = sha256_of_string(src.encode('utf-8')).hexdigest()
if os.path.exists(dest):
if os.path.exists(dest + '.sha256'):
with open(dest + '.sha256') as fh:
sha256_on_disk = fh.read()
else:
sha256_on_disk = sha256_of_file(dest).hexdigest()
differs = sha256_on_disk != sha256_in_mem
if differs:
with open(dest, 'wt') as fh:
fh.write(src)
with open(dest + '.sha256', 'wt') as fh:
fh.write(sha256_in_mem)
source_files.append(dest)
return source_files, build_dir
def compile_link_import_strings(sources, build_dir=None, **kwargs):
""" Compiles, links and imports extension module from source.
Parameters
==========
sources : iterable of name/source pair tuples
build_dir : string (default: None)
Path. ``None`` implies use a temporary directory.
**kwargs:
Keyword arguments passed onto `compile_link_import_py_ext`.
Returns
=======
mod : module
The compiled and imported extension module.
info : dict
Containing ``build_dir`` as 'build_dir'.
"""
source_files, build_dir = _write_sources_to_build_dir(sources, build_dir)
mod = compile_link_import_py_ext(source_files, build_dir=build_dir, **kwargs)
info = {"build_dir": build_dir}
return mod, info
def compile_run_strings(sources, build_dir=None, clean=False, compile_kwargs=None, link_kwargs=None):
""" Compiles, links and runs a program built from sources.
Parameters
==========
sources : iterable of name/source pair tuples
build_dir : string (default: None)
Path. ``None`` implies use a temporary directory.
clean : bool
Whether to remove build_dir after use. This will only have an
effect if ``build_dir`` is ``None`` (which creates a temporary directory).
Passing ``clean == True`` and ``build_dir != None`` raises a ``ValueError``.
This will also set ``build_dir`` in returned info dictionary to ``None``.
compile_kwargs: dict
Keyword arguments passed onto ``compile_sources``
link_kwargs: dict
Keyword arguments passed onto ``link``
Returns
=======
(stdout, stderr): pair of strings
info: dict
Containing exit status as 'exit_status' and ``build_dir`` as 'build_dir'
"""
if clean and build_dir is not None:
raise ValueError("Automatic removal of build_dir is only available for temporary directory.")
try:
source_files, build_dir = _write_sources_to_build_dir(sources, build_dir)
objs = compile_sources(list(map(get_abspath, source_files)), destdir=build_dir,
cwd=build_dir, **(compile_kwargs or {}))
prog = link(objs, cwd=build_dir,
fort=any_fortran_src(source_files),
cplus=any_cplus_src(source_files), **(link_kwargs or {}))
p = subprocess.Popen([prog], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
exit_status = p.wait()
stdout, stderr = [txt.decode('utf-8') for txt in p.communicate()]
finally:
if clean and os.path.isdir(build_dir):
shutil.rmtree(build_dir)
build_dir = None
info = {"exit_status": exit_status, "build_dir": build_dir}
return (stdout, stderr), info

View File

@ -0,0 +1,301 @@
from __future__ import annotations
from typing import Callable, Optional
from collections import OrderedDict
import os
import re
import subprocess
import warnings
from .util import (
find_binary_of_command, unique_list, CompileError
)
class CompilerRunner:
""" CompilerRunner base class.
Parameters
==========
sources : list of str
Paths to sources.
out : str
flags : iterable of str
Compiler flags.
run_linker : bool
compiler_name_exe : (str, str) tuple
Tuple of compiler name & command to call.
cwd : str
Path of root of relative paths.
include_dirs : list of str
Include directories.
libraries : list of str
Libraries to link against.
library_dirs : list of str
Paths to search for shared libraries.
std : str
Standard string, e.g. ``'c++11'``, ``'c99'``, ``'f2003'``.
define: iterable of strings
macros to define
undef : iterable of strings
macros to undefine
preferred_vendor : string
name of preferred vendor e.g. 'gnu' or 'intel'
Methods
=======
run():
Invoke compilation as a subprocess.
"""
environ_key_compiler: str # e.g. 'CC', 'CXX', ...
environ_key_flags: str # e.g. 'CFLAGS', 'CXXFLAGS', ...
environ_key_ldflags: str = "LDFLAGS" # typically 'LDFLAGS'
# Subclass to vendor/binary dict
compiler_dict: dict[str, str]
# Standards should be a tuple of supported standards
# (first one will be the default)
standards: tuple[None | str, ...]
# Subclass to dict of binary/formater-callback
std_formater: dict[str, Callable[[Optional[str]], str]]
# subclass to be e.g. {'gcc': 'gnu', ...}
compiler_name_vendor_mapping: dict[str, str]
def __init__(self, sources, out, flags=None, run_linker=True, compiler=None, cwd='.',
include_dirs=None, libraries=None, library_dirs=None, std=None, define=None,
undef=None, strict_aliasing=None, preferred_vendor=None, linkline=None, **kwargs):
if isinstance(sources, str):
raise ValueError("Expected argument sources to be a list of strings.")
self.sources = list(sources)
self.out = out
self.flags = flags or []
if os.environ.get(self.environ_key_flags):
self.flags += os.environ[self.environ_key_flags].split()
self.cwd = cwd
if compiler:
self.compiler_name, self.compiler_binary = compiler
elif os.environ.get(self.environ_key_compiler):
self.compiler_binary = os.environ[self.environ_key_compiler]
for k, v in self.compiler_dict.items():
if k in self.compiler_binary:
self.compiler_vendor = k
self.compiler_name = v
break
else:
self.compiler_vendor, self.compiler_name = list(self.compiler_dict.items())[0]
warnings.warn("failed to determine what kind of compiler %s is, assuming %s" %
(self.compiler_binary, self.compiler_name))
else:
# Find a compiler
if preferred_vendor is None:
preferred_vendor = os.environ.get('SYMPY_COMPILER_VENDOR', None)
self.compiler_name, self.compiler_binary, self.compiler_vendor = self.find_compiler(preferred_vendor)
if self.compiler_binary is None:
raise ValueError("No compiler found (searched: {})".format(', '.join(self.compiler_dict.values())))
self.define = define or []
self.undef = undef or []
self.include_dirs = include_dirs or []
self.libraries = libraries or []
self.library_dirs = library_dirs or []
self.std = std or self.standards[0]
self.run_linker = run_linker
if self.run_linker:
# both gnu and intel compilers use '-c' for disabling linker
self.flags = list(filter(lambda x: x != '-c', self.flags))
else:
if '-c' not in self.flags:
self.flags.append('-c')
if self.std:
self.flags.append(self.std_formater[
self.compiler_name](self.std))
self.linkline = (linkline or []) + [lf for lf in map(
str.strip, os.environ.get(self.environ_key_ldflags, "").split()
) if lf != ""]
if strict_aliasing is not None:
nsa_re = re.compile("no-strict-aliasing$")
sa_re = re.compile("strict-aliasing$")
if strict_aliasing is True:
if any(map(nsa_re.match, flags)):
raise CompileError("Strict aliasing cannot be both enforced and disabled")
elif any(map(sa_re.match, flags)):
pass # already enforced
else:
flags.append('-fstrict-aliasing')
elif strict_aliasing is False:
if any(map(nsa_re.match, flags)):
pass # already disabled
else:
if any(map(sa_re.match, flags)):
raise CompileError("Strict aliasing cannot be both enforced and disabled")
else:
flags.append('-fno-strict-aliasing')
else:
msg = "Expected argument strict_aliasing to be True/False, got {}"
raise ValueError(msg.format(strict_aliasing))
@classmethod
def find_compiler(cls, preferred_vendor=None):
""" Identify a suitable C/fortran/other compiler. """
candidates = list(cls.compiler_dict.keys())
if preferred_vendor:
if preferred_vendor in candidates:
candidates = [preferred_vendor]+candidates
else:
raise ValueError("Unknown vendor {}".format(preferred_vendor))
name, path = find_binary_of_command([cls.compiler_dict[x] for x in candidates])
return name, path, cls.compiler_name_vendor_mapping[name]
def cmd(self):
""" List of arguments (str) to be passed to e.g. ``subprocess.Popen``. """
cmd = (
[self.compiler_binary] +
self.flags +
['-U'+x for x in self.undef] +
['-D'+x for x in self.define] +
['-I'+x for x in self.include_dirs] +
self.sources
)
if self.run_linker:
cmd += (['-L'+x for x in self.library_dirs] +
['-l'+x for x in self.libraries] +
self.linkline)
counted = []
for envvar in re.findall(r'\$\{(\w+)\}', ' '.join(cmd)):
if os.getenv(envvar) is None:
if envvar not in counted:
counted.append(envvar)
msg = "Environment variable '{}' undefined.".format(envvar)
raise CompileError(msg)
return cmd
def run(self):
self.flags = unique_list(self.flags)
# Append output flag and name to tail of flags
self.flags.extend(['-o', self.out])
env = os.environ.copy()
env['PWD'] = self.cwd
# NOTE: intel compilers seems to need shell=True
p = subprocess.Popen(' '.join(self.cmd()),
shell=True,
cwd=self.cwd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
comm = p.communicate()
try:
self.cmd_outerr = comm[0].decode('utf-8')
except UnicodeDecodeError:
self.cmd_outerr = comm[0].decode('iso-8859-1') # win32
self.cmd_returncode = p.returncode
# Error handling
if self.cmd_returncode != 0:
msg = "Error executing '{}' in {} (exited status {}):\n {}\n".format(
' '.join(self.cmd()), self.cwd, str(self.cmd_returncode), self.cmd_outerr
)
raise CompileError(msg)
return self.cmd_outerr, self.cmd_returncode
class CCompilerRunner(CompilerRunner):
environ_key_compiler = 'CC'
environ_key_flags = 'CFLAGS'
compiler_dict = OrderedDict([
('gnu', 'gcc'),
('intel', 'icc'),
('llvm', 'clang'),
])
standards = ('c89', 'c90', 'c99', 'c11') # First is default
std_formater = {
'gcc': '-std={}'.format,
'icc': '-std={}'.format,
'clang': '-std={}'.format,
}
compiler_name_vendor_mapping = {
'gcc': 'gnu',
'icc': 'intel',
'clang': 'llvm'
}
def _mk_flag_filter(cmplr_name): # helper for class initialization
not_welcome = {'g++': ("Wimplicit-interface",)} # "Wstrict-prototypes",)}
if cmplr_name in not_welcome:
def fltr(x):
for nw in not_welcome[cmplr_name]:
if nw in x:
return False
return True
else:
def fltr(x):
return True
return fltr
class CppCompilerRunner(CompilerRunner):
environ_key_compiler = 'CXX'
environ_key_flags = 'CXXFLAGS'
compiler_dict = OrderedDict([
('gnu', 'g++'),
('intel', 'icpc'),
('llvm', 'clang++'),
])
# First is the default, c++0x == c++11
standards = ('c++98', 'c++0x')
std_formater = {
'g++': '-std={}'.format,
'icpc': '-std={}'.format,
'clang++': '-std={}'.format,
}
compiler_name_vendor_mapping = {
'g++': 'gnu',
'icpc': 'intel',
'clang++': 'llvm'
}
class FortranCompilerRunner(CompilerRunner):
environ_key_compiler = 'FC'
environ_key_flags = 'FFLAGS'
standards = (None, 'f77', 'f95', 'f2003', 'f2008')
std_formater = {
'gfortran': lambda x: '-std=gnu' if x is None else '-std=legacy' if x == 'f77' else '-std={}'.format(x),
'ifort': lambda x: '-stand f08' if x is None else '-stand f{}'.format(x[-2:]), # f2008 => f08
}
compiler_dict = OrderedDict([
('gnu', 'gfortran'),
('intel', 'ifort'),
])
compiler_name_vendor_mapping = {
'gfortran': 'gnu',
'ifort': 'intel',
}

View File

@ -0,0 +1,101 @@
import shutil
import os
import subprocess
import tempfile
from sympy.external import import_module
from sympy.testing.pytest import skip
from sympy.utilities._compilation.compilation import compile_link_import_py_ext, compile_link_import_strings, compile_sources, get_abspath
numpy = import_module('numpy')
cython = import_module('cython')
_sources1 = [
('sigmoid.c', r"""
#include <math.h>
void sigmoid(int n, const double * const restrict in,
double * const restrict out, double lim){
for (int i=0; i<n; ++i){
const double x = in[i];
out[i] = x*pow(pow(x/lim, 8)+1, -1./8.);
}
}
"""),
('_sigmoid.pyx', r"""
import numpy as np
cimport numpy as cnp
cdef extern void c_sigmoid "sigmoid" (int, const double * const,
double * const, double)
def sigmoid(double [:] inp, double lim=350.0):
cdef cnp.ndarray[cnp.float64_t, ndim=1] out = np.empty(
inp.size, dtype=np.float64)
c_sigmoid(inp.size, &inp[0], &out[0], lim)
return out
""")
]
def npy(data, lim=350.0):
return data/((data/lim)**8+1)**(1/8.)
def test_compile_link_import_strings():
if not numpy:
skip("numpy not installed.")
if not cython:
skip("cython not installed.")
from sympy.utilities._compilation import has_c
if not has_c():
skip("No C compiler found.")
compile_kw = {"std": 'c99', "include_dirs": [numpy.get_include()]}
info = None
try:
mod, info = compile_link_import_strings(_sources1, compile_kwargs=compile_kw)
data = numpy.random.random(1024*1024*8) # 64 MB of RAM needed..
res_mod = mod.sigmoid(data)
res_npy = npy(data)
assert numpy.allclose(res_mod, res_npy)
finally:
if info and info['build_dir']:
shutil.rmtree(info['build_dir'])
def test_compile_sources(tmpdir):
from sympy.utilities._compilation import has_c
if not has_c():
skip("No C compiler found.")
build_dir = str(tmpdir)
_handle, file_path = tempfile.mkstemp('.c', dir=build_dir)
with open(file_path, 'wt') as ofh:
ofh.write("""
int foo(int bar) {
return 2*bar;
}
""")
obj, = compile_sources([file_path], cwd=build_dir)
obj_path = get_abspath(obj, cwd=build_dir)
assert os.path.exists(obj_path)
try:
_ = subprocess.check_output(["nm", "--help"])
except subprocess.CalledProcessError:
pass # we cannot test contents of object file
else:
nm_out = subprocess.check_output(["nm", obj_path])
assert 'foo' in nm_out.decode('utf-8')
if not cython:
return # the final (optional) part of the test below requires Cython.
_handle, pyx_path = tempfile.mkstemp('.pyx', dir=build_dir)
with open(pyx_path, 'wt') as ofh:
ofh.write(("cdef extern int foo(int)\n"
"def _foo(arg):\n"
" return foo(arg)"))
mod = compile_link_import_py_ext([pyx_path], extra_objs=[obj_path], build_dir=build_dir)
assert mod._foo(21) == 42

View File

@ -0,0 +1,312 @@
from collections import namedtuple
from hashlib import sha256
import os
import shutil
import sys
import fnmatch
from sympy.testing.pytest import XFAIL
def may_xfail(func):
if sys.platform.lower() == 'darwin' or os.name == 'nt':
# sympy.utilities._compilation needs more testing on Windows and macOS
# once those two platforms are reliably supported this xfail decorator
# may be removed.
return XFAIL(func)
else:
return func
class CompilerNotFoundError(FileNotFoundError):
pass
class CompileError (Exception):
"""Failure to compile one or more C/C++ source files."""
def get_abspath(path, cwd='.'):
""" Returns the absolute path.
Parameters
==========
path : str
(relative) path.
cwd : str
Path to root of relative path.
"""
if os.path.isabs(path):
return path
else:
if not os.path.isabs(cwd):
cwd = os.path.abspath(cwd)
return os.path.abspath(
os.path.join(cwd, path)
)
def make_dirs(path):
""" Create directories (equivalent of ``mkdir -p``). """
if path[-1] == '/':
parent = os.path.dirname(path[:-1])
else:
parent = os.path.dirname(path)
if len(parent) > 0:
if not os.path.exists(parent):
make_dirs(parent)
if not os.path.exists(path):
os.mkdir(path, 0o777)
else:
assert os.path.isdir(path)
def missing_or_other_newer(path, other_path, cwd=None):
"""
Investigate if path is non-existant or older than provided reference
path.
Parameters
==========
path: string
path to path which might be missing or too old
other_path: string
reference path
cwd: string
working directory (root of relative paths)
Returns
=======
True if path is older or missing.
"""
cwd = cwd or '.'
path = get_abspath(path, cwd=cwd)
other_path = get_abspath(other_path, cwd=cwd)
if not os.path.exists(path):
return True
if os.path.getmtime(other_path) - 1e-6 >= os.path.getmtime(path):
# 1e-6 is needed beacuse http://stackoverflow.com/questions/17086426/
return True
return False
def copy(src, dst, only_update=False, copystat=True, cwd=None,
dest_is_dir=False, create_dest_dirs=False):
""" Variation of ``shutil.copy`` with extra options.
Parameters
==========
src : str
Path to source file.
dst : str
Path to destination.
only_update : bool
Only copy if source is newer than destination
(returns None if it was newer), default: ``False``.
copystat : bool
See ``shutil.copystat``. default: ``True``.
cwd : str
Path to working directory (root of relative paths).
dest_is_dir : bool
Ensures that dst is treated as a directory. default: ``False``
create_dest_dirs : bool
Creates directories if needed.
Returns
=======
Path to the copied file.
"""
if cwd: # Handle working directory
if not os.path.isabs(src):
src = os.path.join(cwd, src)
if not os.path.isabs(dst):
dst = os.path.join(cwd, dst)
if not os.path.exists(src): # Make sure source file extists
raise FileNotFoundError("Source: `{}` does not exist".format(src))
# We accept both (re)naming destination file _or_
# passing a (possible non-existent) destination directory
if dest_is_dir:
if not dst[-1] == '/':
dst = dst+'/'
else:
if os.path.exists(dst) and os.path.isdir(dst):
dest_is_dir = True
if dest_is_dir:
dest_dir = dst
dest_fname = os.path.basename(src)
dst = os.path.join(dest_dir, dest_fname)
else:
dest_dir = os.path.dirname(dst)
if not os.path.exists(dest_dir):
if create_dest_dirs:
make_dirs(dest_dir)
else:
raise FileNotFoundError("You must create directory first.")
if only_update:
if not missing_or_other_newer(dst, src):
return
if os.path.islink(dst):
dst = os.path.abspath(os.path.realpath(dst), cwd=cwd)
shutil.copy(src, dst)
if copystat:
shutil.copystat(src, dst)
return dst
Glob = namedtuple('Glob', 'pathname')
ArbitraryDepthGlob = namedtuple('ArbitraryDepthGlob', 'filename')
def glob_at_depth(filename_glob, cwd=None):
if cwd is not None:
cwd = '.'
globbed = []
for root, dirs, filenames in os.walk(cwd):
for fn in filenames:
# This is not tested:
if fnmatch.fnmatch(fn, filename_glob):
globbed.append(os.path.join(root, fn))
return globbed
def sha256_of_file(path, nblocks=128):
""" Computes the SHA256 hash of a file.
Parameters
==========
path : string
Path to file to compute hash of.
nblocks : int
Number of blocks to read per iteration.
Returns
=======
hashlib sha256 hash object. Use ``.digest()`` or ``.hexdigest()``
on returned object to get binary or hex encoded string.
"""
sh = sha256()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(nblocks*sh.block_size), b''):
sh.update(chunk)
return sh
def sha256_of_string(string):
""" Computes the SHA256 hash of a string. """
sh = sha256()
sh.update(string)
return sh
def pyx_is_cplus(path):
"""
Inspect a Cython source file (.pyx) and look for comment line like:
# distutils: language = c++
Returns True if such a file is present in the file, else False.
"""
with open(path) as fh:
for line in fh:
if line.startswith('#') and '=' in line:
splitted = line.split('=')
if len(splitted) != 2:
continue
lhs, rhs = splitted
if lhs.strip().split()[-1].lower() == 'language' and \
rhs.strip().split()[0].lower() == 'c++':
return True
return False
def import_module_from_file(filename, only_if_newer_than=None):
""" Imports Python extension (from shared object file)
Provide a list of paths in `only_if_newer_than` to check
timestamps of dependencies. import_ raises an ImportError
if any is newer.
Word of warning: The OS may cache shared objects which makes
reimporting same path of an shared object file very problematic.
It will not detect the new time stamp, nor new checksum, but will
instead silently use old module. Use unique names for this reason.
Parameters
==========
filename : str
Path to shared object.
only_if_newer_than : iterable of strings
Paths to dependencies of the shared object.
Raises
======
``ImportError`` if any of the files specified in ``only_if_newer_than`` are newer
than the file given by filename.
"""
path, name = os.path.split(filename)
name, ext = os.path.splitext(name)
name = name.split('.')[0]
if sys.version_info[0] == 2:
from imp import find_module, load_module
fobj, filename, data = find_module(name, [path])
if only_if_newer_than:
for dep in only_if_newer_than:
if os.path.getmtime(filename) < os.path.getmtime(dep):
raise ImportError("{} is newer than {}".format(dep, filename))
mod = load_module(name, fobj, filename, data)
else:
import importlib.util
spec = importlib.util.spec_from_file_location(name, filename)
if spec is None:
raise ImportError("Failed to import: '%s'" % filename)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
return mod
def find_binary_of_command(candidates):
""" Finds binary first matching name among candidates.
Calls ``which`` from shutils for provided candidates and returns
first hit.
Parameters
==========
candidates : iterable of str
Names of candidate commands
Raises
======
CompilerNotFoundError if no candidates match.
"""
from shutil import which
for c in candidates:
binary_path = which(c)
if c and binary_path:
return c, binary_path
raise CompilerNotFoundError('No binary located for candidates: {}'.format(candidates))
def unique_list(l):
""" Uniquify a list (skip duplicate items). """
result = []
for x in l:
if x not in result:
result.append(x)
return result

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,333 @@
"""Useful utility decorators. """
import sys
import types
import inspect
from functools import wraps, update_wrapper
from sympy.utilities.exceptions import sympy_deprecation_warning
def threaded_factory(func, use_add):
"""A factory for ``threaded`` decorators. """
from sympy.core import sympify
from sympy.matrices import MatrixBase
from sympy.utilities.iterables import iterable
@wraps(func)
def threaded_func(expr, *args, **kwargs):
if isinstance(expr, MatrixBase):
return expr.applyfunc(lambda f: func(f, *args, **kwargs))
elif iterable(expr):
try:
return expr.__class__([func(f, *args, **kwargs) for f in expr])
except TypeError:
return expr
else:
expr = sympify(expr)
if use_add and expr.is_Add:
return expr.__class__(*[ func(f, *args, **kwargs) for f in expr.args ])
elif expr.is_Relational:
return expr.__class__(func(expr.lhs, *args, **kwargs),
func(expr.rhs, *args, **kwargs))
else:
return func(expr, *args, **kwargs)
return threaded_func
def threaded(func):
"""Apply ``func`` to sub--elements of an object, including :class:`~.Add`.
This decorator is intended to make it uniformly possible to apply a
function to all elements of composite objects, e.g. matrices, lists, tuples
and other iterable containers, or just expressions.
This version of :func:`threaded` decorator allows threading over
elements of :class:`~.Add` class. If this behavior is not desirable
use :func:`xthreaded` decorator.
Functions using this decorator must have the following signature::
@threaded
def function(expr, *args, **kwargs):
"""
return threaded_factory(func, True)
def xthreaded(func):
"""Apply ``func`` to sub--elements of an object, excluding :class:`~.Add`.
This decorator is intended to make it uniformly possible to apply a
function to all elements of composite objects, e.g. matrices, lists, tuples
and other iterable containers, or just expressions.
This version of :func:`threaded` decorator disallows threading over
elements of :class:`~.Add` class. If this behavior is not desirable
use :func:`threaded` decorator.
Functions using this decorator must have the following signature::
@xthreaded
def function(expr, *args, **kwargs):
"""
return threaded_factory(func, False)
def conserve_mpmath_dps(func):
"""After the function finishes, resets the value of ``mpmath.mp.dps`` to
the value it had before the function was run."""
import mpmath
def func_wrapper(*args, **kwargs):
dps = mpmath.mp.dps
try:
return func(*args, **kwargs)
finally:
mpmath.mp.dps = dps
func_wrapper = update_wrapper(func_wrapper, func)
return func_wrapper
class no_attrs_in_subclass:
"""Don't 'inherit' certain attributes from a base class
>>> from sympy.utilities.decorator import no_attrs_in_subclass
>>> class A(object):
... x = 'test'
>>> A.x = no_attrs_in_subclass(A, A.x)
>>> class B(A):
... pass
>>> hasattr(A, 'x')
True
>>> hasattr(B, 'x')
False
"""
def __init__(self, cls, f):
self.cls = cls
self.f = f
def __get__(self, instance, owner=None):
if owner == self.cls:
if hasattr(self.f, '__get__'):
return self.f.__get__(instance, owner)
return self.f
raise AttributeError
def doctest_depends_on(exe=None, modules=None, disable_viewers=None,
python_version=None, ground_types=None):
"""
Adds metadata about the dependencies which need to be met for doctesting
the docstrings of the decorated objects.
``exe`` should be a list of executables
``modules`` should be a list of modules
``disable_viewers`` should be a list of viewers for :func:`~sympy.printing.preview.preview` to disable
``python_version`` should be the minimum Python version required, as a tuple
(like ``(3, 0)``)
"""
dependencies = {}
if exe is not None:
dependencies['executables'] = exe
if modules is not None:
dependencies['modules'] = modules
if disable_viewers is not None:
dependencies['disable_viewers'] = disable_viewers
if python_version is not None:
dependencies['python_version'] = python_version
if ground_types is not None:
dependencies['ground_types'] = ground_types
def skiptests():
from sympy.testing.runtests import DependencyError, SymPyDocTests, PyTestReporter # lazy import
r = PyTestReporter()
t = SymPyDocTests(r, None)
try:
t._check_dependencies(**dependencies)
except DependencyError:
return True # Skip doctests
else:
return False # Run doctests
def depends_on_deco(fn):
fn._doctest_depends_on = dependencies
fn.__doctest_skip__ = skiptests
if inspect.isclass(fn):
fn._doctest_depdends_on = no_attrs_in_subclass(
fn, fn._doctest_depends_on)
fn.__doctest_skip__ = no_attrs_in_subclass(
fn, fn.__doctest_skip__)
return fn
return depends_on_deco
def public(obj):
"""
Append ``obj``'s name to global ``__all__`` variable (call site).
By using this decorator on functions or classes you achieve the same goal
as by filling ``__all__`` variables manually, you just do not have to repeat
yourself (object's name). You also know if object is public at definition
site, not at some random location (where ``__all__`` was set).
Note that in multiple decorator setup (in almost all cases) ``@public``
decorator must be applied before any other decorators, because it relies
on the pointer to object's global namespace. If you apply other decorators
first, ``@public`` may end up modifying the wrong namespace.
Examples
========
>>> from sympy.utilities.decorator import public
>>> __all__ # noqa: F821
Traceback (most recent call last):
...
NameError: name '__all__' is not defined
>>> @public
... def some_function():
... pass
>>> __all__ # noqa: F821
['some_function']
"""
if isinstance(obj, types.FunctionType):
ns = obj.__globals__
name = obj.__name__
elif isinstance(obj, (type(type), type)):
ns = sys.modules[obj.__module__].__dict__
name = obj.__name__
else:
raise TypeError("expected a function or a class, got %s" % obj)
if "__all__" not in ns:
ns["__all__"] = [name]
else:
ns["__all__"].append(name)
return obj
def memoize_property(propfunc):
"""Property decorator that caches the value of potentially expensive
``propfunc`` after the first evaluation. The cached value is stored in
the corresponding property name with an attached underscore."""
attrname = '_' + propfunc.__name__
sentinel = object()
@wraps(propfunc)
def accessor(self):
val = getattr(self, attrname, sentinel)
if val is sentinel:
val = propfunc(self)
setattr(self, attrname, val)
return val
return property(accessor)
def deprecated(message, *, deprecated_since_version,
active_deprecations_target, stacklevel=3):
'''
Mark a function as deprecated.
This decorator should be used if an entire function or class is
deprecated. If only a certain functionality is deprecated, you should use
:func:`~.warns_deprecated_sympy` directly. This decorator is just a
convenience. There is no functional difference between using this
decorator and calling ``warns_deprecated_sympy()`` at the top of the
function.
The decorator takes the same arguments as
:func:`~.warns_deprecated_sympy`. See its
documentation for details on what the keywords to this decorator do.
See the :ref:`deprecation-policy` document for details on when and how
things should be deprecated in SymPy.
Examples
========
>>> from sympy.utilities.decorator import deprecated
>>> from sympy import simplify
>>> @deprecated("""\
... The simplify_this(expr) function is deprecated. Use simplify(expr)
... instead.""", deprecated_since_version="1.1",
... active_deprecations_target='simplify-this-deprecation')
... def simplify_this(expr):
... """
... Simplify ``expr``.
...
... .. deprecated:: 1.1
...
... The ``simplify_this`` function is deprecated. Use :func:`simplify`
... instead. See its documentation for more information. See
... :ref:`simplify-this-deprecation` for details.
...
... """
... return simplify(expr)
>>> from sympy.abc import x
>>> simplify_this(x*(x + 1) - x**2) # doctest: +SKIP
<stdin>:1: SymPyDeprecationWarning:
<BLANKLINE>
The simplify_this(expr) function is deprecated. Use simplify(expr)
instead.
<BLANKLINE>
See https://docs.sympy.org/latest/explanation/active-deprecations.html#simplify-this-deprecation
for details.
<BLANKLINE>
This has been deprecated since SymPy version 1.1. It
will be removed in a future version of SymPy.
<BLANKLINE>
simplify_this(x)
x
See Also
========
sympy.utilities.exceptions.SymPyDeprecationWarning
sympy.utilities.exceptions.sympy_deprecation_warning
sympy.utilities.exceptions.ignore_warnings
sympy.testing.pytest.warns_deprecated_sympy
'''
decorator_kwargs = {"deprecated_since_version": deprecated_since_version,
"active_deprecations_target": active_deprecations_target}
def deprecated_decorator(wrapped):
if hasattr(wrapped, '__mro__'): # wrapped is actually a class
class wrapper(wrapped):
__doc__ = wrapped.__doc__
__module__ = wrapped.__module__
_sympy_deprecated_func = wrapped
if '__new__' in wrapped.__dict__:
def __new__(cls, *args, **kwargs):
sympy_deprecation_warning(message, **decorator_kwargs, stacklevel=stacklevel)
return super().__new__(cls, *args, **kwargs)
else:
def __init__(self, *args, **kwargs):
sympy_deprecation_warning(message, **decorator_kwargs, stacklevel=stacklevel)
super().__init__(*args, **kwargs)
wrapper.__name__ = wrapped.__name__
else:
@wraps(wrapped)
def wrapper(*args, **kwargs):
sympy_deprecation_warning(message, **decorator_kwargs, stacklevel=stacklevel)
return wrapped(*args, **kwargs)
wrapper._sympy_deprecated_func = wrapped
return wrapper
return deprecated_decorator

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,271 @@
"""
General SymPy exceptions and warnings.
"""
import warnings
import contextlib
from textwrap import dedent
class SymPyDeprecationWarning(DeprecationWarning):
r"""
A warning for deprecated features of SymPy.
See the :ref:`deprecation-policy` document for details on when and how
things should be deprecated in SymPy.
Note that simply constructing this class will not cause a warning to be
issued. To do that, you must call the :func`sympy_deprecation_warning`
function. For this reason, it is not recommended to ever construct this
class directly.
Explanation
===========
The ``SymPyDeprecationWarning`` class is a subclass of
``DeprecationWarning`` that is used for all deprecations in SymPy. A
special subclass is used so that we can automatically augment the warning
message with additional metadata about the version the deprecation was
introduced in and a link to the documentation. This also allows users to
explicitly filter deprecation warnings from SymPy using ``warnings``
filters (see :ref:`silencing-sympy-deprecation-warnings`).
Additionally, ``SymPyDeprecationWarning`` is enabled to be shown by
default, unlike normal ``DeprecationWarning``\s, which are only shown by
default in interactive sessions. This ensures that deprecation warnings in
SymPy will actually be seen by users.
See the documentation of :func:`sympy_deprecation_warning` for a
description of the parameters to this function.
To mark a function as deprecated, you can use the :func:`@deprecated
<sympy.utilities.decorator.deprecated>` decorator.
See Also
========
sympy.utilities.exceptions.sympy_deprecation_warning
sympy.utilities.exceptions.ignore_warnings
sympy.utilities.decorator.deprecated
sympy.testing.pytest.warns_deprecated_sympy
"""
def __init__(self, message, *, deprecated_since_version, active_deprecations_target):
super().__init__(message, deprecated_since_version,
active_deprecations_target)
self.message = message
if not isinstance(deprecated_since_version, str):
raise TypeError(f"'deprecated_since_version' should be a string, got {deprecated_since_version!r}")
self.deprecated_since_version = deprecated_since_version
self.active_deprecations_target = active_deprecations_target
if any(i in active_deprecations_target for i in '()='):
raise ValueError("active_deprecations_target be the part inside of the '(...)='")
self.full_message = f"""
{dedent(message).strip()}
See https://docs.sympy.org/latest/explanation/active-deprecations.html#{active_deprecations_target}
for details.
This has been deprecated since SymPy version {deprecated_since_version}. It
will be removed in a future version of SymPy.
"""
def __str__(self):
return self.full_message
def __repr__(self):
return f"{self.__class__.__name__}({self.message!r}, deprecated_since_version={self.deprecated_since_version!r}, active_deprecations_target={self.active_deprecations_target!r})"
def __eq__(self, other):
return isinstance(other, SymPyDeprecationWarning) and self.args == other.args
# Make pickling work. The by default, it tries to recreate the expression
# from its args, but this doesn't work because of our keyword-only
# arguments.
@classmethod
def _new(cls, message, deprecated_since_version,
active_deprecations_target):
return cls(message, deprecated_since_version=deprecated_since_version, active_deprecations_target=active_deprecations_target)
def __reduce__(self):
return (self._new, (self.message, self.deprecated_since_version, self.active_deprecations_target))
# Python by default hides DeprecationWarnings, which we do not want.
warnings.simplefilter("once", SymPyDeprecationWarning)
def sympy_deprecation_warning(message, *, deprecated_since_version,
active_deprecations_target, stacklevel=3):
r'''
Warn that a feature is deprecated in SymPy.
See the :ref:`deprecation-policy` document for details on when and how
things should be deprecated in SymPy.
To mark an entire function or class as deprecated, you can use the
:func:`@deprecated <sympy.utilities.decorator.deprecated>` decorator.
Parameters
==========
message : str
The deprecation message. This may span multiple lines and contain
code examples. Messages should be wrapped to 80 characters. The
message is automatically dedented and leading and trailing whitespace
stripped. Messages may include dynamic content based on the user
input, but avoid using ``str(expression)`` if an expression can be
arbitrary, as it might be huge and make the warning message
unreadable.
deprecated_since_version : str
The version of SymPy the feature has been deprecated since. For new
deprecations, this should be the version in `sympy/release.py
<https://github.com/sympy/sympy/blob/master/sympy/release.py>`_
without the ``.dev``. If the next SymPy version ends up being
different from this, the release manager will need to update any
``SymPyDeprecationWarning``\s using the incorrect version. This
argument is required and must be passed as a keyword argument.
(example: ``deprecated_since_version="1.10"``).
active_deprecations_target : str
The Sphinx target corresponding to the section for the deprecation in
the :ref:`active-deprecations` document (see
``doc/src/explanation/active-deprecations.md``). This is used to
automatically generate a URL to the page in the warning message. This
argument is required and must be passed as a keyword argument.
(example: ``active_deprecations_target="deprecated-feature-abc"``)
stacklevel : int, default: 3
The ``stacklevel`` parameter that is passed to ``warnings.warn``. If
you create a wrapper that calls this function, this should be
increased so that the warning message shows the user line of code that
produced the warning. Note that in some cases there will be multiple
possible different user code paths that could result in the warning.
In that case, just choose the smallest common stacklevel.
Examples
========
>>> from sympy.utilities.exceptions import sympy_deprecation_warning
>>> def is_this_zero(x, y=0):
... """
... Determine if x = 0.
...
... Parameters
... ==========
...
... x : Expr
... The expression to check.
...
... y : Expr, optional
... If provided, check if x = y.
...
... .. deprecated:: 1.1
...
... The ``y`` argument to ``is_this_zero`` is deprecated. Use
... ``is_this_zero(x - y)`` instead.
...
... """
... from sympy import simplify
...
... if y != 0:
... sympy_deprecation_warning("""
... The y argument to is_zero() is deprecated. Use is_zero(x - y) instead.""",
... deprecated_since_version="1.1",
... active_deprecations_target='is-this-zero-y-deprecation')
... return simplify(x - y) == 0
>>> is_this_zero(0)
True
>>> is_this_zero(1, 1) # doctest: +SKIP
<stdin>:1: SymPyDeprecationWarning:
<BLANKLINE>
The y argument to is_zero() is deprecated. Use is_zero(x - y) instead.
<BLANKLINE>
See https://docs.sympy.org/latest/explanation/active-deprecations.html#is-this-zero-y-deprecation
for details.
<BLANKLINE>
This has been deprecated since SymPy version 1.1. It
will be removed in a future version of SymPy.
<BLANKLINE>
is_this_zero(1, 1)
True
See Also
========
sympy.utilities.exceptions.SymPyDeprecationWarning
sympy.utilities.exceptions.ignore_warnings
sympy.utilities.decorator.deprecated
sympy.testing.pytest.warns_deprecated_sympy
'''
w = SymPyDeprecationWarning(message,
deprecated_since_version=deprecated_since_version,
active_deprecations_target=active_deprecations_target)
warnings.warn(w, stacklevel=stacklevel)
@contextlib.contextmanager
def ignore_warnings(warningcls):
'''
Context manager to suppress warnings during tests.
.. note::
Do not use this with SymPyDeprecationWarning in the tests.
warns_deprecated_sympy() should be used instead.
This function is useful for suppressing warnings during tests. The warns
function should be used to assert that a warning is raised. The
ignore_warnings function is useful in situation when the warning is not
guaranteed to be raised (e.g. on importing a module) or if the warning
comes from third-party code.
This function is also useful to prevent the same or similar warnings from
being issue twice due to recursive calls.
When the warning is coming (reliably) from SymPy the warns function should
be preferred to ignore_warnings.
>>> from sympy.utilities.exceptions import ignore_warnings
>>> import warnings
Here's a warning:
>>> with warnings.catch_warnings(): # reset warnings in doctest
... warnings.simplefilter('error')
... warnings.warn('deprecated', UserWarning)
Traceback (most recent call last):
...
UserWarning: deprecated
Let's suppress it with ignore_warnings:
>>> with warnings.catch_warnings(): # reset warnings in doctest
... warnings.simplefilter('error')
... with ignore_warnings(UserWarning):
... warnings.warn('deprecated', UserWarning)
(No warning emitted)
See Also
========
sympy.utilities.exceptions.SymPyDeprecationWarning
sympy.utilities.exceptions.sympy_deprecation_warning
sympy.utilities.decorator.deprecated
sympy.testing.pytest.warns_deprecated_sympy
'''
# Absorbs all warnings in warnrec
with warnings.catch_warnings(record=True) as warnrec:
# Make sure our warning doesn't get filtered
warnings.simplefilter("always", warningcls)
# Now run the test
yield
# Reissue any warnings that we aren't testing for
for w in warnrec:
if not issubclass(w.category, warningcls):
warnings.warn_explicit(w.message, w.category, w.filename, w.lineno)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,12 @@
"""Functions that involve magic. """
def pollute(names, objects):
"""Pollute the global namespace with symbols -> objects mapping. """
from inspect import currentframe
frame = currentframe().f_back.f_back
try:
for name, obj in zip(names, objects):
frame.f_globals[name] = obj
finally:
del frame # break cyclic dependencies as stated in inspect docs

View File

@ -0,0 +1,340 @@
"""
The objects in this module allow the usage of the MatchPy pattern matching
library on SymPy expressions.
"""
import re
from typing import List, Callable, NamedTuple, Any, Dict
from sympy.core.sympify import _sympify
from sympy.external import import_module
from sympy.functions import (log, sin, cos, tan, cot, csc, sec, erf, gamma, uppergamma)
from sympy.functions.elementary.hyperbolic import acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch
from sympy.functions.elementary.trigonometric import atan, acsc, asin, acot, acos, asec
from sympy.functions.special.error_functions import fresnelc, fresnels, erfc, erfi, Ei
from sympy.core.add import Add
from sympy.core.basic import Basic
from sympy.core.expr import Expr
from sympy.core.mul import Mul
from sympy.core.power import Pow
from sympy.core.relational import (Equality, Unequality)
from sympy.core.symbol import Symbol
from sympy.functions.elementary.exponential import exp
from sympy.integrals.integrals import Integral
from sympy.printing.repr import srepr
from sympy.utilities.decorator import doctest_depends_on
matchpy = import_module("matchpy")
__doctest_requires__ = {('*',): ['matchpy']}
if matchpy:
from matchpy import Operation, CommutativeOperation, AssociativeOperation, OneIdentityOperation
from matchpy.expressions.functions import op_iter, create_operation_expression, op_len
Operation.register(Integral)
Operation.register(Pow)
OneIdentityOperation.register(Pow)
Operation.register(Add)
OneIdentityOperation.register(Add)
CommutativeOperation.register(Add)
AssociativeOperation.register(Add)
Operation.register(Mul)
OneIdentityOperation.register(Mul)
CommutativeOperation.register(Mul)
AssociativeOperation.register(Mul)
Operation.register(Equality)
CommutativeOperation.register(Equality)
Operation.register(Unequality)
CommutativeOperation.register(Unequality)
Operation.register(exp)
Operation.register(log)
Operation.register(gamma)
Operation.register(uppergamma)
Operation.register(fresnels)
Operation.register(fresnelc)
Operation.register(erf)
Operation.register(Ei)
Operation.register(erfc)
Operation.register(erfi)
Operation.register(sin)
Operation.register(cos)
Operation.register(tan)
Operation.register(cot)
Operation.register(csc)
Operation.register(sec)
Operation.register(sinh)
Operation.register(cosh)
Operation.register(tanh)
Operation.register(coth)
Operation.register(csch)
Operation.register(sech)
Operation.register(asin)
Operation.register(acos)
Operation.register(atan)
Operation.register(acot)
Operation.register(acsc)
Operation.register(asec)
Operation.register(asinh)
Operation.register(acosh)
Operation.register(atanh)
Operation.register(acoth)
Operation.register(acsch)
Operation.register(asech)
@op_iter.register(Integral) # type: ignore
def _(operation):
return iter((operation._args[0],) + operation._args[1])
@op_iter.register(Basic) # type: ignore
def _(operation):
return iter(operation._args)
@op_len.register(Integral) # type: ignore
def _(operation):
return 1 + len(operation._args[1])
@op_len.register(Basic) # type: ignore
def _(operation):
return len(operation._args)
@create_operation_expression.register(Basic)
def sympy_op_factory(old_operation, new_operands, variable_name=True):
return type(old_operation)(*new_operands)
if matchpy:
from matchpy import Wildcard
else:
class Wildcard: # type: ignore
def __init__(self, min_length, fixed_size, variable_name, optional):
self.min_count = min_length
self.fixed_size = fixed_size
self.variable_name = variable_name
self.optional = optional
@doctest_depends_on(modules=('matchpy',))
class _WildAbstract(Wildcard, Symbol):
min_length: int # abstract field required in subclasses
fixed_size: bool # abstract field required in subclasses
def __init__(self, variable_name=None, optional=None, **assumptions):
min_length = self.min_length
fixed_size = self.fixed_size
if optional is not None:
optional = _sympify(optional)
Wildcard.__init__(self, min_length, fixed_size, str(variable_name), optional)
def __getstate__(self):
return {
"min_length": self.min_length,
"fixed_size": self.fixed_size,
"min_count": self.min_count,
"variable_name": self.variable_name,
"optional": self.optional,
}
def __new__(cls, variable_name=None, optional=None, **assumptions):
cls._sanitize(assumptions, cls)
return _WildAbstract.__xnew__(cls, variable_name, optional, **assumptions)
def __getnewargs__(self):
return self.variable_name, self.optional
@staticmethod
def __xnew__(cls, variable_name=None, optional=None, **assumptions):
obj = Symbol.__xnew__(cls, variable_name, **assumptions)
return obj
def _hashable_content(self):
if self.optional:
return super()._hashable_content() + (self.min_count, self.fixed_size, self.variable_name, self.optional)
else:
return super()._hashable_content() + (self.min_count, self.fixed_size, self.variable_name)
def __copy__(self) -> '_WildAbstract':
return type(self)(variable_name=self.variable_name, optional=self.optional)
def __repr__(self):
return str(self)
def __str__(self):
return self.name
@doctest_depends_on(modules=('matchpy',))
class WildDot(_WildAbstract):
min_length = 1
fixed_size = True
@doctest_depends_on(modules=('matchpy',))
class WildPlus(_WildAbstract):
min_length = 1
fixed_size = False
@doctest_depends_on(modules=('matchpy',))
class WildStar(_WildAbstract):
min_length = 0
fixed_size = False
def _get_srepr(expr):
s = srepr(expr)
s = re.sub(r"WildDot\('(\w+)'\)", r"\1", s)
s = re.sub(r"WildPlus\('(\w+)'\)", r"*\1", s)
s = re.sub(r"WildStar\('(\w+)'\)", r"*\1", s)
return s
class ReplacementInfo(NamedTuple):
replacement: Any
info: Any
@doctest_depends_on(modules=('matchpy',))
class Replacer:
"""
Replacer object to perform multiple pattern matching and subexpression
replacements in SymPy expressions.
Examples
========
Example to construct a simple first degree equation solver:
>>> from sympy.utilities.matchpy_connector import WildDot, Replacer
>>> from sympy import Equality, Symbol
>>> x = Symbol("x")
>>> a_ = WildDot("a_", optional=1)
>>> b_ = WildDot("b_", optional=0)
The lines above have defined two wildcards, ``a_`` and ``b_``, the
coefficients of the equation `a x + b = 0`. The optional values specified
indicate which expression to return in case no match is found, they are
necessary in equations like `a x = 0` and `x + b = 0`.
Create two constraints to make sure that ``a_`` and ``b_`` will not match
any expression containing ``x``:
>>> from matchpy import CustomConstraint
>>> free_x_a = CustomConstraint(lambda a_: not a_.has(x))
>>> free_x_b = CustomConstraint(lambda b_: not b_.has(x))
Now create the rule replacer with the constraints:
>>> replacer = Replacer(common_constraints=[free_x_a, free_x_b])
Add the matching rule:
>>> replacer.add(Equality(a_*x + b_, 0), -b_/a_)
Let's try it:
>>> replacer.replace(Equality(3*x + 4, 0))
-4/3
Notice that it will not match equations expressed with other patterns:
>>> eq = Equality(3*x, 4)
>>> replacer.replace(eq)
Eq(3*x, 4)
In order to extend the matching patterns, define another one (we also need
to clear the cache, because the previous result has already been memorized
and the pattern matcher will not iterate again if given the same expression)
>>> replacer.add(Equality(a_*x, b_), b_/a_)
>>> replacer._matcher.clear()
>>> replacer.replace(eq)
4/3
"""
def __init__(self, common_constraints: list = [], lambdify: bool = False, info: bool = False):
self._matcher = matchpy.ManyToOneMatcher()
self._common_constraint = common_constraints
self._lambdify = lambdify
self._info = info
self._wildcards: Dict[str, Wildcard] = {}
def _get_lambda(self, lambda_str: str) -> Callable[..., Expr]:
exec("from sympy import *")
return eval(lambda_str, locals())
def _get_custom_constraint(self, constraint_expr: Expr, condition_template: str) -> Callable[..., Expr]:
wilds = [x.name for x in constraint_expr.atoms(_WildAbstract)]
lambdaargs = ', '.join(wilds)
fullexpr = _get_srepr(constraint_expr)
condition = condition_template.format(fullexpr)
return matchpy.CustomConstraint(
self._get_lambda(f"lambda {lambdaargs}: ({condition})"))
def _get_custom_constraint_nonfalse(self, constraint_expr: Expr) -> Callable[..., Expr]:
return self._get_custom_constraint(constraint_expr, "({}) != False")
def _get_custom_constraint_true(self, constraint_expr: Expr) -> Callable[..., Expr]:
return self._get_custom_constraint(constraint_expr, "({}) == True")
def add(self, expr: Expr, replacement, conditions_true: List[Expr] = [],
conditions_nonfalse: List[Expr] = [], info: Any = None) -> None:
expr = _sympify(expr)
replacement = _sympify(replacement)
constraints = self._common_constraint[:]
constraint_conditions_true = [
self._get_custom_constraint_true(cond) for cond in conditions_true]
constraint_conditions_nonfalse = [
self._get_custom_constraint_nonfalse(cond) for cond in conditions_nonfalse]
constraints.extend(constraint_conditions_true)
constraints.extend(constraint_conditions_nonfalse)
pattern = matchpy.Pattern(expr, *constraints)
if self._lambdify:
lambda_str = f"lambda {', '.join((x.name for x in expr.atoms(_WildAbstract)))}: {_get_srepr(replacement)}"
lambda_expr = self._get_lambda(lambda_str)
replacement = lambda_expr
else:
self._wildcards.update({str(i): i for i in expr.atoms(Wildcard)})
if self._info:
replacement = ReplacementInfo(replacement, info)
self._matcher.add(pattern, replacement)
def replace(self, expression, max_count: int = -1):
# This method partly rewrites the .replace method of ManyToOneReplacer
# in MatchPy.
# License: https://github.com/HPAC/matchpy/blob/master/LICENSE
infos = []
replaced = True
replace_count = 0
while replaced and (max_count < 0 or replace_count < max_count):
replaced = False
for subexpr, pos in matchpy.preorder_iter_with_position(expression):
try:
replacement_data, subst = next(iter(self._matcher.match(subexpr)))
if self._info:
replacement = replacement_data.replacement
infos.append(replacement_data.info)
else:
replacement = replacement_data
if self._lambdify:
result = replacement(**subst)
else:
result = replacement.xreplace({self._wildcards[k]: v for k, v in subst.items()})
expression = matchpy.functions.replace(expression, pos, result)
replaced = True
break
except StopIteration:
pass
replace_count += 1
if self._info:
return expression, infos
else:
return expression

View File

@ -0,0 +1,122 @@
"""Module with some functions for MathML, like transforming MathML
content in MathML presentation.
To use this module, you will need lxml.
"""
from pathlib import Path
from sympy.utilities.decorator import doctest_depends_on
__doctest_requires__ = {('apply_xsl', 'c2p'): ['lxml']}
def add_mathml_headers(s):
return """<math xmlns:mml="http://www.w3.org/1998/Math/MathML"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.w3.org/1998/Math/MathML
http://www.w3.org/Math/XMLSchema/mathml2/mathml2.xsd">""" + s + "</math>"
def _read_binary(pkgname, filename):
import sys
if sys.version_info >= (3, 10):
# files was added in Python 3.9 but only seems to work here in 3.10+
from importlib.resources import files
return files(pkgname).joinpath(filename).read_bytes()
else:
# read_binary was deprecated in Python 3.11
from importlib.resources import read_binary
return read_binary(pkgname, filename)
def _read_xsl(xsl):
# Previously these values were allowed:
if xsl == 'mathml/data/simple_mmlctop.xsl':
xsl = 'simple_mmlctop.xsl'
elif xsl == 'mathml/data/mmlctop.xsl':
xsl = 'mmlctop.xsl'
elif xsl == 'mathml/data/mmltex.xsl':
xsl = 'mmltex.xsl'
if xsl in ['simple_mmlctop.xsl', 'mmlctop.xsl', 'mmltex.xsl']:
xslbytes = _read_binary('sympy.utilities.mathml.data', xsl)
else:
xslbytes = Path(xsl).read_bytes()
return xslbytes
@doctest_depends_on(modules=('lxml',))
def apply_xsl(mml, xsl):
"""Apply a xsl to a MathML string.
Parameters
==========
mml
A string with MathML code.
xsl
A string giving the name of an xsl (xml stylesheet) file which can be
found in sympy/utilities/mathml/data. The following files are supplied
with SymPy:
- mmlctop.xsl
- mmltex.xsl
- simple_mmlctop.xsl
Alternatively, a full path to an xsl file can be given.
Examples
========
>>> from sympy.utilities.mathml import apply_xsl
>>> xsl = 'simple_mmlctop.xsl'
>>> mml = '<apply> <plus/> <ci>a</ci> <ci>b</ci> </apply>'
>>> res = apply_xsl(mml,xsl)
>>> print(res)
<?xml version="1.0"?>
<mrow xmlns="http://www.w3.org/1998/Math/MathML">
<mi>a</mi>
<mo> + </mo>
<mi>b</mi>
</mrow>
"""
from lxml import etree
parser = etree.XMLParser(resolve_entities=False)
ac = etree.XSLTAccessControl.DENY_ALL
s = etree.XML(_read_xsl(xsl), parser=parser)
transform = etree.XSLT(s, access_control=ac)
doc = etree.XML(mml, parser=parser)
result = transform(doc)
s = str(result)
return s
@doctest_depends_on(modules=('lxml',))
def c2p(mml, simple=False):
"""Transforms a document in MathML content (like the one that sympy produces)
in one document in MathML presentation, more suitable for printing, and more
widely accepted
Examples
========
>>> from sympy.utilities.mathml import c2p
>>> mml = '<apply> <exp/> <cn>2</cn> </apply>'
>>> c2p(mml,simple=True) != c2p(mml,simple=False)
True
"""
if not mml.startswith('<math'):
mml = add_mathml_headers(mml)
if simple:
return apply_xsl(mml, 'mathml/data/simple_mmlctop.xsl')
return apply_xsl(mml, 'mathml/data/mmlctop.xsl')

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,76 @@
from functools import wraps
def recurrence_memo(initial):
"""
Memo decorator for sequences defined by recurrence
Examples
========
>>> from sympy.utilities.memoization import recurrence_memo
>>> @recurrence_memo([1]) # 0! = 1
... def factorial(n, prev):
... return n * prev[-1]
>>> factorial(4)
24
>>> factorial(3) # use cache values
6
>>> factorial.cache_length() # cache length can be obtained
5
>>> factorial.fetch_item(slice(2, 4))
[2, 6]
"""
cache = initial
def decorator(f):
@wraps(f)
def g(n):
L = len(cache)
if n < L:
return cache[n]
for i in range(L, n + 1):
cache.append(f(i, cache))
return cache[-1]
g.cache_length = lambda: len(cache)
g.fetch_item = lambda x: cache[x]
return g
return decorator
def assoc_recurrence_memo(base_seq):
"""
Memo decorator for associated sequences defined by recurrence starting from base
base_seq(n) -- callable to get base sequence elements
XXX works only for Pn0 = base_seq(0) cases
XXX works only for m <= n cases
"""
cache = []
def decorator(f):
@wraps(f)
def g(n, m):
L = len(cache)
if n < L:
return cache[n][m]
for i in range(L, n + 1):
# get base sequence
F_i0 = base_seq(i)
F_i_cache = [F_i0]
cache.append(F_i_cache)
# XXX only works for m <= n cases
# generate assoc sequence
for j in range(1, i + 1):
F_ij = f(i, j, cache)
F_i_cache.append(F_ij)
return cache[n][m]
return g
return decorator

View File

@ -0,0 +1,565 @@
"""Miscellaneous stuff that does not really fit anywhere else."""
from __future__ import annotations
import operator
import sys
import os
import re as _re
import struct
from textwrap import fill, dedent
class Undecidable(ValueError):
# an error to be raised when a decision cannot be made definitively
# where a definitive answer is needed
pass
def filldedent(s, w=70, **kwargs):
"""
Strips leading and trailing empty lines from a copy of ``s``, then dedents,
fills and returns it.
Empty line stripping serves to deal with docstrings like this one that
start with a newline after the initial triple quote, inserting an empty
line at the beginning of the string.
Additional keyword arguments will be passed to ``textwrap.fill()``.
See Also
========
strlines, rawlines
"""
return '\n' + fill(dedent(str(s)).strip('\n'), width=w, **kwargs)
def strlines(s, c=64, short=False):
"""Return a cut-and-pastable string that, when printed, is
equivalent to the input. The lines will be surrounded by
parentheses and no line will be longer than c (default 64)
characters. If the line contains newlines characters, the
`rawlines` result will be returned. If ``short`` is True
(default is False) then if there is one line it will be
returned without bounding parentheses.
Examples
========
>>> from sympy.utilities.misc import strlines
>>> q = 'this is a long string that should be broken into shorter lines'
>>> print(strlines(q, 40))
(
'this is a long string that should be b'
'roken into shorter lines'
)
>>> q == (
... 'this is a long string that should be b'
... 'roken into shorter lines'
... )
True
See Also
========
filldedent, rawlines
"""
if not isinstance(s, str):
raise ValueError('expecting string input')
if '\n' in s:
return rawlines(s)
q = '"' if repr(s).startswith('"') else "'"
q = (q,)*2
if '\\' in s: # use r-string
m = '(\nr%s%%s%s\n)' % q
j = '%s\nr%s' % q
c -= 3
else:
m = '(\n%s%%s%s\n)' % q
j = '%s\n%s' % q
c -= 2
out = []
while s:
out.append(s[:c])
s=s[c:]
if short and len(out) == 1:
return (m % out[0]).splitlines()[1] # strip bounding (\n...\n)
return m % j.join(out)
def rawlines(s):
"""Return a cut-and-pastable string that, when printed, is equivalent
to the input. Use this when there is more than one line in the
string. The string returned is formatted so it can be indented
nicely within tests; in some cases it is wrapped in the dedent
function which has to be imported from textwrap.
Examples
========
Note: because there are characters in the examples below that need
to be escaped because they are themselves within a triple quoted
docstring, expressions below look more complicated than they would
be if they were printed in an interpreter window.
>>> from sympy.utilities.misc import rawlines
>>> from sympy import TableForm
>>> s = str(TableForm([[1, 10]], headings=(None, ['a', 'bee'])))
>>> print(rawlines(s))
(
'a bee\\n'
'-----\\n'
'1 10 '
)
>>> print(rawlines('''this
... that'''))
dedent('''\\
this
that''')
>>> print(rawlines('''this
... that
... '''))
dedent('''\\
this
that
''')
>>> s = \"\"\"this
... is a triple '''
... \"\"\"
>>> print(rawlines(s))
dedent(\"\"\"\\
this
is a triple '''
\"\"\")
>>> print(rawlines('''this
... that
... '''))
(
'this\\n'
'that\\n'
' '
)
See Also
========
filldedent, strlines
"""
lines = s.split('\n')
if len(lines) == 1:
return repr(lines[0])
triple = ["'''" in s, '"""' in s]
if any(li.endswith(' ') for li in lines) or '\\' in s or all(triple):
rv = []
# add on the newlines
trailing = s.endswith('\n')
last = len(lines) - 1
for i, li in enumerate(lines):
if i != last or trailing:
rv.append(repr(li + '\n'))
else:
rv.append(repr(li))
return '(\n %s\n)' % '\n '.join(rv)
else:
rv = '\n '.join(lines)
if triple[0]:
return 'dedent("""\\\n %s""")' % rv
else:
return "dedent('''\\\n %s''')" % rv
ARCH = str(struct.calcsize('P') * 8) + "-bit"
# XXX: PyPy does not support hash randomization
HASH_RANDOMIZATION = getattr(sys.flags, 'hash_randomization', False)
_debug_tmp: list[str] = []
_debug_iter = 0
def debug_decorator(func):
"""If SYMPY_DEBUG is True, it will print a nice execution tree with
arguments and results of all decorated functions, else do nothing.
"""
from sympy import SYMPY_DEBUG
if not SYMPY_DEBUG:
return func
def maketree(f, *args, **kw):
global _debug_tmp
global _debug_iter
oldtmp = _debug_tmp
_debug_tmp = []
_debug_iter += 1
def tree(subtrees):
def indent(s, variant=1):
x = s.split("\n")
r = "+-%s\n" % x[0]
for a in x[1:]:
if a == "":
continue
if variant == 1:
r += "| %s\n" % a
else:
r += " %s\n" % a
return r
if len(subtrees) == 0:
return ""
f = []
for a in subtrees[:-1]:
f.append(indent(a))
f.append(indent(subtrees[-1], 2))
return ''.join(f)
# If there is a bug and the algorithm enters an infinite loop, enable the
# following lines. It will print the names and parameters of all major functions
# that are called, *before* they are called
#from functools import reduce
#print("%s%s %s%s" % (_debug_iter, reduce(lambda x, y: x + y, \
# map(lambda x: '-', range(1, 2 + _debug_iter))), f.__name__, args))
r = f(*args, **kw)
_debug_iter -= 1
s = "%s%s = %s\n" % (f.__name__, args, r)
if _debug_tmp != []:
s += tree(_debug_tmp)
_debug_tmp = oldtmp
_debug_tmp.append(s)
if _debug_iter == 0:
print(_debug_tmp[0])
_debug_tmp = []
return r
def decorated(*args, **kwargs):
return maketree(func, *args, **kwargs)
return decorated
def debug(*args):
"""
Print ``*args`` if SYMPY_DEBUG is True, else do nothing.
"""
from sympy import SYMPY_DEBUG
if SYMPY_DEBUG:
print(*args, file=sys.stderr)
def debugf(string, args):
"""
Print ``string%args`` if SYMPY_DEBUG is True, else do nothing. This is
intended for debug messages using formatted strings.
"""
from sympy import SYMPY_DEBUG
if SYMPY_DEBUG:
print(string%args, file=sys.stderr)
def find_executable(executable, path=None):
"""Try to find 'executable' in the directories listed in 'path' (a
string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']). Returns the complete filename or None if not
found
"""
from .exceptions import sympy_deprecation_warning
sympy_deprecation_warning(
"""
sympy.utilities.misc.find_executable() is deprecated. Use the standard
library shutil.which() function instead.
""",
deprecated_since_version="1.7",
active_deprecations_target="deprecated-find-executable",
)
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
extlist = ['']
if os.name == 'os2':
(base, ext) = os.path.splitext(executable)
# executable files on OS/2 can have an arbitrary extension, but
# .exe is automatically appended if no dot is present in the name
if not ext:
executable = executable + ".exe"
elif sys.platform == 'win32':
pathext = os.environ['PATHEXT'].lower().split(os.pathsep)
(base, ext) = os.path.splitext(executable)
if ext.lower() not in pathext:
extlist = pathext
for ext in extlist:
execname = executable + ext
if os.path.isfile(execname):
return execname
else:
for p in paths:
f = os.path.join(p, execname)
if os.path.isfile(f):
return f
return None
def func_name(x, short=False):
"""Return function name of `x` (if defined) else the `type(x)`.
If short is True and there is a shorter alias for the result,
return the alias.
Examples
========
>>> from sympy.utilities.misc import func_name
>>> from sympy import Matrix
>>> from sympy.abc import x
>>> func_name(Matrix.eye(3))
'MutableDenseMatrix'
>>> func_name(x < 1)
'StrictLessThan'
>>> func_name(x < 1, short=True)
'Lt'
"""
alias = {
'GreaterThan': 'Ge',
'StrictGreaterThan': 'Gt',
'LessThan': 'Le',
'StrictLessThan': 'Lt',
'Equality': 'Eq',
'Unequality': 'Ne',
}
typ = type(x)
if str(typ).startswith("<type '"):
typ = str(typ).split("'")[1].split("'")[0]
elif str(typ).startswith("<class '"):
typ = str(typ).split("'")[1].split("'")[0]
rv = getattr(getattr(x, 'func', x), '__name__', typ)
if '.' in rv:
rv = rv.split('.')[-1]
if short:
rv = alias.get(rv, rv)
return rv
def _replace(reps):
"""Return a function that can make the replacements, given in
``reps``, on a string. The replacements should be given as mapping.
Examples
========
>>> from sympy.utilities.misc import _replace
>>> f = _replace(dict(foo='bar', d='t'))
>>> f('food')
'bart'
>>> f = _replace({})
>>> f('food')
'food'
"""
if not reps:
return lambda x: x
D = lambda match: reps[match.group(0)]
pattern = _re.compile("|".join(
[_re.escape(k) for k, v in reps.items()]), _re.M)
return lambda string: pattern.sub(D, string)
def replace(string, *reps):
"""Return ``string`` with all keys in ``reps`` replaced with
their corresponding values, longer strings first, irrespective
of the order they are given. ``reps`` may be passed as tuples
or a single mapping.
Examples
========
>>> from sympy.utilities.misc import replace
>>> replace('foo', {'oo': 'ar', 'f': 'b'})
'bar'
>>> replace("spamham sha", ("spam", "eggs"), ("sha","md5"))
'eggsham md5'
There is no guarantee that a unique answer will be
obtained if keys in a mapping overlap (i.e. are the same
length and have some identical sequence at the
beginning/end):
>>> reps = [
... ('ab', 'x'),
... ('bc', 'y')]
>>> replace('abc', *reps) in ('xc', 'ay')
True
References
==========
.. [1] https://stackoverflow.com/questions/6116978/how-to-replace-multiple-substrings-of-a-string
"""
if len(reps) == 1:
kv = reps[0]
if isinstance(kv, dict):
reps = kv
else:
return string.replace(*kv)
else:
reps = dict(reps)
return _replace(reps)(string)
def translate(s, a, b=None, c=None):
"""Return ``s`` where characters have been replaced or deleted.
SYNTAX
======
translate(s, None, deletechars):
all characters in ``deletechars`` are deleted
translate(s, map [,deletechars]):
all characters in ``deletechars`` (if provided) are deleted
then the replacements defined by map are made; if the keys
of map are strings then the longer ones are handled first.
Multicharacter deletions should have a value of ''.
translate(s, oldchars, newchars, deletechars)
all characters in ``deletechars`` are deleted
then each character in ``oldchars`` is replaced with the
corresponding character in ``newchars``
Examples
========
>>> from sympy.utilities.misc import translate
>>> abc = 'abc'
>>> translate(abc, None, 'a')
'bc'
>>> translate(abc, {'a': 'x'}, 'c')
'xb'
>>> translate(abc, {'abc': 'x', 'a': 'y'})
'x'
>>> translate('abcd', 'ac', 'AC', 'd')
'AbC'
There is no guarantee that a unique answer will be
obtained if keys in a mapping overlap are the same
length and have some identical sequences at the
beginning/end:
>>> translate(abc, {'ab': 'x', 'bc': 'y'}) in ('xc', 'ay')
True
"""
mr = {}
if a is None:
if c is not None:
raise ValueError('c should be None when a=None is passed, instead got %s' % c)
if b is None:
return s
c = b
a = b = ''
else:
if isinstance(a, dict):
short = {}
for k in list(a.keys()):
if len(k) == 1 and len(a[k]) == 1:
short[k] = a.pop(k)
mr = a
c = b
if short:
a, b = [''.join(i) for i in list(zip(*short.items()))]
else:
a = b = ''
elif len(a) != len(b):
raise ValueError('oldchars and newchars have different lengths')
if c:
val = str.maketrans('', '', c)
s = s.translate(val)
s = replace(s, mr)
n = str.maketrans(a, b)
return s.translate(n)
def ordinal(num):
"""Return ordinal number string of num, e.g. 1 becomes 1st.
"""
# modified from https://codereview.stackexchange.com/questions/41298/producing-ordinal-numbers
n = as_int(num)
k = abs(n) % 100
if 11 <= k <= 13:
suffix = 'th'
elif k % 10 == 1:
suffix = 'st'
elif k % 10 == 2:
suffix = 'nd'
elif k % 10 == 3:
suffix = 'rd'
else:
suffix = 'th'
return str(n) + suffix
def as_int(n, strict=True):
"""
Convert the argument to a builtin integer.
The return value is guaranteed to be equal to the input. ValueError is
raised if the input has a non-integral value. When ``strict`` is True, this
uses `__index__ <https://docs.python.org/3/reference/datamodel.html#object.__index__>`_
and when it is False it uses ``int``.
Examples
========
>>> from sympy.utilities.misc import as_int
>>> from sympy import sqrt, S
The function is primarily concerned with sanitizing input for
functions that need to work with builtin integers, so anything that
is unambiguously an integer should be returned as an int:
>>> as_int(S(3))
3
Floats, being of limited precision, are not assumed to be exact and
will raise an error unless the ``strict`` flag is False. This
precision issue becomes apparent for large floating point numbers:
>>> big = 1e23
>>> type(big) is float
True
>>> big == int(big)
True
>>> as_int(big)
Traceback (most recent call last):
...
ValueError: ... is not an integer
>>> as_int(big, strict=False)
99999999999999991611392
Input that might be a complex representation of an integer value is
also rejected by default:
>>> one = sqrt(3 + 2*sqrt(2)) - sqrt(2)
>>> int(one) == 1
True
>>> as_int(one)
Traceback (most recent call last):
...
ValueError: ... is not an integer
"""
if strict:
try:
if isinstance(n, bool):
raise TypeError
return operator.index(n)
except TypeError:
raise ValueError('%s is not an integer' % (n,))
else:
try:
result = int(n)
except TypeError:
raise ValueError('%s is not an integer' % (n,))
if n - result:
raise ValueError('%s is not an integer' % (n,))
return result

View File

@ -0,0 +1,33 @@
# This module is deprecated and will be removed.
import sys
import os
from io import StringIO
from sympy.utilities.decorator import deprecated
@deprecated(
"""
The sympy.utilities.pkgdata module and its get_resource function are
deprecated. Use the stdlib importlib.resources module instead.
""",
deprecated_since_version="1.12",
active_deprecations_target="pkgdata",
)
def get_resource(identifier, pkgname=__name__):
mod = sys.modules[pkgname]
fn = getattr(mod, '__file__', None)
if fn is None:
raise OSError("%r has no __file__!")
path = os.path.join(os.path.dirname(fn), identifier)
loader = getattr(mod, '__loader__', None)
if loader is not None:
try:
data = loader.get_data(path)
except (OSError, AttributeError):
pass
else:
return StringIO(data.decode('utf-8'))
return open(os.path.normpath(path), 'rb')

View File

@ -0,0 +1,12 @@
"""
.. deprecated:: 1.6
sympy.utilities.pytest has been renamed to sympy.testing.pytest.
"""
from sympy.utilities.exceptions import sympy_deprecation_warning
sympy_deprecation_warning("The sympy.utilities.pytest submodule is deprecated. Use sympy.testing.pytest instead.",
deprecated_since_version="1.6",
active_deprecations_target="deprecated-sympy-utilities-submodules")
from sympy.testing.pytest import * # noqa:F401

View File

@ -0,0 +1,12 @@
"""
.. deprecated:: 1.6
sympy.utilities.randtest has been renamed to sympy.core.random.
"""
from sympy.utilities.exceptions import sympy_deprecation_warning
sympy_deprecation_warning("The sympy.utilities.randtest submodule is deprecated. Use sympy.core.random instead.",
deprecated_since_version="1.6",
active_deprecations_target="deprecated-sympy-utilities-submodules")
from sympy.core.random import * # noqa:F401

View File

@ -0,0 +1,13 @@
"""
.. deprecated:: 1.6
sympy.utilities.runtests has been renamed to sympy.testing.runtests.
"""
from sympy.utilities.exceptions import sympy_deprecation_warning
sympy_deprecation_warning("The sympy.utilities.runtests submodule is deprecated. Use sympy.testing.runtests instead.",
deprecated_since_version="1.6",
active_deprecations_target="deprecated-sympy-utilities-submodules")
from sympy.testing.runtests import * # noqa:F401

View File

@ -0,0 +1,40 @@
"""
This module adds several functions for interactive source code inspection.
"""
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class.
Examples
========
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]

View File

@ -0,0 +1,469 @@
# Tests that require installed backends go into
# sympy/test_external/test_autowrap
import os
import tempfile
import shutil
from io import StringIO
from sympy.core import symbols, Eq
from sympy.utilities.autowrap import (autowrap, binary_function,
CythonCodeWrapper, UfuncifyCodeWrapper, CodeWrapper)
from sympy.utilities.codegen import (
CCodeGen, C99CodeGen, CodeGenArgumentListError, make_routine
)
from sympy.testing.pytest import raises
from sympy.testing.tmpfiles import TmpFileManager
def get_string(dump_fn, routines, prefix="file", **kwargs):
"""Wrapper for dump_fn. dump_fn writes its results to a stream object and
this wrapper returns the contents of that stream as a string. This
auxiliary function is used by many tests below.
The header and the empty lines are not generator to facilitate the
testing of the output.
"""
output = StringIO()
dump_fn(routines, output, prefix, **kwargs)
source = output.getvalue()
output.close()
return source
def test_cython_wrapper_scalar_function():
x, y, z = symbols('x,y,z')
expr = (x + y)*z
routine = make_routine("test", expr)
code_gen = CythonCodeWrapper(CCodeGen())
source = get_string(code_gen.dump_pyx, [routine])
expected = (
"cdef extern from 'file.h':\n"
" double test(double x, double y, double z)\n"
"\n"
"def test_c(double x, double y, double z):\n"
"\n"
" return test(x, y, z)")
assert source == expected
def test_cython_wrapper_outarg():
from sympy.core.relational import Equality
x, y, z = symbols('x,y,z')
code_gen = CythonCodeWrapper(C99CodeGen())
routine = make_routine("test", Equality(z, x + y))
source = get_string(code_gen.dump_pyx, [routine])
expected = (
"cdef extern from 'file.h':\n"
" void test(double x, double y, double *z)\n"
"\n"
"def test_c(double x, double y):\n"
"\n"
" cdef double z = 0\n"
" test(x, y, &z)\n"
" return z")
assert source == expected
def test_cython_wrapper_inoutarg():
from sympy.core.relational import Equality
x, y, z = symbols('x,y,z')
code_gen = CythonCodeWrapper(C99CodeGen())
routine = make_routine("test", Equality(z, x + y + z))
source = get_string(code_gen.dump_pyx, [routine])
expected = (
"cdef extern from 'file.h':\n"
" void test(double x, double y, double *z)\n"
"\n"
"def test_c(double x, double y, double z):\n"
"\n"
" test(x, y, &z)\n"
" return z")
assert source == expected
def test_cython_wrapper_compile_flags():
from sympy.core.relational import Equality
x, y, z = symbols('x,y,z')
routine = make_routine("test", Equality(z, x + y))
code_gen = CythonCodeWrapper(CCodeGen())
expected = """\
from setuptools import setup
from setuptools import Extension
from Cython.Build import cythonize
cy_opts = {'compiler_directives': {'language_level': '3'}}
ext_mods = [Extension(
'wrapper_module_%(num)s', ['wrapper_module_%(num)s.pyx', 'wrapped_code_%(num)s.c'],
include_dirs=[],
library_dirs=[],
libraries=[],
extra_compile_args=['-std=c99'],
extra_link_args=[]
)]
setup(ext_modules=cythonize(ext_mods, **cy_opts))
""" % {'num': CodeWrapper._module_counter}
temp_dir = tempfile.mkdtemp()
TmpFileManager.tmp_folder(temp_dir)
setup_file_path = os.path.join(temp_dir, 'setup.py')
code_gen._prepare_files(routine, build_dir=temp_dir)
with open(setup_file_path) as f:
setup_text = f.read()
assert setup_text == expected
code_gen = CythonCodeWrapper(CCodeGen(),
include_dirs=['/usr/local/include', '/opt/booger/include'],
library_dirs=['/user/local/lib'],
libraries=['thelib', 'nilib'],
extra_compile_args=['-slow-math'],
extra_link_args=['-lswamp', '-ltrident'],
cythonize_options={'compiler_directives': {'boundscheck': False}}
)
expected = """\
from setuptools import setup
from setuptools import Extension
from Cython.Build import cythonize
cy_opts = {'compiler_directives': {'boundscheck': False}}
ext_mods = [Extension(
'wrapper_module_%(num)s', ['wrapper_module_%(num)s.pyx', 'wrapped_code_%(num)s.c'],
include_dirs=['/usr/local/include', '/opt/booger/include'],
library_dirs=['/user/local/lib'],
libraries=['thelib', 'nilib'],
extra_compile_args=['-slow-math', '-std=c99'],
extra_link_args=['-lswamp', '-ltrident']
)]
setup(ext_modules=cythonize(ext_mods, **cy_opts))
""" % {'num': CodeWrapper._module_counter}
code_gen._prepare_files(routine, build_dir=temp_dir)
with open(setup_file_path) as f:
setup_text = f.read()
assert setup_text == expected
expected = """\
from setuptools import setup
from setuptools import Extension
from Cython.Build import cythonize
cy_opts = {'compiler_directives': {'boundscheck': False}}
import numpy as np
ext_mods = [Extension(
'wrapper_module_%(num)s', ['wrapper_module_%(num)s.pyx', 'wrapped_code_%(num)s.c'],
include_dirs=['/usr/local/include', '/opt/booger/include', np.get_include()],
library_dirs=['/user/local/lib'],
libraries=['thelib', 'nilib'],
extra_compile_args=['-slow-math', '-std=c99'],
extra_link_args=['-lswamp', '-ltrident']
)]
setup(ext_modules=cythonize(ext_mods, **cy_opts))
""" % {'num': CodeWrapper._module_counter}
code_gen._need_numpy = True
code_gen._prepare_files(routine, build_dir=temp_dir)
with open(setup_file_path) as f:
setup_text = f.read()
assert setup_text == expected
TmpFileManager.cleanup()
def test_cython_wrapper_unique_dummyvars():
from sympy.core.relational import Equality
from sympy.core.symbol import Dummy
x, y, z = Dummy('x'), Dummy('y'), Dummy('z')
x_id, y_id, z_id = [str(d.dummy_index) for d in [x, y, z]]
expr = Equality(z, x + y)
routine = make_routine("test", expr)
code_gen = CythonCodeWrapper(CCodeGen())
source = get_string(code_gen.dump_pyx, [routine])
expected_template = (
"cdef extern from 'file.h':\n"
" void test(double x_{x_id}, double y_{y_id}, double *z_{z_id})\n"
"\n"
"def test_c(double x_{x_id}, double y_{y_id}):\n"
"\n"
" cdef double z_{z_id} = 0\n"
" test(x_{x_id}, y_{y_id}, &z_{z_id})\n"
" return z_{z_id}")
expected = expected_template.format(x_id=x_id, y_id=y_id, z_id=z_id)
assert source == expected
def test_autowrap_dummy():
x, y, z = symbols('x y z')
# Uses DummyWrapper to test that codegen works as expected
f = autowrap(x + y, backend='dummy')
assert f() == str(x + y)
assert f.args == "x, y"
assert f.returns == "nameless"
f = autowrap(Eq(z, x + y), backend='dummy')
assert f() == str(x + y)
assert f.args == "x, y"
assert f.returns == "z"
f = autowrap(Eq(z, x + y + z), backend='dummy')
assert f() == str(x + y + z)
assert f.args == "x, y, z"
assert f.returns == "z"
def test_autowrap_args():
x, y, z = symbols('x y z')
raises(CodeGenArgumentListError, lambda: autowrap(Eq(z, x + y),
backend='dummy', args=[x]))
f = autowrap(Eq(z, x + y), backend='dummy', args=[y, x])
assert f() == str(x + y)
assert f.args == "y, x"
assert f.returns == "z"
raises(CodeGenArgumentListError, lambda: autowrap(Eq(z, x + y + z),
backend='dummy', args=[x, y]))
f = autowrap(Eq(z, x + y + z), backend='dummy', args=[y, x, z])
assert f() == str(x + y + z)
assert f.args == "y, x, z"
assert f.returns == "z"
f = autowrap(Eq(z, x + y + z), backend='dummy', args=(y, x, z))
assert f() == str(x + y + z)
assert f.args == "y, x, z"
assert f.returns == "z"
def test_autowrap_store_files():
x, y = symbols('x y')
tmp = tempfile.mkdtemp()
TmpFileManager.tmp_folder(tmp)
f = autowrap(x + y, backend='dummy', tempdir=tmp)
assert f() == str(x + y)
assert os.access(tmp, os.F_OK)
TmpFileManager.cleanup()
def test_autowrap_store_files_issue_gh12939():
x, y = symbols('x y')
tmp = './tmp'
saved_cwd = os.getcwd()
temp_cwd = tempfile.mkdtemp()
try:
os.chdir(temp_cwd)
f = autowrap(x + y, backend='dummy', tempdir=tmp)
assert f() == str(x + y)
assert os.access(tmp, os.F_OK)
finally:
os.chdir(saved_cwd)
shutil.rmtree(temp_cwd)
def test_binary_function():
x, y = symbols('x y')
f = binary_function('f', x + y, backend='dummy')
assert f._imp_() == str(x + y)
def test_ufuncify_source():
x, y, z = symbols('x,y,z')
code_wrapper = UfuncifyCodeWrapper(C99CodeGen("ufuncify"))
routine = make_routine("test", x + y + z)
source = get_string(code_wrapper.dump_c, [routine])
expected = """\
#include "Python.h"
#include "math.h"
#include "numpy/ndarraytypes.h"
#include "numpy/ufuncobject.h"
#include "numpy/halffloat.h"
#include "file.h"
static PyMethodDef wrapper_module_%(num)sMethods[] = {
{NULL, NULL, 0, NULL}
};
#ifdef NPY_1_19_API_VERSION
static void test_ufunc(char **args, const npy_intp *dimensions, const npy_intp* steps, void* data)
#else
static void test_ufunc(char **args, npy_intp *dimensions, npy_intp* steps, void* data)
#endif
{
npy_intp i;
npy_intp n = dimensions[0];
char *in0 = args[0];
char *in1 = args[1];
char *in2 = args[2];
char *out0 = args[3];
npy_intp in0_step = steps[0];
npy_intp in1_step = steps[1];
npy_intp in2_step = steps[2];
npy_intp out0_step = steps[3];
for (i = 0; i < n; i++) {
*((double *)out0) = test(*(double *)in0, *(double *)in1, *(double *)in2);
in0 += in0_step;
in1 += in1_step;
in2 += in2_step;
out0 += out0_step;
}
}
PyUFuncGenericFunction test_funcs[1] = {&test_ufunc};
static char test_types[4] = {NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE};
static void *test_data[1] = {NULL};
#if PY_VERSION_HEX >= 0x03000000
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"wrapper_module_%(num)s",
NULL,
-1,
wrapper_module_%(num)sMethods,
NULL,
NULL,
NULL,
NULL
};
PyMODINIT_FUNC PyInit_wrapper_module_%(num)s(void)
{
PyObject *m, *d;
PyObject *ufunc0;
m = PyModule_Create(&moduledef);
if (!m) {
return NULL;
}
import_array();
import_umath();
d = PyModule_GetDict(m);
ufunc0 = PyUFunc_FromFuncAndData(test_funcs, test_data, test_types, 1, 3, 1,
PyUFunc_None, "wrapper_module_%(num)s", "Created in SymPy with Ufuncify", 0);
PyDict_SetItemString(d, "test", ufunc0);
Py_DECREF(ufunc0);
return m;
}
#else
PyMODINIT_FUNC initwrapper_module_%(num)s(void)
{
PyObject *m, *d;
PyObject *ufunc0;
m = Py_InitModule("wrapper_module_%(num)s", wrapper_module_%(num)sMethods);
if (m == NULL) {
return;
}
import_array();
import_umath();
d = PyModule_GetDict(m);
ufunc0 = PyUFunc_FromFuncAndData(test_funcs, test_data, test_types, 1, 3, 1,
PyUFunc_None, "wrapper_module_%(num)s", "Created in SymPy with Ufuncify", 0);
PyDict_SetItemString(d, "test", ufunc0);
Py_DECREF(ufunc0);
}
#endif""" % {'num': CodeWrapper._module_counter}
assert source == expected
def test_ufuncify_source_multioutput():
x, y, z = symbols('x,y,z')
var_symbols = (x, y, z)
expr = x + y**3 + 10*z**2
code_wrapper = UfuncifyCodeWrapper(C99CodeGen("ufuncify"))
routines = [make_routine("func{}".format(i), expr.diff(var_symbols[i]), var_symbols) for i in range(len(var_symbols))]
source = get_string(code_wrapper.dump_c, routines, funcname='multitest')
expected = """\
#include "Python.h"
#include "math.h"
#include "numpy/ndarraytypes.h"
#include "numpy/ufuncobject.h"
#include "numpy/halffloat.h"
#include "file.h"
static PyMethodDef wrapper_module_%(num)sMethods[] = {
{NULL, NULL, 0, NULL}
};
#ifdef NPY_1_19_API_VERSION
static void multitest_ufunc(char **args, const npy_intp *dimensions, const npy_intp* steps, void* data)
#else
static void multitest_ufunc(char **args, npy_intp *dimensions, npy_intp* steps, void* data)
#endif
{
npy_intp i;
npy_intp n = dimensions[0];
char *in0 = args[0];
char *in1 = args[1];
char *in2 = args[2];
char *out0 = args[3];
char *out1 = args[4];
char *out2 = args[5];
npy_intp in0_step = steps[0];
npy_intp in1_step = steps[1];
npy_intp in2_step = steps[2];
npy_intp out0_step = steps[3];
npy_intp out1_step = steps[4];
npy_intp out2_step = steps[5];
for (i = 0; i < n; i++) {
*((double *)out0) = func0(*(double *)in0, *(double *)in1, *(double *)in2);
*((double *)out1) = func1(*(double *)in0, *(double *)in1, *(double *)in2);
*((double *)out2) = func2(*(double *)in0, *(double *)in1, *(double *)in2);
in0 += in0_step;
in1 += in1_step;
in2 += in2_step;
out0 += out0_step;
out1 += out1_step;
out2 += out2_step;
}
}
PyUFuncGenericFunction multitest_funcs[1] = {&multitest_ufunc};
static char multitest_types[6] = {NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE, NPY_DOUBLE};
static void *multitest_data[1] = {NULL};
#if PY_VERSION_HEX >= 0x03000000
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"wrapper_module_%(num)s",
NULL,
-1,
wrapper_module_%(num)sMethods,
NULL,
NULL,
NULL,
NULL
};
PyMODINIT_FUNC PyInit_wrapper_module_%(num)s(void)
{
PyObject *m, *d;
PyObject *ufunc0;
m = PyModule_Create(&moduledef);
if (!m) {
return NULL;
}
import_array();
import_umath();
d = PyModule_GetDict(m);
ufunc0 = PyUFunc_FromFuncAndData(multitest_funcs, multitest_data, multitest_types, 1, 3, 3,
PyUFunc_None, "wrapper_module_%(num)s", "Created in SymPy with Ufuncify", 0);
PyDict_SetItemString(d, "multitest", ufunc0);
Py_DECREF(ufunc0);
return m;
}
#else
PyMODINIT_FUNC initwrapper_module_%(num)s(void)
{
PyObject *m, *d;
PyObject *ufunc0;
m = Py_InitModule("wrapper_module_%(num)s", wrapper_module_%(num)sMethods);
if (m == NULL) {
return;
}
import_array();
import_umath();
d = PyModule_GetDict(m);
ufunc0 = PyUFunc_FromFuncAndData(multitest_funcs, multitest_data, multitest_types, 1, 3, 3,
PyUFunc_None, "wrapper_module_%(num)s", "Created in SymPy with Ufuncify", 0);
PyDict_SetItemString(d, "multitest", ufunc0);
Py_DECREF(ufunc0);
}
#endif""" % {'num': CodeWrapper._module_counter}
assert source == expected

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,620 @@
from io import StringIO
from sympy.core import S, symbols, Eq, pi, Catalan, EulerGamma, Function
from sympy.core.relational import Equality
from sympy.functions.elementary.piecewise import Piecewise
from sympy.matrices import Matrix, MatrixSymbol
from sympy.utilities.codegen import JuliaCodeGen, codegen, make_routine
from sympy.testing.pytest import XFAIL
import sympy
x, y, z = symbols('x,y,z')
def test_empty_jl_code():
code_gen = JuliaCodeGen()
output = StringIO()
code_gen.dump_jl([], output, "file", header=False, empty=False)
source = output.getvalue()
assert source == ""
def test_jl_simple_code():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Julia", header=False, empty=False)
assert result[0] == "test.jl"
source = result[1]
expected = (
"function test(x, y, z)\n"
" out1 = z .* (x + y)\n"
" return out1\n"
"end\n"
)
assert source == expected
def test_jl_simple_code_with_header():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Julia", header=True, empty=False)
assert result[0] == "test.jl"
source = result[1]
expected = (
"# Code generated with SymPy " + sympy.__version__ + "\n"
"#\n"
"# See http://www.sympy.org/ for more information.\n"
"#\n"
"# This file is part of 'project'\n"
"function test(x, y, z)\n"
" out1 = z .* (x + y)\n"
" return out1\n"
"end\n"
)
assert source == expected
def test_jl_simple_code_nameout():
expr = Equality(z, (x + y))
name_expr = ("test", expr)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y)\n"
" z = x + y\n"
" return z\n"
"end\n"
)
assert source == expected
def test_jl_numbersymbol():
name_expr = ("test", pi**Catalan)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test()\n"
" out1 = pi ^ catalan\n"
" return out1\n"
"end\n"
)
assert source == expected
@XFAIL
def test_jl_numbersymbol_no_inline():
# FIXME: how to pass inline=False to the JuliaCodePrinter?
name_expr = ("test", [pi**Catalan, EulerGamma])
result, = codegen(name_expr, "Julia", header=False,
empty=False, inline=False)
source = result[1]
expected = (
"function test()\n"
" Catalan = 0.915965594177219\n"
" EulerGamma = 0.5772156649015329\n"
" out1 = pi ^ Catalan\n"
" out2 = EulerGamma\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_jl_code_argument_order():
expr = x + y
routine = make_routine("test", expr, argument_sequence=[z, x, y], language="julia")
code_gen = JuliaCodeGen()
output = StringIO()
code_gen.dump_jl([routine], output, "test", header=False, empty=False)
source = output.getvalue()
expected = (
"function test(z, x, y)\n"
" out1 = x + y\n"
" return out1\n"
"end\n"
)
assert source == expected
def test_multiple_results_m():
# Here the output order is the input order
expr1 = (x + y)*z
expr2 = (x - y)*z
name_expr = ("test", [expr1, expr2])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y, z)\n"
" out1 = z .* (x + y)\n"
" out2 = z .* (x - y)\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_results_named_unordered():
# Here output order is based on name_expr
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y, z)\n"
" C = z .* (x + y)\n"
" A = z .* (x - y)\n"
" B = 2 * x\n"
" return C, A, B\n"
"end\n"
)
assert source == expected
def test_results_named_ordered():
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result = codegen(name_expr, "Julia", header=False, empty=False,
argument_sequence=(x, z, y))
assert result[0][0] == "test.jl"
source = result[0][1]
expected = (
"function test(x, z, y)\n"
" C = z .* (x + y)\n"
" A = z .* (x - y)\n"
" B = 2 * x\n"
" return C, A, B\n"
"end\n"
)
assert source == expected
def test_complicated_jl_codegen():
from sympy.functions.elementary.trigonometric import (cos, sin, tan)
name_expr = ("testlong",
[ ((sin(x) + cos(y) + tan(z))**3).expand(),
cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))
])
result = codegen(name_expr, "Julia", header=False, empty=False)
assert result[0][0] == "testlong.jl"
source = result[0][1]
expected = (
"function testlong(x, y, z)\n"
" out1 = sin(x) .^ 3 + 3 * sin(x) .^ 2 .* cos(y) + 3 * sin(x) .^ 2 .* tan(z)"
" + 3 * sin(x) .* cos(y) .^ 2 + 6 * sin(x) .* cos(y) .* tan(z) + 3 * sin(x) .* tan(z) .^ 2"
" + cos(y) .^ 3 + 3 * cos(y) .^ 2 .* tan(z) + 3 * cos(y) .* tan(z) .^ 2 + tan(z) .^ 3\n"
" out2 = cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_jl_output_arg_mixed_unordered():
# named outputs are alphabetical, unnamed output appear in the given order
from sympy.functions.elementary.trigonometric import (cos, sin)
a = symbols("a")
name_expr = ("foo", [cos(2*x), Equality(y, sin(x)), cos(x), Equality(a, sin(2*x))])
result, = codegen(name_expr, "Julia", header=False, empty=False)
assert result[0] == "foo.jl"
source = result[1];
expected = (
'function foo(x)\n'
' out1 = cos(2 * x)\n'
' y = sin(x)\n'
' out3 = cos(x)\n'
' a = sin(2 * x)\n'
' return out1, y, out3, a\n'
'end\n'
)
assert source == expected
def test_jl_piecewise_():
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True), evaluate=False)
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function pwtest(x)\n"
" out1 = ((x < -1) ? (0) :\n"
" (x <= 1) ? (x .^ 2) :\n"
" (x > 1) ? (2 - x) : (1))\n"
" return out1\n"
"end\n"
)
assert source == expected
@XFAIL
def test_jl_piecewise_no_inline():
# FIXME: how to pass inline=False to the JuliaCodePrinter?
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True))
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Julia", header=False, empty=False,
inline=False)
source = result[1]
expected = (
"function pwtest(x)\n"
" if (x < -1)\n"
" out1 = 0\n"
" elseif (x <= 1)\n"
" out1 = x .^ 2\n"
" elseif (x > 1)\n"
" out1 = -x + 2\n"
" else\n"
" out1 = 1\n"
" end\n"
" return out1\n"
"end\n"
)
assert source == expected
def test_jl_multifcns_per_file():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Julia", header=False, empty=False)
assert result[0][0] == "foo.jl"
source = result[0][1];
expected = (
"function foo(x, y)\n"
" out1 = 2 * x\n"
" out2 = 3 * y\n"
" return out1, out2\n"
"end\n"
"function bar(y)\n"
" out1 = y .^ 2\n"
" out2 = 4 * y\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_jl_multifcns_per_file_w_header():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Julia", header=True, empty=False)
assert result[0][0] == "foo.jl"
source = result[0][1];
expected = (
"# Code generated with SymPy " + sympy.__version__ + "\n"
"#\n"
"# See http://www.sympy.org/ for more information.\n"
"#\n"
"# This file is part of 'project'\n"
"function foo(x, y)\n"
" out1 = 2 * x\n"
" out2 = 3 * y\n"
" return out1, out2\n"
"end\n"
"function bar(y)\n"
" out1 = y .^ 2\n"
" out2 = 4 * y\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_jl_filename_match_prefix():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result, = codegen(name_expr, "Julia", prefix="baz", header=False,
empty=False)
assert result[0] == "baz.jl"
def test_jl_matrix_named():
e2 = Matrix([[x, 2*y, pi*z]])
name_expr = ("test", Equality(MatrixSymbol('myout1', 1, 3), e2))
result = codegen(name_expr, "Julia", header=False, empty=False)
assert result[0][0] == "test.jl"
source = result[0][1]
expected = (
"function test(x, y, z)\n"
" myout1 = [x 2 * y pi * z]\n"
" return myout1\n"
"end\n"
)
assert source == expected
def test_jl_matrix_named_matsym():
myout1 = MatrixSymbol('myout1', 1, 3)
e2 = Matrix([[x, 2*y, pi*z]])
name_expr = ("test", Equality(myout1, e2, evaluate=False))
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y, z)\n"
" myout1 = [x 2 * y pi * z]\n"
" return myout1\n"
"end\n"
)
assert source == expected
def test_jl_matrix_output_autoname():
expr = Matrix([[x, x+y, 3]])
name_expr = ("test", expr)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y)\n"
" out1 = [x x + y 3]\n"
" return out1\n"
"end\n"
)
assert source == expected
def test_jl_matrix_output_autoname_2():
e1 = (x + y)
e2 = Matrix([[2*x, 2*y, 2*z]])
e3 = Matrix([[x], [y], [z]])
e4 = Matrix([[x, y], [z, 16]])
name_expr = ("test", (e1, e2, e3, e4))
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y, z)\n"
" out1 = x + y\n"
" out2 = [2 * x 2 * y 2 * z]\n"
" out3 = [x, y, z]\n"
" out4 = [x y;\n"
" z 16]\n"
" return out1, out2, out3, out4\n"
"end\n"
)
assert source == expected
def test_jl_results_matrix_named_ordered():
B, C = symbols('B,C')
A = MatrixSymbol('A', 1, 3)
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, Matrix([[1, 2, x]]))
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result, = codegen(name_expr, "Julia", header=False, empty=False,
argument_sequence=(x, z, y))
source = result[1]
expected = (
"function test(x, z, y)\n"
" C = z .* (x + y)\n"
" A = [1 2 x]\n"
" B = 2 * x\n"
" return C, A, B\n"
"end\n"
)
assert source == expected
def test_jl_matrixsymbol_slice():
A = MatrixSymbol('A', 2, 3)
B = MatrixSymbol('B', 1, 3)
C = MatrixSymbol('C', 1, 3)
D = MatrixSymbol('D', 2, 1)
name_expr = ("test", [Equality(B, A[0, :]),
Equality(C, A[1, :]),
Equality(D, A[:, 2])])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(A)\n"
" B = A[1,:]\n"
" C = A[2,:]\n"
" D = A[:,3]\n"
" return B, C, D\n"
"end\n"
)
assert source == expected
def test_jl_matrixsymbol_slice2():
A = MatrixSymbol('A', 3, 4)
B = MatrixSymbol('B', 2, 2)
C = MatrixSymbol('C', 2, 2)
name_expr = ("test", [Equality(B, A[0:2, 0:2]),
Equality(C, A[0:2, 1:3])])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(A)\n"
" B = A[1:2,1:2]\n"
" C = A[1:2,2:3]\n"
" return B, C\n"
"end\n"
)
assert source == expected
def test_jl_matrixsymbol_slice3():
A = MatrixSymbol('A', 8, 7)
B = MatrixSymbol('B', 2, 2)
C = MatrixSymbol('C', 4, 2)
name_expr = ("test", [Equality(B, A[6:, 1::3]),
Equality(C, A[::2, ::3])])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(A)\n"
" B = A[7:end,2:3:end]\n"
" C = A[1:2:end,1:3:end]\n"
" return B, C\n"
"end\n"
)
assert source == expected
def test_jl_matrixsymbol_slice_autoname():
A = MatrixSymbol('A', 2, 3)
B = MatrixSymbol('B', 1, 3)
name_expr = ("test", [Equality(B, A[0,:]), A[1,:], A[:,0], A[:,1]])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(A)\n"
" B = A[1,:]\n"
" out2 = A[2,:]\n"
" out3 = A[:,1]\n"
" out4 = A[:,2]\n"
" return B, out2, out3, out4\n"
"end\n"
)
assert source == expected
def test_jl_loops():
# Note: an Julia programmer would probably vectorize this across one or
# more dimensions. Also, size(A) would be used rather than passing in m
# and n. Perhaps users would expect us to vectorize automatically here?
# Or is it possible to represent such things using IndexedBase?
from sympy.tensor import IndexedBase, Idx
from sympy.core.symbol import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
result, = codegen(('mat_vec_mult', Eq(y[i], A[i, j]*x[j])), "Julia",
header=False, empty=False)
source = result[1]
expected = (
'function mat_vec_mult(y, A, m, n, x)\n'
' for i = 1:m\n'
' y[i] = 0\n'
' end\n'
' for i = 1:m\n'
' for j = 1:n\n'
' y[i] = %(rhs)s + y[i]\n'
' end\n'
' end\n'
' return y\n'
'end\n'
)
assert (source == expected % {'rhs': 'A[%s,%s] .* x[j]' % (i, j)} or
source == expected % {'rhs': 'x[j] .* A[%s,%s]' % (i, j)})
def test_jl_tensor_loops_multiple_contractions():
# see comments in previous test about vectorizing
from sympy.tensor import IndexedBase, Idx
from sympy.core.symbol import symbols
n, m, o, p = symbols('n m o p', integer=True)
A = IndexedBase('A')
B = IndexedBase('B')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
result, = codegen(('tensorthing', Eq(y[i], B[j, k, l]*A[i, j, k, l])),
"Julia", header=False, empty=False)
source = result[1]
expected = (
'function tensorthing(y, A, B, m, n, o, p)\n'
' for i = 1:m\n'
' y[i] = 0\n'
' end\n'
' for i = 1:m\n'
' for j = 1:n\n'
' for k = 1:o\n'
' for l = 1:p\n'
' y[i] = A[i,j,k,l] .* B[j,k,l] + y[i]\n'
' end\n'
' end\n'
' end\n'
' end\n'
' return y\n'
'end\n'
)
assert source == expected
def test_jl_InOutArgument():
expr = Equality(x, x**2)
name_expr = ("mysqr", expr)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function mysqr(x)\n"
" x = x .^ 2\n"
" return x\n"
"end\n"
)
assert source == expected
def test_jl_InOutArgument_order():
# can specify the order as (x, y)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Julia", header=False,
empty=False, argument_sequence=(x,y))
source = result[1]
expected = (
"function test(x, y)\n"
" x = x .^ 2 + y\n"
" return x\n"
"end\n"
)
assert source == expected
# make sure it gives (x, y) not (y, x)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x, y)\n"
" x = x .^ 2 + y\n"
" return x\n"
"end\n"
)
assert source == expected
def test_jl_not_supported():
f = Function('f')
name_expr = ("test", [f(x).diff(x), S.ComplexInfinity])
result, = codegen(name_expr, "Julia", header=False, empty=False)
source = result[1]
expected = (
"function test(x)\n"
" # unsupported: Derivative(f(x), x)\n"
" # unsupported: zoo\n"
" out1 = Derivative(f(x), x)\n"
" out2 = zoo\n"
" return out1, out2\n"
"end\n"
)
assert source == expected
def test_global_vars_octave():
x, y, z, t = symbols("x y z t")
result = codegen(('f', x*y), "Julia", header=False, empty=False,
global_vars=(y,))
source = result[0][1]
expected = (
"function f(x)\n"
" out1 = x .* y\n"
" return out1\n"
"end\n"
)
assert source == expected
result = codegen(('f', x*y+z), "Julia", header=False, empty=False,
argument_sequence=(x, y), global_vars=(z, t))
source = result[0][1]
expected = (
"function f(x, y)\n"
" out1 = x .* y + z\n"
" return out1\n"
"end\n"
)
assert source == expected

View File

@ -0,0 +1,589 @@
from io import StringIO
from sympy.core import S, symbols, Eq, pi, Catalan, EulerGamma, Function
from sympy.core.relational import Equality
from sympy.functions.elementary.piecewise import Piecewise
from sympy.matrices import Matrix, MatrixSymbol
from sympy.utilities.codegen import OctaveCodeGen, codegen, make_routine
from sympy.testing.pytest import raises
from sympy.testing.pytest import XFAIL
import sympy
x, y, z = symbols('x,y,z')
def test_empty_m_code():
code_gen = OctaveCodeGen()
output = StringIO()
code_gen.dump_m([], output, "file", header=False, empty=False)
source = output.getvalue()
assert source == ""
def test_m_simple_code():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Octave", header=False, empty=False)
assert result[0] == "test.m"
source = result[1]
expected = (
"function out1 = test(x, y, z)\n"
" out1 = z.*(x + y);\n"
"end\n"
)
assert source == expected
def test_m_simple_code_with_header():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Octave", header=True, empty=False)
assert result[0] == "test.m"
source = result[1]
expected = (
"function out1 = test(x, y, z)\n"
" %TEST Autogenerated by SymPy\n"
" % Code generated with SymPy " + sympy.__version__ + "\n"
" %\n"
" % See http://www.sympy.org/ for more information.\n"
" %\n"
" % This file is part of 'project'\n"
" out1 = z.*(x + y);\n"
"end\n"
)
assert source == expected
def test_m_simple_code_nameout():
expr = Equality(z, (x + y))
name_expr = ("test", expr)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function z = test(x, y)\n"
" z = x + y;\n"
"end\n"
)
assert source == expected
def test_m_numbersymbol():
name_expr = ("test", pi**Catalan)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function out1 = test()\n"
" out1 = pi^%s;\n"
"end\n"
) % Catalan.evalf(17)
assert source == expected
@XFAIL
def test_m_numbersymbol_no_inline():
# FIXME: how to pass inline=False to the OctaveCodePrinter?
name_expr = ("test", [pi**Catalan, EulerGamma])
result, = codegen(name_expr, "Octave", header=False,
empty=False, inline=False)
source = result[1]
expected = (
"function [out1, out2] = test()\n"
" Catalan = 0.915965594177219; % constant\n"
" EulerGamma = 0.5772156649015329; % constant\n"
" out1 = pi^Catalan;\n"
" out2 = EulerGamma;\n"
"end\n"
)
assert source == expected
def test_m_code_argument_order():
expr = x + y
routine = make_routine("test", expr, argument_sequence=[z, x, y], language="octave")
code_gen = OctaveCodeGen()
output = StringIO()
code_gen.dump_m([routine], output, "test", header=False, empty=False)
source = output.getvalue()
expected = (
"function out1 = test(z, x, y)\n"
" out1 = x + y;\n"
"end\n"
)
assert source == expected
def test_multiple_results_m():
# Here the output order is the input order
expr1 = (x + y)*z
expr2 = (x - y)*z
name_expr = ("test", [expr1, expr2])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [out1, out2] = test(x, y, z)\n"
" out1 = z.*(x + y);\n"
" out2 = z.*(x - y);\n"
"end\n"
)
assert source == expected
def test_results_named_unordered():
# Here output order is based on name_expr
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [C, A, B] = test(x, y, z)\n"
" C = z.*(x + y);\n"
" A = z.*(x - y);\n"
" B = 2*x;\n"
"end\n"
)
assert source == expected
def test_results_named_ordered():
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result = codegen(name_expr, "Octave", header=False, empty=False,
argument_sequence=(x, z, y))
assert result[0][0] == "test.m"
source = result[0][1]
expected = (
"function [C, A, B] = test(x, z, y)\n"
" C = z.*(x + y);\n"
" A = z.*(x - y);\n"
" B = 2*x;\n"
"end\n"
)
assert source == expected
def test_complicated_m_codegen():
from sympy.functions.elementary.trigonometric import (cos, sin, tan)
name_expr = ("testlong",
[ ((sin(x) + cos(y) + tan(z))**3).expand(),
cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))
])
result = codegen(name_expr, "Octave", header=False, empty=False)
assert result[0][0] == "testlong.m"
source = result[0][1]
expected = (
"function [out1, out2] = testlong(x, y, z)\n"
" out1 = sin(x).^3 + 3*sin(x).^2.*cos(y) + 3*sin(x).^2.*tan(z)"
" + 3*sin(x).*cos(y).^2 + 6*sin(x).*cos(y).*tan(z) + 3*sin(x).*tan(z).^2"
" + cos(y).^3 + 3*cos(y).^2.*tan(z) + 3*cos(y).*tan(z).^2 + tan(z).^3;\n"
" out2 = cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))));\n"
"end\n"
)
assert source == expected
def test_m_output_arg_mixed_unordered():
# named outputs are alphabetical, unnamed output appear in the given order
from sympy.functions.elementary.trigonometric import (cos, sin)
a = symbols("a")
name_expr = ("foo", [cos(2*x), Equality(y, sin(x)), cos(x), Equality(a, sin(2*x))])
result, = codegen(name_expr, "Octave", header=False, empty=False)
assert result[0] == "foo.m"
source = result[1];
expected = (
'function [out1, y, out3, a] = foo(x)\n'
' out1 = cos(2*x);\n'
' y = sin(x);\n'
' out3 = cos(x);\n'
' a = sin(2*x);\n'
'end\n'
)
assert source == expected
def test_m_piecewise_():
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True), evaluate=False)
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function out1 = pwtest(x)\n"
" out1 = ((x < -1).*(0) + (~(x < -1)).*( ...\n"
" (x <= 1).*(x.^2) + (~(x <= 1)).*( ...\n"
" (x > 1).*(2 - x) + (~(x > 1)).*(1))));\n"
"end\n"
)
assert source == expected
@XFAIL
def test_m_piecewise_no_inline():
# FIXME: how to pass inline=False to the OctaveCodePrinter?
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True))
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Octave", header=False, empty=False,
inline=False)
source = result[1]
expected = (
"function out1 = pwtest(x)\n"
" if (x < -1)\n"
" out1 = 0;\n"
" elseif (x <= 1)\n"
" out1 = x.^2;\n"
" elseif (x > 1)\n"
" out1 = -x + 2;\n"
" else\n"
" out1 = 1;\n"
" end\n"
"end\n"
)
assert source == expected
def test_m_multifcns_per_file():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Octave", header=False, empty=False)
assert result[0][0] == "foo.m"
source = result[0][1];
expected = (
"function [out1, out2] = foo(x, y)\n"
" out1 = 2*x;\n"
" out2 = 3*y;\n"
"end\n"
"function [out1, out2] = bar(y)\n"
" out1 = y.^2;\n"
" out2 = 4*y;\n"
"end\n"
)
assert source == expected
def test_m_multifcns_per_file_w_header():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Octave", header=True, empty=False)
assert result[0][0] == "foo.m"
source = result[0][1];
expected = (
"function [out1, out2] = foo(x, y)\n"
" %FOO Autogenerated by SymPy\n"
" % Code generated with SymPy " + sympy.__version__ + "\n"
" %\n"
" % See http://www.sympy.org/ for more information.\n"
" %\n"
" % This file is part of 'project'\n"
" out1 = 2*x;\n"
" out2 = 3*y;\n"
"end\n"
"function [out1, out2] = bar(y)\n"
" out1 = y.^2;\n"
" out2 = 4*y;\n"
"end\n"
)
assert source == expected
def test_m_filename_match_first_fcn():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
raises(ValueError, lambda: codegen(name_expr,
"Octave", prefix="bar", header=False, empty=False))
def test_m_matrix_named():
e2 = Matrix([[x, 2*y, pi*z]])
name_expr = ("test", Equality(MatrixSymbol('myout1', 1, 3), e2))
result = codegen(name_expr, "Octave", header=False, empty=False)
assert result[0][0] == "test.m"
source = result[0][1]
expected = (
"function myout1 = test(x, y, z)\n"
" myout1 = [x 2*y pi*z];\n"
"end\n"
)
assert source == expected
def test_m_matrix_named_matsym():
myout1 = MatrixSymbol('myout1', 1, 3)
e2 = Matrix([[x, 2*y, pi*z]])
name_expr = ("test", Equality(myout1, e2, evaluate=False))
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function myout1 = test(x, y, z)\n"
" myout1 = [x 2*y pi*z];\n"
"end\n"
)
assert source == expected
def test_m_matrix_output_autoname():
expr = Matrix([[x, x+y, 3]])
name_expr = ("test", expr)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function out1 = test(x, y)\n"
" out1 = [x x + y 3];\n"
"end\n"
)
assert source == expected
def test_m_matrix_output_autoname_2():
e1 = (x + y)
e2 = Matrix([[2*x, 2*y, 2*z]])
e3 = Matrix([[x], [y], [z]])
e4 = Matrix([[x, y], [z, 16]])
name_expr = ("test", (e1, e2, e3, e4))
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [out1, out2, out3, out4] = test(x, y, z)\n"
" out1 = x + y;\n"
" out2 = [2*x 2*y 2*z];\n"
" out3 = [x; y; z];\n"
" out4 = [x y; z 16];\n"
"end\n"
)
assert source == expected
def test_m_results_matrix_named_ordered():
B, C = symbols('B,C')
A = MatrixSymbol('A', 1, 3)
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, Matrix([[1, 2, x]]))
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result, = codegen(name_expr, "Octave", header=False, empty=False,
argument_sequence=(x, z, y))
source = result[1]
expected = (
"function [C, A, B] = test(x, z, y)\n"
" C = z.*(x + y);\n"
" A = [1 2 x];\n"
" B = 2*x;\n"
"end\n"
)
assert source == expected
def test_m_matrixsymbol_slice():
A = MatrixSymbol('A', 2, 3)
B = MatrixSymbol('B', 1, 3)
C = MatrixSymbol('C', 1, 3)
D = MatrixSymbol('D', 2, 1)
name_expr = ("test", [Equality(B, A[0, :]),
Equality(C, A[1, :]),
Equality(D, A[:, 2])])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [B, C, D] = test(A)\n"
" B = A(1, :);\n"
" C = A(2, :);\n"
" D = A(:, 3);\n"
"end\n"
)
assert source == expected
def test_m_matrixsymbol_slice2():
A = MatrixSymbol('A', 3, 4)
B = MatrixSymbol('B', 2, 2)
C = MatrixSymbol('C', 2, 2)
name_expr = ("test", [Equality(B, A[0:2, 0:2]),
Equality(C, A[0:2, 1:3])])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [B, C] = test(A)\n"
" B = A(1:2, 1:2);\n"
" C = A(1:2, 2:3);\n"
"end\n"
)
assert source == expected
def test_m_matrixsymbol_slice3():
A = MatrixSymbol('A', 8, 7)
B = MatrixSymbol('B', 2, 2)
C = MatrixSymbol('C', 4, 2)
name_expr = ("test", [Equality(B, A[6:, 1::3]),
Equality(C, A[::2, ::3])])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [B, C] = test(A)\n"
" B = A(7:end, 2:3:end);\n"
" C = A(1:2:end, 1:3:end);\n"
"end\n"
)
assert source == expected
def test_m_matrixsymbol_slice_autoname():
A = MatrixSymbol('A', 2, 3)
B = MatrixSymbol('B', 1, 3)
name_expr = ("test", [Equality(B, A[0,:]), A[1,:], A[:,0], A[:,1]])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [B, out2, out3, out4] = test(A)\n"
" B = A(1, :);\n"
" out2 = A(2, :);\n"
" out3 = A(:, 1);\n"
" out4 = A(:, 2);\n"
"end\n"
)
assert source == expected
def test_m_loops():
# Note: an Octave programmer would probably vectorize this across one or
# more dimensions. Also, size(A) would be used rather than passing in m
# and n. Perhaps users would expect us to vectorize automatically here?
# Or is it possible to represent such things using IndexedBase?
from sympy.tensor import IndexedBase, Idx
from sympy.core.symbol import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
result, = codegen(('mat_vec_mult', Eq(y[i], A[i, j]*x[j])), "Octave",
header=False, empty=False)
source = result[1]
expected = (
'function y = mat_vec_mult(A, m, n, x)\n'
' for i = 1:m\n'
' y(i) = 0;\n'
' end\n'
' for i = 1:m\n'
' for j = 1:n\n'
' y(i) = %(rhs)s + y(i);\n'
' end\n'
' end\n'
'end\n'
)
assert (source == expected % {'rhs': 'A(%s, %s).*x(j)' % (i, j)} or
source == expected % {'rhs': 'x(j).*A(%s, %s)' % (i, j)})
def test_m_tensor_loops_multiple_contractions():
# see comments in previous test about vectorizing
from sympy.tensor import IndexedBase, Idx
from sympy.core.symbol import symbols
n, m, o, p = symbols('n m o p', integer=True)
A = IndexedBase('A')
B = IndexedBase('B')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
result, = codegen(('tensorthing', Eq(y[i], B[j, k, l]*A[i, j, k, l])),
"Octave", header=False, empty=False)
source = result[1]
expected = (
'function y = tensorthing(A, B, m, n, o, p)\n'
' for i = 1:m\n'
' y(i) = 0;\n'
' end\n'
' for i = 1:m\n'
' for j = 1:n\n'
' for k = 1:o\n'
' for l = 1:p\n'
' y(i) = A(i, j, k, l).*B(j, k, l) + y(i);\n'
' end\n'
' end\n'
' end\n'
' end\n'
'end\n'
)
assert source == expected
def test_m_InOutArgument():
expr = Equality(x, x**2)
name_expr = ("mysqr", expr)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function x = mysqr(x)\n"
" x = x.^2;\n"
"end\n"
)
assert source == expected
def test_m_InOutArgument_order():
# can specify the order as (x, y)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Octave", header=False,
empty=False, argument_sequence=(x,y))
source = result[1]
expected = (
"function x = test(x, y)\n"
" x = x.^2 + y;\n"
"end\n"
)
assert source == expected
# make sure it gives (x, y) not (y, x)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function x = test(x, y)\n"
" x = x.^2 + y;\n"
"end\n"
)
assert source == expected
def test_m_not_supported():
f = Function('f')
name_expr = ("test", [f(x).diff(x), S.ComplexInfinity])
result, = codegen(name_expr, "Octave", header=False, empty=False)
source = result[1]
expected = (
"function [out1, out2] = test(x)\n"
" % unsupported: Derivative(f(x), x)\n"
" % unsupported: zoo\n"
" out1 = Derivative(f(x), x);\n"
" out2 = zoo;\n"
"end\n"
)
assert source == expected
def test_global_vars_octave():
x, y, z, t = symbols("x y z t")
result = codegen(('f', x*y), "Octave", header=False, empty=False,
global_vars=(y,))
source = result[0][1]
expected = (
"function out1 = f(x)\n"
" global y\n"
" out1 = x.*y;\n"
"end\n"
)
assert source == expected
result = codegen(('f', x*y+z), "Octave", header=False, empty=False,
argument_sequence=(x, y), global_vars=(z, t))
source = result[0][1]
expected = (
"function out1 = f(x, y)\n"
" global t z\n"
" out1 = x.*y + z;\n"
"end\n"
)
assert source == expected

View File

@ -0,0 +1,401 @@
from io import StringIO
from sympy.core import S, symbols, pi, Catalan, EulerGamma, Function
from sympy.core.relational import Equality
from sympy.functions.elementary.piecewise import Piecewise
from sympy.utilities.codegen import RustCodeGen, codegen, make_routine
from sympy.testing.pytest import XFAIL
import sympy
x, y, z = symbols('x,y,z')
def test_empty_rust_code():
code_gen = RustCodeGen()
output = StringIO()
code_gen.dump_rs([], output, "file", header=False, empty=False)
source = output.getvalue()
assert source == ""
def test_simple_rust_code():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Rust", header=False, empty=False)
assert result[0] == "test.rs"
source = result[1]
expected = (
"fn test(x: f64, y: f64, z: f64) -> f64 {\n"
" let out1 = z*(x + y);\n"
" out1\n"
"}\n"
)
assert source == expected
def test_simple_code_with_header():
name_expr = ("test", (x + y)*z)
result, = codegen(name_expr, "Rust", header=True, empty=False)
assert result[0] == "test.rs"
source = result[1]
version_str = "Code generated with SymPy %s" % sympy.__version__
version_line = version_str.center(76).rstrip()
expected = (
"/*\n"
" *%(version_line)s\n"
" *\n"
" * See http://www.sympy.org/ for more information.\n"
" *\n"
" * This file is part of 'project'\n"
" */\n"
"fn test(x: f64, y: f64, z: f64) -> f64 {\n"
" let out1 = z*(x + y);\n"
" out1\n"
"}\n"
) % {'version_line': version_line}
assert source == expected
def test_simple_code_nameout():
expr = Equality(z, (x + y))
name_expr = ("test", expr)
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test(x: f64, y: f64) -> f64 {\n"
" let z = x + y;\n"
" z\n"
"}\n"
)
assert source == expected
def test_numbersymbol():
name_expr = ("test", pi**Catalan)
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test() -> f64 {\n"
" const Catalan: f64 = %s;\n"
" let out1 = PI.powf(Catalan);\n"
" out1\n"
"}\n"
) % Catalan.evalf(17)
assert source == expected
@XFAIL
def test_numbersymbol_inline():
# FIXME: how to pass inline to the RustCodePrinter?
name_expr = ("test", [pi**Catalan, EulerGamma])
result, = codegen(name_expr, "Rust", header=False,
empty=False, inline=True)
source = result[1]
expected = (
"fn test() -> (f64, f64) {\n"
" const Catalan: f64 = %s;\n"
" const EulerGamma: f64 = %s;\n"
" let out1 = PI.powf(Catalan);\n"
" let out2 = EulerGamma);\n"
" (out1, out2)\n"
"}\n"
) % (Catalan.evalf(17), EulerGamma.evalf(17))
assert source == expected
def test_argument_order():
expr = x + y
routine = make_routine("test", expr, argument_sequence=[z, x, y], language="rust")
code_gen = RustCodeGen()
output = StringIO()
code_gen.dump_rs([routine], output, "test", header=False, empty=False)
source = output.getvalue()
expected = (
"fn test(z: f64, x: f64, y: f64) -> f64 {\n"
" let out1 = x + y;\n"
" out1\n"
"}\n"
)
assert source == expected
def test_multiple_results_rust():
# Here the output order is the input order
expr1 = (x + y)*z
expr2 = (x - y)*z
name_expr = ("test", [expr1, expr2])
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test(x: f64, y: f64, z: f64) -> (f64, f64) {\n"
" let out1 = z*(x + y);\n"
" let out2 = z*(x - y);\n"
" (out1, out2)\n"
"}\n"
)
assert source == expected
def test_results_named_unordered():
# Here output order is based on name_expr
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test(x: f64, y: f64, z: f64) -> (f64, f64, f64) {\n"
" let C = z*(x + y);\n"
" let A = z*(x - y);\n"
" let B = 2*x;\n"
" (C, A, B)\n"
"}\n"
)
assert source == expected
def test_results_named_ordered():
A, B, C = symbols('A,B,C')
expr1 = Equality(C, (x + y)*z)
expr2 = Equality(A, (x - y)*z)
expr3 = Equality(B, 2*x)
name_expr = ("test", [expr1, expr2, expr3])
result = codegen(name_expr, "Rust", header=False, empty=False,
argument_sequence=(x, z, y))
assert result[0][0] == "test.rs"
source = result[0][1]
expected = (
"fn test(x: f64, z: f64, y: f64) -> (f64, f64, f64) {\n"
" let C = z*(x + y);\n"
" let A = z*(x - y);\n"
" let B = 2*x;\n"
" (C, A, B)\n"
"}\n"
)
assert source == expected
def test_complicated_rs_codegen():
from sympy.functions.elementary.trigonometric import (cos, sin, tan)
name_expr = ("testlong",
[ ((sin(x) + cos(y) + tan(z))**3).expand(),
cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))
])
result = codegen(name_expr, "Rust", header=False, empty=False)
assert result[0][0] == "testlong.rs"
source = result[0][1]
expected = (
"fn testlong(x: f64, y: f64, z: f64) -> (f64, f64) {\n"
" let out1 = x.sin().powi(3) + 3*x.sin().powi(2)*y.cos()"
" + 3*x.sin().powi(2)*z.tan() + 3*x.sin()*y.cos().powi(2)"
" + 6*x.sin()*y.cos()*z.tan() + 3*x.sin()*z.tan().powi(2)"
" + y.cos().powi(3) + 3*y.cos().powi(2)*z.tan()"
" + 3*y.cos()*z.tan().powi(2) + z.tan().powi(3);\n"
" let out2 = (x + y + z).cos().cos().cos().cos()"
".cos().cos().cos().cos();\n"
" (out1, out2)\n"
"}\n"
)
assert source == expected
def test_output_arg_mixed_unordered():
# named outputs are alphabetical, unnamed output appear in the given order
from sympy.functions.elementary.trigonometric import (cos, sin)
a = symbols("a")
name_expr = ("foo", [cos(2*x), Equality(y, sin(x)), cos(x), Equality(a, sin(2*x))])
result, = codegen(name_expr, "Rust", header=False, empty=False)
assert result[0] == "foo.rs"
source = result[1];
expected = (
"fn foo(x: f64) -> (f64, f64, f64, f64) {\n"
" let out1 = (2*x).cos();\n"
" let y = x.sin();\n"
" let out3 = x.cos();\n"
" let a = (2*x).sin();\n"
" (out1, y, out3, a)\n"
"}\n"
)
assert source == expected
def test_piecewise_():
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True), evaluate=False)
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn pwtest(x: f64) -> f64 {\n"
" let out1 = if (x < -1) {\n"
" 0\n"
" } else if (x <= 1) {\n"
" x.powi(2)\n"
" } else if (x > 1) {\n"
" 2 - x\n"
" } else {\n"
" 1\n"
" };\n"
" out1\n"
"}\n"
)
assert source == expected
@XFAIL
def test_piecewise_inline():
# FIXME: how to pass inline to the RustCodePrinter?
pw = Piecewise((0, x < -1), (x**2, x <= 1), (-x+2, x > 1), (1, True))
name_expr = ("pwtest", pw)
result, = codegen(name_expr, "Rust", header=False, empty=False,
inline=True)
source = result[1]
expected = (
"fn pwtest(x: f64) -> f64 {\n"
" let out1 = if (x < -1) { 0 } else if (x <= 1) { x.powi(2) }"
" else if (x > 1) { -x + 2 } else { 1 };\n"
" out1\n"
"}\n"
)
assert source == expected
def test_multifcns_per_file():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Rust", header=False, empty=False)
assert result[0][0] == "foo.rs"
source = result[0][1];
expected = (
"fn foo(x: f64, y: f64) -> (f64, f64) {\n"
" let out1 = 2*x;\n"
" let out2 = 3*y;\n"
" (out1, out2)\n"
"}\n"
"fn bar(y: f64) -> (f64, f64) {\n"
" let out1 = y.powi(2);\n"
" let out2 = 4*y;\n"
" (out1, out2)\n"
"}\n"
)
assert source == expected
def test_multifcns_per_file_w_header():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result = codegen(name_expr, "Rust", header=True, empty=False)
assert result[0][0] == "foo.rs"
source = result[0][1];
version_str = "Code generated with SymPy %s" % sympy.__version__
version_line = version_str.center(76).rstrip()
expected = (
"/*\n"
" *%(version_line)s\n"
" *\n"
" * See http://www.sympy.org/ for more information.\n"
" *\n"
" * This file is part of 'project'\n"
" */\n"
"fn foo(x: f64, y: f64) -> (f64, f64) {\n"
" let out1 = 2*x;\n"
" let out2 = 3*y;\n"
" (out1, out2)\n"
"}\n"
"fn bar(y: f64) -> (f64, f64) {\n"
" let out1 = y.powi(2);\n"
" let out2 = 4*y;\n"
" (out1, out2)\n"
"}\n"
) % {'version_line': version_line}
assert source == expected
def test_filename_match_prefix():
name_expr = [ ("foo", [2*x, 3*y]), ("bar", [y**2, 4*y]) ]
result, = codegen(name_expr, "Rust", prefix="baz", header=False,
empty=False)
assert result[0] == "baz.rs"
def test_InOutArgument():
expr = Equality(x, x**2)
name_expr = ("mysqr", expr)
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn mysqr(x: f64) -> f64 {\n"
" let x = x.powi(2);\n"
" x\n"
"}\n"
)
assert source == expected
def test_InOutArgument_order():
# can specify the order as (x, y)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Rust", header=False,
empty=False, argument_sequence=(x,y))
source = result[1]
expected = (
"fn test(x: f64, y: f64) -> f64 {\n"
" let x = x.powi(2) + y;\n"
" x\n"
"}\n"
)
assert source == expected
# make sure it gives (x, y) not (y, x)
expr = Equality(x, x**2 + y)
name_expr = ("test", expr)
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test(x: f64, y: f64) -> f64 {\n"
" let x = x.powi(2) + y;\n"
" x\n"
"}\n"
)
assert source == expected
def test_not_supported():
f = Function('f')
name_expr = ("test", [f(x).diff(x), S.ComplexInfinity])
result, = codegen(name_expr, "Rust", header=False, empty=False)
source = result[1]
expected = (
"fn test(x: f64) -> (f64, f64) {\n"
" // unsupported: Derivative(f(x), x)\n"
" // unsupported: zoo\n"
" let out1 = Derivative(f(x), x);\n"
" let out2 = zoo;\n"
" (out1, out2)\n"
"}\n"
)
assert source == expected
def test_global_vars_rust():
x, y, z, t = symbols("x y z t")
result = codegen(('f', x*y), "Rust", header=False, empty=False,
global_vars=(y,))
source = result[0][1]
expected = (
"fn f(x: f64) -> f64 {\n"
" let out1 = x*y;\n"
" out1\n"
"}\n"
)
assert source == expected
result = codegen(('f', x*y+z), "Rust", header=False, empty=False,
argument_sequence=(x, y), global_vars=(z, t))
source = result[0][1]
expected = (
"fn f(x: f64, y: f64) -> f64 {\n"
" let out1 = x*y + z;\n"
" out1\n"
"}\n"
)
assert source == expected

View File

@ -0,0 +1,129 @@
from functools import wraps
from sympy.utilities.decorator import threaded, xthreaded, memoize_property, deprecated
from sympy.testing.pytest import warns_deprecated_sympy
from sympy.core.basic import Basic
from sympy.core.relational import Eq
from sympy.matrices.dense import Matrix
from sympy.abc import x, y
def test_threaded():
@threaded
def function(expr, *args):
return 2*expr + sum(args)
assert function(Matrix([[x, y], [1, x]]), 1, 2) == \
Matrix([[2*x + 3, 2*y + 3], [5, 2*x + 3]])
assert function(Eq(x, y), 1, 2) == Eq(2*x + 3, 2*y + 3)
assert function([x, y], 1, 2) == [2*x + 3, 2*y + 3]
assert function((x, y), 1, 2) == (2*x + 3, 2*y + 3)
assert function({x, y}, 1, 2) == {2*x + 3, 2*y + 3}
@threaded
def function(expr, n):
return expr**n
assert function(x + y, 2) == x**2 + y**2
assert function(x, 2) == x**2
def test_xthreaded():
@xthreaded
def function(expr, n):
return expr**n
assert function(x + y, 2) == (x + y)**2
def test_wraps():
def my_func(x):
"""My function. """
my_func.is_my_func = True
new_my_func = threaded(my_func)
new_my_func = wraps(my_func)(new_my_func)
assert new_my_func.__name__ == 'my_func'
assert new_my_func.__doc__ == 'My function. '
assert hasattr(new_my_func, 'is_my_func')
assert new_my_func.is_my_func is True
def test_memoize_property():
class TestMemoize(Basic):
@memoize_property
def prop(self):
return Basic()
member = TestMemoize()
obj1 = member.prop
obj2 = member.prop
assert obj1 is obj2
def test_deprecated():
@deprecated('deprecated_function is deprecated',
deprecated_since_version='1.10',
# This is the target at the top of the file, which will never
# go away.
active_deprecations_target='active-deprecations')
def deprecated_function(x):
return x
with warns_deprecated_sympy():
assert deprecated_function(1) == 1
@deprecated('deprecated_class is deprecated',
deprecated_since_version='1.10',
active_deprecations_target='active-deprecations')
class deprecated_class:
pass
with warns_deprecated_sympy():
assert isinstance(deprecated_class(), deprecated_class)
# Ensure the class decorator works even when the class never returns
# itself
@deprecated('deprecated_class_new is deprecated',
deprecated_since_version='1.10',
active_deprecations_target='active-deprecations')
class deprecated_class_new:
def __new__(cls, arg):
return arg
with warns_deprecated_sympy():
assert deprecated_class_new(1) == 1
@deprecated('deprecated_class_init is deprecated',
deprecated_since_version='1.10',
active_deprecations_target='active-deprecations')
class deprecated_class_init:
def __init__(self, arg):
self.arg = 1
with warns_deprecated_sympy():
assert deprecated_class_init(1).arg == 1
@deprecated('deprecated_class_new_init is deprecated',
deprecated_since_version='1.10',
active_deprecations_target='active-deprecations')
class deprecated_class_new_init:
def __new__(cls, arg):
if arg == 0:
return arg
return object.__new__(cls)
def __init__(self, arg):
self.arg = 1
with warns_deprecated_sympy():
assert deprecated_class_new_init(0) == 0
with warns_deprecated_sympy():
assert deprecated_class_new_init(1).arg == 1

View File

@ -0,0 +1,13 @@
from sympy.testing.pytest import warns_deprecated_sympy
# See https://github.com/sympy/sympy/pull/18095
def test_deprecated_utilities():
with warns_deprecated_sympy():
import sympy.utilities.pytest # noqa:F401
with warns_deprecated_sympy():
import sympy.utilities.runtests # noqa:F401
with warns_deprecated_sympy():
import sympy.utilities.randtest # noqa:F401
with warns_deprecated_sympy():
import sympy.utilities.tmpfiles # noqa:F401

View File

@ -0,0 +1,178 @@
from itertools import zip_longest
from sympy.utilities.enumerative import (
list_visitor,
MultisetPartitionTraverser,
multiset_partitions_taocp
)
from sympy.utilities.iterables import _set_partitions
# first some functions only useful as test scaffolding - these provide
# straightforward, but slow reference implementations against which to
# compare the real versions, and also a comparison to verify that
# different versions are giving identical results.
def part_range_filter(partition_iterator, lb, ub):
"""
Filters (on the number of parts) a multiset partition enumeration
Arguments
=========
lb, and ub are a range (in the Python slice sense) on the lpart
variable returned from a multiset partition enumeration. Recall
that lpart is 0-based (it points to the topmost part on the part
stack), so if you want to return parts of sizes 2,3,4,5 you would
use lb=1 and ub=5.
"""
for state in partition_iterator:
f, lpart, pstack = state
if lpart >= lb and lpart < ub:
yield state
def multiset_partitions_baseline(multiplicities, components):
"""Enumerates partitions of a multiset
Parameters
==========
multiplicities
list of integer multiplicities of the components of the multiset.
components
the components (elements) themselves
Returns
=======
Set of partitions. Each partition is tuple of parts, and each
part is a tuple of components (with repeats to indicate
multiplicity)
Notes
=====
Multiset partitions can be created as equivalence classes of set
partitions, and this function does just that. This approach is
slow and memory intensive compared to the more advanced algorithms
available, but the code is simple and easy to understand. Hence
this routine is strictly for testing -- to provide a
straightforward baseline against which to regress the production
versions. (This code is a simplified version of an earlier
production implementation.)
"""
canon = [] # list of components with repeats
for ct, elem in zip(multiplicities, components):
canon.extend([elem]*ct)
# accumulate the multiset partitions in a set to eliminate dups
cache = set()
n = len(canon)
for nc, q in _set_partitions(n):
rv = [[] for i in range(nc)]
for i in range(n):
rv[q[i]].append(canon[i])
canonical = tuple(
sorted([tuple(p) for p in rv]))
cache.add(canonical)
return cache
def compare_multiset_w_baseline(multiplicities):
"""
Enumerates the partitions of multiset with AOCP algorithm and
baseline implementation, and compare the results.
"""
letters = "abcdefghijklmnopqrstuvwxyz"
bl_partitions = multiset_partitions_baseline(multiplicities, letters)
# The partitions returned by the different algorithms may have
# their parts in different orders. Also, they generate partitions
# in different orders. Hence the sorting, and set comparison.
aocp_partitions = set()
for state in multiset_partitions_taocp(multiplicities):
p1 = tuple(sorted(
[tuple(p) for p in list_visitor(state, letters)]))
aocp_partitions.add(p1)
assert bl_partitions == aocp_partitions
def compare_multiset_states(s1, s2):
"""compare for equality two instances of multiset partition states
This is useful for comparing different versions of the algorithm
to verify correctness."""
# Comparison is physical, the only use of semantics is to ignore
# trash off the top of the stack.
f1, lpart1, pstack1 = s1
f2, lpart2, pstack2 = s2
if (lpart1 == lpart2) and (f1[0:lpart1+1] == f2[0:lpart2+1]):
if pstack1[0:f1[lpart1+1]] == pstack2[0:f2[lpart2+1]]:
return True
return False
def test_multiset_partitions_taocp():
"""Compares the output of multiset_partitions_taocp with a baseline
(set partition based) implementation."""
# Test cases should not be too large, since the baseline
# implementation is fairly slow.
multiplicities = [2,2]
compare_multiset_w_baseline(multiplicities)
multiplicities = [4,3,1]
compare_multiset_w_baseline(multiplicities)
def test_multiset_partitions_versions():
"""Compares Knuth-based versions of multiset_partitions"""
multiplicities = [5,2,2,1]
m = MultisetPartitionTraverser()
for s1, s2 in zip_longest(m.enum_all(multiplicities),
multiset_partitions_taocp(multiplicities)):
assert compare_multiset_states(s1, s2)
def subrange_exercise(mult, lb, ub):
"""Compare filter-based and more optimized subrange implementations
Helper for tests, called with both small and larger multisets.
"""
m = MultisetPartitionTraverser()
assert m.count_partitions(mult) == \
m.count_partitions_slow(mult)
# Note - multiple traversals from the same
# MultisetPartitionTraverser object cannot execute at the same
# time, hence make several instances here.
ma = MultisetPartitionTraverser()
mc = MultisetPartitionTraverser()
md = MultisetPartitionTraverser()
# Several paths to compute just the size two partitions
a_it = ma.enum_range(mult, lb, ub)
b_it = part_range_filter(multiset_partitions_taocp(mult), lb, ub)
c_it = part_range_filter(mc.enum_small(mult, ub), lb, sum(mult))
d_it = part_range_filter(md.enum_large(mult, lb), 0, ub)
for sa, sb, sc, sd in zip_longest(a_it, b_it, c_it, d_it):
assert compare_multiset_states(sa, sb)
assert compare_multiset_states(sa, sc)
assert compare_multiset_states(sa, sd)
def test_subrange():
# Quick, but doesn't hit some of the corner cases
mult = [4,4,2,1] # mississippi
lb = 1
ub = 2
subrange_exercise(mult, lb, ub)
def test_subrange_large():
# takes a second or so, depending on cpu, Python version, etc.
mult = [6,3,2,1]
lb = 4
ub = 7
subrange_exercise(mult, lb, ub)

View File

@ -0,0 +1,12 @@
from sympy.testing.pytest import raises
from sympy.utilities.exceptions import sympy_deprecation_warning
# Only test exceptions here because the other cases are tested in the
# warns_deprecated_sympy tests
def test_sympy_deprecation_warning():
raises(TypeError, lambda: sympy_deprecation_warning('test',
deprecated_since_version=1.10,
active_deprecations_target='active-deprecations'))
raises(ValueError, lambda: sympy_deprecation_warning('test',
deprecated_since_version="1.10", active_deprecations_target='(active-deprecations)='))

View File

@ -0,0 +1,945 @@
from textwrap import dedent
from itertools import islice, product
from sympy.core.basic import Basic
from sympy.core.numbers import Integer
from sympy.core.sorting import ordered
from sympy.core.symbol import (Dummy, symbols)
from sympy.functions.combinatorial.factorials import factorial
from sympy.matrices.dense import Matrix
from sympy.combinatorics import RGS_enum, RGS_unrank, Permutation
from sympy.utilities.iterables import (
_partition, _set_partitions, binary_partitions, bracelets, capture,
cartes, common_prefix, common_suffix, connected_components, dict_merge,
filter_symbols, flatten, generate_bell, generate_derangements,
generate_involutions, generate_oriented_forest, group, has_dups, ibin,
iproduct, kbins, minlex, multiset, multiset_combinations,
multiset_partitions, multiset_permutations, necklaces, numbered_symbols,
partitions, permutations, postfixes,
prefixes, reshape, rotate_left, rotate_right, runs, sift,
strongly_connected_components, subsets, take, topological_sort, unflatten,
uniq, variations, ordered_partitions, rotations, is_palindromic, iterable,
NotIterable, multiset_derangements, signed_permutations,
sequence_partitions, sequence_partitions_empty)
from sympy.utilities.enumerative import (
factoring_visitor, multiset_partitions_taocp )
from sympy.core.singleton import S
from sympy.testing.pytest import raises, warns_deprecated_sympy
w, x, y, z = symbols('w,x,y,z')
def test_deprecated_iterables():
from sympy.utilities.iterables import default_sort_key, ordered
with warns_deprecated_sympy():
assert list(ordered([y, x])) == [x, y]
with warns_deprecated_sympy():
assert sorted([y, x], key=default_sort_key) == [x, y]
def test_is_palindromic():
assert is_palindromic('')
assert is_palindromic('x')
assert is_palindromic('xx')
assert is_palindromic('xyx')
assert not is_palindromic('xy')
assert not is_palindromic('xyzx')
assert is_palindromic('xxyzzyx', 1)
assert not is_palindromic('xxyzzyx', 2)
assert is_palindromic('xxyzzyx', 2, -1)
assert is_palindromic('xxyzzyx', 2, 6)
assert is_palindromic('xxyzyx', 1)
assert not is_palindromic('xxyzyx', 2)
assert is_palindromic('xxyzyx', 2, 2 + 3)
def test_flatten():
assert flatten((1, (1,))) == [1, 1]
assert flatten((x, (x,))) == [x, x]
ls = [[(-2, -1), (1, 2)], [(0, 0)]]
assert flatten(ls, levels=0) == ls
assert flatten(ls, levels=1) == [(-2, -1), (1, 2), (0, 0)]
assert flatten(ls, levels=2) == [-2, -1, 1, 2, 0, 0]
assert flatten(ls, levels=3) == [-2, -1, 1, 2, 0, 0]
raises(ValueError, lambda: flatten(ls, levels=-1))
class MyOp(Basic):
pass
assert flatten([MyOp(x, y), z]) == [MyOp(x, y), z]
assert flatten([MyOp(x, y), z], cls=MyOp) == [x, y, z]
assert flatten({1, 11, 2}) == list({1, 11, 2})
def test_iproduct():
assert list(iproduct()) == [()]
assert list(iproduct([])) == []
assert list(iproduct([1,2,3])) == [(1,),(2,),(3,)]
assert sorted(iproduct([1, 2], [3, 4, 5])) == [
(1,3),(1,4),(1,5),(2,3),(2,4),(2,5)]
assert sorted(iproduct([0,1],[0,1],[0,1])) == [
(0,0,0),(0,0,1),(0,1,0),(0,1,1),(1,0,0),(1,0,1),(1,1,0),(1,1,1)]
assert iterable(iproduct(S.Integers)) is True
assert iterable(iproduct(S.Integers, S.Integers)) is True
assert (3,) in iproduct(S.Integers)
assert (4, 5) in iproduct(S.Integers, S.Integers)
assert (1, 2, 3) in iproduct(S.Integers, S.Integers, S.Integers)
triples = set(islice(iproduct(S.Integers, S.Integers, S.Integers), 1000))
for n1, n2, n3 in triples:
assert isinstance(n1, Integer)
assert isinstance(n2, Integer)
assert isinstance(n3, Integer)
for t in set(product(*([range(-2, 3)]*3))):
assert t in iproduct(S.Integers, S.Integers, S.Integers)
def test_group():
assert group([]) == []
assert group([], multiple=False) == []
assert group([1]) == [[1]]
assert group([1], multiple=False) == [(1, 1)]
assert group([1, 1]) == [[1, 1]]
assert group([1, 1], multiple=False) == [(1, 2)]
assert group([1, 1, 1]) == [[1, 1, 1]]
assert group([1, 1, 1], multiple=False) == [(1, 3)]
assert group([1, 2, 1]) == [[1], [2], [1]]
assert group([1, 2, 1], multiple=False) == [(1, 1), (2, 1), (1, 1)]
assert group([1, 1, 2, 2, 2, 1, 3, 3]) == [[1, 1], [2, 2, 2], [1], [3, 3]]
assert group([1, 1, 2, 2, 2, 1, 3, 3], multiple=False) == [(1, 2),
(2, 3), (1, 1), (3, 2)]
def test_subsets():
# combinations
assert list(subsets([1, 2, 3], 0)) == [()]
assert list(subsets([1, 2, 3], 1)) == [(1,), (2,), (3,)]
assert list(subsets([1, 2, 3], 2)) == [(1, 2), (1, 3), (2, 3)]
assert list(subsets([1, 2, 3], 3)) == [(1, 2, 3)]
l = list(range(4))
assert list(subsets(l, 0, repetition=True)) == [()]
assert list(subsets(l, 1, repetition=True)) == [(0,), (1,), (2,), (3,)]
assert list(subsets(l, 2, repetition=True)) == [(0, 0), (0, 1), (0, 2),
(0, 3), (1, 1), (1, 2),
(1, 3), (2, 2), (2, 3),
(3, 3)]
assert list(subsets(l, 3, repetition=True)) == [(0, 0, 0), (0, 0, 1),
(0, 0, 2), (0, 0, 3),
(0, 1, 1), (0, 1, 2),
(0, 1, 3), (0, 2, 2),
(0, 2, 3), (0, 3, 3),
(1, 1, 1), (1, 1, 2),
(1, 1, 3), (1, 2, 2),
(1, 2, 3), (1, 3, 3),
(2, 2, 2), (2, 2, 3),
(2, 3, 3), (3, 3, 3)]
assert len(list(subsets(l, 4, repetition=True))) == 35
assert list(subsets(l[:2], 3, repetition=False)) == []
assert list(subsets(l[:2], 3, repetition=True)) == [(0, 0, 0),
(0, 0, 1),
(0, 1, 1),
(1, 1, 1)]
assert list(subsets([1, 2], repetition=True)) == \
[(), (1,), (2,), (1, 1), (1, 2), (2, 2)]
assert list(subsets([1, 2], repetition=False)) == \
[(), (1,), (2,), (1, 2)]
assert list(subsets([1, 2, 3], 2)) == \
[(1, 2), (1, 3), (2, 3)]
assert list(subsets([1, 2, 3], 2, repetition=True)) == \
[(1, 1), (1, 2), (1, 3), (2, 2), (2, 3), (3, 3)]
def test_variations():
# permutations
l = list(range(4))
assert list(variations(l, 0, repetition=False)) == [()]
assert list(variations(l, 1, repetition=False)) == [(0,), (1,), (2,), (3,)]
assert list(variations(l, 2, repetition=False)) == [(0, 1), (0, 2), (0, 3), (1, 0), (1, 2), (1, 3), (2, 0), (2, 1), (2, 3), (3, 0), (3, 1), (3, 2)]
assert list(variations(l, 3, repetition=False)) == [(0, 1, 2), (0, 1, 3), (0, 2, 1), (0, 2, 3), (0, 3, 1), (0, 3, 2), (1, 0, 2), (1, 0, 3), (1, 2, 0), (1, 2, 3), (1, 3, 0), (1, 3, 2), (2, 0, 1), (2, 0, 3), (2, 1, 0), (2, 1, 3), (2, 3, 0), (2, 3, 1), (3, 0, 1), (3, 0, 2), (3, 1, 0), (3, 1, 2), (3, 2, 0), (3, 2, 1)]
assert list(variations(l, 0, repetition=True)) == [()]
assert list(variations(l, 1, repetition=True)) == [(0,), (1,), (2,), (3,)]
assert list(variations(l, 2, repetition=True)) == [(0, 0), (0, 1), (0, 2),
(0, 3), (1, 0), (1, 1),
(1, 2), (1, 3), (2, 0),
(2, 1), (2, 2), (2, 3),
(3, 0), (3, 1), (3, 2),
(3, 3)]
assert len(list(variations(l, 3, repetition=True))) == 64
assert len(list(variations(l, 4, repetition=True))) == 256
assert list(variations(l[:2], 3, repetition=False)) == []
assert list(variations(l[:2], 3, repetition=True)) == [
(0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1),
(1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)
]
def test_cartes():
assert list(cartes([1, 2], [3, 4, 5])) == \
[(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5)]
assert list(cartes()) == [()]
assert list(cartes('a')) == [('a',)]
assert list(cartes('a', repeat=2)) == [('a', 'a')]
assert list(cartes(list(range(2)))) == [(0,), (1,)]
def test_filter_symbols():
s = numbered_symbols()
filtered = filter_symbols(s, symbols("x0 x2 x3"))
assert take(filtered, 3) == list(symbols("x1 x4 x5"))
def test_numbered_symbols():
s = numbered_symbols(cls=Dummy)
assert isinstance(next(s), Dummy)
assert next(numbered_symbols('C', start=1, exclude=[symbols('C1')])) == \
symbols('C2')
def test_sift():
assert sift(list(range(5)), lambda _: _ % 2) == {1: [1, 3], 0: [0, 2, 4]}
assert sift([x, y], lambda _: _.has(x)) == {False: [y], True: [x]}
assert sift([S.One], lambda _: _.has(x)) == {False: [1]}
assert sift([0, 1, 2, 3], lambda x: x % 2, binary=True) == (
[1, 3], [0, 2])
assert sift([0, 1, 2, 3], lambda x: x % 3 == 1, binary=True) == (
[1], [0, 2, 3])
raises(ValueError, lambda:
sift([0, 1, 2, 3], lambda x: x % 3, binary=True))
def test_take():
X = numbered_symbols()
assert take(X, 5) == list(symbols('x0:5'))
assert take(X, 5) == list(symbols('x5:10'))
assert take([1, 2, 3, 4, 5], 5) == [1, 2, 3, 4, 5]
def test_dict_merge():
assert dict_merge({}, {1: x, y: z}) == {1: x, y: z}
assert dict_merge({1: x, y: z}, {}) == {1: x, y: z}
assert dict_merge({2: z}, {1: x, y: z}) == {1: x, 2: z, y: z}
assert dict_merge({1: x, y: z}, {2: z}) == {1: x, 2: z, y: z}
assert dict_merge({1: y, 2: z}, {1: x, y: z}) == {1: x, 2: z, y: z}
assert dict_merge({1: x, y: z}, {1: y, 2: z}) == {1: y, 2: z, y: z}
def test_prefixes():
assert list(prefixes([])) == []
assert list(prefixes([1])) == [[1]]
assert list(prefixes([1, 2])) == [[1], [1, 2]]
assert list(prefixes([1, 2, 3, 4, 5])) == \
[[1], [1, 2], [1, 2, 3], [1, 2, 3, 4], [1, 2, 3, 4, 5]]
def test_postfixes():
assert list(postfixes([])) == []
assert list(postfixes([1])) == [[1]]
assert list(postfixes([1, 2])) == [[2], [1, 2]]
assert list(postfixes([1, 2, 3, 4, 5])) == \
[[5], [4, 5], [3, 4, 5], [2, 3, 4, 5], [1, 2, 3, 4, 5]]
def test_topological_sort():
V = [2, 3, 5, 7, 8, 9, 10, 11]
E = [(7, 11), (7, 8), (5, 11),
(3, 8), (3, 10), (11, 2),
(11, 9), (11, 10), (8, 9)]
assert topological_sort((V, E)) == [3, 5, 7, 8, 11, 2, 9, 10]
assert topological_sort((V, E), key=lambda v: -v) == \
[7, 5, 11, 3, 10, 8, 9, 2]
raises(ValueError, lambda: topological_sort((V, E + [(10, 7)])))
def test_strongly_connected_components():
assert strongly_connected_components(([], [])) == []
assert strongly_connected_components(([1, 2, 3], [])) == [[1], [2], [3]]
V = [1, 2, 3]
E = [(1, 2), (1, 3), (2, 1), (2, 3), (3, 1)]
assert strongly_connected_components((V, E)) == [[1, 2, 3]]
V = [1, 2, 3, 4]
E = [(1, 2), (2, 3), (3, 2), (3, 4)]
assert strongly_connected_components((V, E)) == [[4], [2, 3], [1]]
V = [1, 2, 3, 4]
E = [(1, 2), (2, 1), (3, 4), (4, 3)]
assert strongly_connected_components((V, E)) == [[1, 2], [3, 4]]
def test_connected_components():
assert connected_components(([], [])) == []
assert connected_components(([1, 2, 3], [])) == [[1], [2], [3]]
V = [1, 2, 3]
E = [(1, 2), (1, 3), (2, 1), (2, 3), (3, 1)]
assert connected_components((V, E)) == [[1, 2, 3]]
V = [1, 2, 3, 4]
E = [(1, 2), (2, 3), (3, 2), (3, 4)]
assert connected_components((V, E)) == [[1, 2, 3, 4]]
V = [1, 2, 3, 4]
E = [(1, 2), (3, 4)]
assert connected_components((V, E)) == [[1, 2], [3, 4]]
def test_rotate():
A = [0, 1, 2, 3, 4]
assert rotate_left(A, 2) == [2, 3, 4, 0, 1]
assert rotate_right(A, 1) == [4, 0, 1, 2, 3]
A = []
B = rotate_right(A, 1)
assert B == []
B.append(1)
assert A == []
B = rotate_left(A, 1)
assert B == []
B.append(1)
assert A == []
def test_multiset_partitions():
A = [0, 1, 2, 3, 4]
assert list(multiset_partitions(A, 5)) == [[[0], [1], [2], [3], [4]]]
assert len(list(multiset_partitions(A, 4))) == 10
assert len(list(multiset_partitions(A, 3))) == 25
assert list(multiset_partitions([1, 1, 1, 2, 2], 2)) == [
[[1, 1, 1, 2], [2]], [[1, 1, 1], [2, 2]], [[1, 1, 2, 2], [1]],
[[1, 1, 2], [1, 2]], [[1, 1], [1, 2, 2]]]
assert list(multiset_partitions([1, 1, 2, 2], 2)) == [
[[1, 1, 2], [2]], [[1, 1], [2, 2]], [[1, 2, 2], [1]],
[[1, 2], [1, 2]]]
assert list(multiset_partitions([1, 2, 3, 4], 2)) == [
[[1, 2, 3], [4]], [[1, 2, 4], [3]], [[1, 2], [3, 4]],
[[1, 3, 4], [2]], [[1, 3], [2, 4]], [[1, 4], [2, 3]],
[[1], [2, 3, 4]]]
assert list(multiset_partitions([1, 2, 2], 2)) == [
[[1, 2], [2]], [[1], [2, 2]]]
assert list(multiset_partitions(3)) == [
[[0, 1, 2]], [[0, 1], [2]], [[0, 2], [1]], [[0], [1, 2]],
[[0], [1], [2]]]
assert list(multiset_partitions(3, 2)) == [
[[0, 1], [2]], [[0, 2], [1]], [[0], [1, 2]]]
assert list(multiset_partitions([1] * 3, 2)) == [[[1], [1, 1]]]
assert list(multiset_partitions([1] * 3)) == [
[[1, 1, 1]], [[1], [1, 1]], [[1], [1], [1]]]
a = [3, 2, 1]
assert list(multiset_partitions(a)) == \
list(multiset_partitions(sorted(a)))
assert list(multiset_partitions(a, 5)) == []
assert list(multiset_partitions(a, 1)) == [[[1, 2, 3]]]
assert list(multiset_partitions(a + [4], 5)) == []
assert list(multiset_partitions(a + [4], 1)) == [[[1, 2, 3, 4]]]
assert list(multiset_partitions(2, 5)) == []
assert list(multiset_partitions(2, 1)) == [[[0, 1]]]
assert list(multiset_partitions('a')) == [[['a']]]
assert list(multiset_partitions('a', 2)) == []
assert list(multiset_partitions('ab')) == [[['a', 'b']], [['a'], ['b']]]
assert list(multiset_partitions('ab', 1)) == [[['a', 'b']]]
assert list(multiset_partitions('aaa', 1)) == [['aaa']]
assert list(multiset_partitions([1, 1], 1)) == [[[1, 1]]]
ans = [('mpsyy',), ('mpsy', 'y'), ('mps', 'yy'), ('mps', 'y', 'y'),
('mpyy', 's'), ('mpy', 'sy'), ('mpy', 's', 'y'), ('mp', 'syy'),
('mp', 'sy', 'y'), ('mp', 's', 'yy'), ('mp', 's', 'y', 'y'),
('msyy', 'p'), ('msy', 'py'), ('msy', 'p', 'y'), ('ms', 'pyy'),
('ms', 'py', 'y'), ('ms', 'p', 'yy'), ('ms', 'p', 'y', 'y'),
('myy', 'ps'), ('myy', 'p', 's'), ('my', 'psy'), ('my', 'ps', 'y'),
('my', 'py', 's'), ('my', 'p', 'sy'), ('my', 'p', 's', 'y'),
('m', 'psyy'), ('m', 'psy', 'y'), ('m', 'ps', 'yy'),
('m', 'ps', 'y', 'y'), ('m', 'pyy', 's'), ('m', 'py', 'sy'),
('m', 'py', 's', 'y'), ('m', 'p', 'syy'),
('m', 'p', 'sy', 'y'), ('m', 'p', 's', 'yy'),
('m', 'p', 's', 'y', 'y')]
assert [tuple("".join(part) for part in p)
for p in multiset_partitions('sympy')] == ans
factorings = [[24], [8, 3], [12, 2], [4, 6], [4, 2, 3],
[6, 2, 2], [2, 2, 2, 3]]
assert [factoring_visitor(p, [2,3]) for
p in multiset_partitions_taocp([3, 1])] == factorings
def test_multiset_combinations():
ans = ['iii', 'iim', 'iip', 'iis', 'imp', 'ims', 'ipp', 'ips',
'iss', 'mpp', 'mps', 'mss', 'pps', 'pss', 'sss']
assert [''.join(i) for i in
list(multiset_combinations('mississippi', 3))] == ans
M = multiset('mississippi')
assert [''.join(i) for i in
list(multiset_combinations(M, 3))] == ans
assert [''.join(i) for i in multiset_combinations(M, 30)] == []
assert list(multiset_combinations([[1], [2, 3]], 2)) == [[[1], [2, 3]]]
assert len(list(multiset_combinations('a', 3))) == 0
assert len(list(multiset_combinations('a', 0))) == 1
assert list(multiset_combinations('abc', 1)) == [['a'], ['b'], ['c']]
raises(ValueError, lambda: list(multiset_combinations({0: 3, 1: -1}, 2)))
def test_multiset_permutations():
ans = ['abby', 'abyb', 'aybb', 'baby', 'bayb', 'bbay', 'bbya', 'byab',
'byba', 'yabb', 'ybab', 'ybba']
assert [''.join(i) for i in multiset_permutations('baby')] == ans
assert [''.join(i) for i in multiset_permutations(multiset('baby'))] == ans
assert list(multiset_permutations([0, 0, 0], 2)) == [[0, 0]]
assert list(multiset_permutations([0, 2, 1], 2)) == [
[0, 1], [0, 2], [1, 0], [1, 2], [2, 0], [2, 1]]
assert len(list(multiset_permutations('a', 0))) == 1
assert len(list(multiset_permutations('a', 3))) == 0
for nul in ([], {}, ''):
assert list(multiset_permutations(nul)) == [[]]
assert list(multiset_permutations(nul, 0)) == [[]]
# impossible requests give no result
assert list(multiset_permutations(nul, 1)) == []
assert list(multiset_permutations(nul, -1)) == []
def test():
for i in range(1, 7):
print(i)
for p in multiset_permutations([0, 0, 1, 0, 1], i):
print(p)
assert capture(lambda: test()) == dedent('''\
1
[0]
[1]
2
[0, 0]
[0, 1]
[1, 0]
[1, 1]
3
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
4
[0, 0, 0, 1]
[0, 0, 1, 0]
[0, 0, 1, 1]
[0, 1, 0, 0]
[0, 1, 0, 1]
[0, 1, 1, 0]
[1, 0, 0, 0]
[1, 0, 0, 1]
[1, 0, 1, 0]
[1, 1, 0, 0]
5
[0, 0, 0, 1, 1]
[0, 0, 1, 0, 1]
[0, 0, 1, 1, 0]
[0, 1, 0, 0, 1]
[0, 1, 0, 1, 0]
[0, 1, 1, 0, 0]
[1, 0, 0, 0, 1]
[1, 0, 0, 1, 0]
[1, 0, 1, 0, 0]
[1, 1, 0, 0, 0]
6\n''')
raises(ValueError, lambda: list(multiset_permutations({0: 3, 1: -1})))
def test_partitions():
ans = [[{}], [(0, {})]]
for i in range(2):
assert list(partitions(0, size=i)) == ans[i]
assert list(partitions(1, 0, size=i)) == ans[i]
assert list(partitions(6, 2, 2, size=i)) == ans[i]
assert list(partitions(6, 2, None, size=i)) != ans[i]
assert list(partitions(6, None, 2, size=i)) != ans[i]
assert list(partitions(6, 2, 0, size=i)) == ans[i]
assert list(partitions(6, k=2)) == [
{2: 3}, {1: 2, 2: 2}, {1: 4, 2: 1}, {1: 6}]
assert list(partitions(6, k=3)) == [
{3: 2}, {1: 1, 2: 1, 3: 1}, {1: 3, 3: 1}, {2: 3}, {1: 2, 2: 2},
{1: 4, 2: 1}, {1: 6}]
assert list(partitions(8, k=4, m=3)) == [
{4: 2}, {1: 1, 3: 1, 4: 1}, {2: 2, 4: 1}, {2: 1, 3: 2}] == [
i for i in partitions(8, k=4, m=3) if all(k <= 4 for k in i)
and sum(i.values()) <=3]
assert list(partitions(S(3), m=2)) == [
{3: 1}, {1: 1, 2: 1}]
assert list(partitions(4, k=3)) == [
{1: 1, 3: 1}, {2: 2}, {1: 2, 2: 1}, {1: 4}] == [
i for i in partitions(4) if all(k <= 3 for k in i)]
# Consistency check on output of _partitions and RGS_unrank.
# This provides a sanity test on both routines. Also verifies that
# the total number of partitions is the same in each case.
# (from pkrathmann2)
for n in range(2, 6):
i = 0
for m, q in _set_partitions(n):
assert q == RGS_unrank(i, n)
i += 1
assert i == RGS_enum(n)
def test_binary_partitions():
assert [i[:] for i in binary_partitions(10)] == [[8, 2], [8, 1, 1],
[4, 4, 2], [4, 4, 1, 1], [4, 2, 2, 2], [4, 2, 2, 1, 1],
[4, 2, 1, 1, 1, 1], [4, 1, 1, 1, 1, 1, 1], [2, 2, 2, 2, 2],
[2, 2, 2, 2, 1, 1], [2, 2, 2, 1, 1, 1, 1], [2, 2, 1, 1, 1, 1, 1, 1],
[2, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
assert len([j[:] for j in binary_partitions(16)]) == 36
def test_bell_perm():
assert [len(set(generate_bell(i))) for i in range(1, 7)] == [
factorial(i) for i in range(1, 7)]
assert list(generate_bell(3)) == [
(0, 1, 2), (0, 2, 1), (2, 0, 1), (2, 1, 0), (1, 2, 0), (1, 0, 2)]
# generate_bell and trotterjohnson are advertised to return the same
# permutations; this is not technically necessary so this test could
# be removed
for n in range(1, 5):
p = Permutation(range(n))
b = generate_bell(n)
for bi in b:
assert bi == tuple(p.array_form)
p = p.next_trotterjohnson()
raises(ValueError, lambda: list(generate_bell(0))) # XXX is this consistent with other permutation algorithms?
def test_involutions():
lengths = [1, 2, 4, 10, 26, 76]
for n, N in enumerate(lengths):
i = list(generate_involutions(n + 1))
assert len(i) == N
assert len({Permutation(j)**2 for j in i}) == 1
def test_derangements():
assert len(list(generate_derangements(list(range(6))))) == 265
assert ''.join(''.join(i) for i in generate_derangements('abcde')) == (
'badecbaecdbcaedbcdeabceadbdaecbdeacbdecabeacdbedacbedcacabedcadebcaebd'
'cdaebcdbeacdeabcdebaceabdcebadcedabcedbadabecdaebcdaecbdcaebdcbeadceab'
'dcebadeabcdeacbdebacdebcaeabcdeadbceadcbecabdecbadecdabecdbaedabcedacb'
'edbacedbca')
assert list(generate_derangements([0, 1, 2, 3])) == [
[1, 0, 3, 2], [1, 2, 3, 0], [1, 3, 0, 2], [2, 0, 3, 1],
[2, 3, 0, 1], [2, 3, 1, 0], [3, 0, 1, 2], [3, 2, 0, 1], [3, 2, 1, 0]]
assert list(generate_derangements([0, 1, 2, 2])) == [
[2, 2, 0, 1], [2, 2, 1, 0]]
assert list(generate_derangements('ba')) == [list('ab')]
# multiset_derangements
D = multiset_derangements
assert list(D('abb')) == []
assert [''.join(i) for i in D('ab')] == ['ba']
assert [''.join(i) for i in D('abc')] == ['bca', 'cab']
assert [''.join(i) for i in D('aabb')] == ['bbaa']
assert [''.join(i) for i in D('aabbcccc')] == [
'ccccaabb', 'ccccabab', 'ccccabba', 'ccccbaab', 'ccccbaba',
'ccccbbaa']
assert [''.join(i) for i in D('aabbccc')] == [
'cccabba', 'cccabab', 'cccaabb', 'ccacbba', 'ccacbab',
'ccacabb', 'cbccbaa', 'cbccaba', 'cbccaab', 'bcccbaa',
'bcccaba', 'bcccaab']
assert [''.join(i) for i in D('books')] == ['kbsoo', 'ksboo',
'sbkoo', 'skboo', 'oksbo', 'oskbo', 'okbso', 'obkso', 'oskob',
'oksob', 'osbok', 'obsok']
assert list(generate_derangements([[3], [2], [2], [1]])) == [
[[2], [1], [3], [2]], [[2], [3], [1], [2]]]
def test_necklaces():
def count(n, k, f):
return len(list(necklaces(n, k, f)))
m = []
for i in range(1, 8):
m.append((
i, count(i, 2, 0), count(i, 2, 1), count(i, 3, 1)))
assert Matrix(m) == Matrix([
[1, 2, 2, 3],
[2, 3, 3, 6],
[3, 4, 4, 10],
[4, 6, 6, 21],
[5, 8, 8, 39],
[6, 14, 13, 92],
[7, 20, 18, 198]])
def test_bracelets():
bc = list(bracelets(2, 4))
assert Matrix(bc) == Matrix([
[0, 0],
[0, 1],
[0, 2],
[0, 3],
[1, 1],
[1, 2],
[1, 3],
[2, 2],
[2, 3],
[3, 3]
])
bc = list(bracelets(4, 2))
assert Matrix(bc) == Matrix([
[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[1, 1, 1, 1]
])
def test_generate_oriented_forest():
assert list(generate_oriented_forest(5)) == [[0, 1, 2, 3, 4],
[0, 1, 2, 3, 3], [0, 1, 2, 3, 2], [0, 1, 2, 3, 1], [0, 1, 2, 3, 0],
[0, 1, 2, 2, 2], [0, 1, 2, 2, 1], [0, 1, 2, 2, 0], [0, 1, 2, 1, 2],
[0, 1, 2, 1, 1], [0, 1, 2, 1, 0], [0, 1, 2, 0, 1], [0, 1, 2, 0, 0],
[0, 1, 1, 1, 1], [0, 1, 1, 1, 0], [0, 1, 1, 0, 1], [0, 1, 1, 0, 0],
[0, 1, 0, 1, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 0]]
assert len(list(generate_oriented_forest(10))) == 1842
def test_unflatten():
r = list(range(10))
assert unflatten(r) == list(zip(r[::2], r[1::2]))
assert unflatten(r, 5) == [tuple(r[:5]), tuple(r[5:])]
raises(ValueError, lambda: unflatten(list(range(10)), 3))
raises(ValueError, lambda: unflatten(list(range(10)), -2))
def test_common_prefix_suffix():
assert common_prefix([], [1]) == []
assert common_prefix(list(range(3))) == [0, 1, 2]
assert common_prefix(list(range(3)), list(range(4))) == [0, 1, 2]
assert common_prefix([1, 2, 3], [1, 2, 5]) == [1, 2]
assert common_prefix([1, 2, 3], [1, 3, 5]) == [1]
assert common_suffix([], [1]) == []
assert common_suffix(list(range(3))) == [0, 1, 2]
assert common_suffix(list(range(3)), list(range(3))) == [0, 1, 2]
assert common_suffix(list(range(3)), list(range(4))) == []
assert common_suffix([1, 2, 3], [9, 2, 3]) == [2, 3]
assert common_suffix([1, 2, 3], [9, 7, 3]) == [3]
def test_minlex():
assert minlex([1, 2, 0]) == (0, 1, 2)
assert minlex((1, 2, 0)) == (0, 1, 2)
assert minlex((1, 0, 2)) == (0, 2, 1)
assert minlex((1, 0, 2), directed=False) == (0, 1, 2)
assert minlex('aba') == 'aab'
assert minlex(('bb', 'aaa', 'c', 'a'), key=len) == ('c', 'a', 'bb', 'aaa')
def test_ordered():
assert list(ordered((x, y), hash, default=False)) in [[x, y], [y, x]]
assert list(ordered((x, y), hash, default=False)) == \
list(ordered((y, x), hash, default=False))
assert list(ordered((x, y))) == [x, y]
seq, keys = [[[1, 2, 1], [0, 3, 1], [1, 1, 3], [2], [1]],
(lambda x: len(x), lambda x: sum(x))]
assert list(ordered(seq, keys, default=False, warn=False)) == \
[[1], [2], [1, 2, 1], [0, 3, 1], [1, 1, 3]]
raises(ValueError, lambda:
list(ordered(seq, keys, default=False, warn=True)))
def test_runs():
assert runs([]) == []
assert runs([1]) == [[1]]
assert runs([1, 1]) == [[1], [1]]
assert runs([1, 1, 2]) == [[1], [1, 2]]
assert runs([1, 2, 1]) == [[1, 2], [1]]
assert runs([2, 1, 1]) == [[2], [1], [1]]
from operator import lt
assert runs([2, 1, 1], lt) == [[2, 1], [1]]
def test_reshape():
seq = list(range(1, 9))
assert reshape(seq, [4]) == \
[[1, 2, 3, 4], [5, 6, 7, 8]]
assert reshape(seq, (4,)) == \
[(1, 2, 3, 4), (5, 6, 7, 8)]
assert reshape(seq, (2, 2)) == \
[(1, 2, 3, 4), (5, 6, 7, 8)]
assert reshape(seq, (2, [2])) == \
[(1, 2, [3, 4]), (5, 6, [7, 8])]
assert reshape(seq, ((2,), [2])) == \
[((1, 2), [3, 4]), ((5, 6), [7, 8])]
assert reshape(seq, (1, [2], 1)) == \
[(1, [2, 3], 4), (5, [6, 7], 8)]
assert reshape(tuple(seq), ([[1], 1, (2,)],)) == \
(([[1], 2, (3, 4)],), ([[5], 6, (7, 8)],))
assert reshape(tuple(seq), ([1], 1, (2,))) == \
(([1], 2, (3, 4)), ([5], 6, (7, 8)))
assert reshape(list(range(12)), [2, [3], {2}, (1, (3,), 1)]) == \
[[0, 1, [2, 3, 4], {5, 6}, (7, (8, 9, 10), 11)]]
raises(ValueError, lambda: reshape([0, 1], [-1]))
raises(ValueError, lambda: reshape([0, 1], [3]))
def test_uniq():
assert list(uniq(p for p in partitions(4))) == \
[{4: 1}, {1: 1, 3: 1}, {2: 2}, {1: 2, 2: 1}, {1: 4}]
assert list(uniq(x % 2 for x in range(5))) == [0, 1]
assert list(uniq('a')) == ['a']
assert list(uniq('ababc')) == list('abc')
assert list(uniq([[1], [2, 1], [1]])) == [[1], [2, 1]]
assert list(uniq(permutations(i for i in [[1], 2, 2]))) == \
[([1], 2, 2), (2, [1], 2), (2, 2, [1])]
assert list(uniq([2, 3, 2, 4, [2], [1], [2], [3], [1]])) == \
[2, 3, 4, [2], [1], [3]]
f = [1]
raises(RuntimeError, lambda: [f.remove(i) for i in uniq(f)])
f = [[1]]
raises(RuntimeError, lambda: [f.remove(i) for i in uniq(f)])
def test_kbins():
assert len(list(kbins('1123', 2, ordered=1))) == 24
assert len(list(kbins('1123', 2, ordered=11))) == 36
assert len(list(kbins('1123', 2, ordered=10))) == 10
assert len(list(kbins('1123', 2, ordered=0))) == 5
assert len(list(kbins('1123', 2, ordered=None))) == 3
def test1():
for orderedval in [None, 0, 1, 10, 11]:
print('ordered =', orderedval)
for p in kbins([0, 0, 1], 2, ordered=orderedval):
print(' ', p)
assert capture(lambda : test1()) == dedent('''\
ordered = None
[[0], [0, 1]]
[[0, 0], [1]]
ordered = 0
[[0, 0], [1]]
[[0, 1], [0]]
ordered = 1
[[0], [0, 1]]
[[0], [1, 0]]
[[1], [0, 0]]
ordered = 10
[[0, 0], [1]]
[[1], [0, 0]]
[[0, 1], [0]]
[[0], [0, 1]]
ordered = 11
[[0], [0, 1]]
[[0, 0], [1]]
[[0], [1, 0]]
[[0, 1], [0]]
[[1], [0, 0]]
[[1, 0], [0]]\n''')
def test2():
for orderedval in [None, 0, 1, 10, 11]:
print('ordered =', orderedval)
for p in kbins(list(range(3)), 2, ordered=orderedval):
print(' ', p)
assert capture(lambda : test2()) == dedent('''\
ordered = None
[[0], [1, 2]]
[[0, 1], [2]]
ordered = 0
[[0, 1], [2]]
[[0, 2], [1]]
[[0], [1, 2]]
ordered = 1
[[0], [1, 2]]
[[0], [2, 1]]
[[1], [0, 2]]
[[1], [2, 0]]
[[2], [0, 1]]
[[2], [1, 0]]
ordered = 10
[[0, 1], [2]]
[[2], [0, 1]]
[[0, 2], [1]]
[[1], [0, 2]]
[[0], [1, 2]]
[[1, 2], [0]]
ordered = 11
[[0], [1, 2]]
[[0, 1], [2]]
[[0], [2, 1]]
[[0, 2], [1]]
[[1], [0, 2]]
[[1, 0], [2]]
[[1], [2, 0]]
[[1, 2], [0]]
[[2], [0, 1]]
[[2, 0], [1]]
[[2], [1, 0]]
[[2, 1], [0]]\n''')
def test_has_dups():
assert has_dups(set()) is False
assert has_dups(list(range(3))) is False
assert has_dups([1, 2, 1]) is True
assert has_dups([[1], [1]]) is True
assert has_dups([[1], [2]]) is False
def test__partition():
assert _partition('abcde', [1, 0, 1, 2, 0]) == [
['b', 'e'], ['a', 'c'], ['d']]
assert _partition('abcde', [1, 0, 1, 2, 0], 3) == [
['b', 'e'], ['a', 'c'], ['d']]
output = (3, [1, 0, 1, 2, 0])
assert _partition('abcde', *output) == [['b', 'e'], ['a', 'c'], ['d']]
def test_ordered_partitions():
from sympy.functions.combinatorial.numbers import nT
f = ordered_partitions
assert list(f(0, 1)) == [[]]
assert list(f(1, 0)) == [[]]
for i in range(1, 7):
for j in [None] + list(range(1, i)):
assert (
sum(1 for p in f(i, j, 1)) ==
sum(1 for p in f(i, j, 0)) ==
nT(i, j))
def test_rotations():
assert list(rotations('ab')) == [['a', 'b'], ['b', 'a']]
assert list(rotations(range(3))) == [[0, 1, 2], [1, 2, 0], [2, 0, 1]]
assert list(rotations(range(3), dir=-1)) == [[0, 1, 2], [2, 0, 1], [1, 2, 0]]
def test_ibin():
assert ibin(3) == [1, 1]
assert ibin(3, 3) == [0, 1, 1]
assert ibin(3, str=True) == '11'
assert ibin(3, 3, str=True) == '011'
assert list(ibin(2, 'all')) == [(0, 0), (0, 1), (1, 0), (1, 1)]
assert list(ibin(2, '', str=True)) == ['00', '01', '10', '11']
raises(ValueError, lambda: ibin(-.5))
raises(ValueError, lambda: ibin(2, 1))
def test_iterable():
assert iterable(0) is False
assert iterable(1) is False
assert iterable(None) is False
class Test1(NotIterable):
pass
assert iterable(Test1()) is False
class Test2(NotIterable):
_iterable = True
assert iterable(Test2()) is True
class Test3:
pass
assert iterable(Test3()) is False
class Test4:
_iterable = True
assert iterable(Test4()) is True
class Test5:
def __iter__(self):
yield 1
assert iterable(Test5()) is True
class Test6(Test5):
_iterable = False
assert iterable(Test6()) is False
def test_sequence_partitions():
assert list(sequence_partitions([1], 1)) == [[[1]]]
assert list(sequence_partitions([1, 2], 1)) == [[[1, 2]]]
assert list(sequence_partitions([1, 2], 2)) == [[[1], [2]]]
assert list(sequence_partitions([1, 2, 3], 1)) == [[[1, 2, 3]]]
assert list(sequence_partitions([1, 2, 3], 2)) == \
[[[1], [2, 3]], [[1, 2], [3]]]
assert list(sequence_partitions([1, 2, 3], 3)) == [[[1], [2], [3]]]
# Exceptional cases
assert list(sequence_partitions([], 0)) == []
assert list(sequence_partitions([], 1)) == []
assert list(sequence_partitions([1, 2], 0)) == []
assert list(sequence_partitions([1, 2], 3)) == []
def test_sequence_partitions_empty():
assert list(sequence_partitions_empty([], 1)) == [[[]]]
assert list(sequence_partitions_empty([], 2)) == [[[], []]]
assert list(sequence_partitions_empty([], 3)) == [[[], [], []]]
assert list(sequence_partitions_empty([1], 1)) == [[[1]]]
assert list(sequence_partitions_empty([1], 2)) == [[[], [1]], [[1], []]]
assert list(sequence_partitions_empty([1], 3)) == \
[[[], [], [1]], [[], [1], []], [[1], [], []]]
assert list(sequence_partitions_empty([1, 2], 1)) == [[[1, 2]]]
assert list(sequence_partitions_empty([1, 2], 2)) == \
[[[], [1, 2]], [[1], [2]], [[1, 2], []]]
assert list(sequence_partitions_empty([1, 2], 3)) == [
[[], [], [1, 2]], [[], [1], [2]], [[], [1, 2], []],
[[1], [], [2]], [[1], [2], []], [[1, 2], [], []]
]
assert list(sequence_partitions_empty([1, 2, 3], 1)) == [[[1, 2, 3]]]
assert list(sequence_partitions_empty([1, 2, 3], 2)) == \
[[[], [1, 2, 3]], [[1], [2, 3]], [[1, 2], [3]], [[1, 2, 3], []]]
assert list(sequence_partitions_empty([1, 2, 3], 3)) == [
[[], [], [1, 2, 3]], [[], [1], [2, 3]],
[[], [1, 2], [3]], [[], [1, 2, 3], []],
[[1], [], [2, 3]], [[1], [2], [3]],
[[1], [2, 3], []], [[1, 2], [], [3]],
[[1, 2], [3], []], [[1, 2, 3], [], []]
]
# Exceptional cases
assert list(sequence_partitions([], 0)) == []
assert list(sequence_partitions([1], 0)) == []
assert list(sequence_partitions([1, 2], 0)) == []
def test_signed_permutations():
ans = [(0, 1, 1), (0, -1, 1), (0, 1, -1), (0, -1, -1),
(1, 0, 1), (-1, 0, 1), (1, 0, -1), (-1, 0, -1),
(1, 1, 0), (-1, 1, 0), (1, -1, 0), (-1, -1, 0)]
assert list(signed_permutations((0, 1, 1))) == ans
assert list(signed_permutations((1, 0, 1))) == ans
assert list(signed_permutations((1, 1, 0))) == ans

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,164 @@
import pickle
from sympy.core.relational import (Eq, Ne)
from sympy.core.singleton import S
from sympy.core.symbol import symbols
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.trigonometric import (cos, sin)
from sympy.external import import_module
from sympy.testing.pytest import skip
from sympy.utilities.matchpy_connector import WildDot, WildPlus, WildStar, Replacer
matchpy = import_module("matchpy")
x, y, z = symbols("x y z")
def _get_first_match(expr, pattern):
from matchpy import ManyToOneMatcher, Pattern
matcher = ManyToOneMatcher()
matcher.add(Pattern(pattern))
return next(iter(matcher.match(expr)))
def test_matchpy_connector():
if matchpy is None:
skip("matchpy not installed")
from multiset import Multiset
from matchpy import Pattern, Substitution
w_ = WildDot("w_")
w__ = WildPlus("w__")
w___ = WildStar("w___")
expr = x + y
pattern = x + w_
p, subst = _get_first_match(expr, pattern)
assert p == Pattern(pattern)
assert subst == Substitution({'w_': y})
expr = x + y + z
pattern = x + w__
p, subst = _get_first_match(expr, pattern)
assert p == Pattern(pattern)
assert subst == Substitution({'w__': Multiset([y, z])})
expr = x + y + z
pattern = x + y + z + w___
p, subst = _get_first_match(expr, pattern)
assert p == Pattern(pattern)
assert subst == Substitution({'w___': Multiset()})
def test_matchpy_optional():
if matchpy is None:
skip("matchpy not installed")
from matchpy import Pattern, Substitution
from matchpy import ManyToOneReplacer, ReplacementRule
p = WildDot("p", optional=1)
q = WildDot("q", optional=0)
pattern = p*x + q
expr1 = 2*x
pa, subst = _get_first_match(expr1, pattern)
assert pa == Pattern(pattern)
assert subst == Substitution({'p': 2, 'q': 0})
expr2 = x + 3
pa, subst = _get_first_match(expr2, pattern)
assert pa == Pattern(pattern)
assert subst == Substitution({'p': 1, 'q': 3})
expr3 = x
pa, subst = _get_first_match(expr3, pattern)
assert pa == Pattern(pattern)
assert subst == Substitution({'p': 1, 'q': 0})
expr4 = x*y + z
pa, subst = _get_first_match(expr4, pattern)
assert pa == Pattern(pattern)
assert subst == Substitution({'p': y, 'q': z})
replacer = ManyToOneReplacer()
replacer.add(ReplacementRule(Pattern(pattern), lambda p, q: sin(p)*cos(q)))
assert replacer.replace(expr1) == sin(2)*cos(0)
assert replacer.replace(expr2) == sin(1)*cos(3)
assert replacer.replace(expr3) == sin(1)*cos(0)
assert replacer.replace(expr4) == sin(y)*cos(z)
def test_replacer():
if matchpy is None:
skip("matchpy not installed")
for info in [True, False]:
for lambdify in [True, False]:
_perform_test_replacer(info, lambdify)
def _perform_test_replacer(info, lambdify):
x1_ = WildDot("x1_")
x2_ = WildDot("x2_")
a_ = WildDot("a_", optional=S.One)
b_ = WildDot("b_", optional=S.One)
c_ = WildDot("c_", optional=S.Zero)
replacer = Replacer(common_constraints=[
matchpy.CustomConstraint(lambda a_: not a_.has(x)),
matchpy.CustomConstraint(lambda b_: not b_.has(x)),
matchpy.CustomConstraint(lambda c_: not c_.has(x)),
], lambdify=lambdify, info=info)
# Rewrite the equation into implicit form, unless it's already solved:
replacer.add(Eq(x1_, x2_), Eq(x1_ - x2_, 0), conditions_nonfalse=[Ne(x2_, 0), Ne(x1_, 0), Ne(x1_, x), Ne(x2_, x)], info=1)
# Simple equation solver for real numbers:
replacer.add(Eq(a_*x + b_, 0), Eq(x, -b_/a_), info=2)
disc = b_**2 - 4*a_*c_
replacer.add(
Eq(a_*x**2 + b_*x + c_, 0),
Eq(x, (-b_ - sqrt(disc))/(2*a_)) | Eq(x, (-b_ + sqrt(disc))/(2*a_)),
conditions_nonfalse=[disc >= 0],
info=3
)
replacer.add(
Eq(a_*x**2 + c_, 0),
Eq(x, sqrt(-c_/a_)) | Eq(x, -sqrt(-c_/a_)),
conditions_nonfalse=[-c_*a_ > 0],
info=4
)
g = lambda expr, infos: (expr, infos) if info else expr
assert replacer.replace(Eq(3*x, y)) == g(Eq(x, y/3), [1, 2])
assert replacer.replace(Eq(x**2 + 1, 0)) == g(Eq(x**2 + 1, 0), [])
assert replacer.replace(Eq(x**2, 4)) == g((Eq(x, 2) | Eq(x, -2)), [1, 4])
assert replacer.replace(Eq(x**2 + 4*y*x + 4*y**2, 0)) == g(Eq(x, -2*y), [3])
def test_matchpy_object_pickle():
if matchpy is None:
return
a1 = WildDot("a")
a2 = pickle.loads(pickle.dumps(a1))
assert a1 == a2
a1 = WildDot("a", S(1))
a2 = pickle.loads(pickle.dumps(a1))
assert a1 == a2
a1 = WildPlus("a", S(1))
a2 = pickle.loads(pickle.dumps(a1))
assert a1 == a2
a1 = WildStar("a", S(1))
a2 = pickle.loads(pickle.dumps(a1))
assert a1 == a2

View File

@ -0,0 +1,33 @@
import os
from textwrap import dedent
from sympy.external import import_module
from sympy.testing.pytest import skip
from sympy.utilities.mathml import apply_xsl
lxml = import_module('lxml')
path = os.path.abspath(os.path.join(os.path.dirname(__file__), "test_xxe.py"))
def test_xxe():
assert os.path.isfile(path)
if not lxml:
skip("lxml not installed.")
mml = dedent(
rf"""
<!--?xml version="1.0" ?-->
<!DOCTYPE replace [<!ENTITY ent SYSTEM "file://{path}"> ]>
<userInfo>
<firstName>John</firstName>
<lastName>&ent;</lastName>
</userInfo>
"""
)
xsl = 'mathml/data/simple_mmlctop.xsl'
res = apply_xsl(mml, xsl)
assert res == \
'<?xml version="1.0"?>\n<userInfo>\n<firstName>John</firstName>\n<lastName/>\n</userInfo>\n'

View File

@ -0,0 +1,151 @@
from textwrap import dedent
import sys
from subprocess import Popen, PIPE
import os
from sympy.core.singleton import S
from sympy.testing.pytest import (raises, warns_deprecated_sympy,
skip_under_pyodide)
from sympy.utilities.misc import (translate, replace, ordinal, rawlines,
strlines, as_int, find_executable)
from sympy.external import import_module
pyodide_js = import_module('pyodide_js')
def test_translate():
abc = 'abc'
assert translate(abc, None, 'a') == 'bc'
assert translate(abc, None, '') == 'abc'
assert translate(abc, {'a': 'x'}, 'c') == 'xb'
assert translate(abc, {'a': 'bc'}, 'c') == 'bcb'
assert translate(abc, {'ab': 'x'}, 'c') == 'x'
assert translate(abc, {'ab': ''}, 'c') == ''
assert translate(abc, {'bc': 'x'}, 'c') == 'ab'
assert translate(abc, {'abc': 'x', 'a': 'y'}) == 'x'
u = chr(4096)
assert translate(abc, 'a', 'x', u) == 'xbc'
assert (u in translate(abc, 'a', u, u)) is True
def test_replace():
assert replace('abc', ('a', 'b')) == 'bbc'
assert replace('abc', {'a': 'Aa'}) == 'Aabc'
assert replace('abc', ('a', 'b'), ('c', 'C')) == 'bbC'
def test_ordinal():
assert ordinal(-1) == '-1st'
assert ordinal(0) == '0th'
assert ordinal(1) == '1st'
assert ordinal(2) == '2nd'
assert ordinal(3) == '3rd'
assert all(ordinal(i).endswith('th') for i in range(4, 21))
assert ordinal(100) == '100th'
assert ordinal(101) == '101st'
assert ordinal(102) == '102nd'
assert ordinal(103) == '103rd'
assert ordinal(104) == '104th'
assert ordinal(200) == '200th'
assert all(ordinal(i) == str(i) + 'th' for i in range(-220, -203))
def test_rawlines():
assert rawlines('a a\na') == "dedent('''\\\n a a\n a''')"
assert rawlines('a a') == "'a a'"
assert rawlines(strlines('\\le"ft')) == (
'(\n'
" '(\\n'\n"
' \'r\\\'\\\\le"ft\\\'\\n\'\n'
" ')'\n"
')')
def test_strlines():
q = 'this quote (") is in the middle'
# the following assert rhs was prepared with
# print(rawlines(strlines(q, 10)))
assert strlines(q, 10) == dedent('''\
(
'this quo'
'te (") i'
's in the'
' middle'
)''')
assert q == (
'this quo'
'te (") i'
's in the'
' middle'
)
q = "this quote (') is in the middle"
assert strlines(q, 20) == dedent('''\
(
"this quote (') is "
"in the middle"
)''')
assert strlines('\\left') == (
'(\n'
"r'\\left'\n"
')')
assert strlines('\\left', short=True) == r"r'\left'"
assert strlines('\\le"ft') == (
'(\n'
'r\'\\le"ft\'\n'
')')
q = 'this\nother line'
assert strlines(q) == rawlines(q)
def test_translate_args():
try:
translate(None, None, None, 'not_none')
except ValueError:
pass # Exception raised successfully
else:
assert False
assert translate('s', None, None, None) == 's'
try:
translate('s', 'a', 'bc')
except ValueError:
pass # Exception raised successfully
else:
assert False
@skip_under_pyodide("Cannot create subprocess under pyodide.")
def test_debug_output():
env = os.environ.copy()
env['SYMPY_DEBUG'] = 'True'
cmd = 'from sympy import *; x = Symbol("x"); print(integrate((1-cos(x))/x, x))'
cmdline = [sys.executable, '-c', cmd]
proc = Popen(cmdline, env=env, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
out = out.decode('ascii') # utf-8?
err = err.decode('ascii')
expected = 'substituted: -x*(1 - cos(x)), u: 1/x, u_var: _u'
assert expected in err, err
def test_as_int():
raises(ValueError, lambda : as_int(True))
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
raises(ValueError, lambda : as_int(S.NaN))
raises(ValueError, lambda : as_int(S.Infinity))
raises(ValueError, lambda : as_int(S.NegativeInfinity))
raises(ValueError, lambda : as_int(S.ComplexInfinity))
# for the following, limited precision makes int(arg) == arg
# but the int value is not necessarily what a user might have
# expected; Q.prime is more nuanced in its response for
# expressions which might be complex representations of an
# integer. This is not -- by design -- as_ints role.
raises(ValueError, lambda : as_int(1e23))
raises(ValueError, lambda : as_int(S('1.'+'0'*20+'1')))
assert as_int(True, strict=False) == 1
def test_deprecated_find_executable():
with warns_deprecated_sympy():
find_executable('python')

View File

@ -0,0 +1,717 @@
import inspect
import copy
import pickle
from sympy.physics.units import meter
from sympy.testing.pytest import XFAIL, raises, ignore_warnings
from sympy.core.basic import Atom, Basic
from sympy.core.singleton import SingletonRegistry
from sympy.core.symbol import Str, Dummy, Symbol, Wild
from sympy.core.numbers import (E, I, pi, oo, zoo, nan, Integer,
Rational, Float, AlgebraicNumber)
from sympy.core.relational import (Equality, GreaterThan, LessThan, Relational,
StrictGreaterThan, StrictLessThan, Unequality)
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.power import Pow
from sympy.core.function import Derivative, Function, FunctionClass, Lambda, \
WildFunction
from sympy.sets.sets import Interval
from sympy.core.multidimensional import vectorize
from sympy.external.gmpy import gmpy as _gmpy
from sympy.utilities.exceptions import SymPyDeprecationWarning
from sympy.core.singleton import S
from sympy.core.symbol import symbols
from sympy.external import import_module
cloudpickle = import_module('cloudpickle')
not_equal_attrs = {
'_assumptions', # This is a local cache that isn't automatically filled on creation
'_mhash', # Cached after __hash__ is called but set to None after creation
}
deprecated_attrs = {
'is_EmptySet', # Deprecated from SymPy 1.5. This can be removed when is_EmptySet is removed.
'expr_free_symbols', # Deprecated from SymPy 1.9. This can be removed when exr_free_symbols is removed.
}
def check(a, exclude=[], check_attr=True, deprecated=()):
""" Check that pickling and copying round-trips.
"""
# Pickling with protocols 0 and 1 is disabled for Basic instances:
if isinstance(a, Basic):
for protocol in [0, 1]:
raises(NotImplementedError, lambda: pickle.dumps(a, protocol))
protocols = [2, copy.copy, copy.deepcopy, 3, 4]
if cloudpickle:
protocols.extend([cloudpickle])
for protocol in protocols:
if protocol in exclude:
continue
if callable(protocol):
if isinstance(a, type):
# Classes can't be copied, but that's okay.
continue
b = protocol(a)
elif inspect.ismodule(protocol):
b = protocol.loads(protocol.dumps(a))
else:
b = pickle.loads(pickle.dumps(a, protocol))
d1 = dir(a)
d2 = dir(b)
assert set(d1) == set(d2)
if not check_attr:
continue
def c(a, b, d):
for i in d:
if i in not_equal_attrs:
if hasattr(a, i):
assert hasattr(b, i), i
elif i in deprecated_attrs or i in deprecated:
with ignore_warnings(SymPyDeprecationWarning):
assert getattr(a, i) == getattr(b, i), i
elif not hasattr(a, i):
continue
else:
attr = getattr(a, i)
if not hasattr(attr, "__call__"):
assert hasattr(b, i), i
assert getattr(b, i) == attr, "%s != %s, protocol: %s" % (getattr(b, i), attr, protocol)
c(a, b, d1)
c(b, a, d2)
#================== core =========================
def test_core_basic():
for c in (Atom, Atom(), Basic, Basic(), SingletonRegistry, S):
check(c)
def test_core_Str():
check(Str('x'))
def test_core_symbol():
# make the Symbol a unique name that doesn't class with any other
# testing variable in this file since after this test the symbol
# having the same name will be cached as noncommutative
for c in (Dummy, Dummy("x", commutative=False), Symbol,
Symbol("_issue_3130", commutative=False), Wild, Wild("x")):
check(c)
def test_core_numbers():
for c in (Integer(2), Rational(2, 3), Float("1.2")):
check(c)
for c in (AlgebraicNumber, AlgebraicNumber(sqrt(3))):
check(c, check_attr=False)
def test_core_float_copy():
# See gh-7457
y = Symbol("x") + 1.0
check(y) # does not raise TypeError ("argument is not an mpz")
def test_core_relational():
x = Symbol("x")
y = Symbol("y")
for c in (Equality, Equality(x, y), GreaterThan, GreaterThan(x, y),
LessThan, LessThan(x, y), Relational, Relational(x, y),
StrictGreaterThan, StrictGreaterThan(x, y), StrictLessThan,
StrictLessThan(x, y), Unequality, Unequality(x, y)):
check(c)
def test_core_add():
x = Symbol("x")
for c in (Add, Add(x, 4)):
check(c)
def test_core_mul():
x = Symbol("x")
for c in (Mul, Mul(x, 4)):
check(c)
def test_core_power():
x = Symbol("x")
for c in (Pow, Pow(x, 4)):
check(c)
def test_core_function():
x = Symbol("x")
for f in (Derivative, Derivative(x), Function, FunctionClass, Lambda,
WildFunction):
check(f)
def test_core_undefinedfunctions():
f = Function("f")
# Full XFAILed test below
exclude = list(range(5))
# https://github.com/cloudpipe/cloudpickle/issues/65
# https://github.com/cloudpipe/cloudpickle/issues/190
exclude.append(cloudpickle)
check(f, exclude=exclude)
@XFAIL
def test_core_undefinedfunctions_fail():
# This fails because f is assumed to be a class at sympy.basic.function.f
f = Function("f")
check(f)
def test_core_interval():
for c in (Interval, Interval(0, 2)):
check(c)
def test_core_multidimensional():
for c in (vectorize, vectorize(0)):
check(c)
def test_Singletons():
protocols = [0, 1, 2, 3, 4]
copiers = [copy.copy, copy.deepcopy]
copiers += [lambda x: pickle.loads(pickle.dumps(x, proto))
for proto in protocols]
if cloudpickle:
copiers += [lambda x: cloudpickle.loads(cloudpickle.dumps(x))]
for obj in (Integer(-1), Integer(0), Integer(1), Rational(1, 2), pi, E, I,
oo, -oo, zoo, nan, S.GoldenRatio, S.TribonacciConstant,
S.EulerGamma, S.Catalan, S.EmptySet, S.IdentityFunction):
for func in copiers:
assert func(obj) is obj
#================== functions ===================
from sympy.functions import (Piecewise, lowergamma, acosh, chebyshevu,
chebyshevt, ln, chebyshevt_root, legendre, Heaviside, bernoulli, coth,
tanh, assoc_legendre, sign, arg, asin, DiracDelta, re, rf, Abs,
uppergamma, binomial, sinh, cos, cot, acos, acot, gamma, bell,
hermite, harmonic, LambertW, zeta, log, factorial, asinh, acoth, cosh,
dirichlet_eta, Eijk, loggamma, erf, ceiling, im, fibonacci,
tribonacci, conjugate, tan, chebyshevu_root, floor, atanh, sqrt, sin,
atan, ff, lucas, atan2, polygamma, exp)
def test_functions():
one_var = (acosh, ln, Heaviside, factorial, bernoulli, coth, tanh,
sign, arg, asin, DiracDelta, re, Abs, sinh, cos, cot, acos, acot,
gamma, bell, harmonic, LambertW, zeta, log, factorial, asinh,
acoth, cosh, dirichlet_eta, loggamma, erf, ceiling, im, fibonacci,
tribonacci, conjugate, tan, floor, atanh, sin, atan, lucas, exp)
two_var = (rf, ff, lowergamma, chebyshevu, chebyshevt, binomial,
atan2, polygamma, hermite, legendre, uppergamma)
x, y, z = symbols("x,y,z")
others = (chebyshevt_root, chebyshevu_root, Eijk(x, y, z),
Piecewise( (0, x < -1), (x**2, x <= 1), (x**3, True)),
assoc_legendre)
for cls in one_var:
check(cls)
c = cls(x)
check(c)
for cls in two_var:
check(cls)
c = cls(x, y)
check(c)
for cls in others:
check(cls)
#================== geometry ====================
from sympy.geometry.entity import GeometryEntity
from sympy.geometry.point import Point
from sympy.geometry.ellipse import Circle, Ellipse
from sympy.geometry.line import Line, LinearEntity, Ray, Segment
from sympy.geometry.polygon import Polygon, RegularPolygon, Triangle
def test_geometry():
p1 = Point(1, 2)
p2 = Point(2, 3)
p3 = Point(0, 0)
p4 = Point(0, 1)
for c in (
GeometryEntity, GeometryEntity(), Point, p1, Circle, Circle(p1, 2),
Ellipse, Ellipse(p1, 3, 4), Line, Line(p1, p2), LinearEntity,
LinearEntity(p1, p2), Ray, Ray(p1, p2), Segment, Segment(p1, p2),
Polygon, Polygon(p1, p2, p3, p4), RegularPolygon,
RegularPolygon(p1, 4, 5), Triangle, Triangle(p1, p2, p3)):
check(c, check_attr=False)
#================== integrals ====================
from sympy.integrals.integrals import Integral
def test_integrals():
x = Symbol("x")
for c in (Integral, Integral(x)):
check(c)
#==================== logic =====================
from sympy.core.logic import Logic
def test_logic():
for c in (Logic, Logic(1)):
check(c)
#================== matrices ====================
from sympy.matrices import Matrix, SparseMatrix
def test_matrices():
for c in (Matrix, Matrix([1, 2, 3]), SparseMatrix, SparseMatrix([[1, 2], [3, 4]])):
check(c, deprecated=['_smat', '_mat'])
#================== ntheory =====================
from sympy.ntheory.generate import Sieve
def test_ntheory():
for c in (Sieve, Sieve()):
check(c)
#================== physics =====================
from sympy.physics.paulialgebra import Pauli
from sympy.physics.units import Unit
def test_physics():
for c in (Unit, meter, Pauli, Pauli(1)):
check(c)
#================== plotting ====================
# XXX: These tests are not complete, so XFAIL them
@XFAIL
def test_plotting():
from sympy.plotting.pygletplot.color_scheme import ColorGradient, ColorScheme
from sympy.plotting.pygletplot.managed_window import ManagedWindow
from sympy.plotting.plot import Plot, ScreenShot
from sympy.plotting.pygletplot.plot_axes import PlotAxes, PlotAxesBase, PlotAxesFrame, PlotAxesOrdinate
from sympy.plotting.pygletplot.plot_camera import PlotCamera
from sympy.plotting.pygletplot.plot_controller import PlotController
from sympy.plotting.pygletplot.plot_curve import PlotCurve
from sympy.plotting.pygletplot.plot_interval import PlotInterval
from sympy.plotting.pygletplot.plot_mode import PlotMode
from sympy.plotting.pygletplot.plot_modes import Cartesian2D, Cartesian3D, Cylindrical, \
ParametricCurve2D, ParametricCurve3D, ParametricSurface, Polar, Spherical
from sympy.plotting.pygletplot.plot_object import PlotObject
from sympy.plotting.pygletplot.plot_surface import PlotSurface
from sympy.plotting.pygletplot.plot_window import PlotWindow
for c in (
ColorGradient, ColorGradient(0.2, 0.4), ColorScheme, ManagedWindow,
ManagedWindow, Plot, ScreenShot, PlotAxes, PlotAxesBase,
PlotAxesFrame, PlotAxesOrdinate, PlotCamera, PlotController,
PlotCurve, PlotInterval, PlotMode, Cartesian2D, Cartesian3D,
Cylindrical, ParametricCurve2D, ParametricCurve3D,
ParametricSurface, Polar, Spherical, PlotObject, PlotSurface,
PlotWindow):
check(c)
@XFAIL
def test_plotting2():
#from sympy.plotting.color_scheme import ColorGradient
from sympy.plotting.pygletplot.color_scheme import ColorScheme
#from sympy.plotting.managed_window import ManagedWindow
from sympy.plotting.plot import Plot
#from sympy.plotting.plot import ScreenShot
from sympy.plotting.pygletplot.plot_axes import PlotAxes
#from sympy.plotting.plot_axes import PlotAxesBase, PlotAxesFrame, PlotAxesOrdinate
#from sympy.plotting.plot_camera import PlotCamera
#from sympy.plotting.plot_controller import PlotController
#from sympy.plotting.plot_curve import PlotCurve
#from sympy.plotting.plot_interval import PlotInterval
#from sympy.plotting.plot_mode import PlotMode
#from sympy.plotting.plot_modes import Cartesian2D, Cartesian3D, Cylindrical, \
# ParametricCurve2D, ParametricCurve3D, ParametricSurface, Polar, Spherical
#from sympy.plotting.plot_object import PlotObject
#from sympy.plotting.plot_surface import PlotSurface
# from sympy.plotting.plot_window import PlotWindow
check(ColorScheme("rainbow"))
check(Plot(1, visible=False))
check(PlotAxes())
#================== polys =======================
from sympy.polys.domains.integerring import ZZ
from sympy.polys.domains.rationalfield import QQ
from sympy.polys.orderings import lex
from sympy.polys.polytools import Poly
def test_pickling_polys_polytools():
from sympy.polys.polytools import PurePoly
# from sympy.polys.polytools import GroebnerBasis
x = Symbol('x')
for c in (Poly, Poly(x, x)):
check(c)
for c in (PurePoly, PurePoly(x)):
check(c)
# TODO: fix pickling of Options class (see GroebnerBasis._options)
# for c in (GroebnerBasis, GroebnerBasis([x**2 - 1], x, order=lex)):
# check(c)
def test_pickling_polys_polyclasses():
from sympy.polys.polyclasses import DMP, DMF, ANP
for c in (DMP, DMP([[ZZ(1)], [ZZ(2)], [ZZ(3)]], ZZ)):
check(c, deprecated=['rep'])
for c in (DMF, DMF(([ZZ(1), ZZ(2)], [ZZ(1), ZZ(3)]), ZZ)):
check(c)
for c in (ANP, ANP([QQ(1), QQ(2)], [QQ(1), QQ(2), QQ(3)], QQ)):
check(c)
@XFAIL
def test_pickling_polys_rings():
# NOTE: can't use protocols < 2 because we have to execute __new__ to
# make sure caching of rings works properly.
from sympy.polys.rings import PolyRing
ring = PolyRing("x,y,z", ZZ, lex)
for c in (PolyRing, ring):
check(c, exclude=[0, 1])
for c in (ring.dtype, ring.one):
check(c, exclude=[0, 1], check_attr=False) # TODO: Py3k
def test_pickling_polys_fields():
pass
# NOTE: can't use protocols < 2 because we have to execute __new__ to
# make sure caching of fields works properly.
# from sympy.polys.fields import FracField
# field = FracField("x,y,z", ZZ, lex)
# TODO: AssertionError: assert id(obj) not in self.memo
# for c in (FracField, field):
# check(c, exclude=[0, 1])
# TODO: AssertionError: assert id(obj) not in self.memo
# for c in (field.dtype, field.one):
# check(c, exclude=[0, 1])
def test_pickling_polys_elements():
from sympy.polys.domains.pythonrational import PythonRational
#from sympy.polys.domains.pythonfinitefield import PythonFiniteField
#from sympy.polys.domains.mpelements import MPContext
for c in (PythonRational, PythonRational(1, 7)):
check(c)
#gf = PythonFiniteField(17)
# TODO: fix pickling of ModularInteger
# for c in (gf.dtype, gf(5)):
# check(c)
#mp = MPContext()
# TODO: fix pickling of RealElement
# for c in (mp.mpf, mp.mpf(1.0)):
# check(c)
# TODO: fix pickling of ComplexElement
# for c in (mp.mpc, mp.mpc(1.0, -1.5)):
# check(c)
def test_pickling_polys_domains():
# from sympy.polys.domains.pythonfinitefield import PythonFiniteField
from sympy.polys.domains.pythonintegerring import PythonIntegerRing
from sympy.polys.domains.pythonrationalfield import PythonRationalField
# TODO: fix pickling of ModularInteger
# for c in (PythonFiniteField, PythonFiniteField(17)):
# check(c)
for c in (PythonIntegerRing, PythonIntegerRing()):
check(c, check_attr=False)
for c in (PythonRationalField, PythonRationalField()):
check(c, check_attr=False)
if _gmpy is not None:
# from sympy.polys.domains.gmpyfinitefield import GMPYFiniteField
from sympy.polys.domains.gmpyintegerring import GMPYIntegerRing
from sympy.polys.domains.gmpyrationalfield import GMPYRationalField
# TODO: fix pickling of ModularInteger
# for c in (GMPYFiniteField, GMPYFiniteField(17)):
# check(c)
for c in (GMPYIntegerRing, GMPYIntegerRing()):
check(c, check_attr=False)
for c in (GMPYRationalField, GMPYRationalField()):
check(c, check_attr=False)
#from sympy.polys.domains.realfield import RealField
#from sympy.polys.domains.complexfield import ComplexField
from sympy.polys.domains.algebraicfield import AlgebraicField
#from sympy.polys.domains.polynomialring import PolynomialRing
#from sympy.polys.domains.fractionfield import FractionField
from sympy.polys.domains.expressiondomain import ExpressionDomain
# TODO: fix pickling of RealElement
# for c in (RealField, RealField(100)):
# check(c)
# TODO: fix pickling of ComplexElement
# for c in (ComplexField, ComplexField(100)):
# check(c)
for c in (AlgebraicField, AlgebraicField(QQ, sqrt(3))):
check(c, check_attr=False)
# TODO: AssertionError
# for c in (PolynomialRing, PolynomialRing(ZZ, "x,y,z")):
# check(c)
# TODO: AttributeError: 'PolyElement' object has no attribute 'ring'
# for c in (FractionField, FractionField(ZZ, "x,y,z")):
# check(c)
for c in (ExpressionDomain, ExpressionDomain()):
check(c, check_attr=False)
def test_pickling_polys_orderings():
from sympy.polys.orderings import (LexOrder, GradedLexOrder,
ReversedGradedLexOrder, InverseOrder)
# from sympy.polys.orderings import ProductOrder
for c in (LexOrder, LexOrder()):
check(c)
for c in (GradedLexOrder, GradedLexOrder()):
check(c)
for c in (ReversedGradedLexOrder, ReversedGradedLexOrder()):
check(c)
# TODO: Argh, Python is so naive. No lambdas nor inner function support in
# pickling module. Maybe someone could figure out what to do with this.
#
# for c in (ProductOrder, ProductOrder((LexOrder(), lambda m: m[:2]),
# (GradedLexOrder(), lambda m: m[2:]))):
# check(c)
for c in (InverseOrder, InverseOrder(LexOrder())):
check(c)
def test_pickling_polys_monomials():
from sympy.polys.monomials import MonomialOps, Monomial
x, y, z = symbols("x,y,z")
for c in (MonomialOps, MonomialOps(3)):
check(c)
for c in (Monomial, Monomial((1, 2, 3), (x, y, z))):
check(c)
def test_pickling_polys_errors():
from sympy.polys.polyerrors import (HeuristicGCDFailed,
HomomorphismFailed, IsomorphismFailed, ExtraneousFactors,
EvaluationFailed, RefinementFailed, CoercionFailed, NotInvertible,
NotReversible, NotAlgebraic, DomainError, PolynomialError,
UnificationFailed, GeneratorsError, GeneratorsNeeded,
UnivariatePolynomialError, MultivariatePolynomialError, OptionError,
FlagError)
# from sympy.polys.polyerrors import (ExactQuotientFailed,
# OperationNotSupported, ComputationFailed, PolificationFailed)
# x = Symbol('x')
# TODO: TypeError: __init__() takes at least 3 arguments (1 given)
# for c in (ExactQuotientFailed, ExactQuotientFailed(x, 3*x, ZZ)):
# check(c)
# TODO: TypeError: can't pickle instancemethod objects
# for c in (OperationNotSupported, OperationNotSupported(Poly(x), Poly.gcd)):
# check(c)
for c in (HeuristicGCDFailed, HeuristicGCDFailed()):
check(c)
for c in (HomomorphismFailed, HomomorphismFailed()):
check(c)
for c in (IsomorphismFailed, IsomorphismFailed()):
check(c)
for c in (ExtraneousFactors, ExtraneousFactors()):
check(c)
for c in (EvaluationFailed, EvaluationFailed()):
check(c)
for c in (RefinementFailed, RefinementFailed()):
check(c)
for c in (CoercionFailed, CoercionFailed()):
check(c)
for c in (NotInvertible, NotInvertible()):
check(c)
for c in (NotReversible, NotReversible()):
check(c)
for c in (NotAlgebraic, NotAlgebraic()):
check(c)
for c in (DomainError, DomainError()):
check(c)
for c in (PolynomialError, PolynomialError()):
check(c)
for c in (UnificationFailed, UnificationFailed()):
check(c)
for c in (GeneratorsError, GeneratorsError()):
check(c)
for c in (GeneratorsNeeded, GeneratorsNeeded()):
check(c)
# TODO: PicklingError: Can't pickle <function <lambda> at 0x38578c0>: it's not found as __main__.<lambda>
# for c in (ComputationFailed, ComputationFailed(lambda t: t, 3, None)):
# check(c)
for c in (UnivariatePolynomialError, UnivariatePolynomialError()):
check(c)
for c in (MultivariatePolynomialError, MultivariatePolynomialError()):
check(c)
# TODO: TypeError: __init__() takes at least 3 arguments (1 given)
# for c in (PolificationFailed, PolificationFailed({}, x, x, False)):
# check(c)
for c in (OptionError, OptionError()):
check(c)
for c in (FlagError, FlagError()):
check(c)
#def test_pickling_polys_options():
#from sympy.polys.polyoptions import Options
# TODO: fix pickling of `symbols' flag
# for c in (Options, Options((), dict(domain='ZZ', polys=False))):
# check(c)
# TODO: def test_pickling_polys_rootisolation():
# RealInterval
# ComplexInterval
def test_pickling_polys_rootoftools():
from sympy.polys.rootoftools import CRootOf, RootSum
x = Symbol('x')
f = x**3 + x + 3
for c in (CRootOf, CRootOf(f, 0)):
check(c)
for c in (RootSum, RootSum(f, exp)):
check(c)
#================== printing ====================
from sympy.printing.latex import LatexPrinter
from sympy.printing.mathml import MathMLContentPrinter, MathMLPresentationPrinter
from sympy.printing.pretty.pretty import PrettyPrinter
from sympy.printing.pretty.stringpict import prettyForm, stringPict
from sympy.printing.printer import Printer
from sympy.printing.python import PythonPrinter
def test_printing():
for c in (LatexPrinter, LatexPrinter(), MathMLContentPrinter,
MathMLPresentationPrinter, PrettyPrinter, prettyForm, stringPict,
stringPict("a"), Printer, Printer(), PythonPrinter,
PythonPrinter()):
check(c)
@XFAIL
def test_printing1():
check(MathMLContentPrinter())
@XFAIL
def test_printing2():
check(MathMLPresentationPrinter())
@XFAIL
def test_printing3():
check(PrettyPrinter())
#================== series ======================
from sympy.series.limits import Limit
from sympy.series.order import Order
def test_series():
e = Symbol("e")
x = Symbol("x")
for c in (Limit, Limit(e, x, 1), Order, Order(e)):
check(c)
#================== concrete ==================
from sympy.concrete.products import Product
from sympy.concrete.summations import Sum
def test_concrete():
x = Symbol("x")
for c in (Product, Product(x, (x, 2, 4)), Sum, Sum(x, (x, 2, 4))):
check(c)
def test_deprecation_warning():
w = SymPyDeprecationWarning("message", deprecated_since_version='1.0', active_deprecations_target="active-deprecations")
check(w)
def test_issue_18438():
assert pickle.loads(pickle.dumps(S.Half)) == S.Half
#================= old pickles =================
def test_unpickle_from_older_versions():
data = (
b'\x80\x04\x95^\x00\x00\x00\x00\x00\x00\x00\x8c\x10sympy.core.power'
b'\x94\x8c\x03Pow\x94\x93\x94\x8c\x12sympy.core.numbers\x94\x8c'
b'\x07Integer\x94\x93\x94K\x02\x85\x94R\x94}\x94bh\x03\x8c\x04Half'
b'\x94\x93\x94)R\x94}\x94b\x86\x94R\x94}\x94b.'
)
assert pickle.loads(data) == sqrt(2)

View File

@ -0,0 +1,11 @@
from sympy.utilities.source import get_mod_func, get_class
def test_get_mod_func():
assert get_mod_func(
'sympy.core.basic.Basic') == ('sympy.core.basic', 'Basic')
def test_get_class():
_basic = get_class('sympy.core.basic.Basic')
assert _basic.__name__ == 'Basic'

View File

@ -0,0 +1,10 @@
"""Tests for simple tools for timing functions' execution. """
from sympy.utilities.timeutils import timed
def test_timed():
result = timed(lambda: 1 + 1, limit=100000)
assert result[0] == 100000 and result[3] == "ns", str(result)
result = timed("1 + 1", limit=100000)
assert result[0] == 100000 and result[3] == "ns"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
# A test file for XXE injection
# Username: Test
# Password: Test

View File

@ -0,0 +1,76 @@
"""Simple tools for timing functions' execution, when IPython is not available. """
import timeit
import math
_scales = [1e0, 1e3, 1e6, 1e9]
_units = ['s', 'ms', '\N{GREEK SMALL LETTER MU}s', 'ns']
def timed(func, setup="pass", limit=None):
"""Adaptively measure execution time of a function. """
timer = timeit.Timer(func, setup=setup)
repeat, number = 3, 1
for i in range(1, 10):
if timer.timeit(number) >= 0.2:
break
elif limit is not None and number >= limit:
break
else:
number *= 10
time = min(timer.repeat(repeat, number)) / number
if time > 0.0:
order = min(-int(math.floor(math.log10(time)) // 3), 3)
else:
order = 3
return (number, time, time*_scales[order], _units[order])
# Code for doing inline timings of recursive algorithms.
def __do_timings():
import os
res = os.getenv('SYMPY_TIMINGS', '')
res = [x.strip() for x in res.split(',')]
return set(res)
_do_timings = __do_timings()
_timestack = None
def _print_timestack(stack, level=1):
print('-'*level, '%.2f %s%s' % (stack[2], stack[0], stack[3]))
for s in stack[1]:
_print_timestack(s, level + 1)
def timethis(name):
def decorator(func):
global _do_timings
if name not in _do_timings:
return func
def wrapper(*args, **kwargs):
from time import time
global _timestack
oldtimestack = _timestack
_timestack = [func.func_name, [], 0, args]
t1 = time()
r = func(*args, **kwargs)
t2 = time()
_timestack[2] = t2 - t1
if oldtimestack is not None:
oldtimestack[1].append(_timestack)
_timestack = oldtimestack
else:
_print_timestack(_timestack)
_timestack = None
return r
return wrapper
return decorator

View File

@ -0,0 +1,12 @@
"""
.. deprecated:: 1.6
sympy.utilities.tmpfiles has been renamed to sympy.testing.tmpfiles.
"""
from sympy.utilities.exceptions import sympy_deprecation_warning
sympy_deprecation_warning("The sympy.utilities.tmpfiles submodule is deprecated. Use sympy.testing.tmpfiles instead.",
deprecated_since_version="1.6",
active_deprecations_target="deprecated-sympy-utilities-submodules")
from sympy.testing.tmpfiles import * # noqa:F401