I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,22 @@
""" This sub-module is private, i.e. external code should not depend on it.
These functions are used by tests run as part of continuous integration.
Once the implementation is mature (it should support the major
platforms: Windows, OS X & Linux) it may become official API which
may be relied upon by downstream libraries. Until then API may break
without prior notice.
TODO:
- (optionally) clean up after tempfile.mkdtemp()
- cross-platform testing
- caching of compiler choice and intermediate files
"""
from .compilation import compile_link_import_strings, compile_run_strings
from .availability import has_fortran, has_c, has_cxx
__all__ = [
'compile_link_import_strings', 'compile_run_strings',
'has_fortran', 'has_c', 'has_cxx',
]

View File

@ -0,0 +1,77 @@
import os
from .compilation import compile_run_strings
from .util import CompilerNotFoundError
def has_fortran():
if not hasattr(has_fortran, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.f90', (
'program foo\n'
'print *, "hello world"\n'
'end program'
))], clean=True
)
except CompilerNotFoundError:
has_fortran.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_fortran.result = False
else:
has_fortran.result = True
return has_fortran.result
def has_c():
if not hasattr(has_c, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.c', (
'#include <stdio.h>\n'
'int main(){\n'
'printf("hello world\\n");\n'
'return 0;\n'
'}'
))], clean=True
)
except CompilerNotFoundError:
has_c.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_c.result = False
else:
has_c.result = True
return has_c.result
def has_cxx():
if not hasattr(has_cxx, 'result'):
try:
(stdout, stderr), info = compile_run_strings(
[('main.cxx', (
'#include <iostream>\n'
'int main(){\n'
'std::cout << "hello world" << std::endl;\n'
'}'
))], clean=True
)
except CompilerNotFoundError:
has_cxx.result = False
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise
else:
if info['exit_status'] != os.EX_OK or 'hello world' not in stdout:
if os.environ.get('SYMPY_STRICT_COMPILER_CHECKS', '0') == '1':
raise ValueError("Failed to compile test program:\n%s\n%s\n" % (stdout, stderr))
has_cxx.result = False
else:
has_cxx.result = True
return has_cxx.result

View File

@ -0,0 +1,657 @@
import glob
import os
import shutil
import subprocess
import sys
import tempfile
import warnings
from sysconfig import get_config_var, get_config_vars, get_path
from .runners import (
CCompilerRunner,
CppCompilerRunner,
FortranCompilerRunner
)
from .util import (
get_abspath, make_dirs, copy, Glob, ArbitraryDepthGlob,
glob_at_depth, import_module_from_file, pyx_is_cplus,
sha256_of_string, sha256_of_file, CompileError
)
if os.name == 'posix':
objext = '.o'
elif os.name == 'nt':
objext = '.obj'
else:
warnings.warn("Unknown os.name: {}".format(os.name))
objext = '.o'
def compile_sources(files, Runner=None, destdir=None, cwd=None, keep_dir_struct=False,
per_file_kwargs=None, **kwargs):
""" Compile source code files to object files.
Parameters
==========
files : iterable of str
Paths to source files, if ``cwd`` is given, the paths are taken as relative.
Runner: CompilerRunner subclass (optional)
Could be e.g. ``FortranCompilerRunner``. Will be inferred from filename
extensions if missing.
destdir: str
Output directory, if cwd is given, the path is taken as relative.
cwd: str
Working directory. Specify to have compiler run in other directory.
also used as root of relative paths.
keep_dir_struct: bool
Reproduce directory structure in `destdir`. default: ``False``
per_file_kwargs: dict
Dict mapping instances in ``files`` to keyword arguments.
\\*\\*kwargs: dict
Default keyword arguments to pass to ``Runner``.
Returns
=======
List of strings (paths of object files).
"""
_per_file_kwargs = {}
if per_file_kwargs is not None:
for k, v in per_file_kwargs.items():
if isinstance(k, Glob):
for path in glob.glob(k.pathname):
_per_file_kwargs[path] = v
elif isinstance(k, ArbitraryDepthGlob):
for path in glob_at_depth(k.filename, cwd):
_per_file_kwargs[path] = v
else:
_per_file_kwargs[k] = v
# Set up destination directory
destdir = destdir or '.'
if not os.path.isdir(destdir):
if os.path.exists(destdir):
raise OSError("{} is not a directory".format(destdir))
else:
make_dirs(destdir)
if cwd is None:
cwd = '.'
for f in files:
copy(f, destdir, only_update=True, dest_is_dir=True)
# Compile files and return list of paths to the objects
dstpaths = []
for f in files:
if keep_dir_struct:
name, ext = os.path.splitext(f)
else:
name, ext = os.path.splitext(os.path.basename(f))
file_kwargs = kwargs.copy()
file_kwargs.update(_per_file_kwargs.get(f, {}))
dstpaths.append(src2obj(f, Runner, cwd=cwd, **file_kwargs))
return dstpaths
def get_mixed_fort_c_linker(vendor=None, cplus=False, cwd=None):
vendor = vendor or os.environ.get('SYMPY_COMPILER_VENDOR', 'gnu')
if vendor.lower() == 'intel':
if cplus:
return (FortranCompilerRunner,
{'flags': ['-nofor_main', '-cxxlib']}, vendor)
else:
return (FortranCompilerRunner,
{'flags': ['-nofor_main']}, vendor)
elif vendor.lower() == 'gnu' or 'llvm':
if cplus:
return (CppCompilerRunner,
{'lib_options': ['fortran']}, vendor)
else:
return (FortranCompilerRunner,
{}, vendor)
else:
raise ValueError("No vendor found.")
def link(obj_files, out_file=None, shared=False, Runner=None,
cwd=None, cplus=False, fort=False, extra_objs=None, **kwargs):
""" Link object files.
Parameters
==========
obj_files: iterable of str
Paths to object files.
out_file: str (optional)
Path to executable/shared library, if ``None`` it will be
deduced from the last item in obj_files.
shared: bool
Generate a shared library?
Runner: CompilerRunner subclass (optional)
If not given the ``cplus`` and ``fort`` flags will be inspected
(fallback is the C compiler).
cwd: str
Path to the root of relative paths and working directory for compiler.
cplus: bool
C++ objects? default: ``False``.
fort: bool
Fortran objects? default: ``False``.
extra_objs: list
List of paths to extra object files / static libraries.
\\*\\*kwargs: dict
Keyword arguments passed to ``Runner``.
Returns
=======
The absolute path to the generated shared object / executable.
"""
if out_file is None:
out_file, ext = os.path.splitext(os.path.basename(obj_files[-1]))
if shared:
out_file += get_config_var('EXT_SUFFIX')
if not Runner:
if fort:
Runner, extra_kwargs, vendor = \
get_mixed_fort_c_linker(
vendor=kwargs.get('vendor', None),
cplus=cplus,
cwd=cwd,
)
for k, v in extra_kwargs.items():
if k in kwargs:
kwargs[k].expand(v)
else:
kwargs[k] = v
else:
if cplus:
Runner = CppCompilerRunner
else:
Runner = CCompilerRunner
flags = kwargs.pop('flags', [])
if shared:
if '-shared' not in flags:
flags.append('-shared')
run_linker = kwargs.pop('run_linker', True)
if not run_linker:
raise ValueError("run_linker was set to False (nonsensical).")
out_file = get_abspath(out_file, cwd=cwd)
runner = Runner(obj_files+(extra_objs or []), out_file, flags, cwd=cwd, **kwargs)
runner.run()
return out_file
def link_py_so(obj_files, so_file=None, cwd=None, libraries=None,
cplus=False, fort=False, extra_objs=None, **kwargs):
""" Link Python extension module (shared object) for importing
Parameters
==========
obj_files: iterable of str
Paths to object files to be linked.
so_file: str
Name (path) of shared object file to create. If not specified it will
have the basname of the last object file in `obj_files` but with the
extension '.so' (Unix).
cwd: path string
Root of relative paths and working directory of linker.
libraries: iterable of strings
Libraries to link against, e.g. ['m'].
cplus: bool
Any C++ objects? default: ``False``.
fort: bool
Any Fortran objects? default: ``False``.
extra_objs: list
List of paths of extra object files / static libraries to link against.
kwargs**: dict
Keyword arguments passed to ``link(...)``.
Returns
=======
Absolute path to the generate shared object.
"""
libraries = libraries or []
include_dirs = kwargs.pop('include_dirs', [])
library_dirs = kwargs.pop('library_dirs', [])
# Add Python include and library directories
# PY_LDFLAGS does not available on all python implementations
# e.g. when with pypy, so it's LDFLAGS we need to use
if sys.platform == "win32":
warnings.warn("Windows not yet supported.")
elif sys.platform == 'darwin':
cfgDict = get_config_vars()
kwargs['linkline'] = kwargs.get('linkline', []) + [cfgDict['LDFLAGS']]
library_dirs += [cfgDict['LIBDIR']]
# In macOS, linker needs to compile frameworks
# e.g. "-framework CoreFoundation"
is_framework = False
for opt in cfgDict['LIBS'].split():
if is_framework:
kwargs['linkline'] = kwargs.get('linkline', []) + ['-framework', opt]
is_framework = False
elif opt.startswith('-l'):
libraries.append(opt[2:])
elif opt.startswith('-framework'):
is_framework = True
# The python library is not included in LIBS
libfile = cfgDict['LIBRARY']
libname = ".".join(libfile.split('.')[:-1])[3:]
libraries.append(libname)
elif sys.platform[:3] == 'aix':
# Don't use the default code below
pass
else:
if get_config_var('Py_ENABLE_SHARED'):
cfgDict = get_config_vars()
kwargs['linkline'] = kwargs.get('linkline', []) + [cfgDict['LDFLAGS']]
library_dirs += [cfgDict['LIBDIR']]
for opt in cfgDict['BLDLIBRARY'].split():
if opt.startswith('-l'):
libraries += [opt[2:]]
else:
pass
flags = kwargs.pop('flags', [])
needed_flags = ('-pthread',)
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
return link(obj_files, shared=True, flags=flags, cwd=cwd, cplus=cplus, fort=fort,
include_dirs=include_dirs, libraries=libraries,
library_dirs=library_dirs, extra_objs=extra_objs, **kwargs)
def simple_cythonize(src, destdir=None, cwd=None, **cy_kwargs):
""" Generates a C file from a Cython source file.
Parameters
==========
src: str
Path to Cython source.
destdir: str (optional)
Path to output directory (default: '.').
cwd: path string (optional)
Root of relative paths (default: '.').
**cy_kwargs:
Second argument passed to cy_compile. Generates a .cpp file if ``cplus=True`` in ``cy_kwargs``,
else a .c file.
"""
from Cython.Compiler.Main import (
default_options, CompilationOptions
)
from Cython.Compiler.Main import compile as cy_compile
assert src.lower().endswith('.pyx') or src.lower().endswith('.py')
cwd = cwd or '.'
destdir = destdir or '.'
ext = '.cpp' if cy_kwargs.get('cplus', False) else '.c'
c_name = os.path.splitext(os.path.basename(src))[0] + ext
dstfile = os.path.join(destdir, c_name)
if cwd:
ori_dir = os.getcwd()
else:
ori_dir = '.'
os.chdir(cwd)
try:
cy_options = CompilationOptions(default_options)
cy_options.__dict__.update(cy_kwargs)
# Set language_level if not set by cy_kwargs
# as not setting it is deprecated
if 'language_level' not in cy_kwargs:
cy_options.__dict__['language_level'] = 3
cy_result = cy_compile([src], cy_options)
if cy_result.num_errors > 0:
raise ValueError("Cython compilation failed.")
# Move generated C file to destination
# In macOS, the generated C file is in the same directory as the source
# but the /var is a symlink to /private/var, so we need to use realpath
if os.path.realpath(os.path.dirname(src)) != os.path.realpath(destdir):
if os.path.exists(dstfile):
os.unlink(dstfile)
shutil.move(os.path.join(os.path.dirname(src), c_name), destdir)
finally:
os.chdir(ori_dir)
return dstfile
extension_mapping = {
'.c': (CCompilerRunner, None),
'.cpp': (CppCompilerRunner, None),
'.cxx': (CppCompilerRunner, None),
'.f': (FortranCompilerRunner, None),
'.for': (FortranCompilerRunner, None),
'.ftn': (FortranCompilerRunner, None),
'.f90': (FortranCompilerRunner, None), # ifort only knows about .f90
'.f95': (FortranCompilerRunner, 'f95'),
'.f03': (FortranCompilerRunner, 'f2003'),
'.f08': (FortranCompilerRunner, 'f2008'),
}
def src2obj(srcpath, Runner=None, objpath=None, cwd=None, inc_py=False, **kwargs):
""" Compiles a source code file to an object file.
Files ending with '.pyx' assumed to be cython files and
are dispatched to pyx2obj.
Parameters
==========
srcpath: str
Path to source file.
Runner: CompilerRunner subclass (optional)
If ``None``: deduced from extension of srcpath.
objpath : str (optional)
Path to generated object. If ``None``: deduced from ``srcpath``.
cwd: str (optional)
Working directory and root of relative paths. If ``None``: current dir.
inc_py: bool
Add Python include path to kwarg "include_dirs". Default: False
\\*\\*kwargs: dict
keyword arguments passed to Runner or pyx2obj
"""
name, ext = os.path.splitext(os.path.basename(srcpath))
if objpath is None:
if os.path.isabs(srcpath):
objpath = '.'
else:
objpath = os.path.dirname(srcpath)
objpath = objpath or '.' # avoid objpath == ''
if os.path.isdir(objpath):
objpath = os.path.join(objpath, name + objext)
include_dirs = kwargs.pop('include_dirs', [])
if inc_py:
py_inc_dir = get_path('include')
if py_inc_dir not in include_dirs:
include_dirs.append(py_inc_dir)
if ext.lower() == '.pyx':
return pyx2obj(srcpath, objpath=objpath, include_dirs=include_dirs, cwd=cwd,
**kwargs)
if Runner is None:
Runner, std = extension_mapping[ext.lower()]
if 'std' not in kwargs:
kwargs['std'] = std
flags = kwargs.pop('flags', [])
needed_flags = ('-fPIC',)
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
# src2obj implies not running the linker...
run_linker = kwargs.pop('run_linker', False)
if run_linker:
raise CompileError("src2obj called with run_linker=True")
runner = Runner([srcpath], objpath, include_dirs=include_dirs,
run_linker=run_linker, cwd=cwd, flags=flags, **kwargs)
runner.run()
return objpath
def pyx2obj(pyxpath, objpath=None, destdir=None, cwd=None,
include_dirs=None, cy_kwargs=None, cplus=None, **kwargs):
"""
Convenience function
If cwd is specified, pyxpath and dst are taken to be relative
If only_update is set to `True` the modification time is checked
and compilation is only run if the source is newer than the
destination
Parameters
==========
pyxpath: str
Path to Cython source file.
objpath: str (optional)
Path to object file to generate.
destdir: str (optional)
Directory to put generated C file. When ``None``: directory of ``objpath``.
cwd: str (optional)
Working directory and root of relative paths.
include_dirs: iterable of path strings (optional)
Passed onto src2obj and via cy_kwargs['include_path']
to simple_cythonize.
cy_kwargs: dict (optional)
Keyword arguments passed onto `simple_cythonize`
cplus: bool (optional)
Indicate whether C++ is used. default: auto-detect using ``.util.pyx_is_cplus``.
compile_kwargs: dict
keyword arguments passed onto src2obj
Returns
=======
Absolute path of generated object file.
"""
assert pyxpath.endswith('.pyx')
cwd = cwd or '.'
objpath = objpath or '.'
destdir = destdir or os.path.dirname(objpath)
abs_objpath = get_abspath(objpath, cwd=cwd)
if os.path.isdir(abs_objpath):
pyx_fname = os.path.basename(pyxpath)
name, ext = os.path.splitext(pyx_fname)
objpath = os.path.join(objpath, name + objext)
cy_kwargs = cy_kwargs or {}
cy_kwargs['output_dir'] = cwd
if cplus is None:
cplus = pyx_is_cplus(pyxpath)
cy_kwargs['cplus'] = cplus
interm_c_file = simple_cythonize(pyxpath, destdir=destdir, cwd=cwd, **cy_kwargs)
include_dirs = include_dirs or []
flags = kwargs.pop('flags', [])
needed_flags = ('-fwrapv', '-pthread', '-fPIC')
for flag in needed_flags:
if flag not in flags:
flags.append(flag)
options = kwargs.pop('options', [])
if kwargs.pop('strict_aliasing', False):
raise CompileError("Cython requires strict aliasing to be disabled.")
# Let's be explicit about standard
if cplus:
std = kwargs.pop('std', 'c++98')
else:
std = kwargs.pop('std', 'c99')
return src2obj(interm_c_file, objpath=objpath, cwd=cwd,
include_dirs=include_dirs, flags=flags, std=std,
options=options, inc_py=True, strict_aliasing=False,
**kwargs)
def _any_X(srcs, cls):
for src in srcs:
name, ext = os.path.splitext(src)
key = ext.lower()
if key in extension_mapping:
if extension_mapping[key][0] == cls:
return True
return False
def any_fortran_src(srcs):
return _any_X(srcs, FortranCompilerRunner)
def any_cplus_src(srcs):
return _any_X(srcs, CppCompilerRunner)
def compile_link_import_py_ext(sources, extname=None, build_dir='.', compile_kwargs=None,
link_kwargs=None, extra_objs=None):
""" Compiles sources to a shared object (Python extension) and imports it
Sources in ``sources`` which is imported. If shared object is newer than the sources, they
are not recompiled but instead it is imported.
Parameters
==========
sources : list of strings
List of paths to sources.
extname : string
Name of extension (default: ``None``).
If ``None``: taken from the last file in ``sources`` without extension.
build_dir: str
Path to directory in which objects files etc. are generated.
compile_kwargs: dict
keyword arguments passed to ``compile_sources``
link_kwargs: dict
keyword arguments passed to ``link_py_so``
extra_objs: list
List of paths to (prebuilt) object files / static libraries to link against.
Returns
=======
The imported module from of the Python extension.
"""
if extname is None:
extname = os.path.splitext(os.path.basename(sources[-1]))[0]
compile_kwargs = compile_kwargs or {}
link_kwargs = link_kwargs or {}
try:
mod = import_module_from_file(os.path.join(build_dir, extname), sources)
except ImportError:
objs = compile_sources(list(map(get_abspath, sources)), destdir=build_dir,
cwd=build_dir, **compile_kwargs)
so = link_py_so(objs, cwd=build_dir, fort=any_fortran_src(sources),
cplus=any_cplus_src(sources), extra_objs=extra_objs, **link_kwargs)
mod = import_module_from_file(so)
return mod
def _write_sources_to_build_dir(sources, build_dir):
build_dir = build_dir or tempfile.mkdtemp()
if not os.path.isdir(build_dir):
raise OSError("Non-existent directory: ", build_dir)
source_files = []
for name, src in sources:
dest = os.path.join(build_dir, name)
differs = True
sha256_in_mem = sha256_of_string(src.encode('utf-8')).hexdigest()
if os.path.exists(dest):
if os.path.exists(dest + '.sha256'):
with open(dest + '.sha256') as fh:
sha256_on_disk = fh.read()
else:
sha256_on_disk = sha256_of_file(dest).hexdigest()
differs = sha256_on_disk != sha256_in_mem
if differs:
with open(dest, 'wt') as fh:
fh.write(src)
with open(dest + '.sha256', 'wt') as fh:
fh.write(sha256_in_mem)
source_files.append(dest)
return source_files, build_dir
def compile_link_import_strings(sources, build_dir=None, **kwargs):
""" Compiles, links and imports extension module from source.
Parameters
==========
sources : iterable of name/source pair tuples
build_dir : string (default: None)
Path. ``None`` implies use a temporary directory.
**kwargs:
Keyword arguments passed onto `compile_link_import_py_ext`.
Returns
=======
mod : module
The compiled and imported extension module.
info : dict
Containing ``build_dir`` as 'build_dir'.
"""
source_files, build_dir = _write_sources_to_build_dir(sources, build_dir)
mod = compile_link_import_py_ext(source_files, build_dir=build_dir, **kwargs)
info = {"build_dir": build_dir}
return mod, info
def compile_run_strings(sources, build_dir=None, clean=False, compile_kwargs=None, link_kwargs=None):
""" Compiles, links and runs a program built from sources.
Parameters
==========
sources : iterable of name/source pair tuples
build_dir : string (default: None)
Path. ``None`` implies use a temporary directory.
clean : bool
Whether to remove build_dir after use. This will only have an
effect if ``build_dir`` is ``None`` (which creates a temporary directory).
Passing ``clean == True`` and ``build_dir != None`` raises a ``ValueError``.
This will also set ``build_dir`` in returned info dictionary to ``None``.
compile_kwargs: dict
Keyword arguments passed onto ``compile_sources``
link_kwargs: dict
Keyword arguments passed onto ``link``
Returns
=======
(stdout, stderr): pair of strings
info: dict
Containing exit status as 'exit_status' and ``build_dir`` as 'build_dir'
"""
if clean and build_dir is not None:
raise ValueError("Automatic removal of build_dir is only available for temporary directory.")
try:
source_files, build_dir = _write_sources_to_build_dir(sources, build_dir)
objs = compile_sources(list(map(get_abspath, source_files)), destdir=build_dir,
cwd=build_dir, **(compile_kwargs or {}))
prog = link(objs, cwd=build_dir,
fort=any_fortran_src(source_files),
cplus=any_cplus_src(source_files), **(link_kwargs or {}))
p = subprocess.Popen([prog], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
exit_status = p.wait()
stdout, stderr = [txt.decode('utf-8') for txt in p.communicate()]
finally:
if clean and os.path.isdir(build_dir):
shutil.rmtree(build_dir)
build_dir = None
info = {"exit_status": exit_status, "build_dir": build_dir}
return (stdout, stderr), info

View File

@ -0,0 +1,301 @@
from __future__ import annotations
from typing import Callable, Optional
from collections import OrderedDict
import os
import re
import subprocess
import warnings
from .util import (
find_binary_of_command, unique_list, CompileError
)
class CompilerRunner:
""" CompilerRunner base class.
Parameters
==========
sources : list of str
Paths to sources.
out : str
flags : iterable of str
Compiler flags.
run_linker : bool
compiler_name_exe : (str, str) tuple
Tuple of compiler name & command to call.
cwd : str
Path of root of relative paths.
include_dirs : list of str
Include directories.
libraries : list of str
Libraries to link against.
library_dirs : list of str
Paths to search for shared libraries.
std : str
Standard string, e.g. ``'c++11'``, ``'c99'``, ``'f2003'``.
define: iterable of strings
macros to define
undef : iterable of strings
macros to undefine
preferred_vendor : string
name of preferred vendor e.g. 'gnu' or 'intel'
Methods
=======
run():
Invoke compilation as a subprocess.
"""
environ_key_compiler: str # e.g. 'CC', 'CXX', ...
environ_key_flags: str # e.g. 'CFLAGS', 'CXXFLAGS', ...
environ_key_ldflags: str = "LDFLAGS" # typically 'LDFLAGS'
# Subclass to vendor/binary dict
compiler_dict: dict[str, str]
# Standards should be a tuple of supported standards
# (first one will be the default)
standards: tuple[None | str, ...]
# Subclass to dict of binary/formater-callback
std_formater: dict[str, Callable[[Optional[str]], str]]
# subclass to be e.g. {'gcc': 'gnu', ...}
compiler_name_vendor_mapping: dict[str, str]
def __init__(self, sources, out, flags=None, run_linker=True, compiler=None, cwd='.',
include_dirs=None, libraries=None, library_dirs=None, std=None, define=None,
undef=None, strict_aliasing=None, preferred_vendor=None, linkline=None, **kwargs):
if isinstance(sources, str):
raise ValueError("Expected argument sources to be a list of strings.")
self.sources = list(sources)
self.out = out
self.flags = flags or []
if os.environ.get(self.environ_key_flags):
self.flags += os.environ[self.environ_key_flags].split()
self.cwd = cwd
if compiler:
self.compiler_name, self.compiler_binary = compiler
elif os.environ.get(self.environ_key_compiler):
self.compiler_binary = os.environ[self.environ_key_compiler]
for k, v in self.compiler_dict.items():
if k in self.compiler_binary:
self.compiler_vendor = k
self.compiler_name = v
break
else:
self.compiler_vendor, self.compiler_name = list(self.compiler_dict.items())[0]
warnings.warn("failed to determine what kind of compiler %s is, assuming %s" %
(self.compiler_binary, self.compiler_name))
else:
# Find a compiler
if preferred_vendor is None:
preferred_vendor = os.environ.get('SYMPY_COMPILER_VENDOR', None)
self.compiler_name, self.compiler_binary, self.compiler_vendor = self.find_compiler(preferred_vendor)
if self.compiler_binary is None:
raise ValueError("No compiler found (searched: {})".format(', '.join(self.compiler_dict.values())))
self.define = define or []
self.undef = undef or []
self.include_dirs = include_dirs or []
self.libraries = libraries or []
self.library_dirs = library_dirs or []
self.std = std or self.standards[0]
self.run_linker = run_linker
if self.run_linker:
# both gnu and intel compilers use '-c' for disabling linker
self.flags = list(filter(lambda x: x != '-c', self.flags))
else:
if '-c' not in self.flags:
self.flags.append('-c')
if self.std:
self.flags.append(self.std_formater[
self.compiler_name](self.std))
self.linkline = (linkline or []) + [lf for lf in map(
str.strip, os.environ.get(self.environ_key_ldflags, "").split()
) if lf != ""]
if strict_aliasing is not None:
nsa_re = re.compile("no-strict-aliasing$")
sa_re = re.compile("strict-aliasing$")
if strict_aliasing is True:
if any(map(nsa_re.match, flags)):
raise CompileError("Strict aliasing cannot be both enforced and disabled")
elif any(map(sa_re.match, flags)):
pass # already enforced
else:
flags.append('-fstrict-aliasing')
elif strict_aliasing is False:
if any(map(nsa_re.match, flags)):
pass # already disabled
else:
if any(map(sa_re.match, flags)):
raise CompileError("Strict aliasing cannot be both enforced and disabled")
else:
flags.append('-fno-strict-aliasing')
else:
msg = "Expected argument strict_aliasing to be True/False, got {}"
raise ValueError(msg.format(strict_aliasing))
@classmethod
def find_compiler(cls, preferred_vendor=None):
""" Identify a suitable C/fortran/other compiler. """
candidates = list(cls.compiler_dict.keys())
if preferred_vendor:
if preferred_vendor in candidates:
candidates = [preferred_vendor]+candidates
else:
raise ValueError("Unknown vendor {}".format(preferred_vendor))
name, path = find_binary_of_command([cls.compiler_dict[x] for x in candidates])
return name, path, cls.compiler_name_vendor_mapping[name]
def cmd(self):
""" List of arguments (str) to be passed to e.g. ``subprocess.Popen``. """
cmd = (
[self.compiler_binary] +
self.flags +
['-U'+x for x in self.undef] +
['-D'+x for x in self.define] +
['-I'+x for x in self.include_dirs] +
self.sources
)
if self.run_linker:
cmd += (['-L'+x for x in self.library_dirs] +
['-l'+x for x in self.libraries] +
self.linkline)
counted = []
for envvar in re.findall(r'\$\{(\w+)\}', ' '.join(cmd)):
if os.getenv(envvar) is None:
if envvar not in counted:
counted.append(envvar)
msg = "Environment variable '{}' undefined.".format(envvar)
raise CompileError(msg)
return cmd
def run(self):
self.flags = unique_list(self.flags)
# Append output flag and name to tail of flags
self.flags.extend(['-o', self.out])
env = os.environ.copy()
env['PWD'] = self.cwd
# NOTE: intel compilers seems to need shell=True
p = subprocess.Popen(' '.join(self.cmd()),
shell=True,
cwd=self.cwd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
comm = p.communicate()
try:
self.cmd_outerr = comm[0].decode('utf-8')
except UnicodeDecodeError:
self.cmd_outerr = comm[0].decode('iso-8859-1') # win32
self.cmd_returncode = p.returncode
# Error handling
if self.cmd_returncode != 0:
msg = "Error executing '{}' in {} (exited status {}):\n {}\n".format(
' '.join(self.cmd()), self.cwd, str(self.cmd_returncode), self.cmd_outerr
)
raise CompileError(msg)
return self.cmd_outerr, self.cmd_returncode
class CCompilerRunner(CompilerRunner):
environ_key_compiler = 'CC'
environ_key_flags = 'CFLAGS'
compiler_dict = OrderedDict([
('gnu', 'gcc'),
('intel', 'icc'),
('llvm', 'clang'),
])
standards = ('c89', 'c90', 'c99', 'c11') # First is default
std_formater = {
'gcc': '-std={}'.format,
'icc': '-std={}'.format,
'clang': '-std={}'.format,
}
compiler_name_vendor_mapping = {
'gcc': 'gnu',
'icc': 'intel',
'clang': 'llvm'
}
def _mk_flag_filter(cmplr_name): # helper for class initialization
not_welcome = {'g++': ("Wimplicit-interface",)} # "Wstrict-prototypes",)}
if cmplr_name in not_welcome:
def fltr(x):
for nw in not_welcome[cmplr_name]:
if nw in x:
return False
return True
else:
def fltr(x):
return True
return fltr
class CppCompilerRunner(CompilerRunner):
environ_key_compiler = 'CXX'
environ_key_flags = 'CXXFLAGS'
compiler_dict = OrderedDict([
('gnu', 'g++'),
('intel', 'icpc'),
('llvm', 'clang++'),
])
# First is the default, c++0x == c++11
standards = ('c++98', 'c++0x')
std_formater = {
'g++': '-std={}'.format,
'icpc': '-std={}'.format,
'clang++': '-std={}'.format,
}
compiler_name_vendor_mapping = {
'g++': 'gnu',
'icpc': 'intel',
'clang++': 'llvm'
}
class FortranCompilerRunner(CompilerRunner):
environ_key_compiler = 'FC'
environ_key_flags = 'FFLAGS'
standards = (None, 'f77', 'f95', 'f2003', 'f2008')
std_formater = {
'gfortran': lambda x: '-std=gnu' if x is None else '-std=legacy' if x == 'f77' else '-std={}'.format(x),
'ifort': lambda x: '-stand f08' if x is None else '-stand f{}'.format(x[-2:]), # f2008 => f08
}
compiler_dict = OrderedDict([
('gnu', 'gfortran'),
('intel', 'ifort'),
])
compiler_name_vendor_mapping = {
'gfortran': 'gnu',
'ifort': 'intel',
}

View File

@ -0,0 +1,101 @@
import shutil
import os
import subprocess
import tempfile
from sympy.external import import_module
from sympy.testing.pytest import skip
from sympy.utilities._compilation.compilation import compile_link_import_py_ext, compile_link_import_strings, compile_sources, get_abspath
numpy = import_module('numpy')
cython = import_module('cython')
_sources1 = [
('sigmoid.c', r"""
#include <math.h>
void sigmoid(int n, const double * const restrict in,
double * const restrict out, double lim){
for (int i=0; i<n; ++i){
const double x = in[i];
out[i] = x*pow(pow(x/lim, 8)+1, -1./8.);
}
}
"""),
('_sigmoid.pyx', r"""
import numpy as np
cimport numpy as cnp
cdef extern void c_sigmoid "sigmoid" (int, const double * const,
double * const, double)
def sigmoid(double [:] inp, double lim=350.0):
cdef cnp.ndarray[cnp.float64_t, ndim=1] out = np.empty(
inp.size, dtype=np.float64)
c_sigmoid(inp.size, &inp[0], &out[0], lim)
return out
""")
]
def npy(data, lim=350.0):
return data/((data/lim)**8+1)**(1/8.)
def test_compile_link_import_strings():
if not numpy:
skip("numpy not installed.")
if not cython:
skip("cython not installed.")
from sympy.utilities._compilation import has_c
if not has_c():
skip("No C compiler found.")
compile_kw = {"std": 'c99', "include_dirs": [numpy.get_include()]}
info = None
try:
mod, info = compile_link_import_strings(_sources1, compile_kwargs=compile_kw)
data = numpy.random.random(1024*1024*8) # 64 MB of RAM needed..
res_mod = mod.sigmoid(data)
res_npy = npy(data)
assert numpy.allclose(res_mod, res_npy)
finally:
if info and info['build_dir']:
shutil.rmtree(info['build_dir'])
def test_compile_sources(tmpdir):
from sympy.utilities._compilation import has_c
if not has_c():
skip("No C compiler found.")
build_dir = str(tmpdir)
_handle, file_path = tempfile.mkstemp('.c', dir=build_dir)
with open(file_path, 'wt') as ofh:
ofh.write("""
int foo(int bar) {
return 2*bar;
}
""")
obj, = compile_sources([file_path], cwd=build_dir)
obj_path = get_abspath(obj, cwd=build_dir)
assert os.path.exists(obj_path)
try:
_ = subprocess.check_output(["nm", "--help"])
except subprocess.CalledProcessError:
pass # we cannot test contents of object file
else:
nm_out = subprocess.check_output(["nm", obj_path])
assert 'foo' in nm_out.decode('utf-8')
if not cython:
return # the final (optional) part of the test below requires Cython.
_handle, pyx_path = tempfile.mkstemp('.pyx', dir=build_dir)
with open(pyx_path, 'wt') as ofh:
ofh.write(("cdef extern int foo(int)\n"
"def _foo(arg):\n"
" return foo(arg)"))
mod = compile_link_import_py_ext([pyx_path], extra_objs=[obj_path], build_dir=build_dir)
assert mod._foo(21) == 42

View File

@ -0,0 +1,312 @@
from collections import namedtuple
from hashlib import sha256
import os
import shutil
import sys
import fnmatch
from sympy.testing.pytest import XFAIL
def may_xfail(func):
if sys.platform.lower() == 'darwin' or os.name == 'nt':
# sympy.utilities._compilation needs more testing on Windows and macOS
# once those two platforms are reliably supported this xfail decorator
# may be removed.
return XFAIL(func)
else:
return func
class CompilerNotFoundError(FileNotFoundError):
pass
class CompileError (Exception):
"""Failure to compile one or more C/C++ source files."""
def get_abspath(path, cwd='.'):
""" Returns the absolute path.
Parameters
==========
path : str
(relative) path.
cwd : str
Path to root of relative path.
"""
if os.path.isabs(path):
return path
else:
if not os.path.isabs(cwd):
cwd = os.path.abspath(cwd)
return os.path.abspath(
os.path.join(cwd, path)
)
def make_dirs(path):
""" Create directories (equivalent of ``mkdir -p``). """
if path[-1] == '/':
parent = os.path.dirname(path[:-1])
else:
parent = os.path.dirname(path)
if len(parent) > 0:
if not os.path.exists(parent):
make_dirs(parent)
if not os.path.exists(path):
os.mkdir(path, 0o777)
else:
assert os.path.isdir(path)
def missing_or_other_newer(path, other_path, cwd=None):
"""
Investigate if path is non-existant or older than provided reference
path.
Parameters
==========
path: string
path to path which might be missing or too old
other_path: string
reference path
cwd: string
working directory (root of relative paths)
Returns
=======
True if path is older or missing.
"""
cwd = cwd or '.'
path = get_abspath(path, cwd=cwd)
other_path = get_abspath(other_path, cwd=cwd)
if not os.path.exists(path):
return True
if os.path.getmtime(other_path) - 1e-6 >= os.path.getmtime(path):
# 1e-6 is needed beacuse http://stackoverflow.com/questions/17086426/
return True
return False
def copy(src, dst, only_update=False, copystat=True, cwd=None,
dest_is_dir=False, create_dest_dirs=False):
""" Variation of ``shutil.copy`` with extra options.
Parameters
==========
src : str
Path to source file.
dst : str
Path to destination.
only_update : bool
Only copy if source is newer than destination
(returns None if it was newer), default: ``False``.
copystat : bool
See ``shutil.copystat``. default: ``True``.
cwd : str
Path to working directory (root of relative paths).
dest_is_dir : bool
Ensures that dst is treated as a directory. default: ``False``
create_dest_dirs : bool
Creates directories if needed.
Returns
=======
Path to the copied file.
"""
if cwd: # Handle working directory
if not os.path.isabs(src):
src = os.path.join(cwd, src)
if not os.path.isabs(dst):
dst = os.path.join(cwd, dst)
if not os.path.exists(src): # Make sure source file extists
raise FileNotFoundError("Source: `{}` does not exist".format(src))
# We accept both (re)naming destination file _or_
# passing a (possible non-existent) destination directory
if dest_is_dir:
if not dst[-1] == '/':
dst = dst+'/'
else:
if os.path.exists(dst) and os.path.isdir(dst):
dest_is_dir = True
if dest_is_dir:
dest_dir = dst
dest_fname = os.path.basename(src)
dst = os.path.join(dest_dir, dest_fname)
else:
dest_dir = os.path.dirname(dst)
if not os.path.exists(dest_dir):
if create_dest_dirs:
make_dirs(dest_dir)
else:
raise FileNotFoundError("You must create directory first.")
if only_update:
if not missing_or_other_newer(dst, src):
return
if os.path.islink(dst):
dst = os.path.abspath(os.path.realpath(dst), cwd=cwd)
shutil.copy(src, dst)
if copystat:
shutil.copystat(src, dst)
return dst
Glob = namedtuple('Glob', 'pathname')
ArbitraryDepthGlob = namedtuple('ArbitraryDepthGlob', 'filename')
def glob_at_depth(filename_glob, cwd=None):
if cwd is not None:
cwd = '.'
globbed = []
for root, dirs, filenames in os.walk(cwd):
for fn in filenames:
# This is not tested:
if fnmatch.fnmatch(fn, filename_glob):
globbed.append(os.path.join(root, fn))
return globbed
def sha256_of_file(path, nblocks=128):
""" Computes the SHA256 hash of a file.
Parameters
==========
path : string
Path to file to compute hash of.
nblocks : int
Number of blocks to read per iteration.
Returns
=======
hashlib sha256 hash object. Use ``.digest()`` or ``.hexdigest()``
on returned object to get binary or hex encoded string.
"""
sh = sha256()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(nblocks*sh.block_size), b''):
sh.update(chunk)
return sh
def sha256_of_string(string):
""" Computes the SHA256 hash of a string. """
sh = sha256()
sh.update(string)
return sh
def pyx_is_cplus(path):
"""
Inspect a Cython source file (.pyx) and look for comment line like:
# distutils: language = c++
Returns True if such a file is present in the file, else False.
"""
with open(path) as fh:
for line in fh:
if line.startswith('#') and '=' in line:
splitted = line.split('=')
if len(splitted) != 2:
continue
lhs, rhs = splitted
if lhs.strip().split()[-1].lower() == 'language' and \
rhs.strip().split()[0].lower() == 'c++':
return True
return False
def import_module_from_file(filename, only_if_newer_than=None):
""" Imports Python extension (from shared object file)
Provide a list of paths in `only_if_newer_than` to check
timestamps of dependencies. import_ raises an ImportError
if any is newer.
Word of warning: The OS may cache shared objects which makes
reimporting same path of an shared object file very problematic.
It will not detect the new time stamp, nor new checksum, but will
instead silently use old module. Use unique names for this reason.
Parameters
==========
filename : str
Path to shared object.
only_if_newer_than : iterable of strings
Paths to dependencies of the shared object.
Raises
======
``ImportError`` if any of the files specified in ``only_if_newer_than`` are newer
than the file given by filename.
"""
path, name = os.path.split(filename)
name, ext = os.path.splitext(name)
name = name.split('.')[0]
if sys.version_info[0] == 2:
from imp import find_module, load_module
fobj, filename, data = find_module(name, [path])
if only_if_newer_than:
for dep in only_if_newer_than:
if os.path.getmtime(filename) < os.path.getmtime(dep):
raise ImportError("{} is newer than {}".format(dep, filename))
mod = load_module(name, fobj, filename, data)
else:
import importlib.util
spec = importlib.util.spec_from_file_location(name, filename)
if spec is None:
raise ImportError("Failed to import: '%s'" % filename)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
return mod
def find_binary_of_command(candidates):
""" Finds binary first matching name among candidates.
Calls ``which`` from shutils for provided candidates and returns
first hit.
Parameters
==========
candidates : iterable of str
Names of candidate commands
Raises
======
CompilerNotFoundError if no candidates match.
"""
from shutil import which
for c in candidates:
binary_path = which(c)
if c and binary_path:
return c, binary_path
raise CompilerNotFoundError('No binary located for candidates: {}'.format(candidates))
def unique_list(l):
""" Uniquify a list (skip duplicate items). """
result = []
for x in l:
if x not in result:
result.append(x)
return result