NumPy 源码解析(六)
.\numpy\numpy\distutils\numpy_distribution.py
from distutils.core import Distribution
class NumpyDistribution(Distribution):
def __init__(self, attrs = None):
self.scons_data = []
self.installed_libraries = []
self.installed_pkg_config = {}
Distribution.__init__(self, attrs)
def has_scons_scripts(self):
return bool(self.scons_data)
.\numpy\numpy\distutils\pathccompiler.py
from distutils.unixccompiler import UnixCCompiler
class PathScaleCCompiler(UnixCCompiler):
"""
PathScale compiler compatible with an gcc built Python.
"""
compiler_type = 'pathcc'
cc_exe = 'pathcc'
cxx_exe = 'pathCC'
def __init__ (self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__ (self, verbose, dry_run, force)
cc_compiler = self.cc_exe
cxx_compiler = self.cxx_exe
self.set_executables(compiler=cc_compiler,
compiler_so=cc_compiler,
compiler_cxx=cxx_compiler,
linker_exe=cc_compiler,
linker_so=cc_compiler + ' -shared')
.\numpy\numpy\distutils\system_info.py
"""
This file defines a set of system_info classes for getting
information about various resources (libraries, library directories,
include directories, etc.) in the system. Usage:
info_dict = get_info(<name>)
where <name> is a string 'atlas','x11','fftw','lapack','blas',
'lapack_src', 'blas_src', etc. For a complete list of allowed names,
see the definition of get_info() function below.
Returned info_dict is a dictionary which is compatible with
distutils.setup keyword arguments. If info_dict == {}, then the
asked resource is not available (system_info could not find it).
Several *_info classes specify an environment variable to specify
the locations of software. When setting the corresponding environment
variable to 'None' then the software will be ignored, even when it
is available in system.
Global parameters:
system_info.search_static_first - search static libraries (.a)
in precedence to shared ones (.so, .sl) if enabled.
system_info.verbosity - output the results to stdout if enabled.
The file 'site.cfg' is looked for in
1) Directory of main setup.py file being run.
2) Home directory of user running the setup.py file as ~/.numpy-site.cfg
3) System wide directory (location of this file...)
The first one found is used to get system configuration options The
format is that used by ConfigParser (i.e., Windows .INI style). The
section ALL is not intended for general use.
Appropriate defaults are used if nothing is specified.
The order of finding the locations of resources is the following:
1. environment variable
2. section in site.cfg
3. DEFAULT section in site.cfg
4. System default search paths (see ``default_*`` variables below).
Only the first complete match is returned.
Currently, the following classes are available, along with their section names:
Numeric_info:Numeric
_numpy_info:Numeric
_pkg_config_info:None
accelerate_info:accelerate
accelerate_lapack_info:accelerate
agg2_info:agg2
amd_info:amd
atlas_3_10_blas_info:atlas
atlas_3_10_blas_threads_info:atlas
atlas_3_10_info:atlas
atlas_3_10_threads_info:atlas
atlas_blas_info:atlas
atlas_blas_threads_info:atlas
atlas_info:atlas
atlas_threads_info:atlas
blas64__opt_info:ALL # usage recommended (general ILP64 BLAS, 64_ symbol suffix)
blas_ilp64_opt_info:ALL # usage recommended (general ILP64 BLAS)
blas_ilp64_plain_opt_info:ALL # usage recommended (general ILP64 BLAS, no symbol suffix)
blas_info:blas
blas_mkl_info:mkl
blas_ssl2_info:ssl2
blas_opt_info:ALL # usage recommended
blas_src_info:blas_src
blis_info:blis
boost_python_info:boost_python
dfftw_info:fftw
dfftw_threads_info:fftw
djbfft_info:djbfft
f2py_info:ALL
fft_opt_info:ALL
fftw2_info:fftw
fftw3_info:fftw3
fftw_info:fftw
fftw_threads_info:fftw
flame_info:flame
"""
freetype2_info:freetype2
gdk_2_info:gdk_2
gdk_info:gdk
gdk_pixbuf_2_info:gdk_pixbuf_2
gdk_pixbuf_xlib_2_info:gdk_pixbuf_xlib_2
gdk_x11_2_info:gdk_x11_2
gtkp_2_info:gtkp_2
gtkp_x11_2_info:gtkp_x11_2
lapack64__opt_info:ALL
lapack_atlas_3_10_info:atlas
lapack_atlas_3_10_threads_info:atlas
lapack_atlas_info:atlas
lapack_atlas_threads_info:atlas
lapack_ilp64_opt_info:ALL
lapack_ilp64_plain_opt_info:ALL
lapack_info:lapack
lapack_mkl_info:mkl
lapack_ssl2_info:ssl2
lapack_opt_info:ALL
lapack_src_info:lapack_src
mkl_info:mkl
ssl2_info:ssl2
numarray_info:numarray
numerix_info:numerix
numpy_info:numpy
openblas64__info:openblas64_
openblas64__lapack_info:openblas64_
openblas_clapack_info:openblas
openblas_ilp64_info:openblas_ilp64
openblas_ilp64_lapack_info:openblas_ilp64
openblas_info:openblas
openblas_lapack_info:openblas
sfftw_info:fftw
sfftw_threads_info:fftw
system_info:ALL
umfpack_info:umfpack
wx_info:wx
x11_info:x11
xft_info:xft
import sys
import os
import re
import copy
import warnings
import subprocess
import textwrap
from glob import glob
from functools import reduce
from configparser import NoOptionError, RawConfigParser as ConfigParser
from distutils.errors import DistutilsError
from distutils.dist import Distribution
import sysconfig
from numpy.distutils import log
from distutils.util import get_platform
from numpy.distutils.exec_command import (
find_executable, filepath_from_subprocess_output,
)
from numpy.distutils.misc_util import (is_sequence, is_string,
get_shared_lib_extension)
from numpy.distutils.command.config import config as cmd_config
from numpy.distutils import customized_ccompiler as _customized_ccompiler
from numpy.distutils import _shell_utils
import distutils.ccompiler
import tempfile
import shutil
__all__ = ['system_info']
import platform
_bits = {'32bit': 32, '64bit': 64}
platform_bits = _bits[platform.architecture()[0]]
global_compiler = None
def customized_ccompiler():
global global_compiler
if not global_compiler:
global_compiler = _customized_ccompiler()
return global_compiler
def _c_string_literal(s):
"""
Convert a python string into a literal suitable for inclusion into C code
"""
s = s.replace('\\', r'\\')
s = s.replace('"', r'\"')
s = s.replace('\n', r'\n')
return '"{}"'.format(s)
def libpaths(paths, bits):
"""Return a list of library paths valid on 32 or 64 bit systems.
Inputs:
paths : sequence
A sequence of strings (typically paths)
bits : int
An integer, the only valid values are 32 or 64. A ValueError exception
is raised otherwise.
Examples:
Consider a list of directories
>>> paths = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
For a 32-bit platform, this is already valid:
>>> np.distutils.system_info.libpaths(paths,32)
['/usr/X11R6/lib', '/usr/X11/lib', '/usr/lib']
On 64 bits, we prepend the '64' postfix
>>> np.distutils.system_info.libpaths(paths,64)
['/usr/X11R6/lib64', '/usr/X11R6/lib', '/usr/X11/lib64', '/usr/X11/lib',
'/usr/lib64', '/usr/lib']
"""
if bits not in (32, 64):
raise ValueError("Invalid bit size in libpaths: 32 or 64 only")
if bits == 32:
return paths
out = []
for p in paths:
out.extend([p + '64', p])
return out
if sys.platform == 'win32':
default_lib_dirs = ['C:\\',
os.path.join(sysconfig.get_config_var('exec_prefix'),
'libs')]
default_runtime_dirs = []
default_include_dirs = []
default_src_dirs = ['.']
default_x11_lib_dirs = []
default_x11_include_dirs = []
_include_dirs = [
'include',
'include/suitesparse',
]
_lib_dirs = [
'lib',
]
_include_dirs = [d.replace('/', os.sep) for d in _include_dirs]
_lib_dirs = [d.replace('/', os.sep) for d in _lib_dirs]
def add_system_root(library_root):
"""Add a package manager root to the include directories"""
global default_lib_dirs
global default_include_dirs
library_root = os.path.normpath(library_root)
default_lib_dirs.extend(
os.path.join(library_root, d) for d in _lib_dirs)
default_include_dirs.extend(
os.path.join(library_root, d) for d in _include_dirs)
vcpkg = shutil.which('vcpkg')
if vcpkg:
vcpkg_dir = os.path.dirname(vcpkg)
if platform.architecture()[0] == '32bit':
specifier = 'x86'
else:
specifier = 'x64'
vcpkg_installed = os.path.join(vcpkg_dir, 'installed')
for vcpkg_root in [
os.path.join(vcpkg_installed, specifier + '-windows'),
os.path.join(vcpkg_installed, specifier + '-windows-static'),
]:
add_system_root(vcpkg_root)
conda = shutil.which('conda')
if conda:
conda_dir = os.path.dirname(conda)
add_system_root(os.path.join(conda_dir, '..', 'Library'))
add_system_root(os.path.join(conda_dir, 'Library'))
else:
default_lib_dirs = libpaths(['/usr/local/lib', '/opt/lib', '/usr/lib',
'/opt/local/lib', '/sw/lib'], platform_bits)
default_runtime_dirs = []
default_include_dirs = ['/usr/local/include',
'/opt/include',
'/opt/local/include/ufsparse',
'/opt/local/include', '/sw/include',
'/usr/include/suitesparse']
default_src_dirs = ['.', '/usr/local/src', '/opt/src', '/sw/src']
default_x11_lib_dirs = libpaths(['/usr/X11R6/lib', '/usr/X11/lib',
'/usr/lib'], platform_bits)
default_x11_include_dirs = ['/usr/X11R6/include', '/usr/X11/include']
if os.path.exists('/usr/lib/X11'):
globbed_x11_dir = glob('/usr/lib/*/libX11.so')
if globbed_x11_dir:
x11_so_dir = os.path.split(globbed_x11_dir[0])[0]
default_x11_lib_dirs.extend([x11_so_dir, '/usr/lib/X11'])
default_x11_include_dirs.extend(['/usr/lib/X11/include',
'/usr/include/X11'])
with open(os.devnull, 'w') as tmp:
try:
p = subprocess.Popen(["gcc", "-print-multiarch"], stdout=subprocess.PIPE,
stderr=tmp)
except (OSError, DistutilsError):
pass
else:
triplet = str(p.communicate()[0].decode().strip())
if p.returncode == 0:
default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)]
default_lib_dirs += [os.path.join("/usr/lib/", triplet)]
if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
default_include_dirs.append(os.path.join(sys.prefix, 'include'))
default_src_dirs.append(os.path.join(sys.prefix, 'src'))
default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)]
default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)]
default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)]
default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)]
so_ext = get_shared_lib_extension()
def get_standard_file(fname):
"""返回命名为'fname'的文件列表,顺序为:
1) 系统全局目录(本模块的目录位置)
2) 用户的HOME目录(os.environ['HOME'])
3) 本地目录
"""
filenames = []
try:
f = __file__
except NameError:
f = sys.argv[0]
sysfile = os.path.join(os.path.split(os.path.abspath(f))[0],
fname)
if os.path.isfile(sysfile):
filenames.append(sysfile)
try:
f = os.path.expanduser('~')
except KeyError:
pass
else:
user_file = os.path.join(f, fname)
if os.path.isfile(user_file):
filenames.append(user_file)
if os.path.isfile(fname):
filenames.append(os.path.abspath(fname))
return filenames
def _parse_env_order(base_order, env):
""" Parse an environment variable `env` by splitting with "," and only returning elements from `base_order`
This method will sequence the environment variable and check for their
individual elements in `base_order`.
The items in the environment variable may be negated via '^item' or '!itema,itemb'.
It must start with ^/! to negate all options.
Raises
------
ValueError: for mixed negated and non-negated orders or multiple negated orders
Parameters
----------
base_order : list of str
the base list of orders
env : str
the environment variable to be parsed, if none is found, `base_order` is returned
Returns
-------
allow_order : list of str
allowed orders in lower-case
unknown_order : list of str
for values not overlapping with `base_order`
"""
order_str = os.environ.get(env, None)
base_order = [order.lower() for order in base_order]
if order_str is None:
return base_order, []
neg = order_str.startswith('^') or order_str.startswith('!')
order_str_l = list(order_str)
sum_neg = order_str_l.count('^') + order_str_l.count('!')
if neg:
if sum_neg > 1:
raise ValueError(f"Environment variable '{env}' may only contain a single (prefixed) negation: {order_str}")
order_str = order_str[1:]
elif sum_neg > 0:
raise ValueError(f"Environment variable '{env}' may not mix negated an non-negated items: {order_str}")
orders = order_str.lower().split(',')
unknown_order = []
if neg:
allow_order = base_order.copy()
for order in orders:
if not order:
continue
if order not in base_order:
unknown_order.append(order)
continue
if order in allow_order:
allow_order.remove(order)
else:
allow_order = []
for order in orders:
if not order:
continue
if order not in base_order:
unknown_order.append(order)
continue
if order not in allow_order:
allow_order.append(order)
return allow_order, unknown_order
In section '{section}' we found multiple appearances of options {options}.
class AtlasNotFoundError(NotFoundError):
"""
Atlas (http://github.com/math-atlas/math-atlas) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [atlas]) or by setting
the ATLAS environment variable.
"""
class FlameNotFoundError(NotFoundError):
"""
FLAME (http://www.cs.utexas.edu/~flame/web/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [flame]).
"""
class LapackNotFoundError(NotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [lapack]) or by setting
the LAPACK environment variable.
"""
class LapackSrcNotFoundError(LapackNotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [lapack_src]) or by setting
the LAPACK_SRC environment variable.
"""
class LapackILP64NotFoundError(NotFoundError):
"""
64-bit Lapack libraries not found.
Known libraries in numpy/distutils/site.cfg file are:
openblas64_, openblas_ilp64
"""
class BlasOptNotFoundError(NotFoundError):
"""
Optimized (vendor) Blas libraries are not found.
Falls back to netlib Blas library which has worse performance.
A better performance should be easily gained by switching
Blas library.
"""
class BlasNotFoundError(NotFoundError):
"""
Blas (http://www.netlib.org/blas/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [blas]) or by setting
the BLAS environment variable.
"""
class BlasILP64NotFoundError(NotFoundError):
"""
64-bit Blas libraries not found.
Known libraries in numpy/distutils/site.cfg file are:
openblas64_, openblas_ilp64
"""
class BlasSrcNotFoundError(BlasNotFoundError):
"""
Blas (http://www.netlib.org/blas/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [blas_src]) or by setting
the BLAS_SRC environment variable.
"""
class FFTWNotFoundError(NotFoundError):
"""
FFTW (http://www.fftw.org/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [fftw]) or by setting
the FFTW environment variable.
"""
class DJBFFTNotFoundError(NotFoundError):
"""
DJBFFT (https://cr.yp.to/djbfft.html) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [djbfft]) or by setting
the DJBFFT environment variable.
"""
class NumericNotFoundError(NotFoundError):
"""
Placeholder for additional specific libraries not found.
"""
class X11NotFoundError(NotFoundError):
"""X11 libraries not found."""
class UmfpackNotFoundError(NotFoundError):
"""
UMFPACK sparse solver (https://www.cise.ufl.edu/research/sparse/umfpack/)
not found. Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [umfpack]) or by setting
the UMFPACK environment variable."""
class system_info:
""" get_info() is the only public method. Don't use others.
"""
dir_env_var = None
search_static_first = 0
section = 'ALL'
saved_results = {}
notfounderror = NotFoundError
def __init__(self,
default_lib_dirs=default_lib_dirs,
default_include_dirs=default_include_dirs,
):
self.__class__.info = {}
self.local_prefixes = []
defaults = {'library_dirs': os.pathsep.join(default_lib_dirs),
'include_dirs': os.pathsep.join(default_include_dirs),
'runtime_library_dirs': os.pathsep.join(default_runtime_dirs),
'rpath': '',
'src_dirs': os.pathsep.join(default_src_dirs),
'search_static_first': str(self.search_static_first),
'extra_compile_args': '', 'extra_link_args': ''}
self.cp = ConfigParser(defaults)
self.files = []
self.files.extend(get_standard_file('.numpy-site.cfg'))
self.files.extend(get_standard_file('site.cfg'))
self.parse_config_files()
if self.section is not None:
self.search_static_first = self.cp.getboolean(
self.section, 'search_static_first')
assert isinstance(self.search_static_first, int)
def parse_config_files(self):
self.cp.read(self.files)
if not self.cp.has_section(self.section):
if self.section is not None:
self.cp.add_section(self.section)
def calc_libraries_info(self):
libs = self.get_libraries()
dirs = self.get_lib_dirs()
r_dirs = self.get_runtime_lib_dirs()
r_dirs.extend(self.get_runtime_lib_dirs(key='rpath'))
info = {}
for lib in libs:
i = self.check_libs(dirs, [lib])
if i is not None:
dict_append(info, **i)
else:
log.info('Library %s was not found. Ignoring' % (lib))
if r_dirs:
i = self.check_libs(r_dirs, [lib])
if i is not None:
del i['libraries']
i['runtime_library_dirs'] = i.pop('library_dirs')
dict_append(info, **i)
else:
log.info('Runtime library %s was not found. Ignoring' % (lib))
return info
def set_info(self, **info):
if info:
lib_info = self.calc_libraries_info()
dict_append(info, **lib_info)
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
self.saved_results[self.__class__.__name__] = info
def get_option_single(self, *options):
""" Ensure that only one of `options` are found in the section
Parameters
----------
*options : list of str
a list of options to be found in the section (``self.section``)
Returns
-------
str :
the option that is uniquely found in the section
Raises
------
AliasedOptionError :
in case more than one of the options are found
"""
found = [self.cp.has_option(self.section, opt) for opt in options]
if sum(found) == 1:
return options[found.index(True)]
elif sum(found) == 0:
return options[0]
if AliasedOptionError.__doc__ is None:
raise AliasedOptionError()
raise AliasedOptionError(AliasedOptionError.__doc__.format(
section=self.section, options='[{}]'.format(', '.join(options))))
def has_info(self):
return self.__class__.__name__ in self.saved_results
def calc_extra_info(self):
"""
Updates the information in the current information with
respect to these flags:
extra_compile_args
extra_link_args
"""
info = {}
for key in ['extra_compile_args', 'extra_link_args']:
opt = self.cp.get(self.section, key)
opt = _shell_utils.NativeParser.split(opt)
if opt:
tmp = {key: opt}
dict_append(info, **tmp)
return info
def get_info(self, notfound_action=0):
"""
Return a dictionary with items that are compatible
with numpy.distutils.setup keyword arguments.
"""
flag = 0
if not self.has_info():
flag = 1
log.info(self.__class__.__name__ + ':')
if hasattr(self, 'calc_info'):
self.calc_info()
if notfound_action:
if not self.has_info():
if notfound_action == 1:
warnings.warn(self.notfounderror.__doc__, stacklevel=2)
elif notfound_action == 2:
raise self.notfounderror(self.notfounderror.__doc__)
else:
raise ValueError(repr(notfound_action))
if not self.has_info():
log.info(' NOT AVAILABLE')
self.set_info()
else:
log.info(' FOUND:')
res = self.saved_results.get(self.__class__.__name__)
if log.get_threshold() <= log.INFO and flag:
for k, v in res.items():
v = str(v)
if k in ['sources', 'libraries'] and len(v) > 270:
v = v[:120] + '...\n...\n...' + v[-120:]
log.info(' %s = %s', k, v)
log.info('')
return copy.deepcopy(res)
def get_paths(self, section, key):
dirs = self.cp.get(section, key).split(os.pathsep)
env_var = self.dir_env_var
if env_var:
if is_sequence(env_var):
e0 = env_var[-1]
for e in env_var:
if e in os.environ:
e0 = e
break
if not env_var[0] == e0:
log.info('Setting %s=%s' % (env_var[0], e0))
env_var = e0
if env_var and env_var in os.environ:
d = os.environ[env_var]
if d == 'None':
log.info('Disabled %s: %s', self.__class__.__name__, '(%s is None)' % (env_var,))
return []
if os.path.isfile(d):
dirs = [os.path.dirname(d)] + dirs
l = getattr(self, '_lib_names', [])
if len(l) == 1:
b = os.path.basename(d)
b = os.path.splitext(b)[0]
if b[:3] == 'lib':
log.info('Replacing _lib_names[0]==%r with %r' % (self._lib_names[0], b[3:]))
self._lib_names[0] = b[3:]
else:
ds = d.split(os.pathsep)
ds2 = []
for d in ds:
if os.path.isdir(d):
ds2.append(d)
for dd in ['include', 'lib']:
d1 = os.path.join(d, dd)
if os.path.isdir(d1):
ds2.append(d1)
dirs = ds2 + dirs
default_dirs = self.cp.get(self.section, key).split(os.pathsep)
dirs.extend(default_dirs)
ret = []
for d in dirs:
if len(d) > 0 and not os.path.isdir(d):
warnings.warn('Specified path %s is invalid.' % d, stacklevel=2)
continue
if d not in ret:
ret.append(d)
log.debug('( %s = %s )', key, ':'.join(ret))
return ret
def get_lib_dirs(self, key='library_dirs'):
return self.get_paths(self.section, key)
def get_runtime_lib_dirs(self, key='runtime_library_dirs'):
path = self.get_paths(self.section, key)
if path == ['']:
path = []
return path
def get_include_dirs(self, key='include_dirs'):
return self.get_paths(self.section, key)
def get_src_dirs(self, key='src_dirs'):
return self.get_paths(self.section, key)
def get_libs(self, key, default):
try:
libs = self.cp.get(self.section, key)
except NoOptionError:
if not default:
return []
if is_string(default):
return [default]
return default
return [b for b in [a.strip() for a in libs.split(',')] if b]
def get_libraries(self, key='libraries'):
if hasattr(self, '_lib_names'):
return self.get_libs(key, default=self._lib_names)
else:
return self.get_libs(key, '')
def library_extensions(self):
c = customized_ccompiler()
static_exts = []
if c.compiler_type != 'msvc':
static_exts.append('.a')
if sys.platform == 'win32':
static_exts.append('.lib')
if self.search_static_first:
exts = static_exts + [so_ext]
else:
exts = [so_ext] + static_exts
if sys.platform == 'cygwin':
exts.append('.dll.a')
if sys.platform == 'darwin':
exts.append('.dylib')
return exts
def check_libs(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks for all libraries as shared libraries first, then
static (or vice versa if self.search_static_first is True).
"""
exts = self.library_extensions()
info = None
for ext in exts:
info = self._check_libs(lib_dirs, libs, opt_libs, [ext])
if info is not None:
break
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def check_libs2(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks each library for shared or static.
"""
exts = self.library_extensions()
info = self._check_libs(lib_dirs, libs, opt_libs, exts)
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def _find_lib(self, lib_dir, lib, exts):
assert is_string(lib_dir)
if sys.platform == 'win32':
lib_prefixes = ['', 'lib']
else:
lib_prefixes = ['lib']
for ext in exts:
for prefix in lib_prefixes:
p = self.combine_paths(lib_dir, prefix + lib + ext)
if p:
break
if p:
assert len(p) == 1
if ext == '.dll.a':
lib += '.dll'
if ext == '.lib':
lib = prefix + lib
return lib
return False
def _find_libs(self, lib_dirs, libs, exts):
found_dirs, found_libs = [], []
for lib in libs:
for lib_dir in lib_dirs:
found_lib = self._find_lib(lib_dir, lib, exts)
if found_lib:
found_libs.append(found_lib)
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
break
return found_dirs, found_libs
def _check_libs(self, lib_dirs, libs, opt_libs, exts):
if not is_sequence(lib_dirs):
lib_dirs = [lib_dirs]
found_dirs, found_libs = self._find_libs(lib_dirs, libs, exts)
if len(found_libs) > 0 and len(found_libs) == len(libs):
opt_found_dirs, opt_found_libs = self._find_libs(lib_dirs, opt_libs, exts)
found_libs.extend(opt_found_libs)
for lib_dir in opt_found_dirs:
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
info = {'libraries': found_libs, 'library_dirs': found_dirs}
return info
else:
return None
def combine_paths(self, *args):
"""Return a list of existing paths composed by all combinations
of items from the arguments.
"""
return combine_paths(*args)
class fft_opt_info(system_info):
def calc_info(self):
info = {}
fftw_info = get_info('fftw3') or get_info('fftw2') or get_info('dfftw')
djbfft_info = get_info('djbfft')
if fftw_info:
dict_append(info, **fftw_info)
if djbfft_info:
dict_append(info, **djbfft_info)
self.set_info(**info)
return
class fftw_info(system_info):
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}]
def calc_ver_info(self, ver_param):
"""Returns True on successful version detection, else False"""
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
opt = self.get_option_single(self.section + '_libs', 'libraries')
libs = self.get_libs(opt, ver_param['libs'])
info = self.check_libs(lib_dirs, libs)
if info is not None:
flag = 0
for d in incl_dirs:
if len(self.combine_paths(d, ver_param['includes'])) \
== len(ver_param['includes']):
dict_append(info, include_dirs=[d])
flag = 1
break
if flag:
dict_append(info, define_macros=ver_param['macros'])
else:
info = None
if info is not None:
self.set_info(**info)
return True
else:
log.info(' %s not found' % (ver_param['name']))
return False
def calc_info(self):
for i in self.ver_info:
if self.calc_ver_info(i):
break
class fftw2_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}
]
class fftw3_info(fftw_info):
section = 'fftw3'
dir_env_var = 'FFTW3'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
]
class fftw3_armpl_info(fftw_info):
section = 'fftw3'
dir_env_var = 'ARMPL_DIR'
notfounderror = FFTWNotFoundError
ver_info = [{'name': 'fftw3',
'libs': ['armpl_lp64_mp'],
'includes': ['fftw3.h'],
'macros': [('SCIPY_FFTW3_H', None)]}]
class dfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw',
'libs':['drfftw', 'dfftw'],
'includes':['dfftw.h', 'drfftw.h'],
'macros':[('SCIPY_DFFTW_H', None)]}]
class dfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw',
'libs':['drfftw', 'dfftw'],
'includes':['dfftw.h', 'drfftw.h'],
'macros':[('SCIPY_DFFTW_H', None)]}]
class sfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw',
'libs':['srfftw', 'sfftw'],
'includes':['sfftw.h', 'srfftw.h'],
'macros':[('SCIPY_SFFTW_H', None)]}]
class sfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw',
'libs':['srfftw', 'sfftw'],
'includes':['sfftw.h', 'srfftw.h'],
'macros':[('SCIPY_SFFTW_H', None)]}]
class fftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'fftw threads',
'libs':['rfftw_threads', 'fftw_threads'],
'includes':['fftw_threads.h', 'rfftw_threads.h'],
'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
class fftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'fftw threads',
'libs':['rfftw_threads', 'fftw_threads'],
'includes':['fftw_threads.h', 'rfftw_threads.h'],
'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
class dfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw threads',
'libs':['drfftw_threads', 'dfftw_threads'],
'includes':['dfftw_threads.h', 'drfftw_threads.h'],
'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
class dfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw threads',
'libs':['drfftw_threads', 'dfftw_threads'],
'includes':['dfftw_threads.h', 'drfftw_threads.h'],
'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
class sfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw threads',
'libs':['srfftw_threads', 'sfftw_threads'],
'includes':['sfftw_threads.h', 'srfftw_threads.h'],
'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
class sfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw threads',
'libs':['srfftw_threads', 'sfftw_threads'],
'includes':['sfftw_threads.h', 'srfftw_threads.h'],
'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
class djbfft_info(system_info):
section = 'djbfft'
dir_env_var = 'DJBFFT'
notfounderror = DJBFFTNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['djbfft']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
info = None
for d in lib_dirs:
p = self.combine_paths(d, ['djbfft.a'])
if p:
info = {'extra_objects': p}
break
p = self.combine_paths(d, ['libdjbfft.a', 'libdjbfft' + so_ext])
if p:
info = {'libraries': ['djbfft'], 'library_dirs': [d]}
break
if info is None:
return
for d in incl_dirs:
if len(self.combine_paths(d, ['fftc8.h', 'fftfreq.h'])) == 2:
dict_append(info, include_dirs=[d],
define_macros=[('SCIPY_DJBFFT_H', None)])
self.set_info(**info)
return
return
class mkl_info(system_info):
section = 'mkl'
dir_env_var = 'MKLROOT'
_lib_mkl = ['mkl_rt']
def get_mkl_rootdir(self):
mklroot = os.environ.get('MKLROOT', None)
if mklroot is not None:
return mklroot
paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
ld_so_conf = '/etc/ld.so.conf'
if os.path.isfile(ld_so_conf):
with open(ld_so_conf) as f:
for d in f:
d = d.strip()
if d:
paths.append(d)
intel_mkl_dirs = []
for path in paths:
path_atoms = path.split(os.sep)
for m in path_atoms:
if m.startswith('mkl'):
d = os.sep.join(path_atoms[:path_atoms.index(m) + 2])
intel_mkl_dirs.append(d)
break
for d in paths:
dirs = glob(os.path.join(d, 'mkl', '*'))
dirs += glob(os.path.join(d, 'mkl*'))
for sub_dir in dirs:
if os.path.isdir(os.path.join(sub_dir, 'lib')):
return sub_dir
return None
def __init__(self):
mklroot = self.get_mkl_rootdir()
if mklroot is None:
system_info.__init__(self)
else:
from .cpuinfo import cpu
if cpu.is_Itanium():
plt = '64'
elif cpu.is_Intel() and cpu.is_64bit():
plt = 'intel64'
else:
plt = '32'
system_info.__init__(
self,
default_lib_dirs=[os.path.join(mklroot, 'lib', plt)],
default_include_dirs=[os.path.join(mklroot, 'include')])
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
opt = self.get_option_single('mkl_libs', 'libraries')
mkl_libs = self.get_libs(opt, self._lib_mkl)
info = self.check_libs2(lib_dirs, mkl_libs)
if info is None:
return
dict_append(info,
define_macros=[('SCIPY_MKL_H', None),
('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
if sys.platform == 'win32':
pass
else:
dict_append(info, libraries=['pthread'])
self.set_info(**info)
class lapack_mkl_info(mkl_info):
pass
class blas_mkl_info(mkl_info):
pass
class ssl2_info(system_info):
section = 'ssl2'
dir_env_var = 'SSL2_DIR'
_lib_ssl2 = ['fjlapackexsve']
def get_tcsds_rootdir(self):
tcsdsroot = os.environ.get('TCSDS_PATH', None)
if tcsdsroot is not None:
return tcsdsroot
return None
def __init__(self):
tcsdsroot = self.get_tcsds_rootdir()
if tcsdsroot is None:
system_info.__init__(self)
else:
system_info.__init__(
self,
default_lib_dirs=[os.path.join(tcsdsroot, 'lib64')],
default_include_dirs=[os.path.join(tcsdsroot, 'clang-comp/include')])
def calc_info(self):
tcsdsroot = self.get_tcsds_rootdir()
lib_dirs = self.get_lib_dirs()
if lib_dirs is None:
lib_dirs = os.path.join(tcsdsroot, 'lib64')
incl_dirs = self.get_include_dirs()
if incl_dirs is None:
incl_dirs = os.path.join(tcsdsroot, 'clang-comp/include')
ssl2_libs = self.get_libs('ssl2_libs', self._lib_ssl2)
info = self.check_libs2(lib_dirs, ssl2_libs)
if info is None:
return
dict_append(info,
define_macros=[('HAVE_CBLAS', None),
('HAVE_SSL2', 1)],
include_dirs=incl_dirs,)
self.set_info(**info)
class lapack_ssl2_info(ssl2_info):
pass
class blas_ssl2_info(ssl2_info):
pass
class armpl_info(system_info):
section = 'armpl'
dir_env_var = 'ARMPL_DIR'
_lib_armpl = ['armpl_lp64_mp']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
armpl_libs = self.get_libs('armpl_libs', self._lib_armpl)
info = self.check_libs2(lib_dirs, armpl_libs)
if info is None:
return
dict_append(info,
define_macros=[('SCIPY_MKL_H', None),
('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
self.set_info(**info)
class lapack_armpl_info(armpl_info):
pass
class blas_armpl_info(armpl_info):
pass
class atlas_info(system_info):
section = 'atlas'
dir_env_var = 'ATLAS'
_lib_names = ['f77blas', 'cblas']
if sys.platform[:7] == 'freebsd':
_lib_atlas = ['atlas_r']
_lib_lapack = ['alapack_r']
else:
_lib_atlas = ['atlas']
_lib_lapack = ['lapack']
notfounderror = AtlasNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['atlas*', 'ATLAS*',
'sse', '3dnow', 'sse2']) + [d])
return [d for d in dirs if os.path.isdir(d)]
class atlas_blas_info(atlas_info):
_lib_names = ['f77blas', 'cblas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
opt = self.get_option_single('atlas_libs', 'libraries')
atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
notfounderror = LapackNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('lapack_libs', 'libraries')
lapack_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, lapack_libs, [])
if info is None:
return
info['language'] = 'f77'
self.set_info(**info)
class lapack_src_info(system_info):
section = 'lapack_src'
dir_env_var = 'LAPACK_SRC'
notfounderror = LapackSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['LAPACK*/SRC', 'SRC']))
return [d for d in dirs if os.path.isdir(d)]
atlas_version_c_text = r'''
/* This file is generated from numpy/distutils/system_info.py */
void ATL_buildinfo(void);
int main(void) {
ATL_buildinfo();
return 0;
}
'''
_cached_atlas_version = {}
def get_atlas_version(**config):
libraries = config.get('libraries', [])
library_dirs = config.get('library_dirs', [])
key = (tuple(libraries), tuple(library_dirs))
if key in _cached_atlas_version:
return _cached_atlas_version[key]
c = cmd_config(Distribution())
atlas_version = None
info = {}
try:
s, o = c.get_output(atlas_version_c_text,
libraries=libraries, library_dirs=library_dirs,
)
if s and re.search(r'undefined reference to `_gfortran', o, re.M):
s, o = c.get_output(atlas_version_c_text,
libraries=libraries + ['gfortran'],
library_dirs=library_dirs,
)
if not s:
warnings.warn(textwrap.dedent("""
*****************************************************
Linkage with ATLAS requires gfortran. Use
python setup.py config_fc --fcompiler=gnu95 ...
when building extension libraries that use ATLAS.
Make sure that -lgfortran is used for C++ extensions.
*****************************************************
"""), stacklevel=2)
dict_append(info, language='f90',
define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)])
except Exception:
for o in library_dirs:
m = re.search(r'ATLAS_(?P<version>\d+[.]\d+[.]\d+)_', o)
if m:
atlas_version = m.group('version')
if atlas_version is not None:
break
if atlas_version is None:
atlas_version = os.environ.get('ATLAS_VERSION', None)
if atlas_version:
dict_append(info, define_macros=[(
'ATLAS_INFO', _c_string_literal(atlas_version))
])
else:
dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)])
return atlas_version or '?.?.?', info
if not s:
m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)', o)
if m:
atlas_version = m.group('version')
if atlas_version is None:
if re.search(r'undefined symbol: ATL_buildinfo', o, re.M):
atlas_version = '3.2.1_pre3.3.6'
else:
log.info('Status: %d', s)
log.info('Output: %s', o)
elif atlas_version == '3.2.1_pre3.3.6':
dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)])
else:
dict_append(info, define_macros=[(
'ATLAS_INFO', _c_string_literal(atlas_version))
])
result = _cached_atlas_version[key] = atlas_version, info
return result
class lapack_opt_info(system_info):
notfounderror = LapackNotFoundError
lapack_order = ['armpl', 'mkl', 'ssl2', 'openblas', 'flame',
'accelerate', 'atlas', 'lapack']
order_env_var_name = 'NPY_LAPACK_ORDER'
def _calc_info_armpl(self):
info = get_info('lapack_armpl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_mkl(self):
info = get_info('lapack_mkl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_ssl2(self):
info = get_info('lapack_ssl2')
if info:
self.set_info(**info)
return True
return False
def _calc_info_openblas(self):
info = get_info('openblas_lapack')
if info:
self.set_info(**info)
return True
info = get_info('openblas_clapack')
if info:
self.set_info(**info)
return True
return False
def _calc_info_flame(self):
info = get_info('flame')
if info:
self.set_info(**info)
return True
return False
def _calc_info_atlas(self):
info = get_info('atlas_3_10_threads')
if not info:
info = get_info('atlas_3_10')
if not info:
info = get_info('atlas_threads')
if not info:
info = get_info('atlas')
if info:
l = info.get('define_macros', [])
if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
or ('ATLAS_WITHOUT_LAPACK', None) in l:
lapack_info = self._get_info_lapack()
if not lapack_info:
return False
dict_append(info, **lapack_info)
self.set_info(**info)
return True
return False
def _calc_info_accelerate(self):
info = get_info('accelerate')
if info:
self.set_info(**info)
return True
return False
def _get_info_blas(self):
info = get_info('blas_opt')
if not info:
warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
info_src = get_info('blas_src')
if not info_src:
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
return {}
dict_append(info, libraries=[('fblas_src', info_src)])
return info
info = get_info('lapack')
if not info:
warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=3)
info_src = get_info('lapack_src')
if not info_src:
warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=3)
return {}
dict_append(info, libraries=[('flapack_src', info_src)])
return info
info = self._get_info_lapack()
if info:
info_blas = self._get_info_blas()
dict_append(info, **info_blas)
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
self.set_info(**info)
return True
return False
info = {}
info['language'] = 'f77'
info['libraries'] = []
info['include_dirs'] = []
info['define_macros'] = []
info['extra_link_args'] = os.environ['NPY_LAPACK_LIBS'].split()
self.set_info(**info)
return True
return getattr(self, '_calc_info_{}'.format(name))()
lapack_order, unknown_order = _parse_env_order(self.lapack_order, self.order_env_var_name)
if len(unknown_order) > 0:
raise ValueError("lapack_opt_info user defined "
"LAPACK order has unacceptable "
"values: {}".format(unknown_order))
self._calc_info_from_envvar()
return
for lapack in lapack_order:
if self._calc_info(lapack):
return
warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=2)
warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=2)
class _ilp64_opt_info_mixin:
symbol_suffix = None
symbol_prefix = None
def _check_info(self, info):
macros = dict(info.get('define_macros', []))
prefix = macros.get('BLAS_SYMBOL_PREFIX', '')
suffix = macros.get('BLAS_SYMBOL_SUFFIX', '')
if self.symbol_prefix not in (None, prefix):
return False
if self.symbol_suffix not in (None, suffix):
return False
return bool(info)
class lapack_ilp64_opt_info(lapack_opt_info, _ilp64_opt_info_mixin):
notfounderror = LapackILP64NotFoundError
lapack_order = ['openblas64_', 'openblas_ilp64', 'accelerate']
order_env_var_name = 'NPY_LAPACK_ILP64_ORDER'
def _calc_info(self, name):
print('lapack_ilp64_opt_info._calc_info(name=%s)' % (name))
info = get_info(name + '_lapack')
if self._check_info(info):
self.set_info(**info)
return True
else:
print('%s_lapack does not exist' % (name))
return False
class lapack_ilp64_plain_opt_info(lapack_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = ''
class lapack64__opt_info(lapack_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = '64_'
class blas_opt_info(system_info):
notfounderror = BlasNotFoundError
blas_order = ['armpl', 'mkl', 'ssl2', 'blis', 'openblas',
'accelerate', 'atlas', 'blas']
order_env_var_name = 'NPY_BLAS_ORDER'
def _calc_info_armpl(self):
info = get_info('blas_armpl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_mkl(self):
info = get_info('blas_mkl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_ssl2(self):
info = get_info('blas_ssl2')
if info:
self.set_info(**info)
return True
return False
def _calc_info_blis(self):
info = get_info('blis')
if info:
self.set_info(**info)
return True
return False
def _calc_info_openblas(self):
info = get_info('openblas')
if info:
self.set_info(**info)
return True
return False
def _calc_info_atlas(self):
info = get_info('atlas_3_10_blas_threads')
if not info:
info = get_info('atlas_3_10_blas')
if not info:
info = get_info('atlas_blas_threads')
if not info:
info = get_info('atlas_blas')
if info:
self.set_info(**info)
return True
return False
def _calc_info_accelerate(self):
info = get_info('accelerate')
if info:
self.set_info(**info)
return True
return False
def _calc_info_blas(self):
warnings.warn(BlasOptNotFoundError.__doc__ or '', stacklevel=3)
info = {}
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
blas = get_info('blas')
if blas:
dict_append(info, **blas)
else:
warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
blas_src = get_info('blas_src')
if not blas_src:
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
return False
dict_append(info, libraries=[('fblas_src', blas_src)])
self.set_info(**info)
return True
def _calc_info_from_envvar(self):
info = {}
info['language'] = 'f77'
info['libraries'] = []
info['include_dirs'] = []
info['define_macros'] = []
info['extra_link_args'] = os.environ['NPY_BLAS_LIBS'].split()
if 'NPY_CBLAS_LIBS' in os.environ:
info['define_macros'].append(('HAVE_CBLAS', None))
info['extra_link_args'].extend(
os.environ['NPY_CBLAS_LIBS'].split())
self.set_info(**info)
return True
def _calc_info(self, name):
return getattr(self, '_calc_info_{}'.format(name))()
def calc_info(self):
blas_order, unknown_order = _parse_env_order(self.blas_order, self.order_env_var_name)
if len(unknown_order) > 0:
raise ValueError("blas_opt_info user defined BLAS order has unacceptable values: {}".format(unknown_order))
if 'NPY_BLAS_LIBS' in os.environ:
self._calc_info_from_envvar()
return
for blas in blas_order:
if self._calc_info(blas):
return
if 'blas' not in blas_order:
warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=2)
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=2)
class blas_ilp64_opt_info(blas_opt_info, _ilp64_opt_info_mixin):
notfounderror = BlasILP64NotFoundError
blas_order = ['openblas64_', 'openblas_ilp64', 'accelerate']
order_env_var_name = 'NPY_BLAS_ILP64_ORDER'
def _calc_info(self, name):
info = get_info(name)
if self._check_info(info):
self.set_info(**info)
return True
return False
class blas_ilp64_plain_opt_info(blas_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = ''
class blas64__opt_info(blas_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = '64_'
class cblas_info(system_info):
section = 'cblas'
dir_env_var = 'CBLAS'
_lib_names = []
notfounderror = BlasNotFoundError
class blas_info(system_info):
section = 'blas'
dir_env_var = 'BLAS'
_lib_names = ['blas']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('blas_libs', 'libraries')
blas_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, blas_libs, [])
if info is None:
return
else:
info['include_dirs'] = self.get_include_dirs()
if platform.system() == 'Windows':
info['language'] = 'f77'
cblas_info_obj = cblas_info()
cblas_opt = cblas_info_obj.get_option_single('cblas_libs', 'libraries')
cblas_libs = cblas_info_obj.get_libs(cblas_opt, None)
if cblas_libs:
info['libraries'] = cblas_libs + blas_libs
info['define_macros'] = [('HAVE_CBLAS', None)]
else:
lib = self.get_cblas_libs(info)
if lib is not None:
info['language'] = 'c'
info['libraries'] = lib
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
def get_cblas_libs(self, info):
""" Check whether we can link with CBLAS interface
This method will search through several combinations of libraries
to check whether CBLAS is present:
1. Libraries in ``info['libraries']``, as is
2. As 1. but also explicitly adding ``'cblas'`` as a library
3. As 1. but also explicitly adding ``'blas'`` as a library
4. Check only library ``'cblas'``
5. Check only library ``'blas'``
Parameters
----------
info : dict
system information dictionary for compilation and linking
Returns
-------
libraries : list of str or None
a list of libraries that enables the use of CBLAS interface.
Returns None if not found or a compilation error occurs.
Since 1.17 returns a list.
"""
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
s = textwrap.dedent("""\
#include <cblas.h>
int main(int argc, const char *argv[])
{
double a[4] = {1,2,3,4};
double b[4] = {5,6,7,8};
return cblas_ddot(4, a, 1, b, 1) > 10;
}""")
src = os.path.join(tmpdir, 'source.c')
try:
with open(src, 'w') as f:
f.write(s)
try:
obj = c.compile([src], output_dir=tmpdir,
include_dirs=self.get_include_dirs())
except (distutils.ccompiler.CompileError, distutils.ccompiler.LinkError):
return None
for libs in [info['libraries'], ['cblas'] + info['libraries'],
['blas'] + info['libraries'], ['cblas'], ['blas']]:
try:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=libs,
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
return libs
except distutils.ccompiler.LinkError:
pass
finally:
shutil.rmtree(tmpdir)
return None
class openblas_info(blas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
_require_symbols = []
@property
def symbol_prefix(self):
try:
return self.cp.get(self.section, 'symbol_prefix')
except NoOptionError:
return ''
@property
def symbol_suffix(self):
try:
return self.cp.get(self.section, 'symbol_suffix')
except NoOptionError:
return ''
def _calc_info(self):
c = customized_ccompiler()
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('openblas_libs', 'libraries')
openblas_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, openblas_libs, [])
if c.compiler_type == "msvc" and info is None:
from numpy.distutils.fcompiler import new_fcompiler
f = new_fcompiler(c_compiler=c)
if f and f.compiler_type == 'gnu95':
info = self.check_msvc_gfortran_libs(lib_dirs, openblas_libs)
skip_symbol_check = True
elif info:
skip_symbol_check = False
info['language'] = 'c'
if info is None:
return None
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if not (skip_symbol_check or self.check_symbols(info)):
return None
info['define_macros'] = [('HAVE_CBLAS', None)]
if self.symbol_prefix:
info['define_macros'] += [('BLAS_SYMBOL_PREFIX', self.symbol_prefix)]
if self.symbol_suffix:
info['define_macros'] += [('BLAS_SYMBOL_SUFFIX', self.symbol_suffix)]
return info
def calc_info(self):
info = self._calc_info()
if info is not None:
self.set_info(**info)
def check_msvc_gfortran_libs(self, library_dirs, libraries):
library_paths = []
for library in libraries:
for library_dir in library_dirs:
fullpath = os.path.join(library_dir, library + '.a')
if os.path.isfile(fullpath):
library_paths.append(fullpath)
break
else:
return None
basename = self.__class__.__name__
tmpdir = os.path.join(os.getcwd(), 'build', basename)
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
info = {'library_dirs': [tmpdir],
'libraries': [basename],
'language': 'f77'}
fake_lib_file = os.path.join(tmpdir, basename + '.fobjects')
fake_clib_file = os.path.join(tmpdir, basename + '.cobjects')
with open(fake_lib_file, 'w') as f:
f.write("\n".join(library_paths))
with open(fake_clib_file, 'w') as f:
pass
return info
def check_symbols(self, info):
res = False
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
prototypes = "\n".join("void %s%s%s();" % (self.symbol_prefix,
symbol_name,
self.symbol_suffix)
for symbol_name in self._require_symbols)
calls = "\n".join("%s%s%s();" % (self.symbol_prefix,
symbol_name,
self.symbol_suffix)
for symbol_name in self._require_symbols)
s = textwrap.dedent("""\
%(prototypes)s
int main(int argc, const char *argv[])
{
%(calls)s
return 0;
}""") % dict(prototypes=prototypes, calls=calls)
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
try:
extra_args = info['extra_link_args']
except Exception:
extra_args = []
try:
with open(src, 'w') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
res = True
except distutils.ccompiler.LinkError:
res = False
finally:
shutil.rmtree(tmpdir)
return res
class openblas_lapack_info(openblas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
_require_symbols = ['zungqr_']
notfounderror = BlasNotFoundError
class openblas_clapack_info(openblas_lapack_info):
_lib_names = ['openblas', 'lapack']
class openblas_ilp64_info(openblas_info):
section = 'openblas_ilp64'
dir_env_var = 'OPENBLAS_ILP64'
_lib_names = ['openblas64']
_require_symbols = ['dgemm_', 'cblas_dgemm']
notfounderror = BlasILP64NotFoundError
def _calc_info(self):
info = super()._calc_info()
if info is not None:
info['define_macros'] += [('HAVE_BLAS_ILP64', None)]
return info
class openblas_ilp64_lapack_info(openblas_ilp64_info):
_require_symbols = ['dgemm_', 'cblas_dgemm', 'zungqr_', 'LAPACKE_zungqr']
def _calc_info(self):
info = super()._calc_info()
if info:
info['define_macros'] += [('HAVE_LAPACKE', None)]
return info
class openblas64__info(openblas_ilp64_info):
section = 'openblas64_'
dir_env_var = 'OPENBLAS64_'
_lib_names = ['openblas64_']
symbol_suffix = '64_'
symbol_prefix = ''
class openblas64__lapack_info(openblas_ilp64_lapack_info, openblas64__info):
pass
class blis_info(blas_info):
section = 'blis'
dir_env_var = 'BLIS'
_lib_names = ['blis']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('blis_libs', 'libraries')
blis_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs2(lib_dirs, blis_libs, [])
if info is None:
return
incl_dirs = self.get_include_dirs()
dict_append(info,
language='c',
define_macros=[('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
self.set_info(**info)
class flame_info(system_info):
""" Usage of libflame for LAPACK operations
This requires libflame to be compiled with lapack wrappers:
./configure --enable-lapack2flame ...
Be aware that libflame 5.1.0 has some missing names in the shared library, so
if you have problems, try the static flame library.
"""
section = 'flame'
_lib_names = ['flame']
notfounderror = FlameNotFoundError
def check_embedded_lapack(self, info):
""" libflame does not necessarily have a wrapper for fortran LAPACK, we need to check """
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
s = textwrap.dedent("""\
void zungqr_();
int main(int argc, const char *argv[])
{
zungqr_();
return 0;
}""")
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
extra_args = info.get('extra_link_args', [])
try:
with open(src, 'w') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
return True
except distutils.ccompiler.LinkError:
return False
finally:
shutil.rmtree(tmpdir)
def calc_info(self):
lib_dirs = self.get_lib_dirs()
flame_libs = self.get_libs('libraries', self._lib_names)
info = self.check_libs2(lib_dirs, flame_libs, [])
if info is None:
return
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if self.check_embedded_lapack(info):
self.set_info(**info)
else:
blas_info = get_info('blas_opt')
if not blas_info:
return
for key in blas_info:
if isinstance(blas_info[key], list):
info[key] = info.get(key, []) + blas_info[key]
elif isinstance(blas_info[key], tuple):
info[key] = info.get(key, ()) + blas_info[key]
else:
info[key] = info.get(key, '') + blas_info[key]
if self.check_embedded_lapack(info):
self.set_info(**info)
class accelerate_info(system_info):
section = 'accelerate'
_lib_names = ['accelerate', 'veclib']
notfounderror = BlasNotFoundError
def calc_info(self):
libraries = os.environ.get('ACCELERATE')
if libraries:
libraries = [libraries]
else:
libraries = self.get_libs('libraries', self._lib_names)
libraries = [lib.strip().lower() for lib in libraries]
if (sys.platform == 'darwin' and
not os.getenv('_PYTHON_HOST_PLATFORM', None)):
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if (os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/') and
'accelerate' in libraries):
if intel:
args.extend(['-msse3'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif (os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/') and
'veclib' in libraries):
if intel:
args.extend(['-msse3'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
macros = [
('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None),
('ACCELERATE_NEW_LAPACK', None),
]
if(os.getenv('NPY_USE_BLAS_ILP64', None)):
print('Setting HAVE_BLAS_ILP64')
macros += [
('HAVE_BLAS_ILP64', None),
('ACCELERATE_LAPACK_ILP64', None),
]
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=macros)
return
class accelerate_lapack_info(accelerate_info):
def _calc_info(self):
return super()._calc_info()
class blas_src_info(system_info):
section = 'blas_src'
dir_env_var = 'BLAS_SRC'
notfounderror = BlasSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['blas']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'daxpy.f')):
src_dir = d
break
if not src_dir:
return
blas1 = '''
caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
scabs1
'''
blas2 = '''
cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
'''
blas3 = '''
cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
'''
sources = [os.path.join(src_dir, f + '.f') \
for f in (blas1 + blas2 + blas3).split()]
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
class x11_info(system_info):
section = 'x11'
notfounderror = X11NotFoundError
def __init__(self):
system_info.__init__(self,
default_lib_dirs=default_x11_lib_dirs,
default_include_dirs=default_x11_include_dirs)
def calc_info(self):
if sys.platform in ['win32']:
return
lib_dirs = self.get_lib_dirs()
include_dirs = self.get_include_dirs()
opt = self.get_option_single('x11_libs', 'libraries')
x11_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, x11_libs, [])
if info is None:
return
inc_dir = None
for d in include_dirs:
if self.combine_paths(d, 'X11/X.h'):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
self.set_info(**info)
class _numpy_info(system_info):
section = 'Numeric'
modulename = 'Numeric'
def __init__(self):
include_dirs = []
try:
module = __import__(self.modulename)
prefix = []
for name in module.__file__.split(os.sep):
if name == 'lib':
break
prefix.append(name)
try:
include_dirs.append(getattr(module, 'get_include')())
except AttributeError:
pass
include_dirs.append(sysconfig.get_path('include'))
except ImportError:
pass
py_incl_dir = sysconfig.get_path('include')
include_dirs.append(py_incl_dir)
py_pincl_dir = sysconfig.get_path('platinclude')
if py_pincl_dir not in include_dirs:
include_dirs.append(py_pincl_dir)
for d in default_include_dirs:
d = os.path.join(d, os.path.basename(py_incl_dir))
if d not in include_dirs:
include_dirs.append(d)
system_info.__init__(self,
default_lib_dirs=[],
default_include_dirs=include_dirs)
def calc_info(self):
try:
module = __import__(self.modulename)
except ImportError:
return
info = {}
macros = []
for v in ['__version__', 'version']:
vrs = getattr(module, v, None)
if vrs is None:
continue
macros = [(self.modulename.upper() + '_VERSION',
_c_string_literal(vrs)),
(self.modulename.upper(), None)]
break
dict_append(info, define_macros=macros)
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
if self.combine_paths(d,
os.path.join(self.modulename,
'arrayobject.h')):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
if info:
self.set_info(**info)
return
class numarray_info(_numpy_info):
section = 'numarray'
modulename = 'numarray'
class Numeric_info(_numpy_info):
section = 'Numeric'
modulename = 'Numeric'
class numpy_info(_numpy_info):
section = 'numpy'
modulename = 'numpy'
class numerix_info(system_info):
section = 'numerix'
def calc_info(self):
which = None, None
if os.getenv("NUMERIX"):
which = os.getenv("NUMERIX"), "environment var"
if which[0] is None:
which = "numpy", "defaulted"
try:
import numpy
which = "numpy", "defaulted"
except ImportError as e:
msg1 = str(e)
try:
import Numeric
which = "numeric", "defaulted"
except ImportError as e:
msg2 = str(e)
try:
import numarray
which = "numarray", "defaulted"
except ImportError as e:
msg3 = str(e)
log.info(msg1)
log.info(msg2)
log.info(msg3)
which = which[0].strip().lower(), which[1]
if which[0] not in ["numeric", "numarray", "numpy"]:
raise ValueError("numerix selector must be either 'Numeric' "
"or 'numarray' or 'numpy' but the value obtained"
" from the %s was '%s'." % (which[1], which[0]))
os.environ['NUMERIX'] = which[0]
self.set_info(**get_info(which[0]))
class f2py_info(system_info):
def calc_info(self):
try:
import numpy.f2py as f2py
except ImportError:
return
f2py_dir = os.path.join(os.path.dirname(f2py.__file__), 'src')
self.set_info(sources=[os.path.join(f2py_dir, 'fortranobject.c')],
include_dirs=[f2py_dir])
return
class boost_python_info(system_info):
section = 'boost_python'
dir_env_var = 'BOOST'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['boost*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'libs', 'python', 'src', 'module.cpp')):
src_dir = d
break
if not src_dir:
return
py_incl_dirs = [sysconfig.get_path('include')]
py_pincl_dir = sysconfig.get_path('platinclude')
if py_pincl_dir not in py_incl_dirs:
py_incl_dirs.append(py_pincl_dir)
srcs_dir = os.path.join(src_dir, 'libs', 'python', 'src')
bpl_srcs = glob(os.path.join(srcs_dir, '*.cpp'))
bpl_srcs += glob(os.path.join(srcs_dir, '*', '*.cpp'))
info = {
'libraries': [
('boost_python_src', {
'include_dirs': [src_dir] + py_incl_dirs,
'sources': bpl_srcs
})
],
'include_dirs': [src_dir],
}
if info:
self.set_info(**info)
return
class agg2_info(system_info):
section = 'agg2'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['agg2*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'src', 'agg_affine_matrix.cpp')):
src_dir = d
break
if not src_dir:
return
if sys.platform == 'win32':
agg2_srcs = glob(os.path.join(src_dir, 'src', 'platform',
'win32', 'agg_win32_bmp.cpp'))
else:
agg2_srcs = glob(os.path.join(src_dir, 'src', '*.cpp'))
agg2_srcs += [os.path.join(src_dir, 'src', 'platform',
'X11',
'agg_platform_support.cpp')]
info = {'libraries':
[('agg2_src',
{'sources': agg2_srcs,
'include_dirs': [os.path.join(src_dir, 'include')],
}
)],
'include_dirs': [os.path.join(src_dir, 'include')],
}
if info:
self.set_info(**info)
return
class _pkg_config_info(system_info):
section = None
config_env_var = 'PKG_CONFIG'
default_config_exe = 'pkg-config'
append_config_exe = ''
version_macro_name = None
release_macro_name = None
version_flag = '--modversion'
def get_config_exe(self):
if self.config_env_var in os.environ:
return os.environ[self.config_env_var]
return self.default_config_exe
def get_config_output(self, config_exe, option):
cmd = config_exe + ' ' + self.append_config_exe + ' ' + option
try:
o = subprocess.check_output(cmd)
except (OSError, subprocess.CalledProcessError):
pass
else:
o = filepath_from_subprocess_output(o)
return o
def calc_info(self):
config_exe = find_executable(self.get_config_exe())
if not config_exe:
log.warn('File not found: %s. Cannot determine %s info.' \
% (config_exe, self.section))
return
info = {}
macros = []
libraries = []
library_dirs = []
include_dirs = []
extra_link_args = []
extra_compile_args = []
version = self.get_config_output(config_exe, self.version_flag)
if version:
macros.append((self.__class__.__name__.split('.')[-1].upper(),
_c_string_literal(version)))
if self.version_macro_name:
macros.append((self.version_macro_name + '_%s'
% (version.replace('.', '_')), None))
if self.release_macro_name:
release = self.get_config_output(config_exe, '--release')
if release:
macros.append((self.release_macro_name + '_%s'
% (release.replace('.', '_')), None))
opts = self.get_config_output(config_exe, '--libs')
if opts:
for opt in opts.split():
if opt[:2] == '-l':
libraries.append(opt[2:])
elif opt[:2] == '-L':
library_dirs.append(opt[2:])
else:
extra_link_args.append(opt)
opts = self.get_config_output(config_exe, self.cflags_flag)
if opts:
for opt in opts.split():
if opt[:2] == '-I':
include_dirs.append(opt[2:])
elif opt[:2] == '-D':
if '=' in opt:
n, v = opt[2:].split('=')
macros.append((n, v))
else:
macros.append((opt[2:], None))
else:
extra_compile_args.append(opt)
if macros:
dict_append(info, define_macros=macros)
if libraries:
dict_append(info, libraries=libraries)
if library_dirs:
dict_append(info, library_dirs=library_dirs)
if include_dirs:
dict_append(info, include_dirs=include_dirs)
if extra_link_args:
dict_append(info, extra_link_args=extra_link_args)
if extra_compile_args:
dict_append(info, extra_compile_args=extra_compile_args)
if info:
self.set_info(**info)
return
class wx_info(_pkg_config_info):
section = 'wx'
config_env_var = 'WX_CONFIG'
default_config_exe = 'wx-config'
append_config_exe = ''
version_macro_name = 'WX_VERSION'
release_macro_name = 'WX_RELEASE'
version_flag = '--version'
cflags_flag = '--cxxflags'
class gdk_pixbuf_xlib_2_info(_pkg_config_info):
section = 'gdk_pixbuf_xlib_2'
append_config_exe = 'gdk-pixbuf-xlib-2.0'
version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
class gdk_pixbuf_2_info(_pkg_config_info):
section = 'gdk_pixbuf_2'
append_config_exe = 'gdk-pixbuf-2.0'
version_macro_name = 'GDK_PIXBUF_VERSION'
class gdk_x11_2_info(_pkg_config_info):
section = 'gdk_x11_2'
append_config_exe = 'gdk-x11-2.0'
version_macro_name = 'GDK_X11_VERSION'
class gdk_2_info(_pkg_config_info):
section = 'gdk_2'
append_config_exe = 'gdk-2.0'
version_macro_name = 'GDK_VERSION'
class gdk_info(_pkg_config_info):
section = 'gdk'
append_config_exe = 'gdk'
version_macro_name = 'GDK_VERSION'
class gtkp_x11_2_info(_pkg_config_info):
section = 'gtkp_x11_2'
append_config_exe = 'gtk+-x11-2.0'
version_macro_name = 'GTK_X11_VERSION'
class gtkp_2_info(_pkg_config_info):
section = 'gtkp_2'
append_config_exe = 'gtk+-2.0'
version_macro_name = 'GTK_VERSION'
class xft_info(_pkg_config_info):
section = 'xft'
append_config_exe = 'xft'
version_macro_name = 'XFT_VERSION'
class freetype2_info(_pkg_config_info):
section = 'freetype2'
append_config_exe = 'freetype2'
version_macro_name = 'FREETYPE2_VERSION'
class amd_info(system_info):
section = 'amd'
dir_env_var = 'AMD'
_lib_names = ['amd']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('amd_libs', 'libraries')
amd_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, amd_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, 'amd.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_AMD_H', None)],
swig_opts=['-I' + inc_dir])
self.set_info(**info)
return
class umfpack_info(system_info):
section = 'umfpack'
dir_env_var = 'UMFPACK'
notfounderror = UmfpackNotFoundError
_lib_names = ['umfpack']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('umfpack_libs', 'libraries')
umfpack_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, umfpack_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, ['', 'umfpack'], 'umfpack.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_UMFPACK_H', None)],
swig_opts=['-I' + inc_dir])
dict_append(info, **get_info('amd'))
self.set_info(**info)
return
def combine_paths(*args, **kws):
r = []
for a in args:
if not a:
continue
if is_string(a):
a = [a]
r.append(a)
args = r
if not args:
return []
if len(args) == 1:
result = reduce(lambda a, b: a + b, map(glob, args[0]), [])
elif len(args) == 2:
result = []
for a0 in args[0]:
for a1 in args[1]:
result.extend(glob(os.path.join(a0, a1)))
else:
result = combine_paths(*(combine_paths(args[0], args[1]) + args[2:]))
log.debug('(paths: %s)', ','.join(result))
return result
language_map = {'c': 0, 'c++': 1, 'f77': 2, 'f90': 3}
inv_language_map = {0: 'c', 1: 'c++', 2: 'f77', 3: 'f90'}
def dict_append(d, **kws):
languages = []
for k, v in kws.items():
if k == 'language':
languages.append(v)
continue
if k in d:
if k in ['library_dirs', 'include_dirs',
'extra_compile_args', 'extra_link_args',
'runtime_library_dirs', 'define_macros']:
[d[k].append(vv) for vv in v if vv not in d[k]]
else:
d[k].extend(v)
else:
d[k] = v
if languages:
l = inv_language_map[max([language_map.get(l, 0) for l in languages])]
d['language'] = l
return
def parseCmdLine(argv=(None,)):
import optparse
parser = optparse.OptionParser("usage: %prog [-v] [info objs]")
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False,
help='be verbose and print more messages')
opts, args = parser.parse_args(args=argv[1:])
return opts, args
def show_all(argv=None):
import inspect
if argv is None:
argv = sys.argv
opts, args = parseCmdLine(argv)
if opts.verbose:
log.set_threshold(log.DEBUG)
else:
log.set_threshold(log.INFO)
show_only = []
for n in args:
if n[-5:] != '_info':
n = n + '_info'
show_only.append(n)
show_all = not show_only
_gdict_ = globals().copy()
for name, c in _gdict_.items():
if not inspect.isclass(c):
continue
if not issubclass(c, system_info) or c is system_info:
continue
if not show_all:
if name not in show_only:
continue
del show_only[show_only.index(name)]
conf = c()
conf.verbosity = 2
conf.get_info()
if show_only:
log.info('Info classes not defined: %s', ','.join(show_only))
if __name__ == "__main__":
show_all()
.\numpy\numpy\distutils\tests\test_build_ext.py
'''Tests for numpy.distutils.build_ext.'''
import os
import subprocess
import sys
from textwrap import indent, dedent
import pytest
from numpy.testing import IS_WASM
@pytest.mark.skipif(IS_WASM, reason="cannot start subprocess in wasm")
@pytest.mark.slow
def test_multi_fortran_libs_link(tmp_path):
'''
Ensures multiple "fake" static libraries are correctly linked.
see gh-18295
'''
from numpy.distutils.tests.utilities import has_f77_compiler
if not has_f77_compiler():
pytest.skip('No F77 compiler found')
with open(tmp_path / '_dummy1.f', 'w') as fid:
fid.write(indent(dedent('''\
FUNCTION dummy_one()
RETURN
END FUNCTION'''), prefix=' '*6))
with open(tmp_path / '_dummy2.f', 'w') as fid:
fid.write(indent(dedent('''\
FUNCTION dummy_two()
RETURN
END FUNCTION'''), prefix=' '*6))
with open(tmp_path / '_dummy.c', 'w') as fid:
fid.write('int PyInit_dummyext;')
with open(tmp_path / 'setup.py', 'w') as fid:
srctree = os.path.join(os.path.dirname(__file__), '..', '..', '..')
fid.write(dedent(f'''\
def configuration(parent_package="", top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration("", parent_package, top_path)
config.add_library("dummy1", sources=["_dummy1.f"])
config.add_library("dummy2", sources=["_dummy2.f"])
config.add_extension("dummyext", sources=["_dummy.c"], libraries=["dummy1", "dummy2"])
return config
if __name__ == "__main__":
import sys
sys.path.insert(0, r"{srctree}")
from numpy.distutils.core import setup
setup(**configuration(top_path="").todict())'''))
build_dir = tmp_path
subprocess.check_call([sys.executable, 'setup.py', 'build', 'install',
'--prefix', str(tmp_path / 'installdir'),
'--record', str(tmp_path / 'tmp_install_log.txt'),
],
cwd=str(build_dir),
)
so = None
with open(tmp_path / 'tmp_install_log.txt') as fid:
for line in fid:
if 'dummyext' in line:
so = line.strip()
break
assert so is not None
.\numpy\numpy\distutils\tests\test_ccompiler_opt.py
import re, textwrap, os
from os import sys, path
from distutils.errors import DistutilsError
is_standalone = __name__ == '__main__' and __package__ is None
if is_standalone:
import unittest, contextlib, tempfile, shutil
sys.path.append(path.abspath(path.join(path.dirname(__file__), "..")))
from ccompiler_opt import CCompilerOpt
@contextlib.contextmanager
def tempdir(*args, **kwargs):
tmpdir = tempfile.mkdtemp(*args, **kwargs)
try:
yield tmpdir
finally:
shutil.rmtree(tmpdir)
def assert_(expr, msg=''):
if not expr:
raise AssertionError(msg)
else:
from numpy.distutils.ccompiler_opt import CCompilerOpt
from numpy.testing import assert_, tempdir
arch_compilers = dict(
x86 = ("gcc", "clang", "icc", "iccw", "msvc"),
x64 = ("gcc", "clang", "icc", "iccw", "msvc"),
ppc64 = ("gcc", "clang"),
ppc64le = ("gcc", "clang"),
armhf = ("gcc", "clang"),
aarch64 = ("gcc", "clang", "fcc"),
s390x = ("gcc", "clang"),
noarch = ("gcc",)
)
class FakeCCompilerOpt(CCompilerOpt):
fake_info = ""
def __init__(self, trap_files="", trap_flags="", *args, **kwargs):
self.fake_trap_files = trap_files
self.fake_trap_flags = trap_flags
CCompilerOpt.__init__(self, None, **kwargs)
def __repr__(self):
return textwrap.dedent("""\
<<<<
march : {}
compiler : {}
----------------
{}
>>>>
""").format(self.cc_march, self.cc_name, self.report())
def dist_compile(self, sources, flags, **kwargs):
assert(isinstance(sources, list))
assert(isinstance(flags, list))
if self.fake_trap_files:
for src in sources:
if re.match(self.fake_trap_files, src):
self.dist_error("source is trapped by a fake interface")
if self.fake_trap_flags:
for f in flags:
if re.match(self.fake_trap_flags, f):
self.dist_error("flag is trapped by a fake interface")
return zip(sources, [' '.join(flags)] * len(sources))
def dist_info(self):
return FakeCCompilerOpt.fake_info
@staticmethod
def dist_log(*args, stderr=False):
pass
class _Test_CCompilerOpt:
arch = None
cc = None
def setup_class(self):
FakeCCompilerOpt.conf_nocache = True
self._opt = None
def nopt(self, *args, **kwargs):
FakeCCompilerOpt.fake_info = (self.arch, self.cc, "")
return FakeCCompilerOpt(*args, **kwargs)
def opt(self):
if not self._opt:
self._opt = self.nopt()
return self._opt
def march(self):
return self.opt().cc_march
def cc_name(self):
return self.opt().cc_name
FakeCCompilerOpt.conf_target_groups = groups
opt = self.nopt(
cpu_baseline=kwargs.get("baseline", "min"),
cpu_dispatch=kwargs.get("dispatch", "max"),
trap_files=kwargs.get("trap_files", ""),
trap_flags=kwargs.get("trap_flags", "")
)
with tempdir() as tmpdir:
file = os.path.join(tmpdir, "test_targets.c")
with open(file, 'w') as f:
f.write(targets)
gtargets = []
gflags = {}
fake_objects = opt.try_dispatch([file])
for source, flags in fake_objects:
gtar = path.basename(source).split('.')[1:-1]
glen = len(gtar)
if glen == 0:
gtar = "baseline"
elif glen == 1:
gtar = gtar[0].upper()
else:
gtar = ('('+' '.join(gtar)+')').upper()
gtargets.append(gtar)
gflags[gtar] = flags
has_baseline, targets = opt.sources_status[file]
targets = targets + ["baseline"] if has_baseline else targets
targets = [
'('+' '.join(tar)+')' if isinstance(tar, tuple) else tar
for tar in targets
]
if len(targets) != len(gtargets) or not all(t in gtargets for t in targets):
raise AssertionError(
"'sources_status' returns different targets than the compiled targets\n"
"%s != %s" % (targets, gtargets)
)
return targets, gflags
def arg_regex(self, **kwargs):
map2origin = dict(
x64 = "x86",
ppc64le = "ppc64",
aarch64 = "armhf",
clang = "gcc",
)
march = self.march(); cc_name = self.cc_name()
map_march = map2origin.get(march, march)
map_cc = map2origin.get(cc_name, cc_name)
for key in (
march, cc_name, map_march, map_cc,
march + '_' + cc_name,
map_march + '_' + cc_name,
march + '_' + map_cc,
map_march + '_' + map_cc,
) :
regex = kwargs.pop(key, None)
if regex is not None:
break
if regex:
if isinstance(regex, dict):
for k, v in regex.items():
if v[-1:] not in ')}$?\\.+*':
regex[k] = v + '$'
else:
assert(isinstance(regex, str))
if regex[-1:] not in ')}$?\\.+*':
regex += '$'
return regex
def expect(self, dispatch, baseline="", **kwargs):
match = self.arg_regex(**kwargs)
if match is None:
return
opt = self.nopt(
cpu_baseline=baseline, cpu_dispatch=dispatch,
trap_files=kwargs.get("trap_files", ""),
trap_flags=kwargs.get("trap_flags", "")
)
features = ' '.join(opt.cpu_dispatch_names())
if not match:
if len(features) != 0:
raise AssertionError(
'expected empty features, not "%s"' % features
)
return
if not re.match(match, features, re.IGNORECASE):
raise AssertionError(
'dispatch features "%s" not match "%s"' % (features, match)
)
def expect_baseline(self, baseline, dispatch="", **kwargs):
match = self.arg_regex(**kwargs)
if match is None:
return
opt = self.nopt(
cpu_baseline=baseline, cpu_dispatch=dispatch,
trap_files=kwargs.get("trap_files", ""),
trap_flags=kwargs.get("trap_flags", "")
)
features = ' '.join(opt.cpu_baseline_names())
if not match:
if len(features) != 0:
raise AssertionError(
'expected empty features, not "%s"' % features
)
return
if not re.match(match, features, re.IGNORECASE):
raise AssertionError(
'baseline features "%s" not match "%s"' % (features, match)
)
def expect_flags(self, baseline, dispatch="", **kwargs):
match = self.arg_regex(**kwargs)
if match is None:
return
opt = self.nopt(
cpu_baseline=baseline, cpu_dispatch=dispatch,
trap_files=kwargs.get("trap_files", ""),
trap_flags=kwargs.get("trap_flags", "")
)
flags = ' '.join(opt.cpu_baseline_flags())
if not match:
if len(flags) != 0:
raise AssertionError(
'expected empty flags not "%s"' % flags
)
return
if not re.match(match, flags):
raise AssertionError(
'flags "%s" not match "%s"' % (flags, match)
)
def expect_targets(self, targets, groups={}, **kwargs):
match = self.arg_regex(**kwargs)
if match is None:
return
targets, _ = self.get_targets(targets=targets, groups=groups, **kwargs)
targets = ' '.join(targets)
if not match:
if len(targets) != 0:
raise AssertionError(
'expected empty targets, not "%s"' % targets
)
return
if not re.match(match, targets, re.IGNORECASE):
raise AssertionError(
'targets "%s" not match "%s"' % (targets, match)
)
def expect_target_flags(self, targets, groups={}, **kwargs):
match_dict = self.arg_regex(**kwargs)
if match_dict is None:
return
assert(isinstance(match_dict, dict))
_, tar_flags = self.get_targets(targets=targets, groups=groups)
for match_tar, match_flags in match_dict.items():
if match_tar not in tar_flags:
raise AssertionError(
'expected to find target "%s"' % match_tar
)
flags = tar_flags[match_tar]
if not match_flags:
if len(flags) != 0:
raise AssertionError(
'expected to find empty flags in target "%s"' % match_tar
)
if not re.match(match_flags, flags):
raise AssertionError(
'"%s" flags "%s" not match "%s"' % (match_tar, flags, match_flags)
)
def test_interface(self):
wrong_arch = "ppc64" if self.arch != "ppc64" else "x86"
wrong_cc = "clang" if self.cc != "clang" else "icc"
opt = self.opt()
assert_(getattr(opt, "cc_on_" + self.arch))
assert_(not getattr(opt, "cc_on_" + wrong_arch))
assert_(getattr(opt, "cc_is_" + self.cc))
assert_(not getattr(opt, "cc_is_" + wrong_cc))
def test_args_empty(self):
for baseline, dispatch in (
("", "none"),
(None, ""),
("none +none", "none - none"),
("none -max", "min - max"),
("+vsx2 -VSX2", "vsx avx2 avx512f -max"),
("max -vsx - avx + avx512f neon -MAX ",
"min -min + max -max -vsx + avx2 -avx2 +NONE")
) :
opt = self.nopt(cpu_baseline=baseline, cpu_dispatch=dispatch)
assert(len(opt.cpu_baseline_names()) == 0)
assert(len(opt.cpu_dispatch_names()) == 0)
def test_args_validation(self):
if self.march() == "unknown":
return
for baseline, dispatch in (
("unkown_feature - max +min", "unknown max min"),
("#avx2", "$vsx")
) :
try:
self.nopt(cpu_baseline=baseline, cpu_dispatch=dispatch)
raise AssertionError("excepted an exception for invalid arguments")
except DistutilsError:
pass
def test_skip(self):
self.expect(
"sse vsx neon",
x86="sse", ppc64="vsx", armhf="neon", unknown=""
)
self.expect(
"sse41 avx avx2 vsx2 vsx3 neon_vfpv4 asimd",
x86 = "sse41 avx avx2",
ppc64 = "vsx2 vsx3",
armhf = "neon_vfpv4 asimd",
unknown = ""
)
self.expect(
"sse neon vsx", baseline="sse neon vsx",
x86="", ppc64="", armhf=""
)
self.expect(
"avx2 vsx3 asimdhp", baseline="avx2 vsx3 asimdhp",
x86="", ppc64="", armhf=""
)
def test_implies(self):
self.expect_baseline(
"fma3 avx2 asimd vsx3",
x86 = "sse .* sse41 .* fma3.*avx2",
ppc64 = "vsx vsx2 vsx3",
armhf = "neon neon_fp16 neon_vfpv4 asimd"
)
"""
special cases
"""
for f0, f1 in (
("fma3", "avx2"),
("avx512f", "avx512cd"),
):
diff = ".* sse42 .* %s .*%s$" % (f0, f1)
self.expect_baseline(f0,
x86_gcc=".* sse42 .* %s$" % f0,
x86_icc=diff, x86_iccw=diff
)
self.expect_baseline(f1,
x86_gcc=".* avx .* %s$" % f1,
x86_icc=diff, x86_iccw=diff
)
for f in (("fma3", "avx2"), ("avx512f", "avx512cd", "avx512_skx")):
for ff in f:
self.expect_baseline(ff,
x86_msvc=".*%s" % ' '.join(f)
)
self.expect_baseline("vsx", ppc64le="vsx vsx2")
for f in ("neon", "neon_fp16", "neon_vfpv4", "asimd"):
self.expect_baseline(f, aarch64="neon neon_fp16 neon_vfpv4 asimd")
def test_args_options(self):
for o in ("max", "native"):
if o == "native" and self.cc_name() == "msvc":
continue
self.expect(o,
trap_files=".*cpu_(sse|vsx|neon|vx).c",
x86="", ppc64="", armhf="", s390x=""
)
self.expect(o,
trap_files=".*cpu_(sse3|vsx2|neon_vfpv4|vxe).c",
x86="sse sse2", ppc64="vsx", armhf="neon neon_fp16",
aarch64="", ppc64le="", s390x="vx"
)
self.expect(o,
trap_files=".*cpu_(popcnt|vsx3).c",
x86="sse .* sse41", ppc64="vsx vsx2",
armhf="neon neon_fp16 .* asimd .*",
s390x="vx vxe vxe2"
)
self.expect(o,
x86_gcc=".* xop fma4 .* avx512f .* avx512_knl avx512_knm avx512_skx .*",
x86_icc=".* avx512f .* avx512_knl avx512_knm avx512_skx .*",
x86_iccw=".* avx512f .* avx512_knl avx512_knm avx512_skx .*",
x86_msvc=".* xop fma4 .* avx512f .* avx512_skx .*",
armhf=".* asimd asimdhp asimddp .*",
ppc64="vsx vsx2 vsx3 vsx4.*",
s390x="vx vxe vxe2.*"
)
self.expect("min",
x86="sse sse2", x64="sse sse2 sse3",
armhf="", aarch64="neon neon_fp16 .* asimd",
ppc64="", ppc64le="vsx vsx2", s390x=""
)
self.expect(
"min", trap_files=".*cpu_(sse2|vsx2).c",
x86="", ppc64le=""
)
try:
self.expect("native",
trap_flags=".*(-march=native|-xHost|/QxHost|-mcpu=a64fx).*",
x86=".*", ppc64=".*", armhf=".*", s390x=".*", aarch64=".*",
)
if self.march() != "unknown":
raise AssertionError(
"excepted an exception for %s" % self.march()
)
except DistutilsError:
if self.march() == "unknown":
raise AssertionError("excepted no exceptions")
def test_flags(self):
self.expect_flags(
"sse sse2 vsx vsx2 neon neon_fp16 vx vxe",
x86_gcc="-msse -msse2", x86_icc="-msse -msse2",
x86_iccw="/arch:SSE2",
x86_msvc="/arch:SSE2" if self.march() == "x86" else "",
ppc64_gcc= "-mcpu=power8",
ppc64_clang="-mcpu=power8",
armhf_gcc="-mfpu=neon-fp16 -mfp16-format=ieee",
aarch64="",
s390x="-mzvector -march=arch12"
)
self.expect_flags(
"asimd",
aarch64="",
armhf_gcc=r"-mfp16-format=ieee -mfpu=neon-fp-armv8 -march=armv8-a\+simd"
)
self.expect_flags(
"asimdhp",
aarch64_gcc=r"-march=armv8.2-a\+fp16",
armhf_gcc=r"-mfp16-format=ieee -mfpu=neon-fp-armv8 -march=armv8.2-a\+fp16"
)
self.expect_flags(
"asimddp", aarch64_gcc=r"-march=armv8.2-a\+dotprod"
)
self.expect_flags(
"asimdfhm", aarch64_gcc=r"-march=armv8.2-a\+fp16\+fp16fml"
)
self.expect_flags(
"asimddp asimdhp asimdfhm",
aarch64_gcc=r"-march=armv8.2-a\+dotprod\+fp16\+fp16fml"
)
self.expect_flags(
"vx vxe vxe2",
s390x=r"-mzvector -march=arch13"
)
def test_targets_exceptions(self):
for targets in (
"bla bla", "/*@targets",
"/*@targets */",
"/*@targets unknown */",
"/*@targets $unknown_policy avx2 */",
"/*@targets #unknown_group avx2 */",
"/*@targets $ */",
"/*@targets # vsx */",
"/*@targets #$ vsx */",
"/*@targets vsx avx2 ) */",
"/*@targets vsx avx2 (avx2 */",
"/*@targets vsx avx2 () */",
"/*@targets vsx avx2 ($autovec) */",
"/*@targets vsx avx2 (xxx) */",
"/*@targets vsx avx2 (baseline) */",
):
try:
self.expect_targets(
targets,
x86="", armhf="", ppc64="", s390x=""
)
if self.march() != "unknown":
raise AssertionError(
"excepted an exception for %s" % self.march()
)
except DistutilsError:
if self.march() == "unknown":
raise AssertionError("excepted no exceptions")
def test_targets_syntax(self):
for targets in (
"/*@targets $keep_baseline sse vsx neon vx*/",
"/*@targets,$keep_baseline,sse,vsx,neon vx*/",
"/*@targets*$keep_baseline*sse*vsx*neon*vx*/",
"""
/*
** @targets
** $keep_baseline, sse vsx,neon, vx
*/
""",
"""
/*
************@targets****************
** $keep_baseline, sse vsx, neon, vx
************************************
*/
""",
"""
/*
/////////////@targets/////////////////
//$keep_baseline//sse//vsx//neon//vx
/////////////////////////////////////
*/
""",
"""
/*
@targets
$keep_baseline
SSE VSX NEON VX*/
"""
):
self.expect_targets(targets,
x86="sse", ppc64="vsx", armhf="neon", s390x="vx", unknown=""
)
def test_targets(self):
self.expect_targets(
"""
/*@targets
sse sse2 sse41 avx avx2 avx512f
vsx vsx2 vsx3 vsx4
neon neon_fp16 asimdhp asimddp
vx vxe vxe2
*/
""",
baseline="avx vsx2 asimd vx vxe",
x86="avx512f avx2",
armhf="asimddp asimdhp",
ppc64="vsx4 vsx3",
s390x="vxe2"
)
self.expect_targets(
"""
/*@targets
sse41 avx avx2 avx512f
vsx2 vsx3 vsx4
asimd asimdhp asimddp
vx vxe vxe2
*/
""",
baseline="",
dispatch="sse41 avx2 vsx2 asimd asimddp vxe2",
x86="avx2 sse41",
armhf="asimddp asimd",
ppc64="vsx2",
s390x="vxe2"
)
self.expect_targets(
"""
/*@targets
sse2 sse41 avx2 avx512f
vsx2 vsx3 vsx4
neon asimdhp asimddp
vx vxe vxe2
*/
""",
baseline="",
trap_files=".*(avx2|avx512f|vsx3|vsx4|asimddp|vxe2).c",
x86="sse41 sse2",
ppc64="vsx2",
armhf="asimdhp neon",
s390x="vxe vx"
)
self.expect_targets(
"""
/*@targets
sse sse2 avx fma3 avx2 avx512f avx512cd
vsx vsx2 vsx3
neon neon_vfpv4 neon_fp16 neon_fp16 asimd asimdhp
asimddp asimdfhm
*/
""",
baseline="",
x86_gcc="avx512cd avx512f avx2 fma3 avx sse2",
x86_msvc="avx512cd avx2 avx sse2",
x86_icc="avx512cd avx2 avx sse2",
x86_iccw="avx512cd avx2 avx sse2",
ppc64="vsx3 vsx2 vsx",
ppc64le="vsx3 vsx2",
armhf="asimdfhm asimddp asimdhp asimd neon_vfpv4 neon_fp16 neon",
aarch64="asimdfhm asimddp asimdhp asimd"
)
def test_targets_policies(self):
self.expect_targets(
"""
/*@targets
$keep_baseline
sse2 sse42 avx2 avx512f
vsx2 vsx3
neon neon_vfpv4 asimd asimddp
vx vxe vxe2
*/
""",
baseline="sse41 avx2 vsx2 asimd vsx3 vxe",
x86="avx512f avx2 sse42 sse2",
ppc64="vsx3 vsx2",
armhf="asimddp asimd neon_vfpv4 neon",
aarch64="asimddp asimd",
s390x="vxe2 vxe vx"
)
self.expect_targets(
"""
/*@targets
$keep_baseline $keep_sort
avx512f sse42 avx2 sse2
vsx2 vsx3
asimd neon neon_vfpv4 asimddp
vxe vxe2
*/
""",
x86="avx512f sse42 avx2 sse2",
ppc64="vsx2 vsx3",
armhf="asimd neon neon_vfpv4 asimddp",
aarch64="asimd asimddp",
s390x="vxe vxe2"
)
self.expect_targets(
"""
/*@targets
$keep_baseline $keep_sort $autovec
avx512f avx2 sse42 sse41 sse2
vsx3 vsx2
asimddp asimd neon_vfpv4 neon
*/
""",
x86_gcc="avx512f avx2 sse42 sse41 sse2",
x86_icc="avx512f avx2 sse42 sse41 sse2",
x86_iccw="avx512f avx2 sse42 sse41 sse2",
x86_msvc="avx512f avx2 sse2" if self.march() == 'x86' else "avx512f avx2",
ppc64="vsx3 vsx2",
armhf="asimddp asimd neon_vfpv4 neon",
aarch64="asimddp asimd"
)
for policy in ("$maxopt", "$autovec"):
self.expect_target_flags(
"/*@targets baseline %s */" % policy,
gcc={"baseline":".*-O3.*"}, icc={"baseline":".*-O3.*"},
iccw={"baseline":".*/O3.*"}, msvc={"baseline":".*/O2.*"},
unknown={"baseline":".*"}
)
self.expect_target_flags(
"/*@targets baseline $werror */",
gcc={"baseline":".*-Werror.*"}, icc={"baseline":".*-Werror.*"},
iccw={"baseline":".*/Werror.*"}, msvc={"baseline":".*/WX.*"},
unknown={"baseline":".*"}
)
def test_targets_groups(self):
self.expect_targets(
"""
/*@targets $keep_baseline baseline #test_group */
""",
groups=dict(
test_group=("""
$keep_baseline
asimddp sse2 vsx2 avx2 vsx3
avx512f asimdhp
""")
),
x86="avx512f avx2 sse2 baseline",
ppc64="vsx3 vsx2 baseline",
armhf="asimddp asimdhp baseline"
)
self.expect_targets(
"""
/*@targets
* sse42 avx avx512f
* #test_group_1
* vsx2
* #test_group_2
* asimddp asimdfhm
*/
""",
groups=dict(
test_group_1=("""
VSX2 vsx3 asimd avx2 SSE41
"""),
test_group_2=("""
vsx2 vsx3 asImd aVx2 sse41
""")
),
x86="avx512f avx2 avx sse42 sse41",
ppc64="vsx3 vsx2",
ppc64le="vsx3",
armhf="asimdfhm asimddp asimd",
aarch64="asimdfhm asimddp"
)
def test_targets_multi(self):
self.expect_targets(
"""
/*@targets
(avx512_clx avx512_cnl) (asimdhp asimddp)
*/
""",
x86=r"\(avx512_clx avx512_cnl\)",
armhf=r"\(asimdhp asimddp\)",
)
self.expect_targets(
"""
/*@targets
f16c (sse41 avx sse42) (sse3 avx2 avx512f)
vsx2 (vsx vsx3 vsx2)
(neon neon_vfpv4 asimd asimdhp asimddp)
*/
""",
x86="avx512f f16c avx",
ppc64="vsx3 vsx2",
ppc64le="vsx3",
armhf=r"\(asimdhp asimddp\)",
)
self.expect_targets(
"""
/*@targets $keep_sort
(sse41 avx sse42) (sse3 avx2 avx512f)
(vsx vsx3 vsx2)
(asimddp neon neon_vfpv4 asimd asimdhp)
(vx vxe vxe2)
*/
""",
x86="avx avx512f",
ppc64="vsx3",
armhf=r"\(asimdhp asimddp\)",
s390x="vxe2"
)
self.expect_targets(
"""
/*@targets $keep_sort
fma3 avx2 (fma3 avx2) (avx2 fma3) avx2 fma3
*/
""",
x86_gcc=r"fma3 avx2 \(fma3 avx2\)",
x86_icc="avx2", x86_iccw="avx2",
x86_msvc="avx2"
)
def new_test(arch, cc):
if is_standalone: return textwrap.dedent("""\
class TestCCompilerOpt_{class_name}(_Test_CCompilerOpt, unittest.TestCase):
arch = '{arch}'
cc = '{cc}'
def __init__(self, methodName="runTest"):
unittest.TestCase.__init__(self, methodName)
self.setup_class()
""").format(
class_name=arch + '_' + cc, arch=arch, cc=cc
)
return textwrap.dedent("""\
class TestCCompilerOpt_{class_name}(_Test_CCompilerOpt):
arch = '{arch}'
cc = '{cc}'
""").format(
class_name=arch + '_' + cc, arch=arch, cc=cc
)
if 1 and is_standalone:
FakeCCompilerOpt.fake_info = "x86_icc"
cco = FakeCCompilerOpt(None, cpu_baseline="avx2")
print(' '.join(cco.cpu_baseline_names()))
print(cco.cpu_baseline_flags())
unittest.main()
sys.exit()
for arch, compilers in arch_compilers.items():
for cc in compilers:
exec(new_test(arch, cc))
if is_standalone:
unittest.main()
.\numpy\numpy\distutils\tests\test_ccompiler_opt_conf.py
import unittest
from os import sys, path
is_standalone = __name__ == '__main__' and __package__ is None
if is_standalone:
sys.path.append(path.abspath(path.join(path.dirname(__file__), "..")))
from ccompiler_opt import CCompilerOpt
else:
from numpy.distutils.ccompiler_opt import CCompilerOpt
arch_compilers = dict(
x86 = ("gcc", "clang", "icc", "iccw", "msvc"),
x64 = ("gcc", "clang", "icc", "iccw", "msvc"),
ppc64 = ("gcc", "clang"),
ppc64le = ("gcc", "clang"),
armhf = ("gcc", "clang"),
aarch64 = ("gcc", "clang"),
narch = ("gcc",)
)
class FakeCCompilerOpt(CCompilerOpt):
fake_info = ("arch", "compiler", "extra_args")
def __init__(self, *args, **kwargs):
CCompilerOpt.__init__(self, None, **kwargs)
def dist_compile(self, sources, flags, **kwargs):
return sources
def dist_info(self):
return FakeCCompilerOpt.fake_info
@staticmethod
def dist_log(*args, stderr=False):
pass
class _TestConfFeatures(FakeCCompilerOpt):
"""A hook to check the sanity of configured features
- before it called by the abstract class '_Feature'
"""
def conf_features_partial(self):
conf_all = self.conf_features
for feature_name, feature in conf_all.items():
self.test_feature(
"attribute conf_features",
conf_all, feature_name, feature
)
conf_partial = FakeCCompilerOpt.conf_features_partial(self)
for feature_name, feature in conf_partial.items():
self.test_feature(
"conf_features_partial()",
conf_partial, feature_name, feature
)
return conf_partial
def test_feature(self, log, search_in, feature_name, feature_dict):
error_msg = (
"during validate '{}' within feature '{}', "
"march '{}' and compiler '{}'\n>> "
).format(log, feature_name, self.cc_march, self.cc_name)
if not feature_name.isupper():
raise AssertionError(error_msg + "feature name must be in uppercase")
for option, val in feature_dict.items():
self.test_option_types(error_msg, option, val)
self.test_duplicates(error_msg, option, val)
self.test_implies(error_msg, search_in, feature_name, feature_dict)
self.test_group(error_msg, search_in, feature_name, feature_dict)
self.test_extra_checks(error_msg, search_in, feature_name, feature_dict)
def test_option_types(self, error_msg, option, val):
for tp, available in (
((str, list), (
"implies", "headers", "flags", "group", "detect", "extra_checks"
)),
((str,), ("disable",)),
((int,), ("interest",)),
((bool,), ("implies_detect",)),
((bool, type(None)), ("autovec",)),
) :
found_it = option in available
if not found_it:
continue
if not isinstance(val, tp):
error_tp = [t.__name__ for t in (*tp,)]
error_tp = ' or '.join(error_tp)
raise AssertionError(error_msg +
"expected '%s' type for option '%s' not '%s'" % (
error_tp, option, type(val).__name__
))
break
if not found_it:
raise AssertionError(error_msg + "invalid option name '%s'" % option)
def test_duplicates(self, error_msg, option, val):
if option not in (
"implies", "headers", "flags", "group", "detect", "extra_checks"
) : return
if isinstance(val, str):
val = val.split()
if len(val) != len(set(val)):
raise AssertionError(error_msg + "duplicated values in option '%s'" % option)
def test_implies(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
implies = feature_dict.get("implies", "")
if not implies:
return
if isinstance(implies, str):
implies = implies.split()
if feature_name in implies:
raise AssertionError(error_msg + "feature implies itself")
for impl in implies:
impl_dict = search_in.get(impl)
if impl_dict is not None:
if "disable" in impl_dict:
raise AssertionError(error_msg + "implies disabled feature '%s'" % impl)
continue
raise AssertionError(error_msg + "implies non-exist feature '%s'" % impl)
def test_group(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
group = feature_dict.get("group", "")
if not group:
return
if isinstance(group, str):
group = group.split()
for f in group:
impl_dict = search_in.get(f)
if not impl_dict or "disable" in impl_dict:
continue
raise AssertionError(error_msg +
"in option 'group', '%s' already exists as a feature name" % f
)
def test_extra_checks(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
extra_checks = feature_dict.get("extra_checks", "")
if not extra_checks:
return
if isinstance(extra_checks, str):
extra_checks = extra_checks.split()
for f in extra_checks:
impl_dict = search_in.get(f)
if not impl_dict or "disable" in impl_dict:
continue
raise AssertionError(error_msg +
"in option 'extra_checks', extra test case '%s' already exists as a feature name" % f
)
class TestConfFeatures(unittest.TestCase):
def __init__(self, methodName="runTest"):
unittest.TestCase.__init__(self, methodName)
self._setup()
def _setup(self):
FakeCCompilerOpt.conf_nocache = True
def test_features(self):
for arch, compilers in arch_compilers.items():
for cc in compilers:
FakeCCompilerOpt.fake_info = (arch, cc, "")
_TestConfFeatures()
if is_standalone:
unittest.main()