Updated script that can be controled by Nodejs web app

This commit is contained in:
mac OS
2024-11-25 12:24:18 +07:00
parent c440eda1f4
commit 8b0ab2bd3a
8662 changed files with 1803808 additions and 34 deletions

View File

@ -0,0 +1,22 @@
"""Common test support for all numpy test scripts.
This single module should provide all the common functionality for numpy tests
in a single location, so that test scripts can just import it and work right
away.
"""
from unittest import TestCase
from . import _private
from ._private.utils import *
from ._private.utils import (_assert_valid_refcount, _gen_alignment_data)
from ._private import extbuild
from . import overrides
__all__ = (
_private.utils.__all__ + ['TestCase', 'overrides']
)
from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester

View File

@ -0,0 +1,49 @@
from numpy._pytesttester import PytestTester
from unittest import (
TestCase as TestCase,
)
from numpy.testing._private.utils import (
assert_equal as assert_equal,
assert_almost_equal as assert_almost_equal,
assert_approx_equal as assert_approx_equal,
assert_array_equal as assert_array_equal,
assert_array_less as assert_array_less,
assert_string_equal as assert_string_equal,
assert_array_almost_equal as assert_array_almost_equal,
assert_raises as assert_raises,
build_err_msg as build_err_msg,
decorate_methods as decorate_methods,
jiffies as jiffies,
memusage as memusage,
print_assert_equal as print_assert_equal,
rundocs as rundocs,
runstring as runstring,
verbose as verbose,
measure as measure,
assert_ as assert_,
assert_array_almost_equal_nulp as assert_array_almost_equal_nulp,
assert_raises_regex as assert_raises_regex,
assert_array_max_ulp as assert_array_max_ulp,
assert_warns as assert_warns,
assert_no_warnings as assert_no_warnings,
assert_allclose as assert_allclose,
IgnoreException as IgnoreException,
clear_and_catch_warnings as clear_and_catch_warnings,
SkipTest as SkipTest,
KnownFailureException as KnownFailureException,
temppath as temppath,
tempdir as tempdir,
IS_PYPY as IS_PYPY,
IS_PYSTON as IS_PYSTON,
HAS_REFCOUNT as HAS_REFCOUNT,
suppress_warnings as suppress_warnings,
assert_array_compare as assert_array_compare,
assert_no_gc_cycles as assert_no_gc_cycles,
break_cycles as break_cycles,
HAS_LAPACK64 as HAS_LAPACK64,
)
__all__: list[str]
test: PytestTester

View File

@ -0,0 +1,252 @@
"""
Build a c-extension module on-the-fly in tests.
See build_and_import_extensions for usage hints
"""
import os
import pathlib
import subprocess
import sys
import sysconfig
import textwrap
__all__ = ['build_and_import_extension', 'compile_extension_module']
def build_and_import_extension(
modname, functions, *, prologue="", build_dir=None,
include_dirs=[], more_init=""):
"""
Build and imports a c-extension module `modname` from a list of function
fragments `functions`.
Parameters
----------
functions : list of fragments
Each fragment is a sequence of func_name, calling convention, snippet.
prologue : string
Code to precede the rest, usually extra ``#include`` or ``#define``
macros.
build_dir : pathlib.Path
Where to build the module, usually a temporary directory
include_dirs : list
Extra directories to find include files when compiling
more_init : string
Code to appear in the module PyMODINIT_FUNC
Returns
-------
out: module
The module will have been loaded and is ready for use
Examples
--------
>>> functions = [("test_bytes", "METH_O", \"\"\"
if ( !PyBytesCheck(args)) {
Py_RETURN_FALSE;
}
Py_RETURN_TRUE;
\"\"\")]
>>> mod = build_and_import_extension("testme", functions)
>>> assert not mod.test_bytes('abc')
>>> assert mod.test_bytes(b'abc')
"""
body = prologue + _make_methods(functions, modname)
init = """
PyObject *mod = PyModule_Create(&moduledef);
#ifdef Py_GIL_DISABLED
PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED);
#endif
"""
if not build_dir:
build_dir = pathlib.Path('.')
if more_init:
init += """#define INITERROR return NULL
"""
init += more_init
init += "\nreturn mod;"
source_string = _make_source(modname, init, body)
try:
mod_so = compile_extension_module(
modname, build_dir, include_dirs, source_string)
except Exception as e:
# shorten the exception chain
raise RuntimeError(f"could not compile in {build_dir}:") from e
import importlib.util
spec = importlib.util.spec_from_file_location(modname, mod_so)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
return foo
def compile_extension_module(
name, builddir, include_dirs,
source_string, libraries=[], library_dirs=[]):
"""
Build an extension module and return the filename of the resulting
native code file.
Parameters
----------
name : string
name of the module, possibly including dots if it is a module inside a
package.
builddir : pathlib.Path
Where to build the module, usually a temporary directory
include_dirs : list
Extra directories to find include files when compiling
libraries : list
Libraries to link into the extension module
library_dirs: list
Where to find the libraries, ``-L`` passed to the linker
"""
modname = name.split('.')[-1]
dirname = builddir / name
dirname.mkdir(exist_ok=True)
cfile = _convert_str_to_file(source_string, dirname)
include_dirs = include_dirs + [sysconfig.get_config_var('INCLUDEPY')]
return _c_compile(
cfile, outputfilename=dirname / modname,
include_dirs=include_dirs, libraries=[], library_dirs=[],
)
def _convert_str_to_file(source, dirname):
"""Helper function to create a file ``source.c`` in `dirname` that contains
the string in `source`. Returns the file name
"""
filename = dirname / 'source.c'
with filename.open('w') as f:
f.write(str(source))
return filename
def _make_methods(functions, modname):
""" Turns the name, signature, code in functions into complete functions
and lists them in a methods_table. Then turns the methods_table into a
``PyMethodDef`` structure and returns the resulting code fragment ready
for compilation
"""
methods_table = []
codes = []
for funcname, flags, code in functions:
cfuncname = "%s_%s" % (modname, funcname)
if 'METH_KEYWORDS' in flags:
signature = '(PyObject *self, PyObject *args, PyObject *kwargs)'
else:
signature = '(PyObject *self, PyObject *args)'
methods_table.append(
"{\"%s\", (PyCFunction)%s, %s}," % (funcname, cfuncname, flags))
func_code = """
static PyObject* {cfuncname}{signature}
{{
{code}
}}
""".format(cfuncname=cfuncname, signature=signature, code=code)
codes.append(func_code)
body = "\n".join(codes) + """
static PyMethodDef methods[] = {
%(methods)s
{ NULL }
};
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"%(modname)s", /* m_name */
NULL, /* m_doc */
-1, /* m_size */
methods, /* m_methods */
};
""" % dict(methods='\n'.join(methods_table), modname=modname)
return body
def _make_source(name, init, body):
""" Combines the code fragments into source code ready to be compiled
"""
code = """
#include <Python.h>
%(body)s
PyMODINIT_FUNC
PyInit_%(name)s(void) {
%(init)s
}
""" % dict(
name=name, init=init, body=body,
)
return code
def _c_compile(cfile, outputfilename, include_dirs=[], libraries=[],
library_dirs=[]):
if sys.platform == 'win32':
compile_extra = ["/we4013"]
link_extra = ["/LIBPATH:" + os.path.join(sys.base_prefix, 'libs')]
elif sys.platform.startswith('linux'):
compile_extra = [
"-O0", "-g", "-Werror=implicit-function-declaration", "-fPIC"]
link_extra = []
else:
compile_extra = link_extra = []
pass
if sys.platform == 'win32':
link_extra = link_extra + ['/DEBUG'] # generate .pdb file
if sys.platform == 'darwin':
# support Fink & Darwinports
for s in ('/sw/', '/opt/local/'):
if (s + 'include' not in include_dirs
and os.path.exists(s + 'include')):
include_dirs.append(s + 'include')
if s + 'lib' not in library_dirs and os.path.exists(s + 'lib'):
library_dirs.append(s + 'lib')
outputfilename = outputfilename.with_suffix(get_so_suffix())
build(
cfile, outputfilename,
compile_extra, link_extra,
include_dirs, libraries, library_dirs)
return outputfilename
def build(cfile, outputfilename, compile_extra, link_extra,
include_dirs, libraries, library_dirs):
"use meson to build"
build_dir = cfile.parent / "build"
os.makedirs(build_dir, exist_ok=True)
so_name = outputfilename.parts[-1]
with open(cfile.parent / "meson.build", "wt") as fid:
includes = ['-I' + d for d in include_dirs]
link_dirs = ['-L' + d for d in library_dirs]
fid.write(textwrap.dedent(f"""\
project('foo', 'c')
shared_module('{so_name}', '{cfile.parts[-1]}',
c_args: {includes} + {compile_extra},
link_args: {link_dirs} + {link_extra},
link_with: {libraries},
name_prefix: '',
name_suffix: 'dummy',
)
"""))
if sys.platform == "win32":
subprocess.check_call(["meson", "setup",
"--buildtype=release",
"--vsenv", ".."],
cwd=build_dir,
)
else:
subprocess.check_call(["meson", "setup", "--vsenv", ".."],
cwd=build_dir
)
subprocess.check_call(["meson", "compile"], cwd=build_dir)
os.rename(str(build_dir / so_name) + ".dummy", cfile.parent / so_name)
def get_so_suffix():
ret = sysconfig.get_config_var('EXT_SUFFIX')
assert ret
return ret

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,413 @@
import os
import sys
import ast
import types
import warnings
import unittest
import contextlib
from re import Pattern
from collections.abc import Callable, Iterable, Sequence
from typing import (
Literal as L,
Any,
AnyStr,
ClassVar,
NoReturn,
overload,
type_check_only,
TypeVar,
Final,
SupportsIndex,
ParamSpec
)
import numpy as np
from numpy import number, object_, _FloatValue
from numpy._typing import (
NDArray,
ArrayLike,
DTypeLike,
_ArrayLikeNumber_co,
_ArrayLikeObject_co,
_ArrayLikeTD64_co,
_ArrayLikeDT64_co,
)
from unittest.case import (
SkipTest as SkipTest,
)
_P = ParamSpec("_P")
_T = TypeVar("_T")
_ET = TypeVar("_ET", bound=BaseException)
_FT = TypeVar("_FT", bound=Callable[..., Any])
# Must return a bool or an ndarray/generic type
# that is supported by `np.logical_and.reduce`
_ComparisonFunc = Callable[
[NDArray[Any], NDArray[Any]],
(
bool
| np.bool
| number[Any]
| NDArray[np.bool | number[Any] | object_]
)
]
__all__: list[str]
class KnownFailureException(Exception): ...
class IgnoreException(Exception): ...
class clear_and_catch_warnings(warnings.catch_warnings):
class_modules: ClassVar[tuple[types.ModuleType, ...]]
modules: set[types.ModuleType]
@overload
def __new__(
cls,
record: L[False] = ...,
modules: Iterable[types.ModuleType] = ...,
) -> _clear_and_catch_warnings_without_records: ...
@overload
def __new__(
cls,
record: L[True],
modules: Iterable[types.ModuleType] = ...,
) -> _clear_and_catch_warnings_with_records: ...
@overload
def __new__(
cls,
record: bool,
modules: Iterable[types.ModuleType] = ...,
) -> clear_and_catch_warnings: ...
def __enter__(self) -> None | list[warnings.WarningMessage]: ...
def __exit__(
self,
__exc_type: None | type[BaseException] = ...,
__exc_val: None | BaseException = ...,
__exc_tb: None | types.TracebackType = ...,
) -> None: ...
# Type-check only `clear_and_catch_warnings` subclasses for both values of the
# `record` parameter. Copied from the stdlib `warnings` stubs.
@type_check_only
class _clear_and_catch_warnings_with_records(clear_and_catch_warnings):
def __enter__(self) -> list[warnings.WarningMessage]: ...
@type_check_only
class _clear_and_catch_warnings_without_records(clear_and_catch_warnings):
def __enter__(self) -> None: ...
class suppress_warnings:
log: list[warnings.WarningMessage]
def __init__(
self,
forwarding_rule: L["always", "module", "once", "location"] = ...,
) -> None: ...
def filter(
self,
category: type[Warning] = ...,
message: str = ...,
module: None | types.ModuleType = ...,
) -> None: ...
def record(
self,
category: type[Warning] = ...,
message: str = ...,
module: None | types.ModuleType = ...,
) -> list[warnings.WarningMessage]: ...
def __enter__(self: _T) -> _T: ...
def __exit__(
self,
__exc_type: None | type[BaseException] = ...,
__exc_val: None | BaseException = ...,
__exc_tb: None | types.TracebackType = ...,
) -> None: ...
def __call__(self, func: _FT) -> _FT: ...
verbose: int
IS_PYPY: Final[bool]
IS_PYSTON: Final[bool]
HAS_REFCOUNT: Final[bool]
HAS_LAPACK64: Final[bool]
def assert_(val: object, msg: str | Callable[[], str] = ...) -> None: ...
# Contrary to runtime we can't do `os.name` checks while type checking,
# only `sys.platform` checks
if sys.platform == "win32" or sys.platform == "cygwin":
def memusage(processName: str = ..., instance: int = ...) -> int: ...
elif sys.platform == "linux":
def memusage(_proc_pid_stat: str | bytes | os.PathLike[Any] = ...) -> None | int: ...
else:
def memusage() -> NoReturn: ...
if sys.platform == "linux":
def jiffies(
_proc_pid_stat: str | bytes | os.PathLike[Any] = ...,
_load_time: list[float] = ...,
) -> int: ...
else:
def jiffies(_load_time: list[float] = ...) -> int: ...
def build_err_msg(
arrays: Iterable[object],
err_msg: str,
header: str = ...,
verbose: bool = ...,
names: Sequence[str] = ...,
precision: None | SupportsIndex = ...,
) -> str: ...
def assert_equal(
actual: object,
desired: object,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
def print_assert_equal(
test_string: str,
actual: object,
desired: object,
) -> None: ...
def assert_almost_equal(
actual: _ArrayLikeNumber_co | _ArrayLikeObject_co,
desired: _ArrayLikeNumber_co | _ArrayLikeObject_co,
decimal: int = ...,
err_msg: object = ...,
verbose: bool = ...,
) -> None: ...
# Anything that can be coerced into `builtins.float`
def assert_approx_equal(
actual: _FloatValue,
desired: _FloatValue,
significant: int = ...,
err_msg: object = ...,
verbose: bool = ...,
) -> None: ...
def assert_array_compare(
comparison: _ComparisonFunc,
x: ArrayLike,
y: ArrayLike,
err_msg: object = ...,
verbose: bool = ...,
header: str = ...,
precision: SupportsIndex = ...,
equal_nan: bool = ...,
equal_inf: bool = ...,
*,
strict: bool = ...
) -> None: ...
def assert_array_equal(
x: ArrayLike,
y: ArrayLike,
/,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
def assert_array_almost_equal(
x: _ArrayLikeNumber_co | _ArrayLikeObject_co,
y: _ArrayLikeNumber_co | _ArrayLikeObject_co,
/,
decimal: float = ...,
err_msg: object = ...,
verbose: bool = ...,
) -> None: ...
@overload
def assert_array_less(
x: _ArrayLikeNumber_co | _ArrayLikeObject_co,
y: _ArrayLikeNumber_co | _ArrayLikeObject_co,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
@overload
def assert_array_less(
x: _ArrayLikeTD64_co,
y: _ArrayLikeTD64_co,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
@overload
def assert_array_less(
x: _ArrayLikeDT64_co,
y: _ArrayLikeDT64_co,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
def runstring(
astr: str | bytes | types.CodeType,
dict: None | dict[str, Any],
) -> Any: ...
def assert_string_equal(actual: str, desired: str) -> None: ...
def rundocs(
filename: None | str | os.PathLike[str] = ...,
raise_on_error: bool = ...,
) -> None: ...
def raises(*args: type[BaseException]) -> Callable[[_FT], _FT]: ...
@overload
def assert_raises( # type: ignore
expected_exception: type[BaseException] | tuple[type[BaseException], ...],
callable: Callable[_P, Any],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> None: ...
@overload
def assert_raises(
expected_exception: type[_ET] | tuple[type[_ET], ...],
*,
msg: None | str = ...,
) -> unittest.case._AssertRaisesContext[_ET]: ...
@overload
def assert_raises_regex(
expected_exception: type[BaseException] | tuple[type[BaseException], ...],
expected_regex: str | bytes | Pattern[Any],
callable: Callable[_P, Any],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> None: ...
@overload
def assert_raises_regex(
expected_exception: type[_ET] | tuple[type[_ET], ...],
expected_regex: str | bytes | Pattern[Any],
*,
msg: None | str = ...,
) -> unittest.case._AssertRaisesContext[_ET]: ...
def decorate_methods(
cls: type[Any],
decorator: Callable[[Callable[..., Any]], Any],
testmatch: None | str | bytes | Pattern[Any] = ...,
) -> None: ...
def measure(
code_str: str | bytes | ast.mod | ast.AST,
times: int = ...,
label: None | str = ...,
) -> float: ...
@overload
def assert_allclose(
actual: _ArrayLikeNumber_co | _ArrayLikeObject_co,
desired: _ArrayLikeNumber_co | _ArrayLikeObject_co,
rtol: float = ...,
atol: float = ...,
equal_nan: bool = ...,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
@overload
def assert_allclose(
actual: _ArrayLikeTD64_co,
desired: _ArrayLikeTD64_co,
rtol: float = ...,
atol: float = ...,
equal_nan: bool = ...,
err_msg: object = ...,
verbose: bool = ...,
*,
strict: bool = ...
) -> None: ...
def assert_array_almost_equal_nulp(
x: _ArrayLikeNumber_co,
y: _ArrayLikeNumber_co,
nulp: float = ...,
) -> None: ...
def assert_array_max_ulp(
a: _ArrayLikeNumber_co,
b: _ArrayLikeNumber_co,
maxulp: float = ...,
dtype: DTypeLike = ...,
) -> NDArray[Any]: ...
@overload
def assert_warns(
warning_class: type[Warning],
) -> contextlib._GeneratorContextManager[None]: ...
@overload
def assert_warns(
warning_class: type[Warning],
func: Callable[_P, _T],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> _T: ...
@overload
def assert_no_warnings() -> contextlib._GeneratorContextManager[None]: ...
@overload
def assert_no_warnings(
func: Callable[_P, _T],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> _T: ...
@overload
def tempdir(
suffix: None = ...,
prefix: None = ...,
dir: None = ...,
) -> contextlib._GeneratorContextManager[str]: ...
@overload
def tempdir(
suffix: None | AnyStr = ...,
prefix: None | AnyStr = ...,
dir: None | AnyStr | os.PathLike[AnyStr] = ...,
) -> contextlib._GeneratorContextManager[AnyStr]: ...
@overload
def temppath(
suffix: None = ...,
prefix: None = ...,
dir: None = ...,
text: bool = ...,
) -> contextlib._GeneratorContextManager[str]: ...
@overload
def temppath(
suffix: None | AnyStr = ...,
prefix: None | AnyStr = ...,
dir: None | AnyStr | os.PathLike[AnyStr] = ...,
text: bool = ...,
) -> contextlib._GeneratorContextManager[AnyStr]: ...
@overload
def assert_no_gc_cycles() -> contextlib._GeneratorContextManager[None]: ...
@overload
def assert_no_gc_cycles(
func: Callable[_P, Any],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> None: ...
def break_cycles() -> None: ...

View File

@ -0,0 +1,83 @@
"""Tools for testing implementations of __array_function__ and ufunc overrides
"""
from numpy._core.overrides import ARRAY_FUNCTIONS as _array_functions
from numpy import ufunc as _ufunc
import numpy._core.umath as _umath
def get_overridable_numpy_ufuncs():
"""List all numpy ufuncs overridable via `__array_ufunc__`
Parameters
----------
None
Returns
-------
set
A set containing all overridable ufuncs in the public numpy API.
"""
ufuncs = {obj for obj in _umath.__dict__.values()
if isinstance(obj, _ufunc)}
return ufuncs
def allows_array_ufunc_override(func):
"""Determine if a function can be overridden via `__array_ufunc__`
Parameters
----------
func : callable
Function that may be overridable via `__array_ufunc__`
Returns
-------
bool
`True` if `func` is overridable via `__array_ufunc__` and
`False` otherwise.
Notes
-----
This function is equivalent to ``isinstance(func, np.ufunc)`` and
will work correctly for ufuncs defined outside of Numpy.
"""
return isinstance(func, np.ufunc)
def get_overridable_numpy_array_functions():
"""List all numpy functions overridable via `__array_function__`
Parameters
----------
None
Returns
-------
set
A set containing all functions in the public numpy API that are
overridable via `__array_function__`.
"""
# 'import numpy' doesn't import recfunctions, so make sure it's imported
# so ufuncs defined there show up in the ufunc listing
from numpy.lib import recfunctions
return _array_functions.copy()
def allows_array_function_override(func):
"""Determine if a Numpy function can be overridden via `__array_function__`
Parameters
----------
func : callable
Function that may be overridable via `__array_function__`
Returns
-------
bool
`True` if `func` is a function in the Numpy API that is
overridable via `__array_function__` and `False` otherwise.
"""
return func in _array_functions

View File

@ -0,0 +1,201 @@
#!/usr/bin/env python3
"""Prints type-coercion tables for the built-in NumPy types
"""
import numpy as np
from numpy._core.numerictypes import obj2sctype
from collections import namedtuple
# Generic object that can be added, but doesn't do anything else
class GenericObject:
def __init__(self, v):
self.v = v
def __add__(self, other):
return self
def __radd__(self, other):
return self
dtype = np.dtype('O')
def print_cancast_table(ntypes):
print('X', end=' ')
for char in ntypes:
print(char, end=' ')
print()
for row in ntypes:
print(row, end=' ')
for col in ntypes:
if np.can_cast(row, col, "equiv"):
cast = "#"
elif np.can_cast(row, col, "safe"):
cast = "="
elif np.can_cast(row, col, "same_kind"):
cast = "~"
elif np.can_cast(row, col, "unsafe"):
cast = "."
else:
cast = " "
print(cast, end=' ')
print()
def print_coercion_table(ntypes, inputfirstvalue, inputsecondvalue, firstarray, use_promote_types=False):
print('+', end=' ')
for char in ntypes:
print(char, end=' ')
print()
for row in ntypes:
if row == 'O':
rowtype = GenericObject
else:
rowtype = obj2sctype(row)
print(row, end=' ')
for col in ntypes:
if col == 'O':
coltype = GenericObject
else:
coltype = obj2sctype(col)
try:
if firstarray:
rowvalue = np.array([rowtype(inputfirstvalue)], dtype=rowtype)
else:
rowvalue = rowtype(inputfirstvalue)
colvalue = coltype(inputsecondvalue)
if use_promote_types:
char = np.promote_types(rowvalue.dtype, colvalue.dtype).char
else:
value = np.add(rowvalue, colvalue)
if isinstance(value, np.ndarray):
char = value.dtype.char
else:
char = np.dtype(type(value)).char
except ValueError:
char = '!'
except OverflowError:
char = '@'
except TypeError:
char = '#'
print(char, end=' ')
print()
def print_new_cast_table(*, can_cast=True, legacy=False, flags=False):
"""Prints new casts, the values given are default "can-cast" values, not
actual ones.
"""
from numpy._core._multiarray_tests import get_all_cast_information
cast_table = {
-1: " ",
0: "#", # No cast (classify as equivalent here)
1: "#", # equivalent casting
2: "=", # safe casting
3: "~", # same-kind casting
4: ".", # unsafe casting
}
flags_table = {
0 : "", 7: "",
1: "", 2: "", 4: "",
3: "", 5: "",
6: "",
}
cast_info = namedtuple("cast_info", ["can_cast", "legacy", "flags"])
no_cast_info = cast_info(" ", " ", " ")
casts = get_all_cast_information()
table = {}
dtypes = set()
for cast in casts:
dtypes.add(cast["from"])
dtypes.add(cast["to"])
if cast["from"] not in table:
table[cast["from"]] = {}
to_dict = table[cast["from"]]
can_cast = cast_table[cast["casting"]]
legacy = "L" if cast["legacy"] else "."
flags = 0
if cast["requires_pyapi"]:
flags |= 1
if cast["supports_unaligned"]:
flags |= 2
if cast["no_floatingpoint_errors"]:
flags |= 4
flags = flags_table[flags]
to_dict[cast["to"]] = cast_info(can_cast=can_cast, legacy=legacy, flags=flags)
# The np.dtype(x.type) is a bit strange, because dtype classes do
# not expose much yet.
types = np.typecodes["All"]
def sorter(x):
# This is a bit weird hack, to get a table as close as possible to
# the one printing all typecodes (but expecting user-dtypes).
dtype = np.dtype(x.type)
try:
indx = types.index(dtype.char)
except ValueError:
indx = np.inf
return (indx, dtype.char)
dtypes = sorted(dtypes, key=sorter)
def print_table(field="can_cast"):
print('X', end=' ')
for dt in dtypes:
print(np.dtype(dt.type).char, end=' ')
print()
for from_dt in dtypes:
print(np.dtype(from_dt.type).char, end=' ')
row = table.get(from_dt, {})
for to_dt in dtypes:
print(getattr(row.get(to_dt, no_cast_info), field), end=' ')
print()
if can_cast:
# Print the actual table:
print()
print("Casting: # is equivalent, = is safe, ~ is same-kind, and . is unsafe")
print()
print_table("can_cast")
if legacy:
print()
print("L denotes a legacy cast . a non-legacy one.")
print()
print_table("legacy")
if flags:
print()
print(f"{flags_table[0]}: no flags, {flags_table[1]}: PyAPI, "
f"{flags_table[2]}: supports unaligned, {flags_table[4]}: no-float-errors")
print()
print_table("flags")
if __name__ == '__main__':
print("can cast")
print_cancast_table(np.typecodes['All'])
print()
print("In these tables, ValueError is '!', OverflowError is '@', TypeError is '#'")
print()
print("scalar + scalar")
print_coercion_table(np.typecodes['All'], 0, 0, False)
print()
print("scalar + neg scalar")
print_coercion_table(np.typecodes['All'], 0, -1, False)
print()
print("array + scalar")
print_coercion_table(np.typecodes['All'], 0, 0, True)
print()
print("array + neg scalar")
print_coercion_table(np.typecodes['All'], 0, -1, True)
print()
print("promote_types")
print_coercion_table(np.typecodes['All'], 0, 0, False, True)
print("New casting type promotion:")
print_new_cast_table(can_cast=True, legacy=True, flags=True)

File diff suppressed because it is too large Load Diff