forked from cython/cython
-
Notifications
You must be signed in to change notification settings - Fork 0
/
runtests.py
executable file
·3038 lines (2634 loc) · 118 KB
/
runtests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
import atexit
import base64
import doctest
import gc
import glob
import heapq
import locale
import math
import operator
import os
import re
import shutil
import subprocess
import sys
import tempfile
import time
import traceback
import unittest
import warnings
import zlib
from collections import defaultdict
from contextlib import contextmanager
try:
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
IS_CPYTHON = platform.python_implementation() == 'CPython'
IS_GRAAL = platform.python_implementation() == 'GraalVM'
except (ImportError, AttributeError):
IS_CPYTHON = True
IS_PYPY = False
IS_GRAAL = False
CAN_SYMLINK = sys.platform != 'win32' and hasattr(os, 'symlink')
from io import open as io_open
try:
from StringIO import StringIO
except ImportError:
from io import StringIO # doesn't accept 'str' in Py2
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import threading
except ImportError: # No threads, no problems
threading = None
try:
from unittest import SkipTest
except ImportError:
class SkipTest(Exception): # don't raise, only provided to allow except-ing it!
pass
def skip_test(reason):
sys.stderr.write("Skipping test: %s\n" % reason)
else:
def skip_test(reason):
raise SkipTest(reason)
try:
basestring
except NameError:
basestring = str
WITH_CYTHON = True
try:
# Py3.12+ doesn't have distutils any more and requires setuptools to provide it.
import setuptools
except ImportError:
pass
from distutils.command.build_ext import build_ext as _build_ext
from distutils import sysconfig
_to_clean = []
@atexit.register
def _cleanup_files():
"""
This is only used on Cygwin to clean up shared libraries that are unsafe
to delete while the test suite is running.
"""
for filename in _to_clean:
if os.path.isdir(filename):
shutil.rmtree(filename, ignore_errors=True)
else:
try:
os.remove(filename)
except OSError:
pass
def get_distutils_distro(_cache=[]):
if _cache:
return _cache[0]
# late import to accommodate for setuptools override
from distutils.dist import Distribution
distutils_distro = Distribution()
if sys.platform == 'win32':
# TODO: Figure out why this hackery (see https://thread.gmane.org/gmane.comp.python.cython.devel/8280/).
config_files = distutils_distro.find_config_files()
try:
config_files.remove('setup.cfg')
except ValueError:
pass
distutils_distro.parse_config_files(config_files)
cfgfiles = distutils_distro.find_config_files()
try:
cfgfiles.remove('setup.cfg')
except ValueError:
pass
distutils_distro.parse_config_files(cfgfiles)
_cache.append(distutils_distro)
return distutils_distro
def import_refnanny():
try:
# try test copy first
import refnanny
return refnanny
except ImportError:
pass
import Cython.Runtime.refnanny
return Cython.Runtime.refnanny
EXT_DEP_MODULES = {
'tag:numpy': 'numpy',
'tag:pythran': 'pythran',
'tag:setuptools': 'setuptools.sandbox',
'tag:asyncio': 'asyncio',
'tag:pstats': 'pstats',
'tag:posix': 'posix',
'tag:array': 'array',
'tag:coverage': 'Cython.Coverage',
'Coverage': 'Cython.Coverage',
'tag:ipython': 'IPython.testing.globalipapp',
'tag:jedi': 'jedi_BROKEN_AND_DISABLED',
'tag:test.support': 'test.support', # support module for CPython unit tests
}
def patch_inspect_isfunction():
import inspect
orig_isfunction = inspect.isfunction
def isfunction(obj):
return orig_isfunction(obj) or type(obj).__name__ == 'cython_function_or_method'
isfunction._orig_isfunction = orig_isfunction
inspect.isfunction = isfunction
def unpatch_inspect_isfunction():
import inspect
try:
orig_isfunction = inspect.isfunction._orig_isfunction
except AttributeError:
pass
else:
inspect.isfunction = orig_isfunction
def def_to_cdef(source):
'''
Converts the module-level def methods into cdef methods, i.e.
@decorator
def foo([args]):
"""
[tests]
"""
[body]
becomes
def foo([args]):
"""
[tests]
"""
return foo_c([args])
cdef foo_c([args]):
[body]
'''
output = []
skip = False
def_node = re.compile(r'def (\w+)\(([^()*]*)\):').match
lines = iter(source.split('\n'))
for line in lines:
if not line.strip():
output.append(line)
continue
if skip:
if line[0] != ' ':
skip = False
else:
continue
if line[0] == '@':
skip = True
continue
m = def_node(line)
if m:
name = m.group(1)
args = m.group(2)
if args:
args_no_types = ", ".join(arg.split()[-1] for arg in args.split(','))
else:
args_no_types = ""
output.append("def %s(%s):" % (name, args_no_types))
line = next(lines)
if '"""' in line:
has_docstring = True
output.append(line)
for line in lines:
output.append(line)
if '"""' in line:
break
else:
has_docstring = False
output.append(" return %s_c(%s)" % (name, args_no_types))
output.append('')
output.append("cdef %s_c(%s):" % (name, args))
if not has_docstring:
output.append(line)
else:
output.append(line)
return '\n'.join(output)
def exclude_test_in_pyver(*versions):
return sys.version_info[:2] in versions
def exclude_test_on_platform(*platforms):
return sys.platform in platforms
def update_linetrace_extension(ext):
if sys.version_info[:2] == (3, 12):
# Line tracing is generally fragile in Py3.12.
return EXCLUDE_EXT
if not IS_CPYTHON and sys.version_info[:2] < (3, 13):
# Tracing/profiling requires PEP-669 monitoring or old CPython tracing.
return EXCLUDE_EXT
ext.define_macros.append(('CYTHON_TRACE', 1))
return ext
def update_numpy_extension(ext, set_api17_macro=True):
import numpy as np
# Add paths for npyrandom and npymath libraries:
lib_path = [
os.path.abspath(os.path.join(np.get_include(), '..', '..', 'random', 'lib')),
os.path.abspath(os.path.join(np.get_include(), '..', 'lib'))
]
ext.library_dirs += lib_path
if sys.platform == "win32":
ext.libraries += ["npymath"]
else:
ext.libraries += ["npymath", "m"]
ext.include_dirs.append(np.get_include())
if set_api17_macro and getattr(np, '__version__', '') not in ('1.19.0', '1.19.1'):
ext.define_macros.append(('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION'))
del np
def update_gdb_extension(ext, _has_gdb=[None]):
# We should probably also check for Python support.
if not include_debugger:
_has_gdb[0] = False
if _has_gdb[0] is None:
try:
subprocess.check_call(["gdb", "--version"])
except (IOError, subprocess.CalledProcessError):
_has_gdb[0] = False
else:
_has_gdb[0] = True
if not _has_gdb[0]:
return EXCLUDE_EXT
return ext
def update_openmp_extension(ext):
ext.openmp = True
language = ext.language
if sys.platform == 'win32' and sys.version_info[:2] == (3,4):
# OpenMP tests fail in appveyor in Py3.4 -> just ignore them, EoL of Py3.4 is early 2019...
return EXCLUDE_EXT
if language == 'cpp':
flags = OPENMP_CPP_COMPILER_FLAGS
else:
flags = OPENMP_C_COMPILER_FLAGS
if flags:
compile_flags, link_flags = flags
ext.extra_compile_args.extend(compile_flags.split())
ext.extra_link_args.extend(link_flags.split())
return ext
elif sys.platform == 'win32':
return ext
return EXCLUDE_EXT
def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, min_macos_version=None):
def _update_cpp_extension(ext):
"""
Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos.
"""
# If the extension provides a -std=... option, and it's greater than the one
# we're about to give, assume that whatever C compiler we use will probably be ok with it.
extra_compile_args = []
already_has_std = False
if ext.extra_compile_args:
std_regex = re.compile(r"-std(?!lib).*(?P<number>[0-9]+)")
for ca in ext.extra_compile_args:
match = std_regex.search(ca)
if match:
number = int(match.group("number"))
if number < cpp_std:
continue # and drop the argument
already_has_std = True
extra_compile_args.append(ca)
ext.extra_compile_args = extra_compile_args
use_gcc = use_clang = already_has_std
# check for a usable gcc version
gcc_version = get_gcc_version(ext.language)
if gcc_version:
if cpp_std >= 17 and sys.version_info[0] < 3:
# The Python 2.7 headers contain the 'register' modifier
# which gcc warns about in C++17 mode.
ext.extra_compile_args.append('-Wno-register')
if not already_has_std:
compiler_version = gcc_version.group(1)
if not min_gcc_version or float(compiler_version) >= float(min_gcc_version):
use_gcc = True
ext.extra_compile_args.append("-std=c++%s" % cpp_std)
if use_gcc:
return ext
# check for a usable clang version
clang_version = get_clang_version(ext.language)
if clang_version:
if cpp_std >= 17 and sys.version_info[0] < 3:
# The Python 2.7 headers contain the 'register' modifier
# which clang warns about in C++17 mode.
ext.extra_compile_args.append('-Wno-register')
if not already_has_std:
compiler_version = clang_version.group(1)
if not min_clang_version or float(compiler_version) >= float(min_clang_version):
use_clang = True
ext.extra_compile_args.append("-std=c++%s" % cpp_std)
if sys.platform == "darwin":
ext.extra_compile_args.append("-stdlib=libc++")
if min_macos_version is not None:
ext.extra_compile_args.append("-mmacosx-version-min=" + min_macos_version)
if use_clang:
return ext
# no usable C compiler found => exclude the extension
return EXCLUDE_EXT
return _update_cpp_extension
def require_gcc(version):
def check(ext):
gcc_version = get_gcc_version(ext.language)
if gcc_version:
if float(gcc_version.group(1)) >= float(version):
return ext
return EXCLUDE_EXT
return check
def get_cc_version(language):
"""
finds gcc version using Popen
"""
cc = ''
if language == 'cpp':
cc = os.environ.get('CXX') or sysconfig.get_config_var('CXX')
if not cc:
cc = os.environ.get('CC') or sysconfig.get_config_var('CC')
if not cc:
from distutils import ccompiler
cc = ccompiler.get_default_compiler()
if not cc:
return ''
# For some reason, cc can be e.g. 'gcc -pthread'
cc = cc.split()[0]
# Force english output
env = os.environ.copy()
env['LC_MESSAGES'] = 'C'
try:
p = subprocess.Popen([cc, "-v"], stderr=subprocess.PIPE, env=env)
except EnvironmentError as exc:
warnings.warn("Unable to find the %s compiler: %s: %s" %
(language, os.strerror(exc.errno), cc))
return ''
_, output = p.communicate()
return output.decode(locale.getpreferredencoding() or 'ASCII', 'replace')
def get_gcc_version(language):
matcher = re.compile(r"gcc version (\d+\.\d+)").search
return matcher(get_cc_version(language))
def get_clang_version(language):
matcher = re.compile(r"clang(?:-|\s+version\s+)(\d+\.\d+)").search
return matcher(get_cc_version(language))
def get_openmp_compiler_flags(language):
"""
As of gcc 4.2, it supports OpenMP 2.5. Gcc 4.4 implements 3.0. We don't
(currently) check for other compilers.
returns a two-tuple of (CFLAGS, LDFLAGS) to build the OpenMP extension
"""
gcc_version = get_gcc_version(language)
if not gcc_version:
if sys.platform == 'win32':
return '/openmp', ''
else:
return None # not gcc - FIXME: do something about other compilers
# gcc defines "__int128_t", assume that at least all 64 bit architectures have it
global COMPILER_HAS_INT128
COMPILER_HAS_INT128 = getattr(sys, 'maxsize', getattr(sys, 'maxint', 0)) > 2**60
compiler_version = gcc_version.group(1)
if compiler_version:
compiler_version = [int(num) for num in compiler_version.split('.')]
if compiler_version >= [4, 2]:
return '-fopenmp', '-fopenmp'
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
COMPILER = None
COMPILER_HAS_INT128 = False
OPENMP_C_COMPILER_FLAGS = get_openmp_compiler_flags('c')
OPENMP_CPP_COMPILER_FLAGS = get_openmp_compiler_flags('cpp')
# Return this from the EXT_EXTRAS matcher callback to exclude the extension
EXCLUDE_EXT = object()
EXT_EXTRAS = {
'tag:numpy' : update_numpy_extension,
'tag:openmp': update_openmp_extension,
'tag:gdb': update_gdb_extension,
'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.9", min_macos_version="10.7"),
'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"),
'tag:cpp20': update_cpp_extension(20, min_gcc_version="11.0", min_clang_version="13.0", min_macos_version="10.13"),
'tag:trace' : update_linetrace_extension,
'tag:cppexecpolicies': require_gcc("9.1"),
}
TAG_EXCLUDERS = sorted({
'no-macos': exclude_test_on_platform('darwin'),
'pstats': exclude_test_in_pyver((3,12)),
'trace': not IS_CPYTHON,
}.items())
# TODO: use tags
VER_DEP_MODULES = {
# tests are excluded if 'CurrentPythonVersion OP VersionTuple', i.e.
# (2,4) : (operator.lt, ...) excludes ... when PyVer < 2.4.x
# FIXME: fix? delete?
(3,4,999): (operator.gt, lambda x: x in ['run.initial_file_path',
]),
(3,8): (operator.lt, lambda x: x in ['run.special_methods_T561_py38',
]),
(3,12): (operator.ge, lambda x: x in [
'run.py_unicode_strings', # Py_UNICODE was removed
'compile.pylong', # PyLongObject changed its structure
'run.longintrepr', # PyLongObject changed its structure
'run.line_trace', # sys.monitoring broke sys.set_trace() line tracing
]),
}
INCLUDE_DIRS = [ d for d in os.getenv('INCLUDE', '').split(os.pathsep) if d ]
CFLAGS = os.getenv('CFLAGS', '').split()
CCACHE = os.getenv('CYTHON_RUNTESTS_CCACHE', '').split()
CDEFS = []
TEST_SUPPORT_DIR = 'testsupport'
BACKENDS = ['c', 'cpp']
UTF8_BOM_BYTES = r'\xef\xbb\xbf'.encode('ISO-8859-1').decode('unicode_escape')
# A selector that can be used to determine whether to run with Py_LIMITED_API
# (if run in limited api mode)
limited_api_full_tests = None
def memoize(f):
uncomputed = object()
f._cache = {}
get = f._cache.get
def func(*args):
res = get(args, uncomputed)
if res is uncomputed:
res = f._cache[args] = f(*args)
return res
return func
@memoize
def parse_tags(filepath):
tags = defaultdict(list)
parse_tag = re.compile(r'#\s*(\w+)\s*:(.*)$').match
with io_open(filepath, encoding='ISO-8859-1', errors='ignore') as f:
for line in f:
if line[0] != '#':
# ignore BOM-like bytes and whitespace
line = line.lstrip(UTF8_BOM_BYTES).strip()
if not line:
if tags:
break # assume all tags are in one block
continue
if line[0] != '#':
break
parsed = parse_tag(line)
if parsed is not None:
tag, values = parsed.groups()
if tag not in ('mode', 'tag', 'ticket', 'cython', 'distutils', 'preparse'):
if tag in ('coding', 'encoding'):
continue
if tag == 'tags':
raise RuntimeError("test tags use the 'tag' directive, not 'tags' (%s)" % filepath)
print("WARNING: unknown test directive '%s' found (%s)" % (tag, filepath))
values = values.split(',')
tags[tag].extend(filter(None, [value.strip() for value in values]))
elif tags:
break # assume all tags are in one block
return tags
list_unchanging_dir = memoize(lambda x: os.listdir(x)) # needs lambda to set function attribute
@memoize
def _list_pyregr_data_files(test_directory):
is_data_file = re.compile('(?:[.](txt|pem|db|html)|^bad.*[.]py)$').search
return ['__init__.py'] + [
filename for filename in list_unchanging_dir(test_directory)
if is_data_file(filename)]
def import_module_from_file(module_name, file_path, execute=True):
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, file_path)
m = importlib.util.module_from_spec(spec)
if execute:
sys.modules[module_name] = m
spec.loader.exec_module(m)
return m
def import_ext(module_name, file_path=None):
if file_path:
return import_module_from_file(module_name, file_path)
else:
try:
from importlib import invalidate_caches
except ImportError:
pass
else:
invalidate_caches()
return __import__(module_name, globals(), locals(), ['*'])
class build_ext(_build_ext):
def build_extension(self, ext):
try:
try: # Py2.7+ & Py3.2+
compiler_obj = self.compiler_obj
except AttributeError:
compiler_obj = self.compiler
if ext.language == 'c++':
compiler_obj.compiler_so.remove('-Wstrict-prototypes')
if CCACHE:
compiler_obj.compiler_so = CCACHE + compiler_obj.compiler_so
if getattr(ext, 'openmp', None) and compiler_obj.compiler_type == 'msvc':
ext.extra_compile_args.append('/openmp')
except Exception:
pass
_build_ext.build_extension(self, ext)
class ErrorWriter(object):
match_error = re.compile(
r'(?:(warning|performance hint):)?(?:.*:)?\s*([-0-9]+)\s*:\s*([-0-9]+)\s*:\s*(.*)').match
def __init__(self, encoding=None):
self.output = []
self.encoding = encoding
def write(self, value):
if self.encoding:
value = value.encode('ISO-8859-1').decode(self.encoding)
self.output.append(value)
def _collect(self):
s = ''.join(self.output)
results = {'error': [], 'warning': [], 'performance hint': []}
for line in s.splitlines():
match = self.match_error(line)
if match:
message_type, line, column, message = match.groups()
results[message_type or 'error'].append((int(line), int(column), message.strip()))
return [
["%d:%d: %s" % values for values in sorted(results[key])]
for key in ('error', 'warning', 'performance hint')
]
def geterrors(self):
return self._collect()[0]
def getall(self):
return self._collect()
def close(self):
pass # ignore, only to match file-like interface
class Stats(object):
def __init__(self, top_n=8):
self.top_n = top_n
self.test_counts = defaultdict(int)
self.test_times = defaultdict(float)
self.top_tests = defaultdict(list)
def add_time(self, name, language, metric, t, count=1):
self.test_counts[metric] += count
self.test_times[metric] += t
top = self.top_tests[metric]
push = heapq.heappushpop if len(top) >= self.top_n else heapq.heappush
# min-heap => pop smallest/shortest until longest times remain
push(top, (t, name, language))
@contextmanager
def time(self, name, language, metric):
t = time.time()
yield
t = time.time() - t
self.add_time(name, language, metric, t)
def update(self, stats):
# type: (Stats) -> None
for metric, t in stats.test_times.items():
self.test_times[metric] += t
self.test_counts[metric] += stats.test_counts[metric]
top = self.top_tests[metric]
for entry in stats.top_tests[metric]:
push = heapq.heappushpop if len(top) >= self.top_n else heapq.heappush
push(top, entry)
def print_stats(self, out=sys.stderr):
if not self.test_times:
return
lines = ['Times:\n']
for metric, t in sorted(self.test_times.items(), key=operator.itemgetter(1), reverse=True):
count = self.test_counts[metric]
top = self.top_tests[metric]
lines.append("%-12s: %8.2f sec (%4d, %6.3f / run) - slowest: %s\n" % (
metric, t, count, t / count,
', '.join("'{2}:{1}' ({0:.2f}s)".format(*item) for item in heapq.nlargest(self.top_n, top))))
out.write(''.join(lines))
class TestBuilder(object):
def __init__(self, rootdir, workdir, selectors, exclude_selectors, options,
with_pyregr, languages, test_bugs, language_level,
common_utility_dir, pythran_dir=None,
default_mode='run', stats=None,
add_embedded_test=False, add_cython_import=False,
add_cpp_locals_extra_tests=False):
self.rootdir = rootdir
self.workdir = workdir
self.selectors = selectors
self.exclude_selectors = exclude_selectors
self.shard_num = options.shard_num
self.annotate = options.annotate_source
self.cleanup_workdir = options.cleanup_workdir
self.cleanup_sharedlibs = options.cleanup_sharedlibs
self.cleanup_failures = options.cleanup_failures
self.with_pyregr = with_pyregr
self.cython_only = options.cython_only
self.test_selector = re.compile(options.only_pattern).search if options.only_pattern else None
self.languages = languages
self.test_bugs = test_bugs
self.fork = options.fork
self.language_level = language_level
self.test_determinism = options.test_determinism
self.common_utility_dir = common_utility_dir
self.pythran_dir = pythran_dir
self.default_mode = default_mode
self.stats = stats
self.add_embedded_test = add_embedded_test
self.add_cython_import = add_cython_import
self.capture = options.capture
self.add_cpp_locals_extra_tests = add_cpp_locals_extra_tests
def build_suite(self):
suite = unittest.TestSuite()
filenames = os.listdir(self.rootdir)
filenames.sort()
# TODO: parallelise I/O with a thread pool for the different directories once we drop Py2 support
for filename in filenames:
path = os.path.join(self.rootdir, filename)
if os.path.isdir(path) and filename != TEST_SUPPORT_DIR:
if filename == 'pyregr' and not self.with_pyregr:
continue
if filename == 'broken' and not self.test_bugs:
continue
suite.addTest(
self.handle_directory(path, filename))
if (sys.platform not in ['win32'] and self.add_embedded_test
# the embedding test is currently broken in Py3.8+ and Py2.7, except on Linux.
and ((3, 0) <= sys.version_info < (3, 8) or sys.platform != 'darwin')
# broken on graal too
and not IS_GRAAL):
# Non-Windows makefile.
if [1 for selector in self.selectors if selector("embedded")] \
and not [1 for selector in self.exclude_selectors if selector("embedded")]:
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(EmbedTest))
return suite
def handle_directory(self, path, context):
workdir = os.path.join(self.workdir, context)
if not os.path.exists(workdir):
os.makedirs(workdir)
suite = unittest.TestSuite()
filenames = list_unchanging_dir(path)
filenames.sort()
for filename in filenames:
filepath = os.path.join(path, filename)
module, ext = os.path.splitext(filename)
if ext not in ('.py', '.pyx', '.srctree'):
continue
if filename.startswith('.'):
continue # certain emacs backup files
if context == 'pyregr':
tags = defaultdict(list)
else:
tags = parse_tags(filepath)
fqmodule = "%s.%s" % (context, module)
if not [ 1 for match in self.selectors
if match(fqmodule, tags) ]:
continue
if self.exclude_selectors:
if [1 for match in self.exclude_selectors
if match(fqmodule, tags)]:
continue
full_limited_api_mode = False
if limited_api_full_tests and limited_api_full_tests(fqmodule):
# TODO this (and CYTHON_LIMITED_API) don't yet make it into end-to-end tests
full_limited_api_mode = True
mode = self.default_mode
if tags['mode']:
mode = tags['mode'][0]
elif context == 'pyregr':
mode = 'pyregr'
if ext == '.srctree':
if self.cython_only:
# EndToEnd tests always execute arbitrary build and test code
continue
if skip_limited(tags):
continue
if 'cpp' not in tags['tag'] or 'cpp' in self.languages:
suite.addTest(EndToEndTest(filepath, workdir,
self.cleanup_workdir, stats=self.stats,
capture=self.capture, shard_num=self.shard_num))
continue
# Choose the test suite.
if mode == 'pyregr':
if not filename.startswith('test_'):
continue
test_class = CythonPyregrTestCase
elif mode == 'run':
if module.startswith("test_"):
test_class = CythonUnitTestCase
else:
test_class = CythonRunTestCase
elif mode in ['compile', 'error']:
test_class = CythonCompileTestCase
else:
raise KeyError('Invalid test mode: ' + mode)
for test in self.build_tests(test_class, path, workdir,
module, filepath, mode == 'error', tags,
full_limited_api_mode=full_limited_api_mode):
suite.addTest(test)
if mode == 'run' and ext == '.py' and not self.cython_only and not filename.startswith('test_'):
# additionally test file in real Python
min_py_ver = [
(int(pyver.group(1)), int(pyver.group(2)))
for pyver in map(re.compile(r'pure([0-9]+)[.]([0-9]+)').match, tags['tag'])
if pyver
]
if not min_py_ver or any(sys.version_info >= min_ver for min_ver in min_py_ver):
suite.addTest(PureDoctestTestCase(
module, filepath, tags, stats=self.stats, shard_num=self.shard_num))
return suite
def build_tests(self, test_class, path, workdir, module, module_path, expect_errors, tags, full_limited_api_mode):
warning_errors = 'werror' in tags['tag']
expect_log = ("errors",) if expect_errors else ()
if 'warnings' in tags['tag']:
expect_log += ("warnings",)
if "perf_hints" in tags['tag']:
expect_log += ("perf_hints",)
extra_directives_list = [{}]
if expect_errors:
if skip_c(tags) and 'cpp' in self.languages:
languages = ['cpp']
else:
languages = self.languages[:1]
else:
languages = self.languages
if 'c' in languages and skip_c(tags):
languages = list(languages)
languages.remove('c')
if 'cpp' in languages and 'no-cpp' in tags['tag']:
languages = list(languages)
languages.remove('cpp')
if (self.add_cpp_locals_extra_tests and 'cpp' in languages and
'cpp' in tags['tag'] and not 'no-cpp-locals' in tags['tag']):
extra_directives_list.append({'cpp_locals': True})
if not languages:
return []
if skip_limited(tags):
return []
language_levels = [2, 3] if 'all_language_levels' in tags['tag'] else [None]
pythran_dir = self.pythran_dir
if 'pythran' in tags['tag'] and not pythran_dir and 'cpp' in languages:
import pythran.config
try:
pythran_ext = pythran.config.make_extension(python=True)
except TypeError: # old pythran version syntax
pythran_ext = pythran.config.make_extension()
pythran_dir = pythran_ext['include_dirs'][0]
add_cython_import = self.add_cython_import and module_path.endswith('.py')
preparse_list = tags.get('preparse', ['id'])
tests = [ self.build_test(test_class, path, workdir, module, module_path,
tags, language, language_level,
expect_log,
warning_errors, preparse,
pythran_dir if language == "cpp" else None,
add_cython_import=add_cython_import,
extra_directives=extra_directives,
full_limited_api_mode=full_limited_api_mode)
for language in languages
for preparse in preparse_list
for language_level in language_levels
for extra_directives in extra_directives_list
]
return tests
def build_test(self, test_class, path, workdir, module, module_path, tags, language, language_level,
expect_log, warning_errors, preparse, pythran_dir, add_cython_import,
extra_directives, full_limited_api_mode):
language_workdir = os.path.join(workdir, language)
if not os.path.exists(language_workdir):
os.makedirs(language_workdir)
workdir = os.path.join(language_workdir, module)
if preparse != 'id':
workdir += '_%s' % (preparse,)
if language_level:
workdir += '_cy%d' % (language_level,)
if extra_directives:
workdir += ('_directives_'+ '_'.join('%s_%s' % (k, v) for k,v in extra_directives.items()))
return test_class(path, workdir, module, module_path, tags,
language=language,
preparse=preparse,
expect_log=expect_log,
annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs,
cleanup_failures=self.cleanup_failures,
cython_only=self.cython_only,
test_selector=self.test_selector,
shard_num=self.shard_num,
fork=self.fork,
language_level=language_level or self.language_level,
warning_errors=warning_errors,
test_determinism=self.test_determinism,
common_utility_dir=self.common_utility_dir,
pythran_dir=pythran_dir,
stats=self.stats,
add_cython_import=add_cython_import,
full_limited_api_mode=full_limited_api_mode,
)
def skip_c(tags):
if 'cpp' in tags['tag']:
return True
# We don't want to create a distutils key in the
# dictionary so we check before looping.
if 'distutils' in tags:
for option in tags['distutils']:
split = option.split('=')
if len(split) == 2:
argument, value = split
if argument.strip() == 'language' and value.strip() == 'c++':
return True
return False
def skip_limited(tags):
if 'limited-api' in tags['tag']:
# Run limited-api tests only on CPython.
if sys.implementation.name != 'cpython':
return True
return False
def filter_stderr(stderr_bytes):
"""
Filter annoying warnings from output.
"""
if b"Command line warning D9025" in stderr_bytes:
# MSCV: cl : Command line warning D9025 : overriding '/Ox' with '/Od'
stderr_bytes = b'\n'.join(
line for line in stderr_bytes.splitlines()
if b"Command line warning D9025" not in line)
return stderr_bytes
def filter_test_suite(test_suite, selector):
filtered_tests = []
for test in test_suite._tests:
if isinstance(test, unittest.TestSuite):
filter_test_suite(test, selector)
elif not selector(test.id()):
continue
filtered_tests.append(test)
test_suite._tests[:] = filtered_tests
class CythonCompileTestCase(unittest.TestCase):
def __init__(self, test_directory, workdir, module, module_path, tags, language='c', preparse='id',
expect_log=(),
annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True, cleanup_failures=True, cython_only=False, test_selector=None,
fork=True, language_level=2, warning_errors=False,
test_determinism=False, shard_num=0,
common_utility_dir=None, pythran_dir=None, stats=None, add_cython_import=False,
extra_directives=None, full_limited_api_mode=False):
if extra_directives is None:
extra_directives = {}
self.test_directory = test_directory
self.tags = tags
self.workdir = workdir
self.module = module
self.module_path = module_path
self.language = language
self.preparse = preparse