ZTWHHH commited on
Commit
5480190
·
verified ·
1 Parent(s): e11c059

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +12 -0
  2. deepseekvl2/lib/python3.10/__pycache__/glob.cpython-310.pyc +0 -0
  3. deepseekvl2/lib/python3.10/lib-dynload/_asyncio.cpython-310-x86_64-linux-gnu.so +3 -0
  4. deepseekvl2/lib/python3.10/lib-dynload/_bisect.cpython-310-x86_64-linux-gnu.so +0 -0
  5. deepseekvl2/lib/python3.10/lib-dynload/_blake2.cpython-310-x86_64-linux-gnu.so +3 -0
  6. deepseekvl2/lib/python3.10/lib-dynload/_crypt.cpython-310-x86_64-linux-gnu.so +0 -0
  7. deepseekvl2/lib/python3.10/lib-dynload/_csv.cpython-310-x86_64-linux-gnu.so +3 -0
  8. deepseekvl2/lib/python3.10/lib-dynload/_ctypes.cpython-310-x86_64-linux-gnu.so +3 -0
  9. deepseekvl2/lib/python3.10/lib-dynload/_ctypes_test.cpython-310-x86_64-linux-gnu.so +0 -0
  10. deepseekvl2/lib/python3.10/lib-dynload/_curses.cpython-310-x86_64-linux-gnu.so +3 -0
  11. deepseekvl2/lib/python3.10/lib-dynload/_datetime.cpython-310-x86_64-linux-gnu.so +3 -0
  12. deepseekvl2/lib/python3.10/lib-dynload/_elementtree.cpython-310-x86_64-linux-gnu.so +3 -0
  13. deepseekvl2/lib/python3.10/lib-dynload/_posixsubprocess.cpython-310-x86_64-linux-gnu.so +0 -0
  14. deepseekvl2/lib/python3.10/lib-dynload/_struct.cpython-310-x86_64-linux-gnu.so +3 -0
  15. deepseekvl2/lib/python3.10/lib-dynload/_testcapi.cpython-310-x86_64-linux-gnu.so +3 -0
  16. deepseekvl2/lib/python3.10/lib-dynload/array.cpython-310-x86_64-linux-gnu.so +3 -0
  17. deepseekvl2/lib/python3.10/lib-dynload/audioop.cpython-310-x86_64-linux-gnu.so +3 -0
  18. deepseekvl2/lib/python3.10/lib-dynload/binascii.cpython-310-x86_64-linux-gnu.so +3 -0
  19. deepseekvl2/lib/python3.10/lib-dynload/resource.cpython-310-x86_64-linux-gnu.so +0 -0
  20. deepseekvl2/lib/python3.10/lib-dynload/syslog.cpython-310-x86_64-linux-gnu.so +0 -0
  21. deepseekvl2/lib/python3.10/test/__init__.py +1 -0
  22. deepseekvl2/lib/python3.10/test/__pycache__/__init__.cpython-310.pyc +0 -0
  23. deepseekvl2/lib/python3.10/test/__pycache__/test_script_helper.cpython-310.pyc +0 -0
  24. deepseekvl2/lib/python3.10/test/__pycache__/test_support.cpython-310.pyc +0 -0
  25. deepseekvl2/lib/python3.10/test/support/__init__.py +2124 -0
  26. deepseekvl2/lib/python3.10/test/support/__pycache__/__init__.cpython-310.pyc +0 -0
  27. deepseekvl2/lib/python3.10/test/support/__pycache__/bytecode_helper.cpython-310.pyc +0 -0
  28. deepseekvl2/lib/python3.10/test/support/__pycache__/hashlib_helper.cpython-310.pyc +0 -0
  29. deepseekvl2/lib/python3.10/test/support/__pycache__/import_helper.cpython-310.pyc +0 -0
  30. deepseekvl2/lib/python3.10/test/support/__pycache__/interpreters.cpython-310.pyc +0 -0
  31. deepseekvl2/lib/python3.10/test/support/__pycache__/logging_helper.cpython-310.pyc +0 -0
  32. deepseekvl2/lib/python3.10/test/support/__pycache__/os_helper.cpython-310.pyc +0 -0
  33. deepseekvl2/lib/python3.10/test/support/__pycache__/script_helper.cpython-310.pyc +0 -0
  34. deepseekvl2/lib/python3.10/test/support/__pycache__/socket_helper.cpython-310.pyc +0 -0
  35. deepseekvl2/lib/python3.10/test/support/__pycache__/testresult.cpython-310.pyc +0 -0
  36. deepseekvl2/lib/python3.10/test/support/__pycache__/threading_helper.cpython-310.pyc +0 -0
  37. deepseekvl2/lib/python3.10/test/support/__pycache__/warnings_helper.cpython-310.pyc +0 -0
  38. deepseekvl2/lib/python3.10/test/support/bytecode_helper.py +42 -0
  39. deepseekvl2/lib/python3.10/test/support/hashlib_helper.py +51 -0
  40. deepseekvl2/lib/python3.10/test/support/import_helper.py +220 -0
  41. deepseekvl2/lib/python3.10/test/support/interpreters.py +197 -0
  42. deepseekvl2/lib/python3.10/test/support/logging_helper.py +29 -0
  43. deepseekvl2/lib/python3.10/test/support/os_helper.py +623 -0
  44. deepseekvl2/lib/python3.10/test/support/script_helper.py +294 -0
  45. deepseekvl2/lib/python3.10/test/support/socket_helper.py +269 -0
  46. deepseekvl2/lib/python3.10/test/support/testresult.py +185 -0
  47. deepseekvl2/lib/python3.10/test/support/threading_helper.py +209 -0
  48. deepseekvl2/lib/python3.10/test/support/warnings_helper.py +199 -0
  49. deepseekvl2/lib/python3.10/test/test_script_helper.py +125 -0
  50. deepseekvl2/lib/python3.10/test/test_support.py +713 -0
.gitattributes CHANGED
@@ -733,3 +733,15 @@ deepseekvl2/lib/python3.10/lib-dynload/pyexpat.cpython-310-x86_64-linux-gnu.so f
733
  deepseekvl2/lib/python3.10/lib-dynload/_multibytecodec.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
734
  deepseekvl2/lib/python3.10/lib-dynload/select.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
735
  deepseekvl2/lib/python3.10/lib-dynload/_codecs_jp.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
733
  deepseekvl2/lib/python3.10/lib-dynload/_multibytecodec.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
734
  deepseekvl2/lib/python3.10/lib-dynload/select.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
735
  deepseekvl2/lib/python3.10/lib-dynload/_codecs_jp.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
736
+ deepseekvl2/lib/python3.10/lib-dynload/_csv.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
737
+ deepseekvl2/lib/python3.10/lib-dynload/_elementtree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
738
+ deepseekvl2/lib/python3.10/lib-dynload/_datetime.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
739
+ deepseekvl2/lib/python3.10/lib-dynload/binascii.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
740
+ deepseekvl2/lib/python3.10/lib-dynload/_ctypes.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
741
+ deepseekvl2/lib/python3.10/lib-dynload/array.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
742
+ deepseekvl2/lib/python3.10/lib-dynload/_asyncio.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
743
+ deepseekvl2/lib/python3.10/lib-dynload/_blake2.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
744
+ deepseekvl2/lib/python3.10/lib-dynload/_curses.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
745
+ deepseekvl2/lib/python3.10/lib-dynload/_struct.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
746
+ deepseekvl2/lib/python3.10/lib-dynload/audioop.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
747
+ deepseekvl2/lib/python3.10/lib-dynload/_testcapi.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
deepseekvl2/lib/python3.10/__pycache__/glob.cpython-310.pyc ADDED
Binary file (6.11 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/_asyncio.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10e10c5a50a6caa77c0d10cbc2042a1266fd91f591e9238c5185929972c195d4
3
+ size 228752
deepseekvl2/lib/python3.10/lib-dynload/_bisect.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (62.5 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/_blake2.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f39890de7e68c5a35b8b58df1ccbefb2449ae5e7011b3da531623e520706d74
3
+ size 251704
deepseekvl2/lib/python3.10/lib-dynload/_crypt.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (29.7 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/_csv.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54c23d5033e7faf000004ed531de1c2530a6737895f5592aadcc92c60dd84289
3
+ size 142696
deepseekvl2/lib/python3.10/lib-dynload/_ctypes.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:679b8c5b2ea999168ab98fd52076ae0bfde863226dcb5d24431a7bb16dafaa9a
3
+ size 548912
deepseekvl2/lib/python3.10/lib-dynload/_ctypes_test.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (77.2 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/_curses.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf00a0480e575042eafa4aeff7267e56f9c35b9d96f2386d74adaa4f01e1e856
3
+ size 485160
deepseekvl2/lib/python3.10/lib-dynload/_datetime.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d929ec19b7987cf9a92d8d767c11edfdae6c92a791ad7342cc7ba4ed433ad35b
3
+ size 580080
deepseekvl2/lib/python3.10/lib-dynload/_elementtree.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f9679081554fa8307d3a423d59f499173ad32ee49fa5eb0f0f72ac9a726ff51
3
+ size 341528
deepseekvl2/lib/python3.10/lib-dynload/_posixsubprocess.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (76.2 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/_struct.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:263a18f8985b343b3af8c5b693968de0bc58951f895f1816a9a55b9734b33e2c
3
+ size 219400
deepseekvl2/lib/python3.10/lib-dynload/_testcapi.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:517b2b45e3943262d02807cad5b34e724b384e85553fd62b495e012d5ed897a0
3
+ size 481232
deepseekvl2/lib/python3.10/lib-dynload/array.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12e45ee9da9d37cf82992df35fed43551d3f6990306c9cc7652b776334253b6f
3
+ size 231120
deepseekvl2/lib/python3.10/lib-dynload/audioop.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:233abb4b71e0236796b6cff91d7e78ae2723b52ed42ffba821d385f8d91534d2
3
+ size 231584
deepseekvl2/lib/python3.10/lib-dynload/binascii.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fe7e9e043cae383510159adef847a131bb35d03d64f3293d6cc40c4a94c4815
3
+ size 122768
deepseekvl2/lib/python3.10/lib-dynload/resource.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (50.3 kB). View file
 
deepseekvl2/lib/python3.10/lib-dynload/syslog.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (40.7 kB). View file
 
deepseekvl2/lib/python3.10/test/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # Dummy file to make this directory a package.
deepseekvl2/lib/python3.10/test/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (383 Bytes). View file
 
deepseekvl2/lib/python3.10/test/__pycache__/test_script_helper.cpython-310.pyc ADDED
Binary file (5.97 kB). View file
 
deepseekvl2/lib/python3.10/test/__pycache__/test_support.cpython-310.pyc ADDED
Binary file (23.2 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__init__.py ADDED
@@ -0,0 +1,2124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Supporting definitions for the Python regression tests."""
2
+
3
+ if __name__ != 'test.support':
4
+ raise ImportError('support must be imported from the test package')
5
+
6
+ import contextlib
7
+ import functools
8
+ import os
9
+ import re
10
+ import stat
11
+ import sys
12
+ import sysconfig
13
+ import time
14
+ import types
15
+ import unittest
16
+ import warnings
17
+
18
+ from .testresult import get_test_runner
19
+
20
+
21
+ try:
22
+ from _testcapi import unicode_legacy_string
23
+ except ImportError:
24
+ unicode_legacy_string = None
25
+
26
+ __all__ = [
27
+ # globals
28
+ "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast",
29
+ # exceptions
30
+ "Error", "TestFailed", "TestDidNotRun", "ResourceDenied",
31
+ # io
32
+ "record_original_stdout", "get_original_stdout", "captured_stdout",
33
+ "captured_stdin", "captured_stderr",
34
+ # unittest
35
+ "is_resource_enabled", "requires", "requires_freebsd_version",
36
+ "requires_linux_version", "requires_mac_ver",
37
+ "check_syntax_error",
38
+ "BasicTestRunner", "run_unittest", "run_doctest",
39
+ "requires_gzip", "requires_bz2", "requires_lzma",
40
+ "bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute",
41
+ "requires_IEEE_754", "requires_zlib",
42
+ "anticipate_failure", "load_package_tests", "detect_api_mismatch",
43
+ "check__all__", "skip_if_buggy_ucrt_strfptime",
44
+ "check_disallow_instantiation", "check_sanitizer", "skip_if_sanitizer",
45
+ # sys
46
+ "is_jython", "is_android", "check_impl_detail", "unix_shell",
47
+ "setswitchinterval",
48
+ # network
49
+ "open_urlresource",
50
+ # processes
51
+ "reap_children",
52
+ # miscellaneous
53
+ "run_with_locale", "swap_item", "findfile",
54
+ "swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict",
55
+ "run_with_tz", "PGO", "missing_compiler_executable",
56
+ "ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST",
57
+ "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT",
58
+ ]
59
+
60
+
61
+ # Timeout in seconds for tests using a network server listening on the network
62
+ # local loopback interface like 127.0.0.1.
63
+ #
64
+ # The timeout is long enough to prevent test failure: it takes into account
65
+ # that the client and the server can run in different threads or even different
66
+ # processes.
67
+ #
68
+ # The timeout should be long enough for connect(), recv() and send() methods
69
+ # of socket.socket.
70
+ LOOPBACK_TIMEOUT = 5.0
71
+ if sys.platform == 'win32' and ' 32 bit (ARM)' in sys.version:
72
+ # bpo-37553: test_socket.SendfileUsingSendTest is taking longer than 2
73
+ # seconds on Windows ARM32 buildbot
74
+ LOOPBACK_TIMEOUT = 10
75
+ elif sys.platform == 'vxworks':
76
+ LOOPBACK_TIMEOUT = 10
77
+
78
+ # Timeout in seconds for network requests going to the internet. The timeout is
79
+ # short enough to prevent a test to wait for too long if the internet request
80
+ # is blocked for whatever reason.
81
+ #
82
+ # Usually, a timeout using INTERNET_TIMEOUT should not mark a test as failed,
83
+ # but skip the test instead: see transient_internet().
84
+ INTERNET_TIMEOUT = 60.0
85
+
86
+ # Timeout in seconds to mark a test as failed if the test takes "too long".
87
+ #
88
+ # The timeout value depends on the regrtest --timeout command line option.
89
+ #
90
+ # If a test using SHORT_TIMEOUT starts to fail randomly on slow buildbots, use
91
+ # LONG_TIMEOUT instead.
92
+ SHORT_TIMEOUT = 30.0
93
+
94
+ # Timeout in seconds to detect when a test hangs.
95
+ #
96
+ # It is long enough to reduce the risk of test failure on the slowest Python
97
+ # buildbots. It should not be used to mark a test as failed if the test takes
98
+ # "too long". The timeout value depends on the regrtest --timeout command line
99
+ # option.
100
+ LONG_TIMEOUT = 5 * 60.0
101
+
102
+
103
+ class Error(Exception):
104
+ """Base class for regression test exceptions."""
105
+
106
+ class TestFailed(Error):
107
+ """Test failed."""
108
+
109
+ class TestFailedWithDetails(TestFailed):
110
+ """Test failed."""
111
+ def __init__(self, msg, errors, failures):
112
+ self.msg = msg
113
+ self.errors = errors
114
+ self.failures = failures
115
+ super().__init__(msg, errors, failures)
116
+
117
+ def __str__(self):
118
+ return self.msg
119
+
120
+ class TestDidNotRun(Error):
121
+ """Test did not run any subtests."""
122
+
123
+ class ResourceDenied(unittest.SkipTest):
124
+ """Test skipped because it requested a disallowed resource.
125
+
126
+ This is raised when a test calls requires() for a resource that
127
+ has not be enabled. It is used to distinguish between expected
128
+ and unexpected skips.
129
+ """
130
+
131
+ def anticipate_failure(condition):
132
+ """Decorator to mark a test that is known to be broken in some cases
133
+
134
+ Any use of this decorator should have a comment identifying the
135
+ associated tracker issue.
136
+ """
137
+ if condition:
138
+ return unittest.expectedFailure
139
+ return lambda f: f
140
+
141
+ def load_package_tests(pkg_dir, loader, standard_tests, pattern):
142
+ """Generic load_tests implementation for simple test packages.
143
+
144
+ Most packages can implement load_tests using this function as follows:
145
+
146
+ def load_tests(*args):
147
+ return load_package_tests(os.path.dirname(__file__), *args)
148
+ """
149
+ if pattern is None:
150
+ pattern = "test*"
151
+ top_dir = os.path.dirname( # Lib
152
+ os.path.dirname( # test
153
+ os.path.dirname(__file__))) # support
154
+ package_tests = loader.discover(start_dir=pkg_dir,
155
+ top_level_dir=top_dir,
156
+ pattern=pattern)
157
+ standard_tests.addTests(package_tests)
158
+ return standard_tests
159
+
160
+
161
+ def get_attribute(obj, name):
162
+ """Get an attribute, raising SkipTest if AttributeError is raised."""
163
+ try:
164
+ attribute = getattr(obj, name)
165
+ except AttributeError:
166
+ raise unittest.SkipTest("object %r has no attribute %r" % (obj, name))
167
+ else:
168
+ return attribute
169
+
170
+ verbose = 1 # Flag set to 0 by regrtest.py
171
+ use_resources = None # Flag set to [] by regrtest.py
172
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
173
+ # small sizes, to make sure they work.)
174
+ real_max_memuse = 0
175
+ junit_xml_list = None # list of testsuite XML elements
176
+ failfast = False
177
+
178
+ # _original_stdout is meant to hold stdout at the time regrtest began.
179
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
180
+ # The point is to have some flavor of stdout the user can actually see.
181
+ _original_stdout = None
182
+ def record_original_stdout(stdout):
183
+ global _original_stdout
184
+ _original_stdout = stdout
185
+
186
+ def get_original_stdout():
187
+ return _original_stdout or sys.stdout
188
+
189
+
190
+ def _force_run(path, func, *args):
191
+ try:
192
+ return func(*args)
193
+ except OSError as err:
194
+ if verbose >= 2:
195
+ print('%s: %s' % (err.__class__.__name__, err))
196
+ print('re-run %s%r' % (func.__name__, args))
197
+ os.chmod(path, stat.S_IRWXU)
198
+ return func(*args)
199
+
200
+
201
+ # Check whether a gui is actually available
202
+ def _is_gui_available():
203
+ if hasattr(_is_gui_available, 'result'):
204
+ return _is_gui_available.result
205
+ import platform
206
+ reason = None
207
+ if sys.platform.startswith('win') and platform.win32_is_iot():
208
+ reason = "gui is not available on Windows IoT Core"
209
+ elif sys.platform.startswith('win'):
210
+ # if Python is running as a service (such as the buildbot service),
211
+ # gui interaction may be disallowed
212
+ import ctypes
213
+ import ctypes.wintypes
214
+ UOI_FLAGS = 1
215
+ WSF_VISIBLE = 0x0001
216
+ class USEROBJECTFLAGS(ctypes.Structure):
217
+ _fields_ = [("fInherit", ctypes.wintypes.BOOL),
218
+ ("fReserved", ctypes.wintypes.BOOL),
219
+ ("dwFlags", ctypes.wintypes.DWORD)]
220
+ dll = ctypes.windll.user32
221
+ h = dll.GetProcessWindowStation()
222
+ if not h:
223
+ raise ctypes.WinError()
224
+ uof = USEROBJECTFLAGS()
225
+ needed = ctypes.wintypes.DWORD()
226
+ res = dll.GetUserObjectInformationW(h,
227
+ UOI_FLAGS,
228
+ ctypes.byref(uof),
229
+ ctypes.sizeof(uof),
230
+ ctypes.byref(needed))
231
+ if not res:
232
+ raise ctypes.WinError()
233
+ if not bool(uof.dwFlags & WSF_VISIBLE):
234
+ reason = "gui not available (WSF_VISIBLE flag not set)"
235
+ elif sys.platform == 'darwin':
236
+ # The Aqua Tk implementations on OS X can abort the process if
237
+ # being called in an environment where a window server connection
238
+ # cannot be made, for instance when invoked by a buildbot or ssh
239
+ # process not running under the same user id as the current console
240
+ # user. To avoid that, raise an exception if the window manager
241
+ # connection is not available.
242
+ from ctypes import cdll, c_int, pointer, Structure
243
+ from ctypes.util import find_library
244
+
245
+ app_services = cdll.LoadLibrary(find_library("ApplicationServices"))
246
+
247
+ if app_services.CGMainDisplayID() == 0:
248
+ reason = "gui tests cannot run without OS X window manager"
249
+ else:
250
+ class ProcessSerialNumber(Structure):
251
+ _fields_ = [("highLongOfPSN", c_int),
252
+ ("lowLongOfPSN", c_int)]
253
+ psn = ProcessSerialNumber()
254
+ psn_p = pointer(psn)
255
+ if ( (app_services.GetCurrentProcess(psn_p) < 0) or
256
+ (app_services.SetFrontProcess(psn_p) < 0) ):
257
+ reason = "cannot run without OS X gui process"
258
+
259
+ # check on every platform whether tkinter can actually do anything
260
+ if not reason:
261
+ try:
262
+ from tkinter import Tk
263
+ root = Tk()
264
+ root.withdraw()
265
+ root.update()
266
+ root.destroy()
267
+ except Exception as e:
268
+ err_string = str(e)
269
+ if len(err_string) > 50:
270
+ err_string = err_string[:50] + ' [...]'
271
+ reason = 'Tk unavailable due to {}: {}'.format(type(e).__name__,
272
+ err_string)
273
+
274
+ _is_gui_available.reason = reason
275
+ _is_gui_available.result = not reason
276
+
277
+ return _is_gui_available.result
278
+
279
+ def is_resource_enabled(resource):
280
+ """Test whether a resource is enabled.
281
+
282
+ Known resources are set by regrtest.py. If not running under regrtest.py,
283
+ all resources are assumed enabled unless use_resources has been set.
284
+ """
285
+ return use_resources is None or resource in use_resources
286
+
287
+ def requires(resource, msg=None):
288
+ """Raise ResourceDenied if the specified resource is not available."""
289
+ if not is_resource_enabled(resource):
290
+ if msg is None:
291
+ msg = "Use of the %r resource not enabled" % resource
292
+ raise ResourceDenied(msg)
293
+ if resource == 'gui' and not _is_gui_available():
294
+ raise ResourceDenied(_is_gui_available.reason)
295
+
296
+ def _requires_unix_version(sysname, min_version):
297
+ """Decorator raising SkipTest if the OS is `sysname` and the version is less
298
+ than `min_version`.
299
+
300
+ For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if
301
+ the FreeBSD version is less than 7.2.
302
+ """
303
+ import platform
304
+ min_version_txt = '.'.join(map(str, min_version))
305
+ version_txt = platform.release().split('-', 1)[0]
306
+ if platform.system() == sysname:
307
+ try:
308
+ version = tuple(map(int, version_txt.split('.')))
309
+ except ValueError:
310
+ skip = False
311
+ else:
312
+ skip = version < min_version
313
+ else:
314
+ skip = False
315
+
316
+ return unittest.skipIf(
317
+ skip,
318
+ f"{sysname} version {min_version_txt} or higher required, not "
319
+ f"{version_txt}"
320
+ )
321
+
322
+
323
+ def requires_freebsd_version(*min_version):
324
+ """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is
325
+ less than `min_version`.
326
+
327
+ For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD
328
+ version is less than 7.2.
329
+ """
330
+ return _requires_unix_version('FreeBSD', min_version)
331
+
332
+ def requires_linux_version(*min_version):
333
+ """Decorator raising SkipTest if the OS is Linux and the Linux version is
334
+ less than `min_version`.
335
+
336
+ For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux
337
+ version is less than 2.6.32.
338
+ """
339
+ return _requires_unix_version('Linux', min_version)
340
+
341
+ def requires_mac_ver(*min_version):
342
+ """Decorator raising SkipTest if the OS is Mac OS X and the OS X
343
+ version if less than min_version.
344
+
345
+ For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
346
+ is lesser than 10.5.
347
+ """
348
+ def decorator(func):
349
+ @functools.wraps(func)
350
+ def wrapper(*args, **kw):
351
+ if sys.platform == 'darwin':
352
+ import platform
353
+ version_txt = platform.mac_ver()[0]
354
+ try:
355
+ version = tuple(map(int, version_txt.split('.')))
356
+ except ValueError:
357
+ pass
358
+ else:
359
+ if version < min_version:
360
+ min_version_txt = '.'.join(map(str, min_version))
361
+ raise unittest.SkipTest(
362
+ "Mac OS X %s or higher required, not %s"
363
+ % (min_version_txt, version_txt))
364
+ return func(*args, **kw)
365
+ wrapper.min_version = min_version
366
+ return wrapper
367
+ return decorator
368
+
369
+
370
+ def check_sanitizer(*, address=False, memory=False, ub=False):
371
+ """Returns True if Python is compiled with sanitizer support"""
372
+ if not (address or memory or ub):
373
+ raise ValueError('At least one of address, memory, or ub must be True')
374
+
375
+
376
+ _cflags = sysconfig.get_config_var('CFLAGS') or ''
377
+ _config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
378
+ memory_sanitizer = (
379
+ '-fsanitize=memory' in _cflags or
380
+ '--with-memory-sanitizer' in _config_args
381
+ )
382
+ address_sanitizer = (
383
+ '-fsanitize=address' in _cflags or
384
+ '--with-memory-sanitizer' in _config_args
385
+ )
386
+ ub_sanitizer = (
387
+ '-fsanitize=undefined' in _cflags or
388
+ '--with-undefined-behavior-sanitizer' in _config_args
389
+ )
390
+ return (
391
+ (memory and memory_sanitizer) or
392
+ (address and address_sanitizer) or
393
+ (ub and ub_sanitizer)
394
+ )
395
+
396
+
397
+ def skip_if_sanitizer(reason=None, *, address=False, memory=False, ub=False):
398
+ """Decorator raising SkipTest if running with a sanitizer active."""
399
+ if not reason:
400
+ reason = 'not working with sanitizers active'
401
+ skip = check_sanitizer(address=address, memory=memory, ub=ub)
402
+ return unittest.skipIf(skip, reason)
403
+
404
+
405
+ def system_must_validate_cert(f):
406
+ """Skip the test on TLS certificate validation failures."""
407
+ @functools.wraps(f)
408
+ def dec(*args, **kwargs):
409
+ try:
410
+ f(*args, **kwargs)
411
+ except OSError as e:
412
+ if "CERTIFICATE_VERIFY_FAILED" in str(e):
413
+ raise unittest.SkipTest("system does not contain "
414
+ "necessary certificates")
415
+ raise
416
+ return dec
417
+
418
+ # A constant likely larger than the underlying OS pipe buffer size, to
419
+ # make writes blocking.
420
+ # Windows limit seems to be around 512 B, and many Unix kernels have a
421
+ # 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure.
422
+ # (see issue #17835 for a discussion of this number).
423
+ PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1
424
+
425
+ # A constant likely larger than the underlying OS socket buffer size, to make
426
+ # writes blocking.
427
+ # The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl
428
+ # on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643
429
+ # for a discussion of this number.
430
+ SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1
431
+
432
+ # decorator for skipping tests on non-IEEE 754 platforms
433
+ requires_IEEE_754 = unittest.skipUnless(
434
+ float.__getformat__("double").startswith("IEEE"),
435
+ "test requires IEEE 754 doubles")
436
+
437
+ def requires_zlib(reason='requires zlib'):
438
+ try:
439
+ import zlib
440
+ except ImportError:
441
+ zlib = None
442
+ return unittest.skipUnless(zlib, reason)
443
+
444
+ def requires_gzip(reason='requires gzip'):
445
+ try:
446
+ import gzip
447
+ except ImportError:
448
+ gzip = None
449
+ return unittest.skipUnless(gzip, reason)
450
+
451
+ def requires_bz2(reason='requires bz2'):
452
+ try:
453
+ import bz2
454
+ except ImportError:
455
+ bz2 = None
456
+ return unittest.skipUnless(bz2, reason)
457
+
458
+ def requires_lzma(reason='requires lzma'):
459
+ try:
460
+ import lzma
461
+ except ImportError:
462
+ lzma = None
463
+ return unittest.skipUnless(lzma, reason)
464
+
465
+ requires_legacy_unicode_capi = unittest.skipUnless(unicode_legacy_string,
466
+ 'requires legacy Unicode C API')
467
+
468
+ is_jython = sys.platform.startswith('java')
469
+
470
+ is_android = hasattr(sys, 'getandroidapilevel')
471
+
472
+ if sys.platform not in ('win32', 'vxworks'):
473
+ unix_shell = '/system/bin/sh' if is_android else '/bin/sh'
474
+ else:
475
+ unix_shell = None
476
+
477
+ # Define the URL of a dedicated HTTP server for the network tests.
478
+ # The URL must use clear-text HTTP: no redirection to encrypted HTTPS.
479
+ TEST_HTTP_URL = "http://www.pythontest.net"
480
+
481
+ # Set by libregrtest/main.py so we can skip tests that are not
482
+ # useful for PGO
483
+ PGO = False
484
+
485
+ # Set by libregrtest/main.py if we are running the extended (time consuming)
486
+ # PGO task. If this is True, PGO is also True.
487
+ PGO_EXTENDED = False
488
+
489
+ # TEST_HOME_DIR refers to the top level directory of the "test" package
490
+ # that contains Python's regression test suite
491
+ TEST_SUPPORT_DIR = os.path.dirname(os.path.abspath(__file__))
492
+ TEST_HOME_DIR = os.path.dirname(TEST_SUPPORT_DIR)
493
+
494
+ # TEST_DATA_DIR is used as a target download location for remote resources
495
+ TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data")
496
+
497
+
498
+ def darwin_malloc_err_warning(test_name):
499
+ """Assure user that loud errors generated by macOS libc's malloc are
500
+ expected."""
501
+ if sys.platform != 'darwin':
502
+ return
503
+
504
+ import shutil
505
+ msg = ' NOTICE '
506
+ detail = (f'{test_name} may generate "malloc can\'t allocate region"\n'
507
+ 'warnings on macOS systems. This behavior is known. Do not\n'
508
+ 'report a bug unless tests are also failing. See bpo-40928.')
509
+
510
+ padding, _ = shutil.get_terminal_size()
511
+ print(msg.center(padding, '-'))
512
+ print(detail)
513
+ print('-' * padding)
514
+
515
+
516
+ def findfile(filename, subdir=None):
517
+ """Try to find a file on sys.path or in the test directory. If it is not
518
+ found the argument passed to the function is returned (this does not
519
+ necessarily signal failure; could still be the legitimate path).
520
+
521
+ Setting *subdir* indicates a relative path to use to find the file
522
+ rather than looking directly in the path directories.
523
+ """
524
+ if os.path.isabs(filename):
525
+ return filename
526
+ if subdir is not None:
527
+ filename = os.path.join(subdir, filename)
528
+ path = [TEST_HOME_DIR] + sys.path
529
+ for dn in path:
530
+ fn = os.path.join(dn, filename)
531
+ if os.path.exists(fn): return fn
532
+ return filename
533
+
534
+
535
+ def sortdict(dict):
536
+ "Like repr(dict), but in sorted order."
537
+ items = sorted(dict.items())
538
+ reprpairs = ["%r: %r" % pair for pair in items]
539
+ withcommas = ", ".join(reprpairs)
540
+ return "{%s}" % withcommas
541
+
542
+ def check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=None):
543
+ with testcase.assertRaisesRegex(SyntaxError, errtext) as cm:
544
+ compile(statement, '<test string>', 'exec')
545
+ err = cm.exception
546
+ testcase.assertIsNotNone(err.lineno)
547
+ if lineno is not None:
548
+ testcase.assertEqual(err.lineno, lineno)
549
+ testcase.assertIsNotNone(err.offset)
550
+ if offset is not None:
551
+ testcase.assertEqual(err.offset, offset)
552
+
553
+
554
+ def open_urlresource(url, *args, **kw):
555
+ import urllib.request, urllib.parse
556
+ from .os_helper import unlink
557
+ try:
558
+ import gzip
559
+ except ImportError:
560
+ gzip = None
561
+
562
+ check = kw.pop('check', None)
563
+
564
+ filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
565
+
566
+ fn = os.path.join(TEST_DATA_DIR, filename)
567
+
568
+ def check_valid_file(fn):
569
+ f = open(fn, *args, **kw)
570
+ if check is None:
571
+ return f
572
+ elif check(f):
573
+ f.seek(0)
574
+ return f
575
+ f.close()
576
+
577
+ if os.path.exists(fn):
578
+ f = check_valid_file(fn)
579
+ if f is not None:
580
+ return f
581
+ unlink(fn)
582
+
583
+ # Verify the requirement before downloading the file
584
+ requires('urlfetch')
585
+
586
+ if verbose:
587
+ print('\tfetching %s ...' % url, file=get_original_stdout())
588
+ opener = urllib.request.build_opener()
589
+ if gzip:
590
+ opener.addheaders.append(('Accept-Encoding', 'gzip'))
591
+ f = opener.open(url, timeout=INTERNET_TIMEOUT)
592
+ if gzip and f.headers.get('Content-Encoding') == 'gzip':
593
+ f = gzip.GzipFile(fileobj=f)
594
+ try:
595
+ with open(fn, "wb") as out:
596
+ s = f.read()
597
+ while s:
598
+ out.write(s)
599
+ s = f.read()
600
+ finally:
601
+ f.close()
602
+
603
+ f = check_valid_file(fn)
604
+ if f is not None:
605
+ return f
606
+ raise TestFailed('invalid resource %r' % fn)
607
+
608
+
609
+ @contextlib.contextmanager
610
+ def captured_output(stream_name):
611
+ """Return a context manager used by captured_stdout/stdin/stderr
612
+ that temporarily replaces the sys stream *stream_name* with a StringIO."""
613
+ import io
614
+ orig_stdout = getattr(sys, stream_name)
615
+ setattr(sys, stream_name, io.StringIO())
616
+ try:
617
+ yield getattr(sys, stream_name)
618
+ finally:
619
+ setattr(sys, stream_name, orig_stdout)
620
+
621
+ def captured_stdout():
622
+ """Capture the output of sys.stdout:
623
+
624
+ with captured_stdout() as stdout:
625
+ print("hello")
626
+ self.assertEqual(stdout.getvalue(), "hello\\n")
627
+ """
628
+ return captured_output("stdout")
629
+
630
+ def captured_stderr():
631
+ """Capture the output of sys.stderr:
632
+
633
+ with captured_stderr() as stderr:
634
+ print("hello", file=sys.stderr)
635
+ self.assertEqual(stderr.getvalue(), "hello\\n")
636
+ """
637
+ return captured_output("stderr")
638
+
639
+ def captured_stdin():
640
+ """Capture the input to sys.stdin:
641
+
642
+ with captured_stdin() as stdin:
643
+ stdin.write('hello\\n')
644
+ stdin.seek(0)
645
+ # call test code that consumes from sys.stdin
646
+ captured = input()
647
+ self.assertEqual(captured, "hello")
648
+ """
649
+ return captured_output("stdin")
650
+
651
+
652
+ def gc_collect():
653
+ """Force as many objects as possible to be collected.
654
+
655
+ In non-CPython implementations of Python, this is needed because timely
656
+ deallocation is not guaranteed by the garbage collector. (Even in CPython
657
+ this can be the case in case of reference cycles.) This means that __del__
658
+ methods may be called later than expected and weakrefs may remain alive for
659
+ longer than expected. This function tries its best to force all garbage
660
+ objects to disappear.
661
+ """
662
+ import gc
663
+ gc.collect()
664
+ if is_jython:
665
+ time.sleep(0.1)
666
+ gc.collect()
667
+ gc.collect()
668
+
669
+ @contextlib.contextmanager
670
+ def disable_gc():
671
+ import gc
672
+ have_gc = gc.isenabled()
673
+ gc.disable()
674
+ try:
675
+ yield
676
+ finally:
677
+ if have_gc:
678
+ gc.enable()
679
+
680
+
681
+ def python_is_optimized():
682
+ """Find if Python was built with optimizations."""
683
+ cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
684
+ final_opt = ""
685
+ for opt in cflags.split():
686
+ if opt.startswith('-O'):
687
+ final_opt = opt
688
+ return final_opt not in ('', '-O0', '-Og')
689
+
690
+
691
+ _header = 'nP'
692
+ _align = '0n'
693
+ if hasattr(sys, "getobjects"):
694
+ _header = '2P' + _header
695
+ _align = '0P'
696
+ _vheader = _header + 'n'
697
+
698
+ def calcobjsize(fmt):
699
+ import struct
700
+ return struct.calcsize(_header + fmt + _align)
701
+
702
+ def calcvobjsize(fmt):
703
+ import struct
704
+ return struct.calcsize(_vheader + fmt + _align)
705
+
706
+
707
+ _TPFLAGS_HAVE_GC = 1<<14
708
+ _TPFLAGS_HEAPTYPE = 1<<9
709
+
710
+ def check_sizeof(test, o, size):
711
+ import _testinternalcapi
712
+ result = sys.getsizeof(o)
713
+ # add GC header size
714
+ if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\
715
+ ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))):
716
+ size += _testinternalcapi.SIZEOF_PYGC_HEAD
717
+ msg = 'wrong size for %s: got %d, expected %d' \
718
+ % (type(o), result, size)
719
+ test.assertEqual(result, size, msg)
720
+
721
+ #=======================================================================
722
+ # Decorator for running a function in a different locale, correctly resetting
723
+ # it afterwards.
724
+
725
+ @contextlib.contextmanager
726
+ def run_with_locale(catstr, *locales):
727
+ try:
728
+ import locale
729
+ category = getattr(locale, catstr)
730
+ orig_locale = locale.setlocale(category)
731
+ except AttributeError:
732
+ # if the test author gives us an invalid category string
733
+ raise
734
+ except:
735
+ # cannot retrieve original locale, so do nothing
736
+ locale = orig_locale = None
737
+ else:
738
+ for loc in locales:
739
+ try:
740
+ locale.setlocale(category, loc)
741
+ break
742
+ except:
743
+ pass
744
+
745
+ try:
746
+ yield
747
+ finally:
748
+ if locale and orig_locale:
749
+ locale.setlocale(category, orig_locale)
750
+
751
+ #=======================================================================
752
+ # Decorator for running a function in a specific timezone, correctly
753
+ # resetting it afterwards.
754
+
755
+ def run_with_tz(tz):
756
+ def decorator(func):
757
+ def inner(*args, **kwds):
758
+ try:
759
+ tzset = time.tzset
760
+ except AttributeError:
761
+ raise unittest.SkipTest("tzset required")
762
+ if 'TZ' in os.environ:
763
+ orig_tz = os.environ['TZ']
764
+ else:
765
+ orig_tz = None
766
+ os.environ['TZ'] = tz
767
+ tzset()
768
+
769
+ # now run the function, resetting the tz on exceptions
770
+ try:
771
+ return func(*args, **kwds)
772
+ finally:
773
+ if orig_tz is None:
774
+ del os.environ['TZ']
775
+ else:
776
+ os.environ['TZ'] = orig_tz
777
+ time.tzset()
778
+
779
+ inner.__name__ = func.__name__
780
+ inner.__doc__ = func.__doc__
781
+ return inner
782
+ return decorator
783
+
784
+ #=======================================================================
785
+ # Big-memory-test support. Separate from 'resources' because memory use
786
+ # should be configurable.
787
+
788
+ # Some handy shorthands. Note that these are used for byte-limits as well
789
+ # as size-limits, in the various bigmem tests
790
+ _1M = 1024*1024
791
+ _1G = 1024 * _1M
792
+ _2G = 2 * _1G
793
+ _4G = 4 * _1G
794
+
795
+ MAX_Py_ssize_t = sys.maxsize
796
+
797
+ def set_memlimit(limit):
798
+ global max_memuse
799
+ global real_max_memuse
800
+ sizes = {
801
+ 'k': 1024,
802
+ 'm': _1M,
803
+ 'g': _1G,
804
+ 't': 1024*_1G,
805
+ }
806
+ m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
807
+ re.IGNORECASE | re.VERBOSE)
808
+ if m is None:
809
+ raise ValueError('Invalid memory limit %r' % (limit,))
810
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
811
+ real_max_memuse = memlimit
812
+ if memlimit > MAX_Py_ssize_t:
813
+ memlimit = MAX_Py_ssize_t
814
+ if memlimit < _2G - 1:
815
+ raise ValueError('Memory limit %r too low to be useful' % (limit,))
816
+ max_memuse = memlimit
817
+
818
+ class _MemoryWatchdog:
819
+ """An object which periodically watches the process' memory consumption
820
+ and prints it out.
821
+ """
822
+
823
+ def __init__(self):
824
+ self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
825
+ self.started = False
826
+
827
+ def start(self):
828
+ import warnings
829
+ try:
830
+ f = open(self.procfile, 'r')
831
+ except OSError as e:
832
+ warnings.warn('/proc not available for stats: {}'.format(e),
833
+ RuntimeWarning)
834
+ sys.stderr.flush()
835
+ return
836
+
837
+ import subprocess
838
+ with f:
839
+ watchdog_script = findfile("memory_watchdog.py")
840
+ self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script],
841
+ stdin=f,
842
+ stderr=subprocess.DEVNULL)
843
+ self.started = True
844
+
845
+ def stop(self):
846
+ if self.started:
847
+ self.mem_watchdog.terminate()
848
+ self.mem_watchdog.wait()
849
+
850
+
851
+ def bigmemtest(size, memuse, dry_run=True):
852
+ """Decorator for bigmem tests.
853
+
854
+ 'size' is a requested size for the test (in arbitrary, test-interpreted
855
+ units.) 'memuse' is the number of bytes per unit for the test, or a good
856
+ estimate of it. For example, a test that needs two byte buffers, of 4 GiB
857
+ each, could be decorated with @bigmemtest(size=_4G, memuse=2).
858
+
859
+ The 'size' argument is normally passed to the decorated test method as an
860
+ extra argument. If 'dry_run' is true, the value passed to the test method
861
+ may be less than the requested value. If 'dry_run' is false, it means the
862
+ test doesn't support dummy runs when -M is not specified.
863
+ """
864
+ def decorator(f):
865
+ def wrapper(self):
866
+ size = wrapper.size
867
+ memuse = wrapper.memuse
868
+ if not real_max_memuse:
869
+ maxsize = 5147
870
+ else:
871
+ maxsize = size
872
+
873
+ if ((real_max_memuse or not dry_run)
874
+ and real_max_memuse < maxsize * memuse):
875
+ raise unittest.SkipTest(
876
+ "not enough memory: %.1fG minimum needed"
877
+ % (size * memuse / (1024 ** 3)))
878
+
879
+ if real_max_memuse and verbose:
880
+ print()
881
+ print(" ... expected peak memory use: {peak:.1f}G"
882
+ .format(peak=size * memuse / (1024 ** 3)))
883
+ watchdog = _MemoryWatchdog()
884
+ watchdog.start()
885
+ else:
886
+ watchdog = None
887
+
888
+ try:
889
+ return f(self, maxsize)
890
+ finally:
891
+ if watchdog:
892
+ watchdog.stop()
893
+
894
+ wrapper.size = size
895
+ wrapper.memuse = memuse
896
+ return wrapper
897
+ return decorator
898
+
899
+ def bigaddrspacetest(f):
900
+ """Decorator for tests that fill the address space."""
901
+ def wrapper(self):
902
+ if max_memuse < MAX_Py_ssize_t:
903
+ if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
904
+ raise unittest.SkipTest(
905
+ "not enough memory: try a 32-bit build instead")
906
+ else:
907
+ raise unittest.SkipTest(
908
+ "not enough memory: %.1fG minimum needed"
909
+ % (MAX_Py_ssize_t / (1024 ** 3)))
910
+ else:
911
+ return f(self)
912
+ return wrapper
913
+
914
+ #=======================================================================
915
+ # unittest integration.
916
+
917
+ class BasicTestRunner:
918
+ def run(self, test):
919
+ result = unittest.TestResult()
920
+ test(result)
921
+ return result
922
+
923
+ def _id(obj):
924
+ return obj
925
+
926
+ def requires_resource(resource):
927
+ if resource == 'gui' and not _is_gui_available():
928
+ return unittest.skip(_is_gui_available.reason)
929
+ if is_resource_enabled(resource):
930
+ return _id
931
+ else:
932
+ return unittest.skip("resource {0!r} is not enabled".format(resource))
933
+
934
+ def cpython_only(test):
935
+ """
936
+ Decorator for tests only applicable on CPython.
937
+ """
938
+ return impl_detail(cpython=True)(test)
939
+
940
+ def impl_detail(msg=None, **guards):
941
+ if check_impl_detail(**guards):
942
+ return _id
943
+ if msg is None:
944
+ guardnames, default = _parse_guards(guards)
945
+ if default:
946
+ msg = "implementation detail not available on {0}"
947
+ else:
948
+ msg = "implementation detail specific to {0}"
949
+ guardnames = sorted(guardnames.keys())
950
+ msg = msg.format(' or '.join(guardnames))
951
+ return unittest.skip(msg)
952
+
953
+ def _parse_guards(guards):
954
+ # Returns a tuple ({platform_name: run_me}, default_value)
955
+ if not guards:
956
+ return ({'cpython': True}, False)
957
+ is_true = list(guards.values())[0]
958
+ assert list(guards.values()) == [is_true] * len(guards) # all True or all False
959
+ return (guards, not is_true)
960
+
961
+ # Use the following check to guard CPython's implementation-specific tests --
962
+ # or to run them only on the implementation(s) guarded by the arguments.
963
+ def check_impl_detail(**guards):
964
+ """This function returns True or False depending on the host platform.
965
+ Examples:
966
+ if check_impl_detail(): # only on CPython (default)
967
+ if check_impl_detail(jython=True): # only on Jython
968
+ if check_impl_detail(cpython=False): # everywhere except on CPython
969
+ """
970
+ guards, default = _parse_guards(guards)
971
+ return guards.get(sys.implementation.name, default)
972
+
973
+
974
+ def no_tracing(func):
975
+ """Decorator to temporarily turn off tracing for the duration of a test."""
976
+ if not hasattr(sys, 'gettrace'):
977
+ return func
978
+ else:
979
+ @functools.wraps(func)
980
+ def wrapper(*args, **kwargs):
981
+ original_trace = sys.gettrace()
982
+ try:
983
+ sys.settrace(None)
984
+ return func(*args, **kwargs)
985
+ finally:
986
+ sys.settrace(original_trace)
987
+ return wrapper
988
+
989
+
990
+ def refcount_test(test):
991
+ """Decorator for tests which involve reference counting.
992
+
993
+ To start, the decorator does not run the test if is not run by CPython.
994
+ After that, any trace function is unset during the test to prevent
995
+ unexpected refcounts caused by the trace function.
996
+
997
+ """
998
+ return no_tracing(cpython_only(test))
999
+
1000
+
1001
+ def _filter_suite(suite, pred):
1002
+ """Recursively filter test cases in a suite based on a predicate."""
1003
+ newtests = []
1004
+ for test in suite._tests:
1005
+ if isinstance(test, unittest.TestSuite):
1006
+ _filter_suite(test, pred)
1007
+ newtests.append(test)
1008
+ else:
1009
+ if pred(test):
1010
+ newtests.append(test)
1011
+ suite._tests = newtests
1012
+
1013
+ def _run_suite(suite):
1014
+ """Run tests from a unittest.TestSuite-derived class."""
1015
+ runner = get_test_runner(sys.stdout,
1016
+ verbosity=verbose,
1017
+ capture_output=(junit_xml_list is not None))
1018
+
1019
+ result = runner.run(suite)
1020
+
1021
+ if junit_xml_list is not None:
1022
+ junit_xml_list.append(result.get_xml_element())
1023
+
1024
+ if not result.testsRun and not result.skipped:
1025
+ raise TestDidNotRun
1026
+ if not result.wasSuccessful():
1027
+ if len(result.errors) == 1 and not result.failures:
1028
+ err = result.errors[0][1]
1029
+ elif len(result.failures) == 1 and not result.errors:
1030
+ err = result.failures[0][1]
1031
+ else:
1032
+ err = "multiple errors occurred"
1033
+ if not verbose: err += "; run in verbose mode for details"
1034
+ errors = [(str(tc), exc_str) for tc, exc_str in result.errors]
1035
+ failures = [(str(tc), exc_str) for tc, exc_str in result.failures]
1036
+ raise TestFailedWithDetails(err, errors, failures)
1037
+
1038
+
1039
+ # By default, don't filter tests
1040
+ _match_test_func = None
1041
+
1042
+ _accept_test_patterns = None
1043
+ _ignore_test_patterns = None
1044
+
1045
+
1046
+ def match_test(test):
1047
+ # Function used by support.run_unittest() and regrtest --list-cases
1048
+ if _match_test_func is None:
1049
+ return True
1050
+ else:
1051
+ return _match_test_func(test.id())
1052
+
1053
+
1054
+ def _is_full_match_test(pattern):
1055
+ # If a pattern contains at least one dot, it's considered
1056
+ # as a full test identifier.
1057
+ # Example: 'test.test_os.FileTests.test_access'.
1058
+ #
1059
+ # ignore patterns which contain fnmatch patterns: '*', '?', '[...]'
1060
+ # or '[!...]'. For example, ignore 'test_access*'.
1061
+ return ('.' in pattern) and (not re.search(r'[?*\[\]]', pattern))
1062
+
1063
+
1064
+ def set_match_tests(accept_patterns=None, ignore_patterns=None):
1065
+ global _match_test_func, _accept_test_patterns, _ignore_test_patterns
1066
+
1067
+
1068
+ if accept_patterns is None:
1069
+ accept_patterns = ()
1070
+ if ignore_patterns is None:
1071
+ ignore_patterns = ()
1072
+
1073
+ accept_func = ignore_func = None
1074
+
1075
+ if accept_patterns != _accept_test_patterns:
1076
+ accept_patterns, accept_func = _compile_match_function(accept_patterns)
1077
+ if ignore_patterns != _ignore_test_patterns:
1078
+ ignore_patterns, ignore_func = _compile_match_function(ignore_patterns)
1079
+
1080
+ # Create a copy since patterns can be mutable and so modified later
1081
+ _accept_test_patterns = tuple(accept_patterns)
1082
+ _ignore_test_patterns = tuple(ignore_patterns)
1083
+
1084
+ if accept_func is not None or ignore_func is not None:
1085
+ def match_function(test_id):
1086
+ accept = True
1087
+ ignore = False
1088
+ if accept_func:
1089
+ accept = accept_func(test_id)
1090
+ if ignore_func:
1091
+ ignore = ignore_func(test_id)
1092
+ return accept and not ignore
1093
+
1094
+ _match_test_func = match_function
1095
+
1096
+
1097
+ def _compile_match_function(patterns):
1098
+ if not patterns:
1099
+ func = None
1100
+ # set_match_tests(None) behaves as set_match_tests(())
1101
+ patterns = ()
1102
+ elif all(map(_is_full_match_test, patterns)):
1103
+ # Simple case: all patterns are full test identifier.
1104
+ # The test.bisect_cmd utility only uses such full test identifiers.
1105
+ func = set(patterns).__contains__
1106
+ else:
1107
+ import fnmatch
1108
+ regex = '|'.join(map(fnmatch.translate, patterns))
1109
+ # The search *is* case sensitive on purpose:
1110
+ # don't use flags=re.IGNORECASE
1111
+ regex_match = re.compile(regex).match
1112
+
1113
+ def match_test_regex(test_id):
1114
+ if regex_match(test_id):
1115
+ # The regex matches the whole identifier, for example
1116
+ # 'test.test_os.FileTests.test_access'.
1117
+ return True
1118
+ else:
1119
+ # Try to match parts of the test identifier.
1120
+ # For example, split 'test.test_os.FileTests.test_access'
1121
+ # into: 'test', 'test_os', 'FileTests' and 'test_access'.
1122
+ return any(map(regex_match, test_id.split(".")))
1123
+
1124
+ func = match_test_regex
1125
+
1126
+ return patterns, func
1127
+
1128
+
1129
+ def run_unittest(*classes):
1130
+ """Run tests from unittest.TestCase-derived classes."""
1131
+ valid_types = (unittest.TestSuite, unittest.TestCase)
1132
+ suite = unittest.TestSuite()
1133
+ for cls in classes:
1134
+ if isinstance(cls, str):
1135
+ if cls in sys.modules:
1136
+ suite.addTest(unittest.findTestCases(sys.modules[cls]))
1137
+ else:
1138
+ raise ValueError("str arguments must be keys in sys.modules")
1139
+ elif isinstance(cls, valid_types):
1140
+ suite.addTest(cls)
1141
+ else:
1142
+ suite.addTest(unittest.makeSuite(cls))
1143
+ _filter_suite(suite, match_test)
1144
+ _run_suite(suite)
1145
+
1146
+ #=======================================================================
1147
+ # Check for the presence of docstrings.
1148
+
1149
+ # Rather than trying to enumerate all the cases where docstrings may be
1150
+ # disabled, we just check for that directly
1151
+
1152
+ def _check_docstrings():
1153
+ """Just used to check if docstrings are enabled"""
1154
+
1155
+ MISSING_C_DOCSTRINGS = (check_impl_detail() and
1156
+ sys.platform != 'win32' and
1157
+ not sysconfig.get_config_var('WITH_DOC_STRINGS'))
1158
+
1159
+ HAVE_DOCSTRINGS = (_check_docstrings.__doc__ is not None and
1160
+ not MISSING_C_DOCSTRINGS)
1161
+
1162
+ requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS,
1163
+ "test requires docstrings")
1164
+
1165
+
1166
+ #=======================================================================
1167
+ # doctest driver.
1168
+
1169
+ def run_doctest(module, verbosity=None, optionflags=0):
1170
+ """Run doctest on the given module. Return (#failures, #tests).
1171
+
1172
+ If optional argument verbosity is not specified (or is None), pass
1173
+ support's belief about verbosity on to doctest. Else doctest's
1174
+ usual behavior is used (it searches sys.argv for -v).
1175
+ """
1176
+
1177
+ import doctest
1178
+
1179
+ if verbosity is None:
1180
+ verbosity = verbose
1181
+ else:
1182
+ verbosity = None
1183
+
1184
+ f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags)
1185
+ if f:
1186
+ raise TestFailed("%d of %d doctests failed" % (f, t))
1187
+ if verbose:
1188
+ print('doctest (%s) ... %d tests with zero failures' %
1189
+ (module.__name__, t))
1190
+ return f, t
1191
+
1192
+
1193
+ #=======================================================================
1194
+ # Support for saving and restoring the imported modules.
1195
+
1196
+ def print_warning(msg):
1197
+ # bpo-39983: Print into sys.__stderr__ to display the warning even
1198
+ # when sys.stderr is captured temporarily by a test
1199
+ for line in msg.splitlines():
1200
+ print(f"Warning -- {line}", file=sys.__stderr__, flush=True)
1201
+
1202
+
1203
+ # Flag used by saved_test_environment of test.libregrtest.save_env,
1204
+ # to check if a test modified the environment. The flag should be set to False
1205
+ # before running a new test.
1206
+ #
1207
+ # For example, threading_helper.threading_cleanup() sets the flag is the function fails
1208
+ # to cleanup threads.
1209
+ environment_altered = False
1210
+
1211
+ def reap_children():
1212
+ """Use this function at the end of test_main() whenever sub-processes
1213
+ are started. This will help ensure that no extra children (zombies)
1214
+ stick around to hog resources and create problems when looking
1215
+ for refleaks.
1216
+ """
1217
+ global environment_altered
1218
+
1219
+ # Need os.waitpid(-1, os.WNOHANG): Windows is not supported
1220
+ if not (hasattr(os, 'waitpid') and hasattr(os, 'WNOHANG')):
1221
+ return
1222
+
1223
+ # Reap all our dead child processes so we don't leave zombies around.
1224
+ # These hog resources and might be causing some of the buildbots to die.
1225
+ while True:
1226
+ try:
1227
+ # Read the exit status of any child process which already completed
1228
+ pid, status = os.waitpid(-1, os.WNOHANG)
1229
+ except OSError:
1230
+ break
1231
+
1232
+ if pid == 0:
1233
+ break
1234
+
1235
+ print_warning(f"reap_children() reaped child process {pid}")
1236
+ environment_altered = True
1237
+
1238
+
1239
+ @contextlib.contextmanager
1240
+ def swap_attr(obj, attr, new_val):
1241
+ """Temporary swap out an attribute with a new object.
1242
+
1243
+ Usage:
1244
+ with swap_attr(obj, "attr", 5):
1245
+ ...
1246
+
1247
+ This will set obj.attr to 5 for the duration of the with: block,
1248
+ restoring the old value at the end of the block. If `attr` doesn't
1249
+ exist on `obj`, it will be created and then deleted at the end of the
1250
+ block.
1251
+
1252
+ The old value (or None if it doesn't exist) will be assigned to the
1253
+ target of the "as" clause, if there is one.
1254
+ """
1255
+ if hasattr(obj, attr):
1256
+ real_val = getattr(obj, attr)
1257
+ setattr(obj, attr, new_val)
1258
+ try:
1259
+ yield real_val
1260
+ finally:
1261
+ setattr(obj, attr, real_val)
1262
+ else:
1263
+ setattr(obj, attr, new_val)
1264
+ try:
1265
+ yield
1266
+ finally:
1267
+ if hasattr(obj, attr):
1268
+ delattr(obj, attr)
1269
+
1270
+ @contextlib.contextmanager
1271
+ def swap_item(obj, item, new_val):
1272
+ """Temporary swap out an item with a new object.
1273
+
1274
+ Usage:
1275
+ with swap_item(obj, "item", 5):
1276
+ ...
1277
+
1278
+ This will set obj["item"] to 5 for the duration of the with: block,
1279
+ restoring the old value at the end of the block. If `item` doesn't
1280
+ exist on `obj`, it will be created and then deleted at the end of the
1281
+ block.
1282
+
1283
+ The old value (or None if it doesn't exist) will be assigned to the
1284
+ target of the "as" clause, if there is one.
1285
+ """
1286
+ if item in obj:
1287
+ real_val = obj[item]
1288
+ obj[item] = new_val
1289
+ try:
1290
+ yield real_val
1291
+ finally:
1292
+ obj[item] = real_val
1293
+ else:
1294
+ obj[item] = new_val
1295
+ try:
1296
+ yield
1297
+ finally:
1298
+ if item in obj:
1299
+ del obj[item]
1300
+
1301
+ def args_from_interpreter_flags():
1302
+ """Return a list of command-line arguments reproducing the current
1303
+ settings in sys.flags and sys.warnoptions."""
1304
+ import subprocess
1305
+ return subprocess._args_from_interpreter_flags()
1306
+
1307
+ def optim_args_from_interpreter_flags():
1308
+ """Return a list of command-line arguments reproducing the current
1309
+ optimization settings in sys.flags."""
1310
+ import subprocess
1311
+ return subprocess._optim_args_from_interpreter_flags()
1312
+
1313
+
1314
+ class Matcher(object):
1315
+
1316
+ _partial_matches = ('msg', 'message')
1317
+
1318
+ def matches(self, d, **kwargs):
1319
+ """
1320
+ Try to match a single dict with the supplied arguments.
1321
+
1322
+ Keys whose values are strings and which are in self._partial_matches
1323
+ will be checked for partial (i.e. substring) matches. You can extend
1324
+ this scheme to (for example) do regular expression matching, etc.
1325
+ """
1326
+ result = True
1327
+ for k in kwargs:
1328
+ v = kwargs[k]
1329
+ dv = d.get(k)
1330
+ if not self.match_value(k, dv, v):
1331
+ result = False
1332
+ break
1333
+ return result
1334
+
1335
+ def match_value(self, k, dv, v):
1336
+ """
1337
+ Try to match a single stored value (dv) with a supplied value (v).
1338
+ """
1339
+ if type(v) != type(dv):
1340
+ result = False
1341
+ elif type(dv) is not str or k not in self._partial_matches:
1342
+ result = (v == dv)
1343
+ else:
1344
+ result = dv.find(v) >= 0
1345
+ return result
1346
+
1347
+
1348
+ _buggy_ucrt = None
1349
+ def skip_if_buggy_ucrt_strfptime(test):
1350
+ """
1351
+ Skip decorator for tests that use buggy strptime/strftime
1352
+
1353
+ If the UCRT bugs are present time.localtime().tm_zone will be
1354
+ an empty string, otherwise we assume the UCRT bugs are fixed
1355
+
1356
+ See bpo-37552 [Windows] strptime/strftime return invalid
1357
+ results with UCRT version 17763.615
1358
+ """
1359
+ import locale
1360
+ global _buggy_ucrt
1361
+ if _buggy_ucrt is None:
1362
+ if(sys.platform == 'win32' and
1363
+ locale.getdefaultlocale()[1] == 'cp65001' and
1364
+ time.localtime().tm_zone == ''):
1365
+ _buggy_ucrt = True
1366
+ else:
1367
+ _buggy_ucrt = False
1368
+ return unittest.skip("buggy MSVC UCRT strptime/strftime")(test) if _buggy_ucrt else test
1369
+
1370
+ class PythonSymlink:
1371
+ """Creates a symlink for the current Python executable"""
1372
+ def __init__(self, link=None):
1373
+ from .os_helper import TESTFN
1374
+
1375
+ self.link = link or os.path.abspath(TESTFN)
1376
+ self._linked = []
1377
+ self.real = os.path.realpath(sys.executable)
1378
+ self._also_link = []
1379
+
1380
+ self._env = None
1381
+
1382
+ self._platform_specific()
1383
+
1384
+ if sys.platform == "win32":
1385
+ def _platform_specific(self):
1386
+ import glob
1387
+ import _winapi
1388
+
1389
+ if os.path.lexists(self.real) and not os.path.exists(self.real):
1390
+ # App symlink appears to not exist, but we want the
1391
+ # real executable here anyway
1392
+ self.real = _winapi.GetModuleFileName(0)
1393
+
1394
+ dll = _winapi.GetModuleFileName(sys.dllhandle)
1395
+ src_dir = os.path.dirname(dll)
1396
+ dest_dir = os.path.dirname(self.link)
1397
+ self._also_link.append((
1398
+ dll,
1399
+ os.path.join(dest_dir, os.path.basename(dll))
1400
+ ))
1401
+ for runtime in glob.glob(os.path.join(glob.escape(src_dir), "vcruntime*.dll")):
1402
+ self._also_link.append((
1403
+ runtime,
1404
+ os.path.join(dest_dir, os.path.basename(runtime))
1405
+ ))
1406
+
1407
+ self._env = {k.upper(): os.getenv(k) for k in os.environ}
1408
+ self._env["PYTHONHOME"] = os.path.dirname(self.real)
1409
+ if sysconfig.is_python_build(True):
1410
+ self._env["PYTHONPATH"] = os.path.dirname(os.__file__)
1411
+ else:
1412
+ def _platform_specific(self):
1413
+ pass
1414
+
1415
+ def __enter__(self):
1416
+ os.symlink(self.real, self.link)
1417
+ self._linked.append(self.link)
1418
+ for real, link in self._also_link:
1419
+ os.symlink(real, link)
1420
+ self._linked.append(link)
1421
+ return self
1422
+
1423
+ def __exit__(self, exc_type, exc_value, exc_tb):
1424
+ for link in self._linked:
1425
+ try:
1426
+ os.remove(link)
1427
+ except IOError as ex:
1428
+ if verbose:
1429
+ print("failed to clean up {}: {}".format(link, ex))
1430
+
1431
+ def _call(self, python, args, env, returncode):
1432
+ import subprocess
1433
+ cmd = [python, *args]
1434
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
1435
+ stderr=subprocess.PIPE, env=env)
1436
+ r = p.communicate()
1437
+ if p.returncode != returncode:
1438
+ if verbose:
1439
+ print(repr(r[0]))
1440
+ print(repr(r[1]), file=sys.stderr)
1441
+ raise RuntimeError(
1442
+ 'unexpected return code: {0} (0x{0:08X})'.format(p.returncode))
1443
+ return r
1444
+
1445
+ def call_real(self, *args, returncode=0):
1446
+ return self._call(self.real, args, None, returncode)
1447
+
1448
+ def call_link(self, *args, returncode=0):
1449
+ return self._call(self.link, args, self._env, returncode)
1450
+
1451
+
1452
+ def skip_if_pgo_task(test):
1453
+ """Skip decorator for tests not run in (non-extended) PGO task"""
1454
+ ok = not PGO or PGO_EXTENDED
1455
+ msg = "Not run for (non-extended) PGO task"
1456
+ return test if ok else unittest.skip(msg)(test)
1457
+
1458
+
1459
+ def detect_api_mismatch(ref_api, other_api, *, ignore=()):
1460
+ """Returns the set of items in ref_api not in other_api, except for a
1461
+ defined list of items to be ignored in this check.
1462
+
1463
+ By default this skips private attributes beginning with '_' but
1464
+ includes all magic methods, i.e. those starting and ending in '__'.
1465
+ """
1466
+ missing_items = set(dir(ref_api)) - set(dir(other_api))
1467
+ if ignore:
1468
+ missing_items -= set(ignore)
1469
+ missing_items = set(m for m in missing_items
1470
+ if not m.startswith('_') or m.endswith('__'))
1471
+ return missing_items
1472
+
1473
+
1474
+ def check__all__(test_case, module, name_of_module=None, extra=(),
1475
+ not_exported=()):
1476
+ """Assert that the __all__ variable of 'module' contains all public names.
1477
+
1478
+ The module's public names (its API) are detected automatically based on
1479
+ whether they match the public name convention and were defined in
1480
+ 'module'.
1481
+
1482
+ The 'name_of_module' argument can specify (as a string or tuple thereof)
1483
+ what module(s) an API could be defined in in order to be detected as a
1484
+ public API. One case for this is when 'module' imports part of its public
1485
+ API from other modules, possibly a C backend (like 'csv' and its '_csv').
1486
+
1487
+ The 'extra' argument can be a set of names that wouldn't otherwise be
1488
+ automatically detected as "public", like objects without a proper
1489
+ '__module__' attribute. If provided, it will be added to the
1490
+ automatically detected ones.
1491
+
1492
+ The 'not_exported' argument can be a set of names that must not be treated
1493
+ as part of the public API even though their names indicate otherwise.
1494
+
1495
+ Usage:
1496
+ import bar
1497
+ import foo
1498
+ import unittest
1499
+ from test import support
1500
+
1501
+ class MiscTestCase(unittest.TestCase):
1502
+ def test__all__(self):
1503
+ support.check__all__(self, foo)
1504
+
1505
+ class OtherTestCase(unittest.TestCase):
1506
+ def test__all__(self):
1507
+ extra = {'BAR_CONST', 'FOO_CONST'}
1508
+ not_exported = {'baz'} # Undocumented name.
1509
+ # bar imports part of its API from _bar.
1510
+ support.check__all__(self, bar, ('bar', '_bar'),
1511
+ extra=extra, not_exported=not_exported)
1512
+
1513
+ """
1514
+
1515
+ if name_of_module is None:
1516
+ name_of_module = (module.__name__, )
1517
+ elif isinstance(name_of_module, str):
1518
+ name_of_module = (name_of_module, )
1519
+
1520
+ expected = set(extra)
1521
+
1522
+ for name in dir(module):
1523
+ if name.startswith('_') or name in not_exported:
1524
+ continue
1525
+ obj = getattr(module, name)
1526
+ if (getattr(obj, '__module__', None) in name_of_module or
1527
+ (not hasattr(obj, '__module__') and
1528
+ not isinstance(obj, types.ModuleType))):
1529
+ expected.add(name)
1530
+ test_case.assertCountEqual(module.__all__, expected)
1531
+
1532
+
1533
+ def suppress_msvcrt_asserts(verbose=False):
1534
+ try:
1535
+ import msvcrt
1536
+ except ImportError:
1537
+ return
1538
+
1539
+ msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS
1540
+ | msvcrt.SEM_NOALIGNMENTFAULTEXCEPT
1541
+ | msvcrt.SEM_NOGPFAULTERRORBOX
1542
+ | msvcrt.SEM_NOOPENFILEERRORBOX)
1543
+
1544
+ # CrtSetReportMode() is only available in debug build
1545
+ if hasattr(msvcrt, 'CrtSetReportMode'):
1546
+ for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]:
1547
+ if verbose:
1548
+ msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE)
1549
+ msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR)
1550
+ else:
1551
+ msvcrt.CrtSetReportMode(m, 0)
1552
+
1553
+
1554
+ class SuppressCrashReport:
1555
+ """Try to prevent a crash report from popping up.
1556
+
1557
+ On Windows, don't display the Windows Error Reporting dialog. On UNIX,
1558
+ disable the creation of coredump file.
1559
+ """
1560
+ old_value = None
1561
+ old_modes = None
1562
+
1563
+ def __enter__(self):
1564
+ """On Windows, disable Windows Error Reporting dialogs using
1565
+ SetErrorMode() and CrtSetReportMode().
1566
+
1567
+ On UNIX, try to save the previous core file size limit, then set
1568
+ soft limit to 0.
1569
+ """
1570
+ if sys.platform.startswith('win'):
1571
+ # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx
1572
+ try:
1573
+ import msvcrt
1574
+ except ImportError:
1575
+ return
1576
+
1577
+ self.old_value = msvcrt.GetErrorMode()
1578
+
1579
+ msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX)
1580
+
1581
+ # bpo-23314: Suppress assert dialogs in debug builds.
1582
+ # CrtSetReportMode() is only available in debug build.
1583
+ if hasattr(msvcrt, 'CrtSetReportMode'):
1584
+ self.old_modes = {}
1585
+ for report_type in [msvcrt.CRT_WARN,
1586
+ msvcrt.CRT_ERROR,
1587
+ msvcrt.CRT_ASSERT]:
1588
+ old_mode = msvcrt.CrtSetReportMode(report_type,
1589
+ msvcrt.CRTDBG_MODE_FILE)
1590
+ old_file = msvcrt.CrtSetReportFile(report_type,
1591
+ msvcrt.CRTDBG_FILE_STDERR)
1592
+ self.old_modes[report_type] = old_mode, old_file
1593
+
1594
+ else:
1595
+ try:
1596
+ import resource
1597
+ self.resource = resource
1598
+ except ImportError:
1599
+ self.resource = None
1600
+ if self.resource is not None:
1601
+ try:
1602
+ self.old_value = self.resource.getrlimit(self.resource.RLIMIT_CORE)
1603
+ self.resource.setrlimit(self.resource.RLIMIT_CORE,
1604
+ (0, self.old_value[1]))
1605
+ except (ValueError, OSError):
1606
+ pass
1607
+
1608
+ if sys.platform == 'darwin':
1609
+ import subprocess
1610
+ # Check if the 'Crash Reporter' on OSX was configured
1611
+ # in 'Developer' mode and warn that it will get triggered
1612
+ # when it is.
1613
+ #
1614
+ # This assumes that this context manager is used in tests
1615
+ # that might trigger the next manager.
1616
+ cmd = ['/usr/bin/defaults', 'read',
1617
+ 'com.apple.CrashReporter', 'DialogType']
1618
+ proc = subprocess.Popen(cmd,
1619
+ stdout=subprocess.PIPE,
1620
+ stderr=subprocess.PIPE)
1621
+ with proc:
1622
+ stdout = proc.communicate()[0]
1623
+ if stdout.strip() == b'developer':
1624
+ print("this test triggers the Crash Reporter, "
1625
+ "that is intentional", end='', flush=True)
1626
+
1627
+ return self
1628
+
1629
+ def __exit__(self, *ignore_exc):
1630
+ """Restore Windows ErrorMode or core file behavior to initial value."""
1631
+ if self.old_value is None:
1632
+ return
1633
+
1634
+ if sys.platform.startswith('win'):
1635
+ import msvcrt
1636
+ msvcrt.SetErrorMode(self.old_value)
1637
+
1638
+ if self.old_modes:
1639
+ for report_type, (old_mode, old_file) in self.old_modes.items():
1640
+ msvcrt.CrtSetReportMode(report_type, old_mode)
1641
+ msvcrt.CrtSetReportFile(report_type, old_file)
1642
+ else:
1643
+ if self.resource is not None:
1644
+ try:
1645
+ self.resource.setrlimit(self.resource.RLIMIT_CORE, self.old_value)
1646
+ except (ValueError, OSError):
1647
+ pass
1648
+
1649
+
1650
+ def patch(test_instance, object_to_patch, attr_name, new_value):
1651
+ """Override 'object_to_patch'.'attr_name' with 'new_value'.
1652
+
1653
+ Also, add a cleanup procedure to 'test_instance' to restore
1654
+ 'object_to_patch' value for 'attr_name'.
1655
+ The 'attr_name' should be a valid attribute for 'object_to_patch'.
1656
+
1657
+ """
1658
+ # check that 'attr_name' is a real attribute for 'object_to_patch'
1659
+ # will raise AttributeError if it does not exist
1660
+ getattr(object_to_patch, attr_name)
1661
+
1662
+ # keep a copy of the old value
1663
+ attr_is_local = False
1664
+ try:
1665
+ old_value = object_to_patch.__dict__[attr_name]
1666
+ except (AttributeError, KeyError):
1667
+ old_value = getattr(object_to_patch, attr_name, None)
1668
+ else:
1669
+ attr_is_local = True
1670
+
1671
+ # restore the value when the test is done
1672
+ def cleanup():
1673
+ if attr_is_local:
1674
+ setattr(object_to_patch, attr_name, old_value)
1675
+ else:
1676
+ delattr(object_to_patch, attr_name)
1677
+
1678
+ test_instance.addCleanup(cleanup)
1679
+
1680
+ # actually override the attribute
1681
+ setattr(object_to_patch, attr_name, new_value)
1682
+
1683
+
1684
+ @contextlib.contextmanager
1685
+ def patch_list(orig):
1686
+ """Like unittest.mock.patch.dict, but for lists."""
1687
+ try:
1688
+ saved = orig[:]
1689
+ yield
1690
+ finally:
1691
+ orig[:] = saved
1692
+
1693
+
1694
+ def run_in_subinterp(code):
1695
+ """
1696
+ Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc
1697
+ module is enabled.
1698
+ """
1699
+ # Issue #10915, #15751: PyGILState_*() functions don't work with
1700
+ # sub-interpreters, the tracemalloc module uses these functions internally
1701
+ try:
1702
+ import tracemalloc
1703
+ except ImportError:
1704
+ pass
1705
+ else:
1706
+ if tracemalloc.is_tracing():
1707
+ raise unittest.SkipTest("run_in_subinterp() cannot be used "
1708
+ "if tracemalloc module is tracing "
1709
+ "memory allocations")
1710
+ import _testcapi
1711
+ return _testcapi.run_in_subinterp(code)
1712
+
1713
+
1714
+ def check_free_after_iterating(test, iter, cls, args=()):
1715
+ class A(cls):
1716
+ def __del__(self):
1717
+ nonlocal done
1718
+ done = True
1719
+ try:
1720
+ next(it)
1721
+ except StopIteration:
1722
+ pass
1723
+
1724
+ done = False
1725
+ it = iter(A(*args))
1726
+ # Issue 26494: Shouldn't crash
1727
+ test.assertRaises(StopIteration, next, it)
1728
+ # The sequence should be deallocated just after the end of iterating
1729
+ gc_collect()
1730
+ test.assertTrue(done)
1731
+
1732
+
1733
+ def missing_compiler_executable(cmd_names=[]):
1734
+ """Check if the compiler components used to build the interpreter exist.
1735
+
1736
+ Check for the existence of the compiler executables whose names are listed
1737
+ in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
1738
+ and return the first missing executable or None when none is found
1739
+ missing.
1740
+
1741
+ """
1742
+ # TODO (PEP 632): alternate check without using distutils
1743
+ from distutils import ccompiler, sysconfig, spawn, errors
1744
+ compiler = ccompiler.new_compiler()
1745
+ sysconfig.customize_compiler(compiler)
1746
+ if compiler.compiler_type == "msvc":
1747
+ # MSVC has no executables, so check whether initialization succeeds
1748
+ try:
1749
+ compiler.initialize()
1750
+ except errors.DistutilsPlatformError:
1751
+ return "msvc"
1752
+ for name in compiler.executables:
1753
+ if cmd_names and name not in cmd_names:
1754
+ continue
1755
+ cmd = getattr(compiler, name)
1756
+ if cmd_names:
1757
+ assert cmd is not None, \
1758
+ "the '%s' executable is not configured" % name
1759
+ elif not cmd:
1760
+ continue
1761
+ if spawn.find_executable(cmd[0]) is None:
1762
+ return cmd[0]
1763
+
1764
+
1765
+ _is_android_emulator = None
1766
+ def setswitchinterval(interval):
1767
+ # Setting a very low gil interval on the Android emulator causes python
1768
+ # to hang (issue #26939).
1769
+ minimum_interval = 1e-5
1770
+ if is_android and interval < minimum_interval:
1771
+ global _is_android_emulator
1772
+ if _is_android_emulator is None:
1773
+ import subprocess
1774
+ _is_android_emulator = (subprocess.check_output(
1775
+ ['getprop', 'ro.kernel.qemu']).strip() == b'1')
1776
+ if _is_android_emulator:
1777
+ interval = minimum_interval
1778
+ return sys.setswitchinterval(interval)
1779
+
1780
+
1781
+ @contextlib.contextmanager
1782
+ def disable_faulthandler():
1783
+ import faulthandler
1784
+
1785
+ # use sys.__stderr__ instead of sys.stderr, since regrtest replaces
1786
+ # sys.stderr with a StringIO which has no file descriptor when a test
1787
+ # is run with -W/--verbose3.
1788
+ fd = sys.__stderr__.fileno()
1789
+
1790
+ is_enabled = faulthandler.is_enabled()
1791
+ try:
1792
+ faulthandler.disable()
1793
+ yield
1794
+ finally:
1795
+ if is_enabled:
1796
+ faulthandler.enable(file=fd, all_threads=True)
1797
+
1798
+
1799
+ class SaveSignals:
1800
+ """
1801
+ Save and restore signal handlers.
1802
+
1803
+ This class is only able to save/restore signal handlers registered
1804
+ by the Python signal module: see bpo-13285 for "external" signal
1805
+ handlers.
1806
+ """
1807
+
1808
+ def __init__(self):
1809
+ import signal
1810
+ self.signal = signal
1811
+ self.signals = signal.valid_signals()
1812
+ # SIGKILL and SIGSTOP signals cannot be ignored nor caught
1813
+ for signame in ('SIGKILL', 'SIGSTOP'):
1814
+ try:
1815
+ signum = getattr(signal, signame)
1816
+ except AttributeError:
1817
+ continue
1818
+ self.signals.remove(signum)
1819
+ self.handlers = {}
1820
+
1821
+ def save(self):
1822
+ for signum in self.signals:
1823
+ handler = self.signal.getsignal(signum)
1824
+ if handler is None:
1825
+ # getsignal() returns None if a signal handler was not
1826
+ # registered by the Python signal module,
1827
+ # and the handler is not SIG_DFL nor SIG_IGN.
1828
+ #
1829
+ # Ignore the signal: we cannot restore the handler.
1830
+ continue
1831
+ self.handlers[signum] = handler
1832
+
1833
+ def restore(self):
1834
+ for signum, handler in self.handlers.items():
1835
+ self.signal.signal(signum, handler)
1836
+
1837
+
1838
+ def with_pymalloc():
1839
+ import _testcapi
1840
+ return _testcapi.WITH_PYMALLOC
1841
+
1842
+
1843
+ class _ALWAYS_EQ:
1844
+ """
1845
+ Object that is equal to anything.
1846
+ """
1847
+ def __eq__(self, other):
1848
+ return True
1849
+ def __ne__(self, other):
1850
+ return False
1851
+
1852
+ ALWAYS_EQ = _ALWAYS_EQ()
1853
+
1854
+ class _NEVER_EQ:
1855
+ """
1856
+ Object that is not equal to anything.
1857
+ """
1858
+ def __eq__(self, other):
1859
+ return False
1860
+ def __ne__(self, other):
1861
+ return True
1862
+ def __hash__(self):
1863
+ return 1
1864
+
1865
+ NEVER_EQ = _NEVER_EQ()
1866
+
1867
+ @functools.total_ordering
1868
+ class _LARGEST:
1869
+ """
1870
+ Object that is greater than anything (except itself).
1871
+ """
1872
+ def __eq__(self, other):
1873
+ return isinstance(other, _LARGEST)
1874
+ def __lt__(self, other):
1875
+ return False
1876
+
1877
+ LARGEST = _LARGEST()
1878
+
1879
+ @functools.total_ordering
1880
+ class _SMALLEST:
1881
+ """
1882
+ Object that is less than anything (except itself).
1883
+ """
1884
+ def __eq__(self, other):
1885
+ return isinstance(other, _SMALLEST)
1886
+ def __gt__(self, other):
1887
+ return False
1888
+
1889
+ SMALLEST = _SMALLEST()
1890
+
1891
+ def maybe_get_event_loop_policy():
1892
+ """Return the global event loop policy if one is set, else return None."""
1893
+ import asyncio.events
1894
+ return asyncio.events._event_loop_policy
1895
+
1896
+ # Helpers for testing hashing.
1897
+ NHASHBITS = sys.hash_info.width # number of bits in hash() result
1898
+ assert NHASHBITS in (32, 64)
1899
+
1900
+ # Return mean and sdev of number of collisions when tossing nballs balls
1901
+ # uniformly at random into nbins bins. By definition, the number of
1902
+ # collisions is the number of balls minus the number of occupied bins at
1903
+ # the end.
1904
+ def collision_stats(nbins, nballs):
1905
+ n, k = nbins, nballs
1906
+ # prob a bin empty after k trials = (1 - 1/n)**k
1907
+ # mean # empty is then n * (1 - 1/n)**k
1908
+ # so mean # occupied is n - n * (1 - 1/n)**k
1909
+ # so collisions = k - (n - n*(1 - 1/n)**k)
1910
+ #
1911
+ # For the variance:
1912
+ # n*(n-1)*(1-2/n)**k + meanempty - meanempty**2 =
1913
+ # n*(n-1)*(1-2/n)**k + meanempty * (1 - meanempty)
1914
+ #
1915
+ # Massive cancellation occurs, and, e.g., for a 64-bit hash code
1916
+ # 1-1/2**64 rounds uselessly to 1.0. Rather than make heroic (and
1917
+ # error-prone) efforts to rework the naive formulas to avoid those,
1918
+ # we use the `decimal` module to get plenty of extra precision.
1919
+ #
1920
+ # Note: the exact values are straightforward to compute with
1921
+ # rationals, but in context that's unbearably slow, requiring
1922
+ # multi-million bit arithmetic.
1923
+ import decimal
1924
+ with decimal.localcontext() as ctx:
1925
+ bits = n.bit_length() * 2 # bits in n**2
1926
+ # At least that many bits will likely cancel out.
1927
+ # Use that many decimal digits instead.
1928
+ ctx.prec = max(bits, 30)
1929
+ dn = decimal.Decimal(n)
1930
+ p1empty = ((dn - 1) / dn) ** k
1931
+ meanempty = n * p1empty
1932
+ occupied = n - meanempty
1933
+ collisions = k - occupied
1934
+ var = dn*(dn-1)*((dn-2)/dn)**k + meanempty * (1 - meanempty)
1935
+ return float(collisions), float(var.sqrt())
1936
+
1937
+
1938
+ class catch_unraisable_exception:
1939
+ """
1940
+ Context manager catching unraisable exception using sys.unraisablehook.
1941
+
1942
+ Storing the exception value (cm.unraisable.exc_value) creates a reference
1943
+ cycle. The reference cycle is broken explicitly when the context manager
1944
+ exits.
1945
+
1946
+ Storing the object (cm.unraisable.object) can resurrect it if it is set to
1947
+ an object which is being finalized. Exiting the context manager clears the
1948
+ stored object.
1949
+
1950
+ Usage:
1951
+
1952
+ with support.catch_unraisable_exception() as cm:
1953
+ # code creating an "unraisable exception"
1954
+ ...
1955
+
1956
+ # check the unraisable exception: use cm.unraisable
1957
+ ...
1958
+
1959
+ # cm.unraisable attribute no longer exists at this point
1960
+ # (to break a reference cycle)
1961
+ """
1962
+
1963
+ def __init__(self):
1964
+ self.unraisable = None
1965
+ self._old_hook = None
1966
+
1967
+ def _hook(self, unraisable):
1968
+ # Storing unraisable.object can resurrect an object which is being
1969
+ # finalized. Storing unraisable.exc_value creates a reference cycle.
1970
+ self.unraisable = unraisable
1971
+
1972
+ def __enter__(self):
1973
+ self._old_hook = sys.unraisablehook
1974
+ sys.unraisablehook = self._hook
1975
+ return self
1976
+
1977
+ def __exit__(self, *exc_info):
1978
+ sys.unraisablehook = self._old_hook
1979
+ del self.unraisable
1980
+
1981
+
1982
+ def wait_process(pid, *, exitcode, timeout=None):
1983
+ """
1984
+ Wait until process pid completes and check that the process exit code is
1985
+ exitcode.
1986
+
1987
+ Raise an AssertionError if the process exit code is not equal to exitcode.
1988
+
1989
+ If the process runs longer than timeout seconds (LONG_TIMEOUT by default),
1990
+ kill the process (if signal.SIGKILL is available) and raise an
1991
+ AssertionError. The timeout feature is not available on Windows.
1992
+ """
1993
+ if os.name != "nt":
1994
+ import signal
1995
+
1996
+ if timeout is None:
1997
+ timeout = LONG_TIMEOUT
1998
+ t0 = time.monotonic()
1999
+ sleep = 0.001
2000
+ max_sleep = 0.1
2001
+ while True:
2002
+ pid2, status = os.waitpid(pid, os.WNOHANG)
2003
+ if pid2 != 0:
2004
+ break
2005
+ # process is still running
2006
+
2007
+ dt = time.monotonic() - t0
2008
+ if dt > timeout:
2009
+ try:
2010
+ os.kill(pid, signal.SIGKILL)
2011
+ os.waitpid(pid, 0)
2012
+ except OSError:
2013
+ # Ignore errors like ChildProcessError or PermissionError
2014
+ pass
2015
+
2016
+ raise AssertionError(f"process {pid} is still running "
2017
+ f"after {dt:.1f} seconds")
2018
+
2019
+ sleep = min(sleep * 2, max_sleep)
2020
+ time.sleep(sleep)
2021
+ else:
2022
+ # Windows implementation
2023
+ pid2, status = os.waitpid(pid, 0)
2024
+
2025
+ exitcode2 = os.waitstatus_to_exitcode(status)
2026
+ if exitcode2 != exitcode:
2027
+ raise AssertionError(f"process {pid} exited with code {exitcode2}, "
2028
+ f"but exit code {exitcode} is expected")
2029
+
2030
+ # sanity check: it should not fail in practice
2031
+ if pid2 != pid:
2032
+ raise AssertionError(f"pid {pid2} != pid {pid}")
2033
+
2034
+ def skip_if_broken_multiprocessing_synchronize():
2035
+ """
2036
+ Skip tests if the multiprocessing.synchronize module is missing, if there
2037
+ is no available semaphore implementation, or if creating a lock raises an
2038
+ OSError (on Linux only).
2039
+ """
2040
+ from .import_helper import import_module
2041
+
2042
+ # Skip tests if the _multiprocessing extension is missing.
2043
+ import_module('_multiprocessing')
2044
+
2045
+ # Skip tests if there is no available semaphore implementation:
2046
+ # multiprocessing.synchronize requires _multiprocessing.SemLock.
2047
+ synchronize = import_module('multiprocessing.synchronize')
2048
+
2049
+ if sys.platform == "linux":
2050
+ try:
2051
+ # bpo-38377: On Linux, creating a semaphore fails with OSError
2052
+ # if the current user does not have the permission to create
2053
+ # a file in /dev/shm/ directory.
2054
+ synchronize.Lock(ctx=None)
2055
+ except OSError as exc:
2056
+ raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}")
2057
+
2058
+
2059
+ @contextlib.contextmanager
2060
+ def infinite_recursion(max_depth=75):
2061
+ original_depth = sys.getrecursionlimit()
2062
+ try:
2063
+ sys.setrecursionlimit(max_depth)
2064
+ yield
2065
+ finally:
2066
+ sys.setrecursionlimit(original_depth)
2067
+
2068
+
2069
+ def check_disallow_instantiation(testcase, tp, *args, **kwds):
2070
+ """
2071
+ Check that given type cannot be instantiated using *args and **kwds.
2072
+
2073
+ See bpo-43916: Add Py_TPFLAGS_DISALLOW_INSTANTIATION type flag.
2074
+ """
2075
+ mod = tp.__module__
2076
+ name = tp.__name__
2077
+ if mod != 'builtins':
2078
+ qualname = f"{mod}.{name}"
2079
+ else:
2080
+ qualname = f"{name}"
2081
+ msg = f"cannot create '{re.escape(qualname)}' instances"
2082
+ testcase.assertRaisesRegex(TypeError, msg, tp, *args, **kwds)
2083
+
2084
+
2085
+ def ignore_deprecations_from(module: str, *, like: str) -> object:
2086
+ token = object()
2087
+ warnings.filterwarnings(
2088
+ "ignore",
2089
+ category=DeprecationWarning,
2090
+ module=module,
2091
+ message=like + fr"(?#support{id(token)})",
2092
+ )
2093
+ return token
2094
+
2095
+
2096
+ def clear_ignored_deprecations(*tokens: object) -> None:
2097
+ if not tokens:
2098
+ raise ValueError("Provide token or tokens returned by ignore_deprecations_from")
2099
+
2100
+ new_filters = []
2101
+ endswith = tuple(rf"(?#support{id(token)})" for token in tokens)
2102
+ for action, message, category, module, lineno in warnings.filters:
2103
+ if action == "ignore" and category is DeprecationWarning:
2104
+ if isinstance(message, re.Pattern):
2105
+ msg = message.pattern
2106
+ else:
2107
+ msg = message or ""
2108
+ if msg.endswith(endswith):
2109
+ continue
2110
+ new_filters.append((action, message, category, module, lineno))
2111
+ if warnings.filters != new_filters:
2112
+ warnings.filters[:] = new_filters
2113
+ warnings._filters_mutated()
2114
+
2115
+
2116
+ @contextlib.contextmanager
2117
+ def adjust_int_max_str_digits(max_digits):
2118
+ """Temporarily change the integer string conversion length limit."""
2119
+ current = sys.get_int_max_str_digits()
2120
+ try:
2121
+ sys.set_int_max_str_digits(max_digits)
2122
+ yield
2123
+ finally:
2124
+ sys.set_int_max_str_digits(current)
deepseekvl2/lib/python3.10/test/support/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (57.9 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/bytecode_helper.cpython-310.pyc ADDED
Binary file (1.7 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/hashlib_helper.cpython-310.pyc ADDED
Binary file (2.15 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/import_helper.cpython-310.pyc ADDED
Binary file (7.57 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/interpreters.cpython-310.pyc ADDED
Binary file (7.36 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/logging_helper.cpython-310.pyc ADDED
Binary file (1.42 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/os_helper.cpython-310.pyc ADDED
Binary file (14.4 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/script_helper.cpython-310.pyc ADDED
Binary file (8.06 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/socket_helper.cpython-310.pyc ADDED
Binary file (9.62 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/testresult.cpython-310.pyc ADDED
Binary file (7.04 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/threading_helper.cpython-310.pyc ADDED
Binary file (6.32 kB). View file
 
deepseekvl2/lib/python3.10/test/support/__pycache__/warnings_helper.cpython-310.pyc ADDED
Binary file (6.35 kB). View file
 
deepseekvl2/lib/python3.10/test/support/bytecode_helper.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """bytecode_helper - support tools for testing correct bytecode generation"""
2
+
3
+ import unittest
4
+ import dis
5
+ import io
6
+
7
+ _UNSPECIFIED = object()
8
+
9
+ class BytecodeTestCase(unittest.TestCase):
10
+ """Custom assertion methods for inspecting bytecode."""
11
+
12
+ def get_disassembly_as_string(self, co):
13
+ s = io.StringIO()
14
+ dis.dis(co, file=s)
15
+ return s.getvalue()
16
+
17
+ def assertInBytecode(self, x, opname, argval=_UNSPECIFIED):
18
+ """Returns instr if opname is found, otherwise throws AssertionError"""
19
+ for instr in dis.get_instructions(x):
20
+ if instr.opname == opname:
21
+ if argval is _UNSPECIFIED or instr.argval == argval:
22
+ return instr
23
+ disassembly = self.get_disassembly_as_string(x)
24
+ if argval is _UNSPECIFIED:
25
+ msg = '%s not found in bytecode:\n%s' % (opname, disassembly)
26
+ else:
27
+ msg = '(%s,%r) not found in bytecode:\n%s'
28
+ msg = msg % (opname, argval, disassembly)
29
+ self.fail(msg)
30
+
31
+ def assertNotInBytecode(self, x, opname, argval=_UNSPECIFIED):
32
+ """Throws AssertionError if opname is found"""
33
+ for instr in dis.get_instructions(x):
34
+ if instr.opname == opname:
35
+ disassembly = self.get_disassembly_as_string(x)
36
+ if argval is _UNSPECIFIED:
37
+ msg = '%s occurs in bytecode:\n%s' % (opname, disassembly)
38
+ self.fail(msg)
39
+ elif instr.argval == argval:
40
+ msg = '(%s,%r) occurs in bytecode:\n%s'
41
+ msg = msg % (opname, argval, disassembly)
42
+ self.fail(msg)
deepseekvl2/lib/python3.10/test/support/hashlib_helper.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import hashlib
3
+ import unittest
4
+
5
+ try:
6
+ import _hashlib
7
+ except ImportError:
8
+ _hashlib = None
9
+
10
+
11
+ def requires_hashdigest(digestname, openssl=None, usedforsecurity=True):
12
+ """Decorator raising SkipTest if a hashing algorithm is not available
13
+
14
+ The hashing algorithm could be missing or blocked by a strict crypto
15
+ policy.
16
+
17
+ If 'openssl' is True, then the decorator checks that OpenSSL provides
18
+ the algorithm. Otherwise the check falls back to built-in
19
+ implementations. The usedforsecurity flag is passed to the constructor.
20
+
21
+ ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS
22
+ ValueError: unsupported hash type md4
23
+ """
24
+ def decorator(func_or_class):
25
+ if isinstance(func_or_class, type):
26
+ setUpClass = func_or_class.__dict__.get('setUpClass')
27
+ if setUpClass is None:
28
+ def setUpClass(cls):
29
+ super(func_or_class, cls).setUpClass()
30
+ setUpClass.__qualname__ = func_or_class.__qualname__ + '.setUpClass'
31
+ setUpClass.__module__ = func_or_class.__module__
32
+ else:
33
+ setUpClass = setUpClass.__func__
34
+ setUpClass = classmethod(decorator(setUpClass))
35
+ func_or_class.setUpClass = setUpClass
36
+ return func_or_class
37
+
38
+ @functools.wraps(func_or_class)
39
+ def wrapper(*args, **kwargs):
40
+ try:
41
+ if openssl and _hashlib is not None:
42
+ _hashlib.new(digestname, usedforsecurity=usedforsecurity)
43
+ else:
44
+ hashlib.new(digestname, usedforsecurity=usedforsecurity)
45
+ except ValueError:
46
+ raise unittest.SkipTest(
47
+ f"hash digest '{digestname}' is not available."
48
+ )
49
+ return func_or_class(*args, **kwargs)
50
+ return wrapper
51
+ return decorator
deepseekvl2/lib/python3.10/test/support/import_helper.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import importlib
3
+ import importlib.util
4
+ import os
5
+ import shutil
6
+ import sys
7
+ import unittest
8
+ import warnings
9
+
10
+ from .os_helper import unlink
11
+
12
+
13
+ @contextlib.contextmanager
14
+ def _ignore_deprecated_imports(ignore=True):
15
+ """Context manager to suppress package and module deprecation
16
+ warnings when importing them.
17
+
18
+ If ignore is False, this context manager has no effect.
19
+ """
20
+ if ignore:
21
+ with warnings.catch_warnings():
22
+ warnings.filterwarnings("ignore", ".+ (module|package)",
23
+ DeprecationWarning)
24
+ yield
25
+ else:
26
+ yield
27
+
28
+
29
+ def unload(name):
30
+ try:
31
+ del sys.modules[name]
32
+ except KeyError:
33
+ pass
34
+
35
+
36
+ def forget(modname):
37
+ """'Forget' a module was ever imported.
38
+
39
+ This removes the module from sys.modules and deletes any PEP 3147/488 or
40
+ legacy .pyc files.
41
+ """
42
+ unload(modname)
43
+ for dirname in sys.path:
44
+ source = os.path.join(dirname, modname + '.py')
45
+ # It doesn't matter if they exist or not, unlink all possible
46
+ # combinations of PEP 3147/488 and legacy pyc files.
47
+ unlink(source + 'c')
48
+ for opt in ('', 1, 2):
49
+ unlink(importlib.util.cache_from_source(source, optimization=opt))
50
+
51
+
52
+ def make_legacy_pyc(source):
53
+ """Move a PEP 3147/488 pyc file to its legacy pyc location.
54
+
55
+ :param source: The file system path to the source file. The source file
56
+ does not need to exist, however the PEP 3147/488 pyc file must exist.
57
+ :return: The file system path to the legacy pyc file.
58
+ """
59
+ pyc_file = importlib.util.cache_from_source(source)
60
+ up_one = os.path.dirname(os.path.abspath(source))
61
+ legacy_pyc = os.path.join(up_one, source + 'c')
62
+ shutil.move(pyc_file, legacy_pyc)
63
+ return legacy_pyc
64
+
65
+
66
+ def import_module(name, deprecated=False, *, required_on=()):
67
+ """Import and return the module to be tested, raising SkipTest if
68
+ it is not available.
69
+
70
+ If deprecated is True, any module or package deprecation messages
71
+ will be suppressed. If a module is required on a platform but optional for
72
+ others, set required_on to an iterable of platform prefixes which will be
73
+ compared against sys.platform.
74
+ """
75
+ with _ignore_deprecated_imports(deprecated):
76
+ try:
77
+ return importlib.import_module(name)
78
+ except ImportError as msg:
79
+ if sys.platform.startswith(tuple(required_on)):
80
+ raise
81
+ raise unittest.SkipTest(str(msg))
82
+
83
+
84
+ def _save_and_remove_modules(names):
85
+ orig_modules = {}
86
+ prefixes = tuple(name + '.' for name in names)
87
+ for modname in list(sys.modules):
88
+ if modname in names or modname.startswith(prefixes):
89
+ orig_modules[modname] = sys.modules.pop(modname)
90
+ return orig_modules
91
+
92
+
93
+ def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
94
+ """Import and return a module, deliberately bypassing sys.modules.
95
+
96
+ This function imports and returns a fresh copy of the named Python module
97
+ by removing the named module from sys.modules before doing the import.
98
+ Note that unlike reload, the original module is not affected by
99
+ this operation.
100
+
101
+ *fresh* is an iterable of additional module names that are also removed
102
+ from the sys.modules cache before doing the import. If one of these
103
+ modules can't be imported, None is returned.
104
+
105
+ *blocked* is an iterable of module names that are replaced with None
106
+ in the module cache during the import to ensure that attempts to import
107
+ them raise ImportError.
108
+
109
+ The named module and any modules named in the *fresh* and *blocked*
110
+ parameters are saved before starting the import and then reinserted into
111
+ sys.modules when the fresh import is complete.
112
+
113
+ Module and package deprecation messages are suppressed during this import
114
+ if *deprecated* is True.
115
+
116
+ This function will raise ImportError if the named module cannot be
117
+ imported.
118
+ """
119
+ # NOTE: test_heapq, test_json and test_warnings include extra sanity checks
120
+ # to make sure that this utility function is working as expected
121
+ with _ignore_deprecated_imports(deprecated):
122
+ # Keep track of modules saved for later restoration as well
123
+ # as those which just need a blocking entry removed
124
+ fresh = list(fresh)
125
+ blocked = list(blocked)
126
+ names = {name, *fresh, *blocked}
127
+ orig_modules = _save_and_remove_modules(names)
128
+ for modname in blocked:
129
+ sys.modules[modname] = None
130
+
131
+ try:
132
+ # Return None when one of the "fresh" modules can not be imported.
133
+ try:
134
+ for modname in fresh:
135
+ __import__(modname)
136
+ except ImportError:
137
+ return None
138
+ return importlib.import_module(name)
139
+ finally:
140
+ _save_and_remove_modules(names)
141
+ sys.modules.update(orig_modules)
142
+
143
+
144
+ class CleanImport(object):
145
+ """Context manager to force import to return a new module reference.
146
+
147
+ This is useful for testing module-level behaviours, such as
148
+ the emission of a DeprecationWarning on import.
149
+
150
+ Use like this:
151
+
152
+ with CleanImport("foo"):
153
+ importlib.import_module("foo") # new reference
154
+ """
155
+
156
+ def __init__(self, *module_names):
157
+ self.original_modules = sys.modules.copy()
158
+ for module_name in module_names:
159
+ if module_name in sys.modules:
160
+ module = sys.modules[module_name]
161
+ # It is possible that module_name is just an alias for
162
+ # another module (e.g. stub for modules renamed in 3.x).
163
+ # In that case, we also need delete the real module to clear
164
+ # the import cache.
165
+ if module.__name__ != module_name:
166
+ del sys.modules[module.__name__]
167
+ del sys.modules[module_name]
168
+
169
+ def __enter__(self):
170
+ return self
171
+
172
+ def __exit__(self, *ignore_exc):
173
+ sys.modules.update(self.original_modules)
174
+
175
+
176
+ class DirsOnSysPath(object):
177
+ """Context manager to temporarily add directories to sys.path.
178
+
179
+ This makes a copy of sys.path, appends any directories given
180
+ as positional arguments, then reverts sys.path to the copied
181
+ settings when the context ends.
182
+
183
+ Note that *all* sys.path modifications in the body of the
184
+ context manager, including replacement of the object,
185
+ will be reverted at the end of the block.
186
+ """
187
+
188
+ def __init__(self, *paths):
189
+ self.original_value = sys.path[:]
190
+ self.original_object = sys.path
191
+ sys.path.extend(paths)
192
+
193
+ def __enter__(self):
194
+ return self
195
+
196
+ def __exit__(self, *ignore_exc):
197
+ sys.path = self.original_object
198
+ sys.path[:] = self.original_value
199
+
200
+
201
+ def modules_setup():
202
+ return sys.modules.copy(),
203
+
204
+
205
+ def modules_cleanup(oldmodules):
206
+ # Encoders/decoders are registered permanently within the internal
207
+ # codec cache. If we destroy the corresponding modules their
208
+ # globals will be set to None which will trip up the cached functions.
209
+ encodings = [(k, v) for k, v in sys.modules.items()
210
+ if k.startswith('encodings.')]
211
+ sys.modules.clear()
212
+ sys.modules.update(encodings)
213
+ # XXX: This kind of problem can affect more than just encodings.
214
+ # In particular extension modules (such as _ssl) don't cope
215
+ # with reloading properly. Really, test modules should be cleaning
216
+ # out the test specific modules they know they added (ala test_runpy)
217
+ # rather than relying on this function (as test_importhooks and test_pkg
218
+ # do currently). Implicitly imported *real* modules should be left alone
219
+ # (see issue 10556).
220
+ sys.modules.update(oldmodules)
deepseekvl2/lib/python3.10/test/support/interpreters.py ADDED
@@ -0,0 +1,197 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Subinterpreters High Level Module."""
2
+
3
+ import time
4
+ import _xxsubinterpreters as _interpreters
5
+
6
+ # aliases:
7
+ from _xxsubinterpreters import (
8
+ ChannelError, ChannelNotFoundError, ChannelEmptyError,
9
+ is_shareable,
10
+ )
11
+
12
+
13
+ __all__ = [
14
+ 'Interpreter', 'get_current', 'get_main', 'create', 'list_all',
15
+ 'SendChannel', 'RecvChannel',
16
+ 'create_channel', 'list_all_channels', 'is_shareable',
17
+ 'ChannelError', 'ChannelNotFoundError',
18
+ 'ChannelEmptyError',
19
+ ]
20
+
21
+
22
+ def create(*, isolated=True):
23
+ """Return a new (idle) Python interpreter."""
24
+ id = _interpreters.create(isolated=isolated)
25
+ return Interpreter(id, isolated=isolated)
26
+
27
+
28
+ def list_all():
29
+ """Return all existing interpreters."""
30
+ return [Interpreter(id) for id in _interpreters.list_all()]
31
+
32
+
33
+ def get_current():
34
+ """Return the currently running interpreter."""
35
+ id = _interpreters.get_current()
36
+ return Interpreter(id)
37
+
38
+
39
+ def get_main():
40
+ """Return the main interpreter."""
41
+ id = _interpreters.get_main()
42
+ return Interpreter(id)
43
+
44
+
45
+ class Interpreter:
46
+ """A single Python interpreter."""
47
+
48
+ def __init__(self, id, *, isolated=None):
49
+ if not isinstance(id, (int, _interpreters.InterpreterID)):
50
+ raise TypeError(f'id must be an int, got {id!r}')
51
+ self._id = id
52
+ self._isolated = isolated
53
+
54
+ def __repr__(self):
55
+ data = dict(id=int(self._id), isolated=self._isolated)
56
+ kwargs = (f'{k}={v!r}' for k, v in data.items())
57
+ return f'{type(self).__name__}({", ".join(kwargs)})'
58
+
59
+ def __hash__(self):
60
+ return hash(self._id)
61
+
62
+ def __eq__(self, other):
63
+ if not isinstance(other, Interpreter):
64
+ return NotImplemented
65
+ else:
66
+ return other._id == self._id
67
+
68
+ @property
69
+ def id(self):
70
+ return self._id
71
+
72
+ @property
73
+ def isolated(self):
74
+ if self._isolated is None:
75
+ # XXX The low-level function has not been added yet.
76
+ # See bpo-....
77
+ self._isolated = _interpreters.is_isolated(self._id)
78
+ return self._isolated
79
+
80
+ def is_running(self):
81
+ """Return whether or not the identified interpreter is running."""
82
+ return _interpreters.is_running(self._id)
83
+
84
+ def close(self):
85
+ """Finalize and destroy the interpreter.
86
+
87
+ Attempting to destroy the current interpreter results
88
+ in a RuntimeError.
89
+ """
90
+ return _interpreters.destroy(self._id)
91
+
92
+ def run(self, src_str, /, *, channels=None):
93
+ """Run the given source code in the interpreter.
94
+
95
+ This blocks the current Python thread until done.
96
+ """
97
+ _interpreters.run_string(self._id, src_str, channels)
98
+
99
+
100
+ def create_channel():
101
+ """Return (recv, send) for a new cross-interpreter channel.
102
+
103
+ The channel may be used to pass data safely between interpreters.
104
+ """
105
+ cid = _interpreters.channel_create()
106
+ recv, send = RecvChannel(cid), SendChannel(cid)
107
+ return recv, send
108
+
109
+
110
+ def list_all_channels():
111
+ """Return a list of (recv, send) for all open channels."""
112
+ return [(RecvChannel(cid), SendChannel(cid))
113
+ for cid in _interpreters.channel_list_all()]
114
+
115
+
116
+ class _ChannelEnd:
117
+ """The base class for RecvChannel and SendChannel."""
118
+
119
+ def __init__(self, id):
120
+ if not isinstance(id, (int, _interpreters.ChannelID)):
121
+ raise TypeError(f'id must be an int, got {id!r}')
122
+ self._id = id
123
+
124
+ def __repr__(self):
125
+ return f'{type(self).__name__}(id={int(self._id)})'
126
+
127
+ def __hash__(self):
128
+ return hash(self._id)
129
+
130
+ def __eq__(self, other):
131
+ if isinstance(self, RecvChannel):
132
+ if not isinstance(other, RecvChannel):
133
+ return NotImplemented
134
+ elif not isinstance(other, SendChannel):
135
+ return NotImplemented
136
+ return other._id == self._id
137
+
138
+ @property
139
+ def id(self):
140
+ return self._id
141
+
142
+
143
+ _NOT_SET = object()
144
+
145
+
146
+ class RecvChannel(_ChannelEnd):
147
+ """The receiving end of a cross-interpreter channel."""
148
+
149
+ def recv(self, *, _sentinel=object(), _delay=10 / 1000): # 10 milliseconds
150
+ """Return the next object from the channel.
151
+
152
+ This blocks until an object has been sent, if none have been
153
+ sent already.
154
+ """
155
+ obj = _interpreters.channel_recv(self._id, _sentinel)
156
+ while obj is _sentinel:
157
+ time.sleep(_delay)
158
+ obj = _interpreters.channel_recv(self._id, _sentinel)
159
+ return obj
160
+
161
+ def recv_nowait(self, default=_NOT_SET):
162
+ """Return the next object from the channel.
163
+
164
+ If none have been sent then return the default if one
165
+ is provided or fail with ChannelEmptyError. Otherwise this
166
+ is the same as recv().
167
+ """
168
+ if default is _NOT_SET:
169
+ return _interpreters.channel_recv(self._id)
170
+ else:
171
+ return _interpreters.channel_recv(self._id, default)
172
+
173
+
174
+ class SendChannel(_ChannelEnd):
175
+ """The sending end of a cross-interpreter channel."""
176
+
177
+ def send(self, obj):
178
+ """Send the object (i.e. its data) to the channel's receiving end.
179
+
180
+ This blocks until the object is received.
181
+ """
182
+ _interpreters.channel_send(self._id, obj)
183
+ # XXX We are missing a low-level channel_send_wait().
184
+ # See bpo-32604 and gh-19829.
185
+ # Until that shows up we fake it:
186
+ time.sleep(2)
187
+
188
+ def send_nowait(self, obj):
189
+ """Send the object to the channel's receiving end.
190
+
191
+ If the object is immediately received then return True
192
+ (else False). Otherwise this is the same as send().
193
+ """
194
+ # XXX Note that at the moment channel_send() only ever returns
195
+ # None. This should be fixed when channel_send_wait() is added.
196
+ # See bpo-32604 and gh-19829.
197
+ return _interpreters.channel_send(self._id, obj)
deepseekvl2/lib/python3.10/test/support/logging_helper.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging.handlers
2
+
3
+ class TestHandler(logging.handlers.BufferingHandler):
4
+ def __init__(self, matcher):
5
+ # BufferingHandler takes a "capacity" argument
6
+ # so as to know when to flush. As we're overriding
7
+ # shouldFlush anyway, we can set a capacity of zero.
8
+ # You can call flush() manually to clear out the
9
+ # buffer.
10
+ logging.handlers.BufferingHandler.__init__(self, 0)
11
+ self.matcher = matcher
12
+
13
+ def shouldFlush(self):
14
+ return False
15
+
16
+ def emit(self, record):
17
+ self.format(record)
18
+ self.buffer.append(record.__dict__)
19
+
20
+ def matches(self, **kwargs):
21
+ """
22
+ Look for a saved dict whose keys/values match the supplied arguments.
23
+ """
24
+ result = False
25
+ for d in self.buffer:
26
+ if self.matcher.matches(d, **kwargs):
27
+ result = True
28
+ break
29
+ return result
deepseekvl2/lib/python3.10/test/support/os_helper.py ADDED
@@ -0,0 +1,623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections.abc
2
+ import contextlib
3
+ import errno
4
+ import os
5
+ import re
6
+ import stat
7
+ import sys
8
+ import time
9
+ import unittest
10
+ import warnings
11
+
12
+
13
+ # Filename used for testing
14
+ if os.name == 'java':
15
+ # Jython disallows @ in module names
16
+ TESTFN_ASCII = '$test'
17
+ else:
18
+ TESTFN_ASCII = '@test'
19
+
20
+ # Disambiguate TESTFN for parallel testing, while letting it remain a valid
21
+ # module name.
22
+ TESTFN_ASCII = "{}_{}_tmp".format(TESTFN_ASCII, os.getpid())
23
+
24
+ # TESTFN_UNICODE is a non-ascii filename
25
+ TESTFN_UNICODE = TESTFN_ASCII + "-\xe0\xf2\u0258\u0141\u011f"
26
+ if sys.platform == 'darwin':
27
+ # In Mac OS X's VFS API file names are, by definition, canonically
28
+ # decomposed Unicode, encoded using UTF-8. See QA1173:
29
+ # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
30
+ import unicodedata
31
+ TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE)
32
+
33
+ # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be
34
+ # encoded by the filesystem encoding (in strict mode). It can be None if we
35
+ # cannot generate such filename.
36
+ TESTFN_UNENCODABLE = None
37
+ if os.name == 'nt':
38
+ # skip win32s (0) or Windows 9x/ME (1)
39
+ if sys.getwindowsversion().platform >= 2:
40
+ # Different kinds of characters from various languages to minimize the
41
+ # probability that the whole name is encodable to MBCS (issue #9819)
42
+ TESTFN_UNENCODABLE = TESTFN_ASCII + "-\u5171\u0141\u2661\u0363\uDC80"
43
+ try:
44
+ TESTFN_UNENCODABLE.encode(sys.getfilesystemencoding())
45
+ except UnicodeEncodeError:
46
+ pass
47
+ else:
48
+ print('WARNING: The filename %r CAN be encoded by the filesystem '
49
+ 'encoding (%s). Unicode filename tests may not be effective'
50
+ % (TESTFN_UNENCODABLE, sys.getfilesystemencoding()))
51
+ TESTFN_UNENCODABLE = None
52
+ # Mac OS X denies unencodable filenames (invalid utf-8)
53
+ elif sys.platform != 'darwin':
54
+ try:
55
+ # ascii and utf-8 cannot encode the byte 0xff
56
+ b'\xff'.decode(sys.getfilesystemencoding())
57
+ except UnicodeDecodeError:
58
+ # 0xff will be encoded using the surrogate character u+DCFF
59
+ TESTFN_UNENCODABLE = TESTFN_ASCII \
60
+ + b'-\xff'.decode(sys.getfilesystemencoding(), 'surrogateescape')
61
+ else:
62
+ # File system encoding (eg. ISO-8859-* encodings) can encode
63
+ # the byte 0xff. Skip some unicode filename tests.
64
+ pass
65
+
66
+ # FS_NONASCII: non-ASCII character encodable by os.fsencode(),
67
+ # or an empty string if there is no such character.
68
+ FS_NONASCII = ''
69
+ for character in (
70
+ # First try printable and common characters to have a readable filename.
71
+ # For each character, the encoding list are just example of encodings able
72
+ # to encode the character (the list is not exhaustive).
73
+
74
+ # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1
75
+ '\u00E6',
76
+ # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3
77
+ '\u0130',
78
+ # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257
79
+ '\u0141',
80
+ # U+03C6 (Greek Small Letter Phi): cp1253
81
+ '\u03C6',
82
+ # U+041A (Cyrillic Capital Letter Ka): cp1251
83
+ '\u041A',
84
+ # U+05D0 (Hebrew Letter Alef): Encodable to cp424
85
+ '\u05D0',
86
+ # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic
87
+ '\u060C',
88
+ # U+062A (Arabic Letter Teh): cp720
89
+ '\u062A',
90
+ # U+0E01 (Thai Character Ko Kai): cp874
91
+ '\u0E01',
92
+
93
+ # Then try more "special" characters. "special" because they may be
94
+ # interpreted or displayed differently depending on the exact locale
95
+ # encoding and the font.
96
+
97
+ # U+00A0 (No-Break Space)
98
+ '\u00A0',
99
+ # U+20AC (Euro Sign)
100
+ '\u20AC',
101
+ ):
102
+ try:
103
+ # If Python is set up to use the legacy 'mbcs' in Windows,
104
+ # 'replace' error mode is used, and encode() returns b'?'
105
+ # for characters missing in the ANSI codepage
106
+ if os.fsdecode(os.fsencode(character)) != character:
107
+ raise UnicodeError
108
+ except UnicodeError:
109
+ pass
110
+ else:
111
+ FS_NONASCII = character
112
+ break
113
+
114
+ # Save the initial cwd
115
+ SAVEDCWD = os.getcwd()
116
+
117
+ # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be
118
+ # decoded from the filesystem encoding (in strict mode). It can be None if we
119
+ # cannot generate such filename (ex: the latin1 encoding can decode any byte
120
+ # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks
121
+ # to the surrogateescape error handler (PEP 383), but not from the filesystem
122
+ # encoding in strict mode.
123
+ TESTFN_UNDECODABLE = None
124
+ for name in (
125
+ # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows
126
+ # accepts it to create a file or a directory, or don't accept to enter to
127
+ # such directory (when the bytes name is used). So test b'\xe7' first:
128
+ # it is not decodable from cp932.
129
+ b'\xe7w\xf0',
130
+ # undecodable from ASCII, UTF-8
131
+ b'\xff',
132
+ # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856
133
+ # and cp857
134
+ b'\xae\xd5'
135
+ # undecodable from UTF-8 (UNIX and Mac OS X)
136
+ b'\xed\xb2\x80', b'\xed\xb4\x80',
137
+ # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252,
138
+ # cp1253, cp1254, cp1255, cp1257, cp1258
139
+ b'\x81\x98',
140
+ ):
141
+ try:
142
+ name.decode(sys.getfilesystemencoding())
143
+ except UnicodeDecodeError:
144
+ TESTFN_UNDECODABLE = os.fsencode(TESTFN_ASCII) + name
145
+ break
146
+
147
+ if FS_NONASCII:
148
+ TESTFN_NONASCII = TESTFN_ASCII + FS_NONASCII
149
+ else:
150
+ TESTFN_NONASCII = None
151
+ TESTFN = TESTFN_NONASCII or TESTFN_ASCII
152
+
153
+
154
+ def make_bad_fd():
155
+ """
156
+ Create an invalid file descriptor by opening and closing a file and return
157
+ its fd.
158
+ """
159
+ file = open(TESTFN, "wb")
160
+ try:
161
+ return file.fileno()
162
+ finally:
163
+ file.close()
164
+ unlink(TESTFN)
165
+
166
+
167
+ _can_symlink = None
168
+
169
+
170
+ def can_symlink():
171
+ global _can_symlink
172
+ if _can_symlink is not None:
173
+ return _can_symlink
174
+ symlink_path = TESTFN + "can_symlink"
175
+ try:
176
+ os.symlink(TESTFN, symlink_path)
177
+ can = True
178
+ except (OSError, NotImplementedError, AttributeError):
179
+ can = False
180
+ else:
181
+ os.remove(symlink_path)
182
+ _can_symlink = can
183
+ return can
184
+
185
+
186
+ def skip_unless_symlink(test):
187
+ """Skip decorator for tests that require functional symlink"""
188
+ ok = can_symlink()
189
+ msg = "Requires functional symlink implementation"
190
+ return test if ok else unittest.skip(msg)(test)
191
+
192
+
193
+ _can_xattr = None
194
+
195
+
196
+ def can_xattr():
197
+ import tempfile
198
+ global _can_xattr
199
+ if _can_xattr is not None:
200
+ return _can_xattr
201
+ if not hasattr(os, "setxattr"):
202
+ can = False
203
+ else:
204
+ import platform
205
+ tmp_dir = tempfile.mkdtemp()
206
+ tmp_fp, tmp_name = tempfile.mkstemp(dir=tmp_dir)
207
+ try:
208
+ with open(TESTFN, "wb") as fp:
209
+ try:
210
+ # TESTFN & tempfile may use different file systems with
211
+ # different capabilities
212
+ os.setxattr(tmp_fp, b"user.test", b"")
213
+ os.setxattr(tmp_name, b"trusted.foo", b"42")
214
+ os.setxattr(fp.fileno(), b"user.test", b"")
215
+ # Kernels < 2.6.39 don't respect setxattr flags.
216
+ kernel_version = platform.release()
217
+ m = re.match(r"2.6.(\d{1,2})", kernel_version)
218
+ can = m is None or int(m.group(1)) >= 39
219
+ except OSError:
220
+ can = False
221
+ finally:
222
+ unlink(TESTFN)
223
+ unlink(tmp_name)
224
+ rmdir(tmp_dir)
225
+ _can_xattr = can
226
+ return can
227
+
228
+
229
+ def skip_unless_xattr(test):
230
+ """Skip decorator for tests that require functional extended attributes"""
231
+ ok = can_xattr()
232
+ msg = "no non-broken extended attribute support"
233
+ return test if ok else unittest.skip(msg)(test)
234
+
235
+
236
+ def unlink(filename):
237
+ try:
238
+ _unlink(filename)
239
+ except (FileNotFoundError, NotADirectoryError):
240
+ pass
241
+
242
+
243
+ if sys.platform.startswith("win"):
244
+ def _waitfor(func, pathname, waitall=False):
245
+ # Perform the operation
246
+ func(pathname)
247
+ # Now setup the wait loop
248
+ if waitall:
249
+ dirname = pathname
250
+ else:
251
+ dirname, name = os.path.split(pathname)
252
+ dirname = dirname or '.'
253
+ # Check for `pathname` to be removed from the filesystem.
254
+ # The exponential backoff of the timeout amounts to a total
255
+ # of ~1 second after which the deletion is probably an error
256
+ # anyway.
257
+ # Testing on an i7@4.3GHz shows that usually only 1 iteration is
258
+ # required when contention occurs.
259
+ timeout = 0.001
260
+ while timeout < 1.0:
261
+ # Note we are only testing for the existence of the file(s) in
262
+ # the contents of the directory regardless of any security or
263
+ # access rights. If we have made it this far, we have sufficient
264
+ # permissions to do that much using Python's equivalent of the
265
+ # Windows API FindFirstFile.
266
+ # Other Windows APIs can fail or give incorrect results when
267
+ # dealing with files that are pending deletion.
268
+ L = os.listdir(dirname)
269
+ if not (L if waitall else name in L):
270
+ return
271
+ # Increase the timeout and try again
272
+ time.sleep(timeout)
273
+ timeout *= 2
274
+ warnings.warn('tests may fail, delete still pending for ' + pathname,
275
+ RuntimeWarning, stacklevel=4)
276
+
277
+ def _unlink(filename):
278
+ _waitfor(os.unlink, filename)
279
+
280
+ def _rmdir(dirname):
281
+ _waitfor(os.rmdir, dirname)
282
+
283
+ def _rmtree(path):
284
+ from test.support import _force_run
285
+
286
+ def _rmtree_inner(path):
287
+ for name in _force_run(path, os.listdir, path):
288
+ fullname = os.path.join(path, name)
289
+ try:
290
+ mode = os.lstat(fullname).st_mode
291
+ except OSError as exc:
292
+ print("support.rmtree(): os.lstat(%r) failed with %s"
293
+ % (fullname, exc),
294
+ file=sys.__stderr__)
295
+ mode = 0
296
+ if stat.S_ISDIR(mode):
297
+ _waitfor(_rmtree_inner, fullname, waitall=True)
298
+ _force_run(fullname, os.rmdir, fullname)
299
+ else:
300
+ _force_run(fullname, os.unlink, fullname)
301
+ _waitfor(_rmtree_inner, path, waitall=True)
302
+ _waitfor(lambda p: _force_run(p, os.rmdir, p), path)
303
+
304
+ def _longpath(path):
305
+ try:
306
+ import ctypes
307
+ except ImportError:
308
+ # No ctypes means we can't expands paths.
309
+ pass
310
+ else:
311
+ buffer = ctypes.create_unicode_buffer(len(path) * 2)
312
+ length = ctypes.windll.kernel32.GetLongPathNameW(path, buffer,
313
+ len(buffer))
314
+ if length:
315
+ return buffer[:length]
316
+ return path
317
+ else:
318
+ _unlink = os.unlink
319
+ _rmdir = os.rmdir
320
+
321
+ def _rmtree(path):
322
+ import shutil
323
+ try:
324
+ shutil.rmtree(path)
325
+ return
326
+ except OSError:
327
+ pass
328
+
329
+ def _rmtree_inner(path):
330
+ from test.support import _force_run
331
+ for name in _force_run(path, os.listdir, path):
332
+ fullname = os.path.join(path, name)
333
+ try:
334
+ mode = os.lstat(fullname).st_mode
335
+ except OSError:
336
+ mode = 0
337
+ if stat.S_ISDIR(mode):
338
+ _rmtree_inner(fullname)
339
+ _force_run(path, os.rmdir, fullname)
340
+ else:
341
+ _force_run(path, os.unlink, fullname)
342
+ _rmtree_inner(path)
343
+ os.rmdir(path)
344
+
345
+ def _longpath(path):
346
+ return path
347
+
348
+
349
+ def rmdir(dirname):
350
+ try:
351
+ _rmdir(dirname)
352
+ except FileNotFoundError:
353
+ pass
354
+
355
+
356
+ def rmtree(path):
357
+ try:
358
+ _rmtree(path)
359
+ except FileNotFoundError:
360
+ pass
361
+
362
+
363
+ @contextlib.contextmanager
364
+ def temp_dir(path=None, quiet=False):
365
+ """Return a context manager that creates a temporary directory.
366
+
367
+ Arguments:
368
+
369
+ path: the directory to create temporarily. If omitted or None,
370
+ defaults to creating a temporary directory using tempfile.mkdtemp.
371
+
372
+ quiet: if False (the default), the context manager raises an exception
373
+ on error. Otherwise, if the path is specified and cannot be
374
+ created, only a warning is issued.
375
+
376
+ """
377
+ import tempfile
378
+ dir_created = False
379
+ if path is None:
380
+ path = tempfile.mkdtemp()
381
+ dir_created = True
382
+ path = os.path.realpath(path)
383
+ else:
384
+ try:
385
+ os.mkdir(path)
386
+ dir_created = True
387
+ except OSError as exc:
388
+ if not quiet:
389
+ raise
390
+ warnings.warn(f'tests may fail, unable to create '
391
+ f'temporary directory {path!r}: {exc}',
392
+ RuntimeWarning, stacklevel=3)
393
+ if dir_created:
394
+ pid = os.getpid()
395
+ try:
396
+ yield path
397
+ finally:
398
+ # In case the process forks, let only the parent remove the
399
+ # directory. The child has a different process id. (bpo-30028)
400
+ if dir_created and pid == os.getpid():
401
+ rmtree(path)
402
+
403
+
404
+ @contextlib.contextmanager
405
+ def change_cwd(path, quiet=False):
406
+ """Return a context manager that changes the current working directory.
407
+
408
+ Arguments:
409
+
410
+ path: the directory to use as the temporary current working directory.
411
+
412
+ quiet: if False (the default), the context manager raises an exception
413
+ on error. Otherwise, it issues only a warning and keeps the current
414
+ working directory the same.
415
+
416
+ """
417
+ saved_dir = os.getcwd()
418
+ try:
419
+ os.chdir(os.path.realpath(path))
420
+ except OSError as exc:
421
+ if not quiet:
422
+ raise
423
+ warnings.warn(f'tests may fail, unable to change the current working '
424
+ f'directory to {path!r}: {exc}',
425
+ RuntimeWarning, stacklevel=3)
426
+ try:
427
+ yield os.getcwd()
428
+ finally:
429
+ os.chdir(saved_dir)
430
+
431
+
432
+ @contextlib.contextmanager
433
+ def temp_cwd(name='tempcwd', quiet=False):
434
+ """
435
+ Context manager that temporarily creates and changes the CWD.
436
+
437
+ The function temporarily changes the current working directory
438
+ after creating a temporary directory in the current directory with
439
+ name *name*. If *name* is None, the temporary directory is
440
+ created using tempfile.mkdtemp.
441
+
442
+ If *quiet* is False (default) and it is not possible to
443
+ create or change the CWD, an error is raised. If *quiet* is True,
444
+ only a warning is raised and the original CWD is used.
445
+
446
+ """
447
+ with temp_dir(path=name, quiet=quiet) as temp_path:
448
+ with change_cwd(temp_path, quiet=quiet) as cwd_dir:
449
+ yield cwd_dir
450
+
451
+
452
+ def create_empty_file(filename):
453
+ """Create an empty file. If the file already exists, truncate it."""
454
+ fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
455
+ os.close(fd)
456
+
457
+
458
+ @contextlib.contextmanager
459
+ def open_dir_fd(path):
460
+ """Open a file descriptor to a directory."""
461
+ assert os.path.isdir(path)
462
+ dir_fd = os.open(path, os.O_RDONLY)
463
+ try:
464
+ yield dir_fd
465
+ finally:
466
+ os.close(dir_fd)
467
+
468
+
469
+ def fs_is_case_insensitive(directory):
470
+ """Detects if the file system for the specified directory
471
+ is case-insensitive."""
472
+ import tempfile
473
+ with tempfile.NamedTemporaryFile(dir=directory) as base:
474
+ base_path = base.name
475
+ case_path = base_path.upper()
476
+ if case_path == base_path:
477
+ case_path = base_path.lower()
478
+ try:
479
+ return os.path.samefile(base_path, case_path)
480
+ except FileNotFoundError:
481
+ return False
482
+
483
+
484
+ class FakePath:
485
+ """Simple implementing of the path protocol.
486
+ """
487
+ def __init__(self, path):
488
+ self.path = path
489
+
490
+ def __repr__(self):
491
+ return f'<FakePath {self.path!r}>'
492
+
493
+ def __fspath__(self):
494
+ if (isinstance(self.path, BaseException) or
495
+ isinstance(self.path, type) and
496
+ issubclass(self.path, BaseException)):
497
+ raise self.path
498
+ else:
499
+ return self.path
500
+
501
+
502
+ def fd_count():
503
+ """Count the number of open file descriptors.
504
+ """
505
+ if sys.platform.startswith(('linux', 'freebsd')):
506
+ try:
507
+ names = os.listdir("/proc/self/fd")
508
+ # Subtract one because listdir() internally opens a file
509
+ # descriptor to list the content of the /proc/self/fd/ directory.
510
+ return len(names) - 1
511
+ except FileNotFoundError:
512
+ pass
513
+
514
+ MAXFD = 256
515
+ if hasattr(os, 'sysconf'):
516
+ try:
517
+ MAXFD = os.sysconf("SC_OPEN_MAX")
518
+ except OSError:
519
+ pass
520
+
521
+ old_modes = None
522
+ if sys.platform == 'win32':
523
+ # bpo-25306, bpo-31009: Call CrtSetReportMode() to not kill the process
524
+ # on invalid file descriptor if Python is compiled in debug mode
525
+ try:
526
+ import msvcrt
527
+ msvcrt.CrtSetReportMode
528
+ except (AttributeError, ImportError):
529
+ # no msvcrt or a release build
530
+ pass
531
+ else:
532
+ old_modes = {}
533
+ for report_type in (msvcrt.CRT_WARN,
534
+ msvcrt.CRT_ERROR,
535
+ msvcrt.CRT_ASSERT):
536
+ old_modes[report_type] = msvcrt.CrtSetReportMode(report_type,
537
+ 0)
538
+
539
+ try:
540
+ count = 0
541
+ for fd in range(MAXFD):
542
+ try:
543
+ # Prefer dup() over fstat(). fstat() can require input/output
544
+ # whereas dup() doesn't.
545
+ fd2 = os.dup(fd)
546
+ except OSError as e:
547
+ if e.errno != errno.EBADF:
548
+ raise
549
+ else:
550
+ os.close(fd2)
551
+ count += 1
552
+ finally:
553
+ if old_modes is not None:
554
+ for report_type in (msvcrt.CRT_WARN,
555
+ msvcrt.CRT_ERROR,
556
+ msvcrt.CRT_ASSERT):
557
+ msvcrt.CrtSetReportMode(report_type, old_modes[report_type])
558
+
559
+ return count
560
+
561
+
562
+ if hasattr(os, "umask"):
563
+ @contextlib.contextmanager
564
+ def temp_umask(umask):
565
+ """Context manager that temporarily sets the process umask."""
566
+ oldmask = os.umask(umask)
567
+ try:
568
+ yield
569
+ finally:
570
+ os.umask(oldmask)
571
+
572
+
573
+ class EnvironmentVarGuard(collections.abc.MutableMapping):
574
+
575
+ """Class to help protect the environment variable properly. Can be used as
576
+ a context manager."""
577
+
578
+ def __init__(self):
579
+ self._environ = os.environ
580
+ self._changed = {}
581
+
582
+ def __getitem__(self, envvar):
583
+ return self._environ[envvar]
584
+
585
+ def __setitem__(self, envvar, value):
586
+ # Remember the initial value on the first access
587
+ if envvar not in self._changed:
588
+ self._changed[envvar] = self._environ.get(envvar)
589
+ self._environ[envvar] = value
590
+
591
+ def __delitem__(self, envvar):
592
+ # Remember the initial value on the first access
593
+ if envvar not in self._changed:
594
+ self._changed[envvar] = self._environ.get(envvar)
595
+ if envvar in self._environ:
596
+ del self._environ[envvar]
597
+
598
+ def keys(self):
599
+ return self._environ.keys()
600
+
601
+ def __iter__(self):
602
+ return iter(self._environ)
603
+
604
+ def __len__(self):
605
+ return len(self._environ)
606
+
607
+ def set(self, envvar, value):
608
+ self[envvar] = value
609
+
610
+ def unset(self, envvar):
611
+ del self[envvar]
612
+
613
+ def __enter__(self):
614
+ return self
615
+
616
+ def __exit__(self, *ignore_exc):
617
+ for (k, v) in self._changed.items():
618
+ if v is None:
619
+ if k in self._environ:
620
+ del self._environ[k]
621
+ else:
622
+ self._environ[k] = v
623
+ os.environ = self._environ
deepseekvl2/lib/python3.10/test/support/script_helper.py ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Common utility functions used by various script execution tests
2
+ # e.g. test_cmd_line, test_cmd_line_script and test_runpy
3
+
4
+ import collections
5
+ import importlib
6
+ import sys
7
+ import os
8
+ import os.path
9
+ import subprocess
10
+ import py_compile
11
+ import zipfile
12
+
13
+ from importlib.util import source_from_cache
14
+ from test import support
15
+ from test.support.import_helper import make_legacy_pyc
16
+
17
+
18
+ # Cached result of the expensive test performed in the function below.
19
+ __cached_interp_requires_environment = None
20
+
21
+
22
+ def interpreter_requires_environment():
23
+ """
24
+ Returns True if our sys.executable interpreter requires environment
25
+ variables in order to be able to run at all.
26
+
27
+ This is designed to be used with @unittest.skipIf() to annotate tests
28
+ that need to use an assert_python*() function to launch an isolated
29
+ mode (-I) or no environment mode (-E) sub-interpreter process.
30
+
31
+ A normal build & test does not run into this situation but it can happen
32
+ when trying to run the standard library test suite from an interpreter that
33
+ doesn't have an obvious home with Python's current home finding logic.
34
+
35
+ Setting PYTHONHOME is one way to get most of the testsuite to run in that
36
+ situation. PYTHONPATH or PYTHONUSERSITE are other common environment
37
+ variables that might impact whether or not the interpreter can start.
38
+ """
39
+ global __cached_interp_requires_environment
40
+ if __cached_interp_requires_environment is None:
41
+ # If PYTHONHOME is set, assume that we need it
42
+ if 'PYTHONHOME' in os.environ:
43
+ __cached_interp_requires_environment = True
44
+ return True
45
+
46
+ # Try running an interpreter with -E to see if it works or not.
47
+ try:
48
+ subprocess.check_call([sys.executable, '-E',
49
+ '-c', 'import sys; sys.exit(0)'])
50
+ except subprocess.CalledProcessError:
51
+ __cached_interp_requires_environment = True
52
+ else:
53
+ __cached_interp_requires_environment = False
54
+
55
+ return __cached_interp_requires_environment
56
+
57
+
58
+ class _PythonRunResult(collections.namedtuple("_PythonRunResult",
59
+ ("rc", "out", "err"))):
60
+ """Helper for reporting Python subprocess run results"""
61
+ def fail(self, cmd_line):
62
+ """Provide helpful details about failed subcommand runs"""
63
+ # Limit to 80 lines to ASCII characters
64
+ maxlen = 80 * 100
65
+ out, err = self.out, self.err
66
+ if len(out) > maxlen:
67
+ out = b'(... truncated stdout ...)' + out[-maxlen:]
68
+ if len(err) > maxlen:
69
+ err = b'(... truncated stderr ...)' + err[-maxlen:]
70
+ out = out.decode('ascii', 'replace').rstrip()
71
+ err = err.decode('ascii', 'replace').rstrip()
72
+ raise AssertionError("Process return code is %d\n"
73
+ "command line: %r\n"
74
+ "\n"
75
+ "stdout:\n"
76
+ "---\n"
77
+ "%s\n"
78
+ "---\n"
79
+ "\n"
80
+ "stderr:\n"
81
+ "---\n"
82
+ "%s\n"
83
+ "---"
84
+ % (self.rc, cmd_line,
85
+ out,
86
+ err))
87
+
88
+
89
+ # Executing the interpreter in a subprocess
90
+ def run_python_until_end(*args, **env_vars):
91
+ env_required = interpreter_requires_environment()
92
+ cwd = env_vars.pop('__cwd', None)
93
+ if '__isolated' in env_vars:
94
+ isolated = env_vars.pop('__isolated')
95
+ else:
96
+ isolated = not env_vars and not env_required
97
+ cmd_line = [sys.executable, '-X', 'faulthandler']
98
+ if isolated:
99
+ # isolated mode: ignore Python environment variables, ignore user
100
+ # site-packages, and don't add the current directory to sys.path
101
+ cmd_line.append('-I')
102
+ elif not env_vars and not env_required:
103
+ # ignore Python environment variables
104
+ cmd_line.append('-E')
105
+
106
+ # But a special flag that can be set to override -- in this case, the
107
+ # caller is responsible to pass the full environment.
108
+ if env_vars.pop('__cleanenv', None):
109
+ env = {}
110
+ if sys.platform == 'win32':
111
+ # Windows requires at least the SYSTEMROOT environment variable to
112
+ # start Python.
113
+ env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
114
+
115
+ # Other interesting environment variables, not copied currently:
116
+ # COMSPEC, HOME, PATH, TEMP, TMPDIR, TMP.
117
+ else:
118
+ # Need to preserve the original environment, for in-place testing of
119
+ # shared library builds.
120
+ env = os.environ.copy()
121
+
122
+ # set TERM='' unless the TERM environment variable is passed explicitly
123
+ # see issues #11390 and #18300
124
+ if 'TERM' not in env_vars:
125
+ env['TERM'] = ''
126
+
127
+ env.update(env_vars)
128
+ cmd_line.extend(args)
129
+ proc = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
130
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
131
+ env=env, cwd=cwd)
132
+ with proc:
133
+ try:
134
+ out, err = proc.communicate()
135
+ finally:
136
+ proc.kill()
137
+ subprocess._cleanup()
138
+ rc = proc.returncode
139
+ return _PythonRunResult(rc, out, err), cmd_line
140
+
141
+
142
+ def _assert_python(expected_success, /, *args, **env_vars):
143
+ res, cmd_line = run_python_until_end(*args, **env_vars)
144
+ if (res.rc and expected_success) or (not res.rc and not expected_success):
145
+ res.fail(cmd_line)
146
+ return res
147
+
148
+
149
+ def assert_python_ok(*args, **env_vars):
150
+ """
151
+ Assert that running the interpreter with `args` and optional environment
152
+ variables `env_vars` succeeds (rc == 0) and return a (return code, stdout,
153
+ stderr) tuple.
154
+
155
+ If the __cleanenv keyword is set, env_vars is used as a fresh environment.
156
+
157
+ Python is started in isolated mode (command line option -I),
158
+ except if the __isolated keyword is set to False.
159
+ """
160
+ return _assert_python(True, *args, **env_vars)
161
+
162
+
163
+ def assert_python_failure(*args, **env_vars):
164
+ """
165
+ Assert that running the interpreter with `args` and optional environment
166
+ variables `env_vars` fails (rc != 0) and return a (return code, stdout,
167
+ stderr) tuple.
168
+
169
+ See assert_python_ok() for more options.
170
+ """
171
+ return _assert_python(False, *args, **env_vars)
172
+
173
+
174
+ def spawn_python(*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kw):
175
+ """Run a Python subprocess with the given arguments.
176
+
177
+ kw is extra keyword args to pass to subprocess.Popen. Returns a Popen
178
+ object.
179
+ """
180
+ cmd_line = [sys.executable]
181
+ if not interpreter_requires_environment():
182
+ cmd_line.append('-E')
183
+ cmd_line.extend(args)
184
+ # Under Fedora (?), GNU readline can output junk on stderr when initialized,
185
+ # depending on the TERM setting. Setting TERM=vt100 is supposed to disable
186
+ # that. References:
187
+ # - http://reinout.vanrees.org/weblog/2009/08/14/readline-invisible-character-hack.html
188
+ # - http://stackoverflow.com/questions/15760712/python-readline-module-prints-escape-character-during-import
189
+ # - http://lists.gnu.org/archive/html/bug-readline/2007-08/msg00004.html
190
+ env = kw.setdefault('env', dict(os.environ))
191
+ env['TERM'] = 'vt100'
192
+ return subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
193
+ stdout=stdout, stderr=stderr,
194
+ **kw)
195
+
196
+
197
+ def kill_python(p):
198
+ """Run the given Popen process until completion and return stdout."""
199
+ p.stdin.close()
200
+ data = p.stdout.read()
201
+ p.stdout.close()
202
+ # try to cleanup the child so we don't appear to leak when running
203
+ # with regrtest -R.
204
+ p.wait()
205
+ subprocess._cleanup()
206
+ return data
207
+
208
+
209
+ def make_script(script_dir, script_basename, source, omit_suffix=False):
210
+ script_filename = script_basename
211
+ if not omit_suffix:
212
+ script_filename += os.extsep + 'py'
213
+ script_name = os.path.join(script_dir, script_filename)
214
+ # The script should be encoded to UTF-8, the default string encoding
215
+ with open(script_name, 'w', encoding='utf-8') as script_file:
216
+ script_file.write(source)
217
+ importlib.invalidate_caches()
218
+ return script_name
219
+
220
+
221
+ def make_zip_script(zip_dir, zip_basename, script_name, name_in_zip=None):
222
+ zip_filename = zip_basename+os.extsep+'zip'
223
+ zip_name = os.path.join(zip_dir, zip_filename)
224
+ with zipfile.ZipFile(zip_name, 'w') as zip_file:
225
+ if name_in_zip is None:
226
+ parts = script_name.split(os.sep)
227
+ if len(parts) >= 2 and parts[-2] == '__pycache__':
228
+ legacy_pyc = make_legacy_pyc(source_from_cache(script_name))
229
+ name_in_zip = os.path.basename(legacy_pyc)
230
+ script_name = legacy_pyc
231
+ else:
232
+ name_in_zip = os.path.basename(script_name)
233
+ zip_file.write(script_name, name_in_zip)
234
+ #if test.support.verbose:
235
+ # with zipfile.ZipFile(zip_name, 'r') as zip_file:
236
+ # print 'Contents of %r:' % zip_name
237
+ # zip_file.printdir()
238
+ return zip_name, os.path.join(zip_name, name_in_zip)
239
+
240
+
241
+ def make_pkg(pkg_dir, init_source=''):
242
+ os.mkdir(pkg_dir)
243
+ make_script(pkg_dir, '__init__', init_source)
244
+
245
+
246
+ def make_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
247
+ source, depth=1, compiled=False):
248
+ unlink = []
249
+ init_name = make_script(zip_dir, '__init__', '')
250
+ unlink.append(init_name)
251
+ init_basename = os.path.basename(init_name)
252
+ script_name = make_script(zip_dir, script_basename, source)
253
+ unlink.append(script_name)
254
+ if compiled:
255
+ init_name = py_compile.compile(init_name, doraise=True)
256
+ script_name = py_compile.compile(script_name, doraise=True)
257
+ unlink.extend((init_name, script_name))
258
+ pkg_names = [os.sep.join([pkg_name]*i) for i in range(1, depth+1)]
259
+ script_name_in_zip = os.path.join(pkg_names[-1], os.path.basename(script_name))
260
+ zip_filename = zip_basename+os.extsep+'zip'
261
+ zip_name = os.path.join(zip_dir, zip_filename)
262
+ with zipfile.ZipFile(zip_name, 'w') as zip_file:
263
+ for name in pkg_names:
264
+ init_name_in_zip = os.path.join(name, init_basename)
265
+ zip_file.write(init_name, init_name_in_zip)
266
+ zip_file.write(script_name, script_name_in_zip)
267
+ for name in unlink:
268
+ os.unlink(name)
269
+ #if test.support.verbose:
270
+ # with zipfile.ZipFile(zip_name, 'r') as zip_file:
271
+ # print 'Contents of %r:' % zip_name
272
+ # zip_file.printdir()
273
+ return zip_name, os.path.join(zip_name, script_name_in_zip)
274
+
275
+
276
+ def run_test_script(script):
277
+ # use -u to try to get the full output if the test hangs or crash
278
+ if support.verbose:
279
+ def title(text):
280
+ return f"===== {text} ======"
281
+
282
+ name = f"script {os.path.basename(script)}"
283
+ print()
284
+ print(title(name), flush=True)
285
+ # In verbose mode, the child process inherit stdout and stdout,
286
+ # to see output in realtime and reduce the risk of losing output.
287
+ args = [sys.executable, "-E", "-X", "faulthandler", "-u", script, "-v"]
288
+ proc = subprocess.run(args)
289
+ print(title(f"{name} completed: exit code {proc.returncode}"),
290
+ flush=True)
291
+ if proc.returncode:
292
+ raise AssertionError(f"{name} failed")
293
+ else:
294
+ assert_python_ok("-u", script, "-v")
deepseekvl2/lib/python3.10/test/support/socket_helper.py ADDED
@@ -0,0 +1,269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import errno
3
+ import socket
4
+ import unittest
5
+ import sys
6
+
7
+ from .. import support
8
+
9
+
10
+ HOST = "localhost"
11
+ HOSTv4 = "127.0.0.1"
12
+ HOSTv6 = "::1"
13
+
14
+
15
+ def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
16
+ """Returns an unused port that should be suitable for binding. This is
17
+ achieved by creating a temporary socket with the same family and type as
18
+ the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
19
+ the specified host address (defaults to 0.0.0.0) with the port set to 0,
20
+ eliciting an unused ephemeral port from the OS. The temporary socket is
21
+ then closed and deleted, and the ephemeral port is returned.
22
+
23
+ Either this method or bind_port() should be used for any tests where a
24
+ server socket needs to be bound to a particular port for the duration of
25
+ the test. Which one to use depends on whether the calling code is creating
26
+ a python socket, or if an unused port needs to be provided in a constructor
27
+ or passed to an external program (i.e. the -accept argument to openssl's
28
+ s_server mode). Always prefer bind_port() over find_unused_port() where
29
+ possible. Hard coded ports should *NEVER* be used. As soon as a server
30
+ socket is bound to a hard coded port, the ability to run multiple instances
31
+ of the test simultaneously on the same host is compromised, which makes the
32
+ test a ticking time bomb in a buildbot environment. On Unix buildbots, this
33
+ may simply manifest as a failed test, which can be recovered from without
34
+ intervention in most cases, but on Windows, the entire python process can
35
+ completely and utterly wedge, requiring someone to log in to the buildbot
36
+ and manually kill the affected process.
37
+
38
+ (This is easy to reproduce on Windows, unfortunately, and can be traced to
39
+ the SO_REUSEADDR socket option having different semantics on Windows versus
40
+ Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
41
+ listen and then accept connections on identical host/ports. An EADDRINUSE
42
+ OSError will be raised at some point (depending on the platform and
43
+ the order bind and listen were called on each socket).
44
+
45
+ However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
46
+ will ever be raised when attempting to bind two identical host/ports. When
47
+ accept() is called on each socket, the second caller's process will steal
48
+ the port from the first caller, leaving them both in an awkwardly wedged
49
+ state where they'll no longer respond to any signals or graceful kills, and
50
+ must be forcibly killed via OpenProcess()/TerminateProcess().
51
+
52
+ The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
53
+ instead of SO_REUSEADDR, which effectively affords the same semantics as
54
+ SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open
55
+ Source world compared to Windows ones, this is a common mistake. A quick
56
+ look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
57
+ openssl.exe is called with the 's_server' option, for example. See
58
+ http://bugs.python.org/issue2550 for more info. The following site also
59
+ has a very thorough description about the implications of both REUSEADDR
60
+ and EXCLUSIVEADDRUSE on Windows:
61
+ https://learn.microsoft.com/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
62
+
63
+ XXX: although this approach is a vast improvement on previous attempts to
64
+ elicit unused ports, it rests heavily on the assumption that the ephemeral
65
+ port returned to us by the OS won't immediately be dished back out to some
66
+ other process when we close and delete our temporary socket but before our
67
+ calling code has a chance to bind the returned port. We can deal with this
68
+ issue if/when we come across it.
69
+ """
70
+
71
+ with socket.socket(family, socktype) as tempsock:
72
+ port = bind_port(tempsock)
73
+ del tempsock
74
+ return port
75
+
76
+ def bind_port(sock, host=HOST):
77
+ """Bind the socket to a free port and return the port number. Relies on
78
+ ephemeral ports in order to ensure we are using an unbound port. This is
79
+ important as many tests may be running simultaneously, especially in a
80
+ buildbot environment. This method raises an exception if the sock.family
81
+ is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
82
+ or SO_REUSEPORT set on it. Tests should *never* set these socket options
83
+ for TCP/IP sockets. The only case for setting these options is testing
84
+ multicasting via multiple UDP sockets.
85
+
86
+ Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
87
+ on Windows), it will be set on the socket. This will prevent anyone else
88
+ from bind()'ing to our host/port for the duration of the test.
89
+ """
90
+
91
+ if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
92
+ if hasattr(socket, 'SO_REUSEADDR'):
93
+ if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
94
+ raise support.TestFailed("tests should never set the "
95
+ "SO_REUSEADDR socket option on "
96
+ "TCP/IP sockets!")
97
+ if hasattr(socket, 'SO_REUSEPORT'):
98
+ try:
99
+ if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
100
+ raise support.TestFailed("tests should never set the "
101
+ "SO_REUSEPORT socket option on "
102
+ "TCP/IP sockets!")
103
+ except OSError:
104
+ # Python's socket module was compiled using modern headers
105
+ # thus defining SO_REUSEPORT but this process is running
106
+ # under an older kernel that does not support SO_REUSEPORT.
107
+ pass
108
+ if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
109
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
110
+
111
+ sock.bind((host, 0))
112
+ port = sock.getsockname()[1]
113
+ return port
114
+
115
+ def bind_unix_socket(sock, addr):
116
+ """Bind a unix socket, raising SkipTest if PermissionError is raised."""
117
+ assert sock.family == socket.AF_UNIX
118
+ try:
119
+ sock.bind(addr)
120
+ except PermissionError:
121
+ sock.close()
122
+ raise unittest.SkipTest('cannot bind AF_UNIX sockets')
123
+
124
+ def _is_ipv6_enabled():
125
+ """Check whether IPv6 is enabled on this host."""
126
+ if socket.has_ipv6:
127
+ sock = None
128
+ try:
129
+ sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
130
+ sock.bind((HOSTv6, 0))
131
+ return True
132
+ except OSError:
133
+ pass
134
+ finally:
135
+ if sock:
136
+ sock.close()
137
+ return False
138
+
139
+ IPV6_ENABLED = _is_ipv6_enabled()
140
+
141
+
142
+ _bind_nix_socket_error = None
143
+ def skip_unless_bind_unix_socket(test):
144
+ """Decorator for tests requiring a functional bind() for unix sockets."""
145
+ if not hasattr(socket, 'AF_UNIX'):
146
+ return unittest.skip('No UNIX Sockets')(test)
147
+ global _bind_nix_socket_error
148
+ if _bind_nix_socket_error is None:
149
+ from .os_helper import TESTFN, unlink
150
+ path = TESTFN + "can_bind_unix_socket"
151
+ with socket.socket(socket.AF_UNIX) as sock:
152
+ try:
153
+ sock.bind(path)
154
+ _bind_nix_socket_error = False
155
+ except OSError as e:
156
+ _bind_nix_socket_error = e
157
+ finally:
158
+ unlink(path)
159
+ if _bind_nix_socket_error:
160
+ msg = 'Requires a functional unix bind(): %s' % _bind_nix_socket_error
161
+ return unittest.skip(msg)(test)
162
+ else:
163
+ return test
164
+
165
+
166
+ def get_socket_conn_refused_errs():
167
+ """
168
+ Get the different socket error numbers ('errno') which can be received
169
+ when a connection is refused.
170
+ """
171
+ errors = [errno.ECONNREFUSED]
172
+ if hasattr(errno, 'ENETUNREACH'):
173
+ # On Solaris, ENETUNREACH is returned sometimes instead of ECONNREFUSED
174
+ errors.append(errno.ENETUNREACH)
175
+ if hasattr(errno, 'EADDRNOTAVAIL'):
176
+ # bpo-31910: socket.create_connection() fails randomly
177
+ # with EADDRNOTAVAIL on Travis CI
178
+ errors.append(errno.EADDRNOTAVAIL)
179
+ if hasattr(errno, 'EHOSTUNREACH'):
180
+ # bpo-37583: The destination host cannot be reached
181
+ errors.append(errno.EHOSTUNREACH)
182
+ if not IPV6_ENABLED:
183
+ errors.append(errno.EAFNOSUPPORT)
184
+ return errors
185
+
186
+
187
+ _NOT_SET = object()
188
+
189
+ @contextlib.contextmanager
190
+ def transient_internet(resource_name, *, timeout=_NOT_SET, errnos=()):
191
+ """Return a context manager that raises ResourceDenied when various issues
192
+ with the internet connection manifest themselves as exceptions."""
193
+ import nntplib
194
+ import urllib.error
195
+ if timeout is _NOT_SET:
196
+ timeout = support.INTERNET_TIMEOUT
197
+
198
+ default_errnos = [
199
+ ('ECONNREFUSED', 111),
200
+ ('ECONNRESET', 104),
201
+ ('EHOSTUNREACH', 113),
202
+ ('ENETUNREACH', 101),
203
+ ('ETIMEDOUT', 110),
204
+ # socket.create_connection() fails randomly with
205
+ # EADDRNOTAVAIL on Travis CI.
206
+ ('EADDRNOTAVAIL', 99),
207
+ ]
208
+ default_gai_errnos = [
209
+ ('EAI_AGAIN', -3),
210
+ ('EAI_FAIL', -4),
211
+ ('EAI_NONAME', -2),
212
+ ('EAI_NODATA', -5),
213
+ # Encountered when trying to resolve IPv6-only hostnames
214
+ ('WSANO_DATA', 11004),
215
+ ]
216
+
217
+ denied = support.ResourceDenied("Resource %r is not available" % resource_name)
218
+ captured_errnos = errnos
219
+ gai_errnos = []
220
+ if not captured_errnos:
221
+ captured_errnos = [getattr(errno, name, num)
222
+ for (name, num) in default_errnos]
223
+ gai_errnos = [getattr(socket, name, num)
224
+ for (name, num) in default_gai_errnos]
225
+
226
+ def filter_error(err):
227
+ n = getattr(err, 'errno', None)
228
+ if (isinstance(err, TimeoutError) or
229
+ (isinstance(err, socket.gaierror) and n in gai_errnos) or
230
+ (isinstance(err, urllib.error.HTTPError) and
231
+ 500 <= err.code <= 599) or
232
+ (isinstance(err, urllib.error.URLError) and
233
+ (("ConnectionRefusedError" in err.reason) or
234
+ ("TimeoutError" in err.reason) or
235
+ ("EOFError" in err.reason))) or
236
+ n in captured_errnos):
237
+ if not support.verbose:
238
+ sys.stderr.write(denied.args[0] + "\n")
239
+ raise denied from err
240
+
241
+ old_timeout = socket.getdefaulttimeout()
242
+ try:
243
+ if timeout is not None:
244
+ socket.setdefaulttimeout(timeout)
245
+ yield
246
+ except nntplib.NNTPTemporaryError as err:
247
+ if support.verbose:
248
+ sys.stderr.write(denied.args[0] + "\n")
249
+ raise denied from err
250
+ except OSError as err:
251
+ # urllib can wrap original socket errors multiple times (!), we must
252
+ # unwrap to get at the original error.
253
+ while True:
254
+ a = err.args
255
+ if len(a) >= 1 and isinstance(a[0], OSError):
256
+ err = a[0]
257
+ # The error can also be wrapped as args[1]:
258
+ # except socket.error as msg:
259
+ # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
260
+ elif len(a) >= 2 and isinstance(a[1], OSError):
261
+ err = a[1]
262
+ else:
263
+ break
264
+ filter_error(err)
265
+ raise
266
+ # XXX should we catch generic exceptions and look for their
267
+ # __cause__ or __context__?
268
+ finally:
269
+ socket.setdefaulttimeout(old_timeout)
deepseekvl2/lib/python3.10/test/support/testresult.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''Test runner and result class for the regression test suite.
2
+
3
+ '''
4
+
5
+ import functools
6
+ import io
7
+ import sys
8
+ import time
9
+ import traceback
10
+ import unittest
11
+
12
+ class RegressionTestResult(unittest.TextTestResult):
13
+ USE_XML = False
14
+
15
+ def __init__(self, stream, descriptions, verbosity):
16
+ super().__init__(stream=stream, descriptions=descriptions,
17
+ verbosity=2 if verbosity else 0)
18
+ self.buffer = True
19
+ if self.USE_XML:
20
+ from xml.etree import ElementTree as ET
21
+ from datetime import datetime
22
+ self.__ET = ET
23
+ self.__suite = ET.Element('testsuite')
24
+ self.__suite.set('start', datetime.utcnow().isoformat(' '))
25
+ self.__e = None
26
+ self.__start_time = None
27
+
28
+ @classmethod
29
+ def __getId(cls, test):
30
+ try:
31
+ test_id = test.id
32
+ except AttributeError:
33
+ return str(test)
34
+ try:
35
+ return test_id()
36
+ except TypeError:
37
+ return str(test_id)
38
+ return repr(test)
39
+
40
+ def startTest(self, test):
41
+ super().startTest(test)
42
+ if self.USE_XML:
43
+ self.__e = e = self.__ET.SubElement(self.__suite, 'testcase')
44
+ self.__start_time = time.perf_counter()
45
+
46
+ def _add_result(self, test, capture=False, **args):
47
+ if not self.USE_XML:
48
+ return
49
+ e = self.__e
50
+ self.__e = None
51
+ if e is None:
52
+ return
53
+ ET = self.__ET
54
+
55
+ e.set('name', args.pop('name', self.__getId(test)))
56
+ e.set('status', args.pop('status', 'run'))
57
+ e.set('result', args.pop('result', 'completed'))
58
+ if self.__start_time:
59
+ e.set('time', f'{time.perf_counter() - self.__start_time:0.6f}')
60
+
61
+ if capture:
62
+ if self._stdout_buffer is not None:
63
+ stdout = self._stdout_buffer.getvalue().rstrip()
64
+ ET.SubElement(e, 'system-out').text = stdout
65
+ if self._stderr_buffer is not None:
66
+ stderr = self._stderr_buffer.getvalue().rstrip()
67
+ ET.SubElement(e, 'system-err').text = stderr
68
+
69
+ for k, v in args.items():
70
+ if not k or not v:
71
+ continue
72
+ e2 = ET.SubElement(e, k)
73
+ if hasattr(v, 'items'):
74
+ for k2, v2 in v.items():
75
+ if k2:
76
+ e2.set(k2, str(v2))
77
+ else:
78
+ e2.text = str(v2)
79
+ else:
80
+ e2.text = str(v)
81
+
82
+ @classmethod
83
+ def __makeErrorDict(cls, err_type, err_value, err_tb):
84
+ if isinstance(err_type, type):
85
+ if err_type.__module__ == 'builtins':
86
+ typename = err_type.__name__
87
+ else:
88
+ typename = f'{err_type.__module__}.{err_type.__name__}'
89
+ else:
90
+ typename = repr(err_type)
91
+
92
+ msg = traceback.format_exception(err_type, err_value, None)
93
+ tb = traceback.format_exception(err_type, err_value, err_tb)
94
+
95
+ return {
96
+ 'type': typename,
97
+ 'message': ''.join(msg),
98
+ '': ''.join(tb),
99
+ }
100
+
101
+ def addError(self, test, err):
102
+ self._add_result(test, True, error=self.__makeErrorDict(*err))
103
+ super().addError(test, err)
104
+
105
+ def addExpectedFailure(self, test, err):
106
+ self._add_result(test, True, output=self.__makeErrorDict(*err))
107
+ super().addExpectedFailure(test, err)
108
+
109
+ def addFailure(self, test, err):
110
+ self._add_result(test, True, failure=self.__makeErrorDict(*err))
111
+ super().addFailure(test, err)
112
+
113
+ def addSkip(self, test, reason):
114
+ self._add_result(test, skipped=reason)
115
+ super().addSkip(test, reason)
116
+
117
+ def addSuccess(self, test):
118
+ self._add_result(test)
119
+ super().addSuccess(test)
120
+
121
+ def addUnexpectedSuccess(self, test):
122
+ self._add_result(test, outcome='UNEXPECTED_SUCCESS')
123
+ super().addUnexpectedSuccess(test)
124
+
125
+ def get_xml_element(self):
126
+ if not self.USE_XML:
127
+ raise ValueError("USE_XML is false")
128
+ e = self.__suite
129
+ e.set('tests', str(self.testsRun))
130
+ e.set('errors', str(len(self.errors)))
131
+ e.set('failures', str(len(self.failures)))
132
+ return e
133
+
134
+ class QuietRegressionTestRunner:
135
+ def __init__(self, stream, buffer=False):
136
+ self.result = RegressionTestResult(stream, None, 0)
137
+ self.result.buffer = buffer
138
+
139
+ def run(self, test):
140
+ test(self.result)
141
+ return self.result
142
+
143
+ def get_test_runner_class(verbosity, buffer=False):
144
+ if verbosity:
145
+ return functools.partial(unittest.TextTestRunner,
146
+ resultclass=RegressionTestResult,
147
+ buffer=buffer,
148
+ verbosity=verbosity)
149
+ return functools.partial(QuietRegressionTestRunner, buffer=buffer)
150
+
151
+ def get_test_runner(stream, verbosity, capture_output=False):
152
+ return get_test_runner_class(verbosity, capture_output)(stream)
153
+
154
+ if __name__ == '__main__':
155
+ import xml.etree.ElementTree as ET
156
+ RegressionTestResult.USE_XML = True
157
+
158
+ class TestTests(unittest.TestCase):
159
+ def test_pass(self):
160
+ pass
161
+
162
+ def test_pass_slow(self):
163
+ time.sleep(1.0)
164
+
165
+ def test_fail(self):
166
+ print('stdout', file=sys.stdout)
167
+ print('stderr', file=sys.stderr)
168
+ self.fail('failure message')
169
+
170
+ def test_error(self):
171
+ print('stdout', file=sys.stdout)
172
+ print('stderr', file=sys.stderr)
173
+ raise RuntimeError('error message')
174
+
175
+ suite = unittest.TestSuite()
176
+ suite.addTest(unittest.makeSuite(TestTests))
177
+ stream = io.StringIO()
178
+ runner_cls = get_test_runner_class(sum(a == '-v' for a in sys.argv))
179
+ runner = runner_cls(sys.stdout)
180
+ result = runner.run(suite)
181
+ print('Output:', stream.getvalue())
182
+ print('XML: ', end='')
183
+ for s in ET.tostringlist(result.get_xml_element()):
184
+ print(s.decode(), end='')
185
+ print()
deepseekvl2/lib/python3.10/test/support/threading_helper.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import _thread
2
+ import contextlib
3
+ import functools
4
+ import sys
5
+ import threading
6
+ import time
7
+
8
+ from test import support
9
+
10
+
11
+ #=======================================================================
12
+ # Threading support to prevent reporting refleaks when running regrtest.py -R
13
+
14
+ # NOTE: we use thread._count() rather than threading.enumerate() (or the
15
+ # moral equivalent thereof) because a threading.Thread object is still alive
16
+ # until its __bootstrap() method has returned, even after it has been
17
+ # unregistered from the threading module.
18
+ # thread._count(), on the other hand, only gets decremented *after* the
19
+ # __bootstrap() method has returned, which gives us reliable reference counts
20
+ # at the end of a test run.
21
+
22
+
23
+ def threading_setup():
24
+ return _thread._count(), threading._dangling.copy()
25
+
26
+
27
+ def threading_cleanup(*original_values):
28
+ _MAX_COUNT = 100
29
+
30
+ for count in range(_MAX_COUNT):
31
+ values = _thread._count(), threading._dangling
32
+ if values == original_values:
33
+ break
34
+
35
+ if not count:
36
+ # Display a warning at the first iteration
37
+ support.environment_altered = True
38
+ dangling_threads = values[1]
39
+ support.print_warning(f"threading_cleanup() failed to cleanup "
40
+ f"{values[0] - original_values[0]} threads "
41
+ f"(count: {values[0]}, "
42
+ f"dangling: {len(dangling_threads)})")
43
+ for thread in dangling_threads:
44
+ support.print_warning(f"Dangling thread: {thread!r}")
45
+
46
+ # Don't hold references to threads
47
+ dangling_threads = None
48
+ values = None
49
+
50
+ time.sleep(0.01)
51
+ support.gc_collect()
52
+
53
+
54
+ def reap_threads(func):
55
+ """Use this function when threads are being used. This will
56
+ ensure that the threads are cleaned up even when the test fails.
57
+ """
58
+ @functools.wraps(func)
59
+ def decorator(*args):
60
+ key = threading_setup()
61
+ try:
62
+ return func(*args)
63
+ finally:
64
+ threading_cleanup(*key)
65
+ return decorator
66
+
67
+
68
+ @contextlib.contextmanager
69
+ def wait_threads_exit(timeout=None):
70
+ """
71
+ bpo-31234: Context manager to wait until all threads created in the with
72
+ statement exit.
73
+
74
+ Use _thread.count() to check if threads exited. Indirectly, wait until
75
+ threads exit the internal t_bootstrap() C function of the _thread module.
76
+
77
+ threading_setup() and threading_cleanup() are designed to emit a warning
78
+ if a test leaves running threads in the background. This context manager
79
+ is designed to cleanup threads started by the _thread.start_new_thread()
80
+ which doesn't allow to wait for thread exit, whereas thread.Thread has a
81
+ join() method.
82
+ """
83
+ if timeout is None:
84
+ timeout = support.SHORT_TIMEOUT
85
+ old_count = _thread._count()
86
+ try:
87
+ yield
88
+ finally:
89
+ start_time = time.monotonic()
90
+ deadline = start_time + timeout
91
+ while True:
92
+ count = _thread._count()
93
+ if count <= old_count:
94
+ break
95
+ if time.monotonic() > deadline:
96
+ dt = time.monotonic() - start_time
97
+ msg = (f"wait_threads() failed to cleanup {count - old_count} "
98
+ f"threads after {dt:.1f} seconds "
99
+ f"(count: {count}, old count: {old_count})")
100
+ raise AssertionError(msg)
101
+ time.sleep(0.010)
102
+ support.gc_collect()
103
+
104
+
105
+ def join_thread(thread, timeout=None):
106
+ """Join a thread. Raise an AssertionError if the thread is still alive
107
+ after timeout seconds.
108
+ """
109
+ if timeout is None:
110
+ timeout = support.SHORT_TIMEOUT
111
+ thread.join(timeout)
112
+ if thread.is_alive():
113
+ msg = f"failed to join the thread in {timeout:.1f} seconds"
114
+ raise AssertionError(msg)
115
+
116
+
117
+ @contextlib.contextmanager
118
+ def start_threads(threads, unlock=None):
119
+ import faulthandler
120
+ threads = list(threads)
121
+ started = []
122
+ try:
123
+ try:
124
+ for t in threads:
125
+ t.start()
126
+ started.append(t)
127
+ except:
128
+ if support.verbose:
129
+ print("Can't start %d threads, only %d threads started" %
130
+ (len(threads), len(started)))
131
+ raise
132
+ yield
133
+ finally:
134
+ try:
135
+ if unlock:
136
+ unlock()
137
+ endtime = time.monotonic()
138
+ for timeout in range(1, 16):
139
+ endtime += 60
140
+ for t in started:
141
+ t.join(max(endtime - time.monotonic(), 0.01))
142
+ started = [t for t in started if t.is_alive()]
143
+ if not started:
144
+ break
145
+ if support.verbose:
146
+ print('Unable to join %d threads during a period of '
147
+ '%d minutes' % (len(started), timeout))
148
+ finally:
149
+ started = [t for t in started if t.is_alive()]
150
+ if started:
151
+ faulthandler.dump_traceback(sys.stdout)
152
+ raise AssertionError('Unable to join %d threads' % len(started))
153
+
154
+
155
+ class catch_threading_exception:
156
+ """
157
+ Context manager catching threading.Thread exception using
158
+ threading.excepthook.
159
+
160
+ Attributes set when an exception is caught:
161
+
162
+ * exc_type
163
+ * exc_value
164
+ * exc_traceback
165
+ * thread
166
+
167
+ See threading.excepthook() documentation for these attributes.
168
+
169
+ These attributes are deleted at the context manager exit.
170
+
171
+ Usage:
172
+
173
+ with threading_helper.catch_threading_exception() as cm:
174
+ # code spawning a thread which raises an exception
175
+ ...
176
+
177
+ # check the thread exception, use cm attributes:
178
+ # exc_type, exc_value, exc_traceback, thread
179
+ ...
180
+
181
+ # exc_type, exc_value, exc_traceback, thread attributes of cm no longer
182
+ # exists at this point
183
+ # (to avoid reference cycles)
184
+ """
185
+
186
+ def __init__(self):
187
+ self.exc_type = None
188
+ self.exc_value = None
189
+ self.exc_traceback = None
190
+ self.thread = None
191
+ self._old_hook = None
192
+
193
+ def _hook(self, args):
194
+ self.exc_type = args.exc_type
195
+ self.exc_value = args.exc_value
196
+ self.exc_traceback = args.exc_traceback
197
+ self.thread = args.thread
198
+
199
+ def __enter__(self):
200
+ self._old_hook = threading.excepthook
201
+ threading.excepthook = self._hook
202
+ return self
203
+
204
+ def __exit__(self, *exc_info):
205
+ threading.excepthook = self._old_hook
206
+ del self.exc_type
207
+ del self.exc_value
208
+ del self.exc_traceback
209
+ del self.thread
deepseekvl2/lib/python3.10/test/support/warnings_helper.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import functools
3
+ import re
4
+ import sys
5
+ import warnings
6
+
7
+
8
+ def check_syntax_warning(testcase, statement, errtext='',
9
+ *, lineno=1, offset=None):
10
+ # Test also that a warning is emitted only once.
11
+ from test.support import check_syntax_error
12
+ with warnings.catch_warnings(record=True) as warns:
13
+ warnings.simplefilter('always', SyntaxWarning)
14
+ compile(statement, '<testcase>', 'exec')
15
+ testcase.assertEqual(len(warns), 1, warns)
16
+
17
+ warn, = warns
18
+ testcase.assertTrue(issubclass(warn.category, SyntaxWarning),
19
+ warn.category)
20
+ if errtext:
21
+ testcase.assertRegex(str(warn.message), errtext)
22
+ testcase.assertEqual(warn.filename, '<testcase>')
23
+ testcase.assertIsNotNone(warn.lineno)
24
+ if lineno is not None:
25
+ testcase.assertEqual(warn.lineno, lineno)
26
+
27
+ # SyntaxWarning should be converted to SyntaxError when raised,
28
+ # since the latter contains more information and provides better
29
+ # error report.
30
+ with warnings.catch_warnings(record=True) as warns:
31
+ warnings.simplefilter('error', SyntaxWarning)
32
+ check_syntax_error(testcase, statement, errtext,
33
+ lineno=lineno, offset=offset)
34
+ # No warnings are leaked when a SyntaxError is raised.
35
+ testcase.assertEqual(warns, [])
36
+
37
+
38
+ def ignore_warnings(*, category):
39
+ """Decorator to suppress warnings.
40
+
41
+ Use of context managers to hide warnings make diffs
42
+ more noisy and tools like 'git blame' less useful.
43
+ """
44
+ def decorator(test):
45
+ @functools.wraps(test)
46
+ def wrapper(self, *args, **kwargs):
47
+ with warnings.catch_warnings():
48
+ warnings.simplefilter('ignore', category=category)
49
+ return test(self, *args, **kwargs)
50
+ return wrapper
51
+ return decorator
52
+
53
+
54
+ class WarningsRecorder(object):
55
+ """Convenience wrapper for the warnings list returned on
56
+ entry to the warnings.catch_warnings() context manager.
57
+ """
58
+ def __init__(self, warnings_list):
59
+ self._warnings = warnings_list
60
+ self._last = 0
61
+
62
+ def __getattr__(self, attr):
63
+ if len(self._warnings) > self._last:
64
+ return getattr(self._warnings[-1], attr)
65
+ elif attr in warnings.WarningMessage._WARNING_DETAILS:
66
+ return None
67
+ raise AttributeError("%r has no attribute %r" % (self, attr))
68
+
69
+ @property
70
+ def warnings(self):
71
+ return self._warnings[self._last:]
72
+
73
+ def reset(self):
74
+ self._last = len(self._warnings)
75
+
76
+
77
+ @contextlib.contextmanager
78
+ def check_warnings(*filters, **kwargs):
79
+ """Context manager to silence warnings.
80
+
81
+ Accept 2-tuples as positional arguments:
82
+ ("message regexp", WarningCategory)
83
+
84
+ Optional argument:
85
+ - if 'quiet' is True, it does not fail if a filter catches nothing
86
+ (default True without argument,
87
+ default False if some filters are defined)
88
+
89
+ Without argument, it defaults to:
90
+ check_warnings(("", Warning), quiet=True)
91
+ """
92
+ quiet = kwargs.get('quiet')
93
+ if not filters:
94
+ filters = (("", Warning),)
95
+ # Preserve backward compatibility
96
+ if quiet is None:
97
+ quiet = True
98
+ return _filterwarnings(filters, quiet)
99
+
100
+
101
+ @contextlib.contextmanager
102
+ def check_no_warnings(testcase, message='', category=Warning, force_gc=False):
103
+ """Context manager to check that no warnings are emitted.
104
+
105
+ This context manager enables a given warning within its scope
106
+ and checks that no warnings are emitted even with that warning
107
+ enabled.
108
+
109
+ If force_gc is True, a garbage collection is attempted before checking
110
+ for warnings. This may help to catch warnings emitted when objects
111
+ are deleted, such as ResourceWarning.
112
+
113
+ Other keyword arguments are passed to warnings.filterwarnings().
114
+ """
115
+ from test.support import gc_collect
116
+ with warnings.catch_warnings(record=True) as warns:
117
+ warnings.filterwarnings('always',
118
+ message=message,
119
+ category=category)
120
+ yield
121
+ if force_gc:
122
+ gc_collect()
123
+ testcase.assertEqual(warns, [])
124
+
125
+
126
+ @contextlib.contextmanager
127
+ def check_no_resource_warning(testcase):
128
+ """Context manager to check that no ResourceWarning is emitted.
129
+
130
+ Usage:
131
+
132
+ with check_no_resource_warning(self):
133
+ f = open(...)
134
+ ...
135
+ del f
136
+
137
+ You must remove the object which may emit ResourceWarning before
138
+ the end of the context manager.
139
+ """
140
+ with check_no_warnings(testcase, category=ResourceWarning, force_gc=True):
141
+ yield
142
+
143
+
144
+ def _filterwarnings(filters, quiet=False):
145
+ """Catch the warnings, then check if all the expected
146
+ warnings have been raised and re-raise unexpected warnings.
147
+ If 'quiet' is True, only re-raise the unexpected warnings.
148
+ """
149
+ # Clear the warning registry of the calling module
150
+ # in order to re-raise the warnings.
151
+ frame = sys._getframe(2)
152
+ registry = frame.f_globals.get('__warningregistry__')
153
+ if registry:
154
+ registry.clear()
155
+ with warnings.catch_warnings(record=True) as w:
156
+ # Set filter "always" to record all warnings. Because
157
+ # test_warnings swap the module, we need to look up in
158
+ # the sys.modules dictionary.
159
+ sys.modules['warnings'].simplefilter("always")
160
+ yield WarningsRecorder(w)
161
+ # Filter the recorded warnings
162
+ reraise = list(w)
163
+ missing = []
164
+ for msg, cat in filters:
165
+ seen = False
166
+ for w in reraise[:]:
167
+ warning = w.message
168
+ # Filter out the matching messages
169
+ if (re.match(msg, str(warning), re.I) and
170
+ issubclass(warning.__class__, cat)):
171
+ seen = True
172
+ reraise.remove(w)
173
+ if not seen and not quiet:
174
+ # This filter caught nothing
175
+ missing.append((msg, cat.__name__))
176
+ if reraise:
177
+ raise AssertionError("unhandled warning %s" % reraise[0])
178
+ if missing:
179
+ raise AssertionError("filter (%r, %s) did not catch any warning" %
180
+ missing[0])
181
+
182
+
183
+ @contextlib.contextmanager
184
+ def save_restore_warnings_filters():
185
+ old_filters = warnings.filters[:]
186
+ try:
187
+ yield
188
+ finally:
189
+ warnings.filters[:] = old_filters
190
+
191
+
192
+ def _warn_about_deprecation():
193
+ warnings.warn(
194
+ "This is used in test_support test to ensure"
195
+ " support.ignore_deprecations_from() works as expected."
196
+ " You should not be seeing this.",
197
+ DeprecationWarning,
198
+ stacklevel=0,
199
+ )
deepseekvl2/lib/python3.10/test/test_script_helper.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unittests for test.support.script_helper. Who tests the test helper?"""
2
+
3
+ import subprocess
4
+ import sys
5
+ import os
6
+ from test.support import script_helper
7
+ import unittest
8
+ from unittest import mock
9
+
10
+
11
+ class TestScriptHelper(unittest.TestCase):
12
+
13
+ def test_assert_python_ok(self):
14
+ t = script_helper.assert_python_ok('-c', 'import sys; sys.exit(0)')
15
+ self.assertEqual(0, t[0], 'return code was not 0')
16
+
17
+ def test_assert_python_failure(self):
18
+ # I didn't import the sys module so this child will fail.
19
+ rc, out, err = script_helper.assert_python_failure('-c', 'sys.exit(0)')
20
+ self.assertNotEqual(0, rc, 'return code should not be 0')
21
+
22
+ def test_assert_python_ok_raises(self):
23
+ # I didn't import the sys module so this child will fail.
24
+ with self.assertRaises(AssertionError) as error_context:
25
+ script_helper.assert_python_ok('-c', 'sys.exit(0)')
26
+ error_msg = str(error_context.exception)
27
+ self.assertIn('command line:', error_msg)
28
+ self.assertIn('sys.exit(0)', error_msg, msg='unexpected command line')
29
+
30
+ def test_assert_python_failure_raises(self):
31
+ with self.assertRaises(AssertionError) as error_context:
32
+ script_helper.assert_python_failure('-c', 'import sys; sys.exit(0)')
33
+ error_msg = str(error_context.exception)
34
+ self.assertIn('Process return code is 0\n', error_msg)
35
+ self.assertIn('import sys; sys.exit(0)', error_msg,
36
+ msg='unexpected command line.')
37
+
38
+ @mock.patch('subprocess.Popen')
39
+ def test_assert_python_isolated_when_env_not_required(self, mock_popen):
40
+ with mock.patch.object(script_helper,
41
+ 'interpreter_requires_environment',
42
+ return_value=False) as mock_ire_func:
43
+ mock_popen.side_effect = RuntimeError('bail out of unittest')
44
+ try:
45
+ script_helper._assert_python(True, '-c', 'None')
46
+ except RuntimeError as err:
47
+ self.assertEqual('bail out of unittest', err.args[0])
48
+ self.assertEqual(1, mock_popen.call_count)
49
+ self.assertEqual(1, mock_ire_func.call_count)
50
+ popen_command = mock_popen.call_args[0][0]
51
+ self.assertEqual(sys.executable, popen_command[0])
52
+ self.assertIn('None', popen_command)
53
+ self.assertIn('-I', popen_command)
54
+ self.assertNotIn('-E', popen_command) # -I overrides this
55
+
56
+ @mock.patch('subprocess.Popen')
57
+ def test_assert_python_not_isolated_when_env_is_required(self, mock_popen):
58
+ """Ensure that -I is not passed when the environment is required."""
59
+ with mock.patch.object(script_helper,
60
+ 'interpreter_requires_environment',
61
+ return_value=True) as mock_ire_func:
62
+ mock_popen.side_effect = RuntimeError('bail out of unittest')
63
+ try:
64
+ script_helper._assert_python(True, '-c', 'None')
65
+ except RuntimeError as err:
66
+ self.assertEqual('bail out of unittest', err.args[0])
67
+ popen_command = mock_popen.call_args[0][0]
68
+ self.assertNotIn('-I', popen_command)
69
+ self.assertNotIn('-E', popen_command)
70
+
71
+
72
+ class TestScriptHelperEnvironment(unittest.TestCase):
73
+ """Code coverage for interpreter_requires_environment()."""
74
+
75
+ def setUp(self):
76
+ self.assertTrue(
77
+ hasattr(script_helper, '__cached_interp_requires_environment'))
78
+ # Reset the private cached state.
79
+ script_helper.__dict__['__cached_interp_requires_environment'] = None
80
+
81
+ def tearDown(self):
82
+ # Reset the private cached state.
83
+ script_helper.__dict__['__cached_interp_requires_environment'] = None
84
+
85
+ @mock.patch('subprocess.check_call')
86
+ def test_interpreter_requires_environment_true(self, mock_check_call):
87
+ with mock.patch.dict(os.environ):
88
+ os.environ.pop('PYTHONHOME', None)
89
+ mock_check_call.side_effect = subprocess.CalledProcessError('', '')
90
+ self.assertTrue(script_helper.interpreter_requires_environment())
91
+ self.assertTrue(script_helper.interpreter_requires_environment())
92
+ self.assertEqual(1, mock_check_call.call_count)
93
+
94
+ @mock.patch('subprocess.check_call')
95
+ def test_interpreter_requires_environment_false(self, mock_check_call):
96
+ with mock.patch.dict(os.environ):
97
+ os.environ.pop('PYTHONHOME', None)
98
+ # The mocked subprocess.check_call fakes a no-error process.
99
+ script_helper.interpreter_requires_environment()
100
+ self.assertFalse(script_helper.interpreter_requires_environment())
101
+ self.assertEqual(1, mock_check_call.call_count)
102
+
103
+ @mock.patch('subprocess.check_call')
104
+ def test_interpreter_requires_environment_details(self, mock_check_call):
105
+ with mock.patch.dict(os.environ):
106
+ os.environ.pop('PYTHONHOME', None)
107
+ script_helper.interpreter_requires_environment()
108
+ self.assertFalse(script_helper.interpreter_requires_environment())
109
+ self.assertFalse(script_helper.interpreter_requires_environment())
110
+ self.assertEqual(1, mock_check_call.call_count)
111
+ check_call_command = mock_check_call.call_args[0][0]
112
+ self.assertEqual(sys.executable, check_call_command[0])
113
+ self.assertIn('-E', check_call_command)
114
+
115
+ @mock.patch('subprocess.check_call')
116
+ def test_interpreter_requires_environment_with_pythonhome(self, mock_check_call):
117
+ with mock.patch.dict(os.environ):
118
+ os.environ['PYTHONHOME'] = 'MockedHome'
119
+ self.assertTrue(script_helper.interpreter_requires_environment())
120
+ self.assertTrue(script_helper.interpreter_requires_environment())
121
+ self.assertEqual(0, mock_check_call.call_count)
122
+
123
+
124
+ if __name__ == '__main__':
125
+ unittest.main()
deepseekvl2/lib/python3.10/test/test_support.py ADDED
@@ -0,0 +1,713 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import errno
2
+ import importlib
3
+ import io
4
+ import os
5
+ import shutil
6
+ import socket
7
+ import stat
8
+ import subprocess
9
+ import sys
10
+ import tempfile
11
+ import textwrap
12
+ import time
13
+ import unittest
14
+ import warnings
15
+
16
+ from test import support
17
+ from test.support import import_helper
18
+ from test.support import os_helper
19
+ from test.support import script_helper
20
+ from test.support import socket_helper
21
+ from test.support import warnings_helper
22
+
23
+ TESTFN = os_helper.TESTFN
24
+
25
+
26
+ class TestSupport(unittest.TestCase):
27
+ @classmethod
28
+ def setUpClass(cls):
29
+ orig_filter_len = len(warnings.filters)
30
+ cls._warnings_helper_token = support.ignore_deprecations_from(
31
+ "test.support.warnings_helper", like=".*used in test_support.*"
32
+ )
33
+ cls._test_support_token = support.ignore_deprecations_from(
34
+ "test.test_support", like=".*You should NOT be seeing this.*"
35
+ )
36
+ assert len(warnings.filters) == orig_filter_len + 2
37
+
38
+ @classmethod
39
+ def tearDownClass(cls):
40
+ orig_filter_len = len(warnings.filters)
41
+ support.clear_ignored_deprecations(
42
+ cls._warnings_helper_token,
43
+ cls._test_support_token,
44
+ )
45
+ assert len(warnings.filters) == orig_filter_len - 2
46
+
47
+ def test_ignored_deprecations_are_silent(self):
48
+ """Test support.ignore_deprecations_from() silences warnings"""
49
+ with warnings.catch_warnings(record=True) as warning_objs:
50
+ warnings_helper._warn_about_deprecation()
51
+ warnings.warn("You should NOT be seeing this.", DeprecationWarning)
52
+ messages = [str(w.message) for w in warning_objs]
53
+ self.assertEqual(len(messages), 0, messages)
54
+
55
+ def test_import_module(self):
56
+ import_helper.import_module("ftplib")
57
+ self.assertRaises(unittest.SkipTest,
58
+ import_helper.import_module, "foo")
59
+
60
+ def test_import_fresh_module(self):
61
+ import_helper.import_fresh_module("ftplib")
62
+
63
+ def test_get_attribute(self):
64
+ self.assertEqual(support.get_attribute(self, "test_get_attribute"),
65
+ self.test_get_attribute)
66
+ self.assertRaises(unittest.SkipTest, support.get_attribute, self, "foo")
67
+
68
+ @unittest.skip("failing buildbots")
69
+ def test_get_original_stdout(self):
70
+ self.assertEqual(support.get_original_stdout(), sys.stdout)
71
+
72
+ def test_unload(self):
73
+ import sched
74
+ self.assertIn("sched", sys.modules)
75
+ import_helper.unload("sched")
76
+ self.assertNotIn("sched", sys.modules)
77
+
78
+ def test_unlink(self):
79
+ with open(TESTFN, "w", encoding="utf-8") as f:
80
+ pass
81
+ os_helper.unlink(TESTFN)
82
+ self.assertFalse(os.path.exists(TESTFN))
83
+ os_helper.unlink(TESTFN)
84
+
85
+ def test_rmtree(self):
86
+ dirpath = os_helper.TESTFN + 'd'
87
+ subdirpath = os.path.join(dirpath, 'subdir')
88
+ os.mkdir(dirpath)
89
+ os.mkdir(subdirpath)
90
+ os_helper.rmtree(dirpath)
91
+ self.assertFalse(os.path.exists(dirpath))
92
+ with support.swap_attr(support, 'verbose', 0):
93
+ os_helper.rmtree(dirpath)
94
+
95
+ os.mkdir(dirpath)
96
+ os.mkdir(subdirpath)
97
+ os.chmod(dirpath, stat.S_IRUSR|stat.S_IXUSR)
98
+ with support.swap_attr(support, 'verbose', 0):
99
+ os_helper.rmtree(dirpath)
100
+ self.assertFalse(os.path.exists(dirpath))
101
+
102
+ os.mkdir(dirpath)
103
+ os.mkdir(subdirpath)
104
+ os.chmod(dirpath, 0)
105
+ with support.swap_attr(support, 'verbose', 0):
106
+ os_helper.rmtree(dirpath)
107
+ self.assertFalse(os.path.exists(dirpath))
108
+
109
+ def test_forget(self):
110
+ mod_filename = TESTFN + '.py'
111
+ with open(mod_filename, 'w', encoding="utf-8") as f:
112
+ print('foo = 1', file=f)
113
+ sys.path.insert(0, os.curdir)
114
+ importlib.invalidate_caches()
115
+ try:
116
+ mod = __import__(TESTFN)
117
+ self.assertIn(TESTFN, sys.modules)
118
+
119
+ import_helper.forget(TESTFN)
120
+ self.assertNotIn(TESTFN, sys.modules)
121
+ finally:
122
+ del sys.path[0]
123
+ os_helper.unlink(mod_filename)
124
+ os_helper.rmtree('__pycache__')
125
+
126
+ def test_HOST(self):
127
+ s = socket.create_server((socket_helper.HOST, 0))
128
+ s.close()
129
+
130
+ def test_find_unused_port(self):
131
+ port = socket_helper.find_unused_port()
132
+ s = socket.create_server((socket_helper.HOST, port))
133
+ s.close()
134
+
135
+ def test_bind_port(self):
136
+ s = socket.socket()
137
+ socket_helper.bind_port(s)
138
+ s.listen()
139
+ s.close()
140
+
141
+ # Tests for temp_dir()
142
+
143
+ def test_temp_dir(self):
144
+ """Test that temp_dir() creates and destroys its directory."""
145
+ parent_dir = tempfile.mkdtemp()
146
+ parent_dir = os.path.realpath(parent_dir)
147
+
148
+ try:
149
+ path = os.path.join(parent_dir, 'temp')
150
+ self.assertFalse(os.path.isdir(path))
151
+ with os_helper.temp_dir(path) as temp_path:
152
+ self.assertEqual(temp_path, path)
153
+ self.assertTrue(os.path.isdir(path))
154
+ self.assertFalse(os.path.isdir(path))
155
+ finally:
156
+ os_helper.rmtree(parent_dir)
157
+
158
+ def test_temp_dir__path_none(self):
159
+ """Test passing no path."""
160
+ with os_helper.temp_dir() as temp_path:
161
+ self.assertTrue(os.path.isdir(temp_path))
162
+ self.assertFalse(os.path.isdir(temp_path))
163
+
164
+ def test_temp_dir__existing_dir__quiet_default(self):
165
+ """Test passing a directory that already exists."""
166
+ def call_temp_dir(path):
167
+ with os_helper.temp_dir(path) as temp_path:
168
+ raise Exception("should not get here")
169
+
170
+ path = tempfile.mkdtemp()
171
+ path = os.path.realpath(path)
172
+ try:
173
+ self.assertTrue(os.path.isdir(path))
174
+ self.assertRaises(FileExistsError, call_temp_dir, path)
175
+ # Make sure temp_dir did not delete the original directory.
176
+ self.assertTrue(os.path.isdir(path))
177
+ finally:
178
+ shutil.rmtree(path)
179
+
180
+ def test_temp_dir__existing_dir__quiet_true(self):
181
+ """Test passing a directory that already exists with quiet=True."""
182
+ path = tempfile.mkdtemp()
183
+ path = os.path.realpath(path)
184
+
185
+ try:
186
+ with warnings_helper.check_warnings() as recorder:
187
+ with os_helper.temp_dir(path, quiet=True) as temp_path:
188
+ self.assertEqual(path, temp_path)
189
+ warnings = [str(w.message) for w in recorder.warnings]
190
+ # Make sure temp_dir did not delete the original directory.
191
+ self.assertTrue(os.path.isdir(path))
192
+ finally:
193
+ shutil.rmtree(path)
194
+
195
+ self.assertEqual(len(warnings), 1, warnings)
196
+ warn = warnings[0]
197
+ self.assertTrue(warn.startswith(f'tests may fail, unable to create '
198
+ f'temporary directory {path!r}: '),
199
+ warn)
200
+
201
+ @unittest.skipUnless(hasattr(os, "fork"), "test requires os.fork")
202
+ def test_temp_dir__forked_child(self):
203
+ """Test that a forked child process does not remove the directory."""
204
+ # See bpo-30028 for details.
205
+ # Run the test as an external script, because it uses fork.
206
+ script_helper.assert_python_ok("-c", textwrap.dedent("""
207
+ import os
208
+ from test import support
209
+ from test.support import os_helper
210
+ with os_helper.temp_cwd() as temp_path:
211
+ pid = os.fork()
212
+ if pid != 0:
213
+ # parent process
214
+
215
+ # wait for the child to terminate
216
+ support.wait_process(pid, exitcode=0)
217
+
218
+ # Make sure that temp_path is still present. When the child
219
+ # process leaves the 'temp_cwd'-context, the __exit__()-
220
+ # method of the context must not remove the temporary
221
+ # directory.
222
+ if not os.path.isdir(temp_path):
223
+ raise AssertionError("Child removed temp_path.")
224
+ """))
225
+
226
+ # Tests for change_cwd()
227
+
228
+ def test_change_cwd(self):
229
+ original_cwd = os.getcwd()
230
+
231
+ with os_helper.temp_dir() as temp_path:
232
+ with os_helper.change_cwd(temp_path) as new_cwd:
233
+ self.assertEqual(new_cwd, temp_path)
234
+ self.assertEqual(os.getcwd(), new_cwd)
235
+
236
+ self.assertEqual(os.getcwd(), original_cwd)
237
+
238
+ def test_change_cwd__non_existent_dir(self):
239
+ """Test passing a non-existent directory."""
240
+ original_cwd = os.getcwd()
241
+
242
+ def call_change_cwd(path):
243
+ with os_helper.change_cwd(path) as new_cwd:
244
+ raise Exception("should not get here")
245
+
246
+ with os_helper.temp_dir() as parent_dir:
247
+ non_existent_dir = os.path.join(parent_dir, 'does_not_exist')
248
+ self.assertRaises(FileNotFoundError, call_change_cwd,
249
+ non_existent_dir)
250
+
251
+ self.assertEqual(os.getcwd(), original_cwd)
252
+
253
+ def test_change_cwd__non_existent_dir__quiet_true(self):
254
+ """Test passing a non-existent directory with quiet=True."""
255
+ original_cwd = os.getcwd()
256
+
257
+ with os_helper.temp_dir() as parent_dir:
258
+ bad_dir = os.path.join(parent_dir, 'does_not_exist')
259
+ with warnings_helper.check_warnings() as recorder:
260
+ with os_helper.change_cwd(bad_dir, quiet=True) as new_cwd:
261
+ self.assertEqual(new_cwd, original_cwd)
262
+ self.assertEqual(os.getcwd(), new_cwd)
263
+ warnings = [str(w.message) for w in recorder.warnings]
264
+
265
+ self.assertEqual(len(warnings), 1, warnings)
266
+ warn = warnings[0]
267
+ self.assertTrue(warn.startswith(f'tests may fail, unable to change '
268
+ f'the current working directory '
269
+ f'to {bad_dir!r}: '),
270
+ warn)
271
+
272
+ # Tests for change_cwd()
273
+
274
+ def test_change_cwd__chdir_warning(self):
275
+ """Check the warning message when os.chdir() fails."""
276
+ path = TESTFN + '_does_not_exist'
277
+ with warnings_helper.check_warnings() as recorder:
278
+ with os_helper.change_cwd(path=path, quiet=True):
279
+ pass
280
+ messages = [str(w.message) for w in recorder.warnings]
281
+
282
+ self.assertEqual(len(messages), 1, messages)
283
+ msg = messages[0]
284
+ self.assertTrue(msg.startswith(f'tests may fail, unable to change '
285
+ f'the current working directory '
286
+ f'to {path!r}: '),
287
+ msg)
288
+
289
+ # Tests for temp_cwd()
290
+
291
+ def test_temp_cwd(self):
292
+ here = os.getcwd()
293
+ with os_helper.temp_cwd(name=TESTFN):
294
+ self.assertEqual(os.path.basename(os.getcwd()), TESTFN)
295
+ self.assertFalse(os.path.exists(TESTFN))
296
+ self.assertEqual(os.getcwd(), here)
297
+
298
+
299
+ def test_temp_cwd__name_none(self):
300
+ """Test passing None to temp_cwd()."""
301
+ original_cwd = os.getcwd()
302
+ with os_helper.temp_cwd(name=None) as new_cwd:
303
+ self.assertNotEqual(new_cwd, original_cwd)
304
+ self.assertTrue(os.path.isdir(new_cwd))
305
+ self.assertEqual(os.getcwd(), new_cwd)
306
+ self.assertEqual(os.getcwd(), original_cwd)
307
+
308
+ def test_sortdict(self):
309
+ self.assertEqual(support.sortdict({3:3, 2:2, 1:1}), "{1: 1, 2: 2, 3: 3}")
310
+
311
+ def test_make_bad_fd(self):
312
+ fd = os_helper.make_bad_fd()
313
+ with self.assertRaises(OSError) as cm:
314
+ os.write(fd, b"foo")
315
+ self.assertEqual(cm.exception.errno, errno.EBADF)
316
+
317
+ def test_check_syntax_error(self):
318
+ support.check_syntax_error(self, "def class", lineno=1, offset=5)
319
+ with self.assertRaises(AssertionError):
320
+ support.check_syntax_error(self, "x=1")
321
+
322
+ def test_CleanImport(self):
323
+ import importlib
324
+ with import_helper.CleanImport("pprint"):
325
+ importlib.import_module("pprint")
326
+
327
+ def test_DirsOnSysPath(self):
328
+ with import_helper.DirsOnSysPath('foo', 'bar'):
329
+ self.assertIn("foo", sys.path)
330
+ self.assertIn("bar", sys.path)
331
+ self.assertNotIn("foo", sys.path)
332
+ self.assertNotIn("bar", sys.path)
333
+
334
+ def test_captured_stdout(self):
335
+ with support.captured_stdout() as stdout:
336
+ print("hello")
337
+ self.assertEqual(stdout.getvalue(), "hello\n")
338
+
339
+ def test_captured_stderr(self):
340
+ with support.captured_stderr() as stderr:
341
+ print("hello", file=sys.stderr)
342
+ self.assertEqual(stderr.getvalue(), "hello\n")
343
+
344
+ def test_captured_stdin(self):
345
+ with support.captured_stdin() as stdin:
346
+ stdin.write('hello\n')
347
+ stdin.seek(0)
348
+ # call test code that consumes from sys.stdin
349
+ captured = input()
350
+ self.assertEqual(captured, "hello")
351
+
352
+ def test_gc_collect(self):
353
+ support.gc_collect()
354
+
355
+ def test_python_is_optimized(self):
356
+ self.assertIsInstance(support.python_is_optimized(), bool)
357
+
358
+ def test_swap_attr(self):
359
+ class Obj:
360
+ pass
361
+ obj = Obj()
362
+ obj.x = 1
363
+ with support.swap_attr(obj, "x", 5) as x:
364
+ self.assertEqual(obj.x, 5)
365
+ self.assertEqual(x, 1)
366
+ self.assertEqual(obj.x, 1)
367
+ with support.swap_attr(obj, "y", 5) as y:
368
+ self.assertEqual(obj.y, 5)
369
+ self.assertIsNone(y)
370
+ self.assertFalse(hasattr(obj, 'y'))
371
+ with support.swap_attr(obj, "y", 5):
372
+ del obj.y
373
+ self.assertFalse(hasattr(obj, 'y'))
374
+
375
+ def test_swap_item(self):
376
+ D = {"x":1}
377
+ with support.swap_item(D, "x", 5) as x:
378
+ self.assertEqual(D["x"], 5)
379
+ self.assertEqual(x, 1)
380
+ self.assertEqual(D["x"], 1)
381
+ with support.swap_item(D, "y", 5) as y:
382
+ self.assertEqual(D["y"], 5)
383
+ self.assertIsNone(y)
384
+ self.assertNotIn("y", D)
385
+ with support.swap_item(D, "y", 5):
386
+ del D["y"]
387
+ self.assertNotIn("y", D)
388
+
389
+ class RefClass:
390
+ attribute1 = None
391
+ attribute2 = None
392
+ _hidden_attribute1 = None
393
+ __magic_1__ = None
394
+
395
+ class OtherClass:
396
+ attribute2 = None
397
+ attribute3 = None
398
+ __magic_1__ = None
399
+ __magic_2__ = None
400
+
401
+ def test_detect_api_mismatch(self):
402
+ missing_items = support.detect_api_mismatch(self.RefClass,
403
+ self.OtherClass)
404
+ self.assertEqual({'attribute1'}, missing_items)
405
+
406
+ missing_items = support.detect_api_mismatch(self.OtherClass,
407
+ self.RefClass)
408
+ self.assertEqual({'attribute3', '__magic_2__'}, missing_items)
409
+
410
+ def test_detect_api_mismatch__ignore(self):
411
+ ignore = ['attribute1', 'attribute3', '__magic_2__', 'not_in_either']
412
+
413
+ missing_items = support.detect_api_mismatch(
414
+ self.RefClass, self.OtherClass, ignore=ignore)
415
+ self.assertEqual(set(), missing_items)
416
+
417
+ missing_items = support.detect_api_mismatch(
418
+ self.OtherClass, self.RefClass, ignore=ignore)
419
+ self.assertEqual(set(), missing_items)
420
+
421
+ def test_check__all__(self):
422
+ extra = {'tempdir'}
423
+ not_exported = {'template'}
424
+ support.check__all__(self,
425
+ tempfile,
426
+ extra=extra,
427
+ not_exported=not_exported)
428
+
429
+ extra = {'TextTestResult', 'installHandler'}
430
+ not_exported = {'load_tests', "TestProgram", "BaseTestSuite"}
431
+
432
+ support.check__all__(self,
433
+ unittest,
434
+ ("unittest.result", "unittest.case",
435
+ "unittest.suite", "unittest.loader",
436
+ "unittest.main", "unittest.runner",
437
+ "unittest.signals", "unittest.async_case"),
438
+ extra=extra,
439
+ not_exported=not_exported)
440
+
441
+ self.assertRaises(AssertionError, support.check__all__, self, unittest)
442
+
443
+ @unittest.skipUnless(hasattr(os, 'waitpid') and hasattr(os, 'WNOHANG'),
444
+ 'need os.waitpid() and os.WNOHANG')
445
+ def test_reap_children(self):
446
+ # Make sure that there is no other pending child process
447
+ support.reap_children()
448
+
449
+ # Create a child process
450
+ pid = os.fork()
451
+ if pid == 0:
452
+ # child process: do nothing, just exit
453
+ os._exit(0)
454
+
455
+ t0 = time.monotonic()
456
+ deadline = time.monotonic() + support.SHORT_TIMEOUT
457
+
458
+ was_altered = support.environment_altered
459
+ try:
460
+ support.environment_altered = False
461
+ stderr = io.StringIO()
462
+
463
+ while True:
464
+ if time.monotonic() > deadline:
465
+ self.fail("timeout")
466
+
467
+ old_stderr = sys.__stderr__
468
+ try:
469
+ sys.__stderr__ = stderr
470
+ support.reap_children()
471
+ finally:
472
+ sys.__stderr__ = old_stderr
473
+
474
+ # Use environment_altered to check if reap_children() found
475
+ # the child process
476
+ if support.environment_altered:
477
+ break
478
+
479
+ # loop until the child process completed
480
+ time.sleep(0.100)
481
+
482
+ msg = "Warning -- reap_children() reaped child process %s" % pid
483
+ self.assertIn(msg, stderr.getvalue())
484
+ self.assertTrue(support.environment_altered)
485
+ finally:
486
+ support.environment_altered = was_altered
487
+
488
+ # Just in case, check again that there is no other
489
+ # pending child process
490
+ support.reap_children()
491
+
492
+ def check_options(self, args, func, expected=None):
493
+ code = f'from test.support import {func}; print(repr({func}()))'
494
+ cmd = [sys.executable, *args, '-c', code]
495
+ env = {key: value for key, value in os.environ.items()
496
+ if not key.startswith('PYTHON')}
497
+ proc = subprocess.run(cmd,
498
+ stdout=subprocess.PIPE,
499
+ stderr=subprocess.DEVNULL,
500
+ universal_newlines=True,
501
+ env=env)
502
+ if expected is None:
503
+ expected = args
504
+ self.assertEqual(proc.stdout.rstrip(), repr(expected))
505
+ self.assertEqual(proc.returncode, 0)
506
+
507
+ def test_args_from_interpreter_flags(self):
508
+ # Test test.support.args_from_interpreter_flags()
509
+ for opts in (
510
+ # no option
511
+ [],
512
+ # single option
513
+ ['-B'],
514
+ ['-s'],
515
+ ['-S'],
516
+ ['-E'],
517
+ ['-v'],
518
+ ['-b'],
519
+ ['-q'],
520
+ ['-I'],
521
+ # same option multiple times
522
+ ['-bb'],
523
+ ['-vvv'],
524
+ # -W options
525
+ ['-Wignore'],
526
+ # -X options
527
+ ['-X', 'dev'],
528
+ ['-Wignore', '-X', 'dev'],
529
+ ['-X', 'faulthandler'],
530
+ ['-X', 'importtime'],
531
+ ['-X', 'showrefcount'],
532
+ ['-X', 'tracemalloc'],
533
+ ['-X', 'tracemalloc=3'],
534
+ ):
535
+ with self.subTest(opts=opts):
536
+ self.check_options(opts, 'args_from_interpreter_flags')
537
+
538
+ self.check_options(['-I', '-E', '-s'], 'args_from_interpreter_flags',
539
+ ['-I'])
540
+
541
+ def test_optim_args_from_interpreter_flags(self):
542
+ # Test test.support.optim_args_from_interpreter_flags()
543
+ for opts in (
544
+ # no option
545
+ [],
546
+ ['-O'],
547
+ ['-OO'],
548
+ ['-OOOO'],
549
+ ):
550
+ with self.subTest(opts=opts):
551
+ self.check_options(opts, 'optim_args_from_interpreter_flags')
552
+
553
+ def test_match_test(self):
554
+ class Test:
555
+ def __init__(self, test_id):
556
+ self.test_id = test_id
557
+
558
+ def id(self):
559
+ return self.test_id
560
+
561
+ test_access = Test('test.test_os.FileTests.test_access')
562
+ test_chdir = Test('test.test_os.Win32ErrorTests.test_chdir')
563
+
564
+ # Test acceptance
565
+ with support.swap_attr(support, '_match_test_func', None):
566
+ # match all
567
+ support.set_match_tests([])
568
+ self.assertTrue(support.match_test(test_access))
569
+ self.assertTrue(support.match_test(test_chdir))
570
+
571
+ # match all using None
572
+ support.set_match_tests(None, None)
573
+ self.assertTrue(support.match_test(test_access))
574
+ self.assertTrue(support.match_test(test_chdir))
575
+
576
+ # match the full test identifier
577
+ support.set_match_tests([test_access.id()], None)
578
+ self.assertTrue(support.match_test(test_access))
579
+ self.assertFalse(support.match_test(test_chdir))
580
+
581
+ # match the module name
582
+ support.set_match_tests(['test_os'], None)
583
+ self.assertTrue(support.match_test(test_access))
584
+ self.assertTrue(support.match_test(test_chdir))
585
+
586
+ # Test '*' pattern
587
+ support.set_match_tests(['test_*'], None)
588
+ self.assertTrue(support.match_test(test_access))
589
+ self.assertTrue(support.match_test(test_chdir))
590
+
591
+ # Test case sensitivity
592
+ support.set_match_tests(['filetests'], None)
593
+ self.assertFalse(support.match_test(test_access))
594
+ support.set_match_tests(['FileTests'], None)
595
+ self.assertTrue(support.match_test(test_access))
596
+
597
+ # Test pattern containing '.' and a '*' metacharacter
598
+ support.set_match_tests(['*test_os.*.test_*'], None)
599
+ self.assertTrue(support.match_test(test_access))
600
+ self.assertTrue(support.match_test(test_chdir))
601
+
602
+ # Multiple patterns
603
+ support.set_match_tests([test_access.id(), test_chdir.id()], None)
604
+ self.assertTrue(support.match_test(test_access))
605
+ self.assertTrue(support.match_test(test_chdir))
606
+
607
+ support.set_match_tests(['test_access', 'DONTMATCH'], None)
608
+ self.assertTrue(support.match_test(test_access))
609
+ self.assertFalse(support.match_test(test_chdir))
610
+
611
+ # Test rejection
612
+ with support.swap_attr(support, '_match_test_func', None):
613
+ # match all
614
+ support.set_match_tests(ignore_patterns=[])
615
+ self.assertTrue(support.match_test(test_access))
616
+ self.assertTrue(support.match_test(test_chdir))
617
+
618
+ # match all using None
619
+ support.set_match_tests(None, None)
620
+ self.assertTrue(support.match_test(test_access))
621
+ self.assertTrue(support.match_test(test_chdir))
622
+
623
+ # match the full test identifier
624
+ support.set_match_tests(None, [test_access.id()])
625
+ self.assertFalse(support.match_test(test_access))
626
+ self.assertTrue(support.match_test(test_chdir))
627
+
628
+ # match the module name
629
+ support.set_match_tests(None, ['test_os'])
630
+ self.assertFalse(support.match_test(test_access))
631
+ self.assertFalse(support.match_test(test_chdir))
632
+
633
+ # Test '*' pattern
634
+ support.set_match_tests(None, ['test_*'])
635
+ self.assertFalse(support.match_test(test_access))
636
+ self.assertFalse(support.match_test(test_chdir))
637
+
638
+ # Test case sensitivity
639
+ support.set_match_tests(None, ['filetests'])
640
+ self.assertTrue(support.match_test(test_access))
641
+ support.set_match_tests(None, ['FileTests'])
642
+ self.assertFalse(support.match_test(test_access))
643
+
644
+ # Test pattern containing '.' and a '*' metacharacter
645
+ support.set_match_tests(None, ['*test_os.*.test_*'])
646
+ self.assertFalse(support.match_test(test_access))
647
+ self.assertFalse(support.match_test(test_chdir))
648
+
649
+ # Multiple patterns
650
+ support.set_match_tests(None, [test_access.id(), test_chdir.id()])
651
+ self.assertFalse(support.match_test(test_access))
652
+ self.assertFalse(support.match_test(test_chdir))
653
+
654
+ support.set_match_tests(None, ['test_access', 'DONTMATCH'])
655
+ self.assertFalse(support.match_test(test_access))
656
+ self.assertTrue(support.match_test(test_chdir))
657
+
658
+ def test_fd_count(self):
659
+ # We cannot test the absolute value of fd_count(): on old Linux
660
+ # kernel or glibc versions, os.urandom() keeps a FD open on
661
+ # /dev/urandom device and Python has 4 FD opens instead of 3.
662
+ start = os_helper.fd_count()
663
+ fd = os.open(__file__, os.O_RDONLY)
664
+ try:
665
+ more = os_helper.fd_count()
666
+ finally:
667
+ os.close(fd)
668
+ self.assertEqual(more - start, 1)
669
+
670
+ def check_print_warning(self, msg, expected):
671
+ stderr = io.StringIO()
672
+
673
+ old_stderr = sys.__stderr__
674
+ try:
675
+ sys.__stderr__ = stderr
676
+ support.print_warning(msg)
677
+ finally:
678
+ sys.__stderr__ = old_stderr
679
+
680
+ self.assertEqual(stderr.getvalue(), expected)
681
+
682
+ def test_print_warning(self):
683
+ self.check_print_warning("msg",
684
+ "Warning -- msg\n")
685
+ self.check_print_warning("a\nb",
686
+ 'Warning -- a\nWarning -- b\n')
687
+
688
+ # XXX -follows a list of untested API
689
+ # make_legacy_pyc
690
+ # is_resource_enabled
691
+ # requires
692
+ # fcmp
693
+ # umaks
694
+ # findfile
695
+ # check_warnings
696
+ # EnvironmentVarGuard
697
+ # transient_internet
698
+ # run_with_locale
699
+ # set_memlimit
700
+ # bigmemtest
701
+ # precisionbigmemtest
702
+ # bigaddrspacetest
703
+ # requires_resource
704
+ # run_doctest
705
+ # threading_cleanup
706
+ # reap_threads
707
+ # can_symlink
708
+ # skip_unless_symlink
709
+ # SuppressCrashReport
710
+
711
+
712
+ if __name__ == '__main__':
713
+ unittest.main()