ZTWHHH commited on
Commit
fedd766
·
verified ·
1 Parent(s): d4f2d59

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__init__.py +5 -0
  2. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__main__.py +5 -0
  3. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/__init__.cpython-310.pyc +0 -0
  4. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/__main__.cpython-310.pyc +0 -0
  5. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/cpuinfo.cpython-310.pyc +0 -0
  6. evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/cpuinfo.py +2827 -0
  7. evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/__init__.cpython-310.pyc +0 -0
  8. evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/badness.cpython-310.pyc +0 -0
  9. evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/chardata.cpython-310.pyc +0 -0
  10. evalkit_cambrian/lib/python3.10/site-packages/referencing/__init__.py +7 -0
  11. evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/_attrs.cpython-310.pyc +0 -0
  12. evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/exceptions.cpython-310.pyc +0 -0
  13. evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/jsonschema.cpython-310.pyc +0 -0
  14. evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/retrieval.cpython-310.pyc +0 -0
  15. evalkit_cambrian/lib/python3.10/site-packages/referencing/_attrs.py +31 -0
  16. evalkit_cambrian/lib/python3.10/site-packages/referencing/_attrs.pyi +20 -0
  17. evalkit_cambrian/lib/python3.10/site-packages/referencing/_core.py +739 -0
  18. evalkit_cambrian/lib/python3.10/site-packages/referencing/exceptions.py +165 -0
  19. evalkit_cambrian/lib/python3.10/site-packages/referencing/jsonschema.py +642 -0
  20. evalkit_cambrian/lib/python3.10/site-packages/referencing/py.typed +0 -0
  21. evalkit_cambrian/lib/python3.10/site-packages/referencing/retrieval.py +92 -0
  22. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__init__.py +0 -0
  23. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  24. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_core.cpython-310.pyc +0 -0
  25. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_exceptions.cpython-310.pyc +0 -0
  26. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_jsonschema.cpython-310.pyc +0 -0
  27. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_referencing_suite.cpython-310.pyc +0 -0
  28. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_retrieval.cpython-310.pyc +0 -0
  29. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_core.py +1057 -0
  30. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_exceptions.py +34 -0
  31. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_jsonschema.py +382 -0
  32. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_referencing_suite.py +66 -0
  33. evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_retrieval.py +106 -0
  34. evalkit_cambrian/lib/python3.10/site-packages/referencing/typing.py +61 -0
  35. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__init__.py +59 -0
  36. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/api.cpython-310.pyc +0 -0
  37. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/exceptions.cpython-310.pyc +0 -0
  38. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/items.cpython-310.pyc +0 -0
  39. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/parser.cpython-310.pyc +0 -0
  40. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/toml_file.cpython-310.pyc +0 -0
  41. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/_types.py +65 -0
  42. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/parser.py +1141 -0
  43. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/py.typed +0 -0
  44. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/toml_char.py +52 -0
  45. evalkit_cambrian/lib/python3.10/site-packages/tomlkit/toml_file.py +58 -0
  46. infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_adaptive_avg_pool3d_backward_cpu_dispatch.h +23 -0
  47. infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_add_relu_ops.h +83 -0
  48. infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_addmm_activation.h +39 -0
  49. infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_coalesced_native.h +23 -0
  50. infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cufft_get_plan_cache_size_native.h +21 -0
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+
2
+ import sys
3
+ from cpuinfo.cpuinfo import *
4
+
5
+
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__main__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+
2
+ import cpuinfo
3
+
4
+ cpuinfo.main()
5
+
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (216 Bytes). View file
 
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (204 Bytes). View file
 
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/__pycache__/cpuinfo.cpython-310.pyc ADDED
Binary file (58.8 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/cpuinfo/cpuinfo.py ADDED
@@ -0,0 +1,2827 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+
4
+ # Copyright (c) 2014-2022 Matthew Brennan Jones <matthew.brennan.jones@gmail.com>
5
+ # Py-cpuinfo gets CPU info with pure Python
6
+ # It uses the MIT License
7
+ # It is hosted at: https://github.com/workhorsy/py-cpuinfo
8
+ #
9
+ # Permission is hereby granted, free of charge, to any person obtaining
10
+ # a copy of this software and associated documentation files (the
11
+ # "Software"), to deal in the Software without restriction, including
12
+ # without limitation the rights to use, copy, modify, merge, publish,
13
+ # distribute, sublicense, and/or sell copies of the Software, and to
14
+ # permit persons to whom the Software is furnished to do so, subject to
15
+ # the following conditions:
16
+ #
17
+ # The above copyright notice and this permission notice shall be included
18
+ # in all copies or substantial portions of the Software.
19
+ #
20
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23
+ # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24
+ # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25
+ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26
+ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27
+
28
+ CPUINFO_VERSION = (9, 0, 0)
29
+ CPUINFO_VERSION_STRING = '.'.join([str(n) for n in CPUINFO_VERSION])
30
+
31
+ import os, sys
32
+ import platform
33
+ import multiprocessing
34
+ import ctypes
35
+
36
+
37
+ CAN_CALL_CPUID_IN_SUBPROCESS = True
38
+
39
+ g_trace = None
40
+
41
+
42
+ class Trace(object):
43
+ def __init__(self, is_active, is_stored_in_string):
44
+ self._is_active = is_active
45
+ if not self._is_active:
46
+ return
47
+
48
+ from datetime import datetime
49
+ from io import StringIO
50
+
51
+ if is_stored_in_string:
52
+ self._output = StringIO()
53
+ else:
54
+ date = datetime.now().strftime("%Y-%m-%d_%H-%M-%S-%f")
55
+ self._output = open('cpuinfo_trace_{0}.trace'.format(date), 'w')
56
+
57
+ self._stdout = StringIO()
58
+ self._stderr = StringIO()
59
+ self._err = None
60
+
61
+ def header(self, msg):
62
+ if not self._is_active: return
63
+
64
+ from inspect import stack
65
+ frame = stack()[1]
66
+ file = frame[1]
67
+ line = frame[2]
68
+ self._output.write("{0} ({1} {2})\n".format(msg, file, line))
69
+ self._output.flush()
70
+
71
+ def success(self):
72
+ if not self._is_active: return
73
+
74
+ from inspect import stack
75
+ frame = stack()[1]
76
+ file = frame[1]
77
+ line = frame[2]
78
+
79
+ self._output.write("Success ... ({0} {1})\n\n".format(file, line))
80
+ self._output.flush()
81
+
82
+ def fail(self, msg):
83
+ if not self._is_active: return
84
+
85
+ from inspect import stack
86
+ frame = stack()[1]
87
+ file = frame[1]
88
+ line = frame[2]
89
+
90
+ if isinstance(msg, str):
91
+ msg = ''.join(['\t' + line for line in msg.split('\n')]) + '\n'
92
+
93
+ self._output.write(msg)
94
+ self._output.write("Failed ... ({0} {1})\n\n".format(file, line))
95
+ self._output.flush()
96
+ elif isinstance(msg, Exception):
97
+ from traceback import format_exc
98
+ err_string = format_exc()
99
+ self._output.write("\tFailed ... ({0} {1})\n".format(file, line))
100
+ self._output.write(''.join(['\t\t{0}\n'.format(n) for n in err_string.split('\n')]) + '\n')
101
+ self._output.flush()
102
+
103
+ def command_header(self, msg):
104
+ if not self._is_active: return
105
+
106
+ from inspect import stack
107
+ frame = stack()[3]
108
+ file = frame[1]
109
+ line = frame[2]
110
+ self._output.write("\t{0} ({1} {2})\n".format(msg, file, line))
111
+ self._output.flush()
112
+
113
+ def command_output(self, msg, output):
114
+ if not self._is_active: return
115
+
116
+ self._output.write("\t\t{0}\n".format(msg))
117
+ self._output.write(''.join(['\t\t\t{0}\n'.format(n) for n in output.split('\n')]) + '\n')
118
+ self._output.flush()
119
+
120
+ def keys(self, keys, info, new_info):
121
+ if not self._is_active: return
122
+
123
+ from inspect import stack
124
+ frame = stack()[2]
125
+ file = frame[1]
126
+ line = frame[2]
127
+
128
+ # List updated keys
129
+ self._output.write("\tChanged keys ({0} {1})\n".format(file, line))
130
+ changed_keys = [key for key in keys if key in info and key in new_info and info[key] != new_info[key]]
131
+ if changed_keys:
132
+ for key in changed_keys:
133
+ self._output.write('\t\t{0}: {1} to {2}\n'.format(key, info[key], new_info[key]))
134
+ else:
135
+ self._output.write('\t\tNone\n')
136
+
137
+ # List new keys
138
+ self._output.write("\tNew keys ({0} {1})\n".format(file, line))
139
+ new_keys = [key for key in keys if key in new_info and key not in info]
140
+ if new_keys:
141
+ for key in new_keys:
142
+ self._output.write('\t\t{0}: {1}\n'.format(key, new_info[key]))
143
+ else:
144
+ self._output.write('\t\tNone\n')
145
+
146
+ self._output.write('\n')
147
+ self._output.flush()
148
+
149
+ def write(self, msg):
150
+ if not self._is_active: return
151
+
152
+ self._output.write(msg + '\n')
153
+ self._output.flush()
154
+
155
+ def to_dict(self, info, is_fail):
156
+ return {
157
+ 'output' : self._output.getvalue(),
158
+ 'stdout' : self._stdout.getvalue(),
159
+ 'stderr' : self._stderr.getvalue(),
160
+ 'info' : info,
161
+ 'err' : self._err,
162
+ 'is_fail' : is_fail
163
+ }
164
+
165
+ class DataSource(object):
166
+ bits = platform.architecture()[0]
167
+ cpu_count = multiprocessing.cpu_count()
168
+ is_windows = platform.system().lower() == 'windows'
169
+ arch_string_raw = platform.machine()
170
+ uname_string_raw = platform.uname()[5]
171
+ can_cpuid = True
172
+
173
+ @staticmethod
174
+ def has_proc_cpuinfo():
175
+ return os.path.exists('/proc/cpuinfo')
176
+
177
+ @staticmethod
178
+ def has_dmesg():
179
+ return len(_program_paths('dmesg')) > 0
180
+
181
+ @staticmethod
182
+ def has_var_run_dmesg_boot():
183
+ uname = platform.system().strip().strip('"').strip("'").strip().lower()
184
+ return 'linux' in uname and os.path.exists('/var/run/dmesg.boot')
185
+
186
+ @staticmethod
187
+ def has_cpufreq_info():
188
+ return len(_program_paths('cpufreq-info')) > 0
189
+
190
+ @staticmethod
191
+ def has_sestatus():
192
+ return len(_program_paths('sestatus')) > 0
193
+
194
+ @staticmethod
195
+ def has_sysctl():
196
+ return len(_program_paths('sysctl')) > 0
197
+
198
+ @staticmethod
199
+ def has_isainfo():
200
+ return len(_program_paths('isainfo')) > 0
201
+
202
+ @staticmethod
203
+ def has_kstat():
204
+ return len(_program_paths('kstat')) > 0
205
+
206
+ @staticmethod
207
+ def has_sysinfo():
208
+ uname = platform.system().strip().strip('"').strip("'").strip().lower()
209
+ is_beos = 'beos' in uname or 'haiku' in uname
210
+ return is_beos and len(_program_paths('sysinfo')) > 0
211
+
212
+ @staticmethod
213
+ def has_lscpu():
214
+ return len(_program_paths('lscpu')) > 0
215
+
216
+ @staticmethod
217
+ def has_ibm_pa_features():
218
+ return len(_program_paths('lsprop')) > 0
219
+
220
+ @staticmethod
221
+ def has_wmic():
222
+ returncode, output = _run_and_get_stdout(['wmic', 'os', 'get', 'Version'])
223
+ return returncode == 0 and len(output) > 0
224
+
225
+ @staticmethod
226
+ def cat_proc_cpuinfo():
227
+ return _run_and_get_stdout(['cat', '/proc/cpuinfo'])
228
+
229
+ @staticmethod
230
+ def cpufreq_info():
231
+ return _run_and_get_stdout(['cpufreq-info'])
232
+
233
+ @staticmethod
234
+ def sestatus_b():
235
+ return _run_and_get_stdout(['sestatus', '-b'])
236
+
237
+ @staticmethod
238
+ def dmesg_a():
239
+ return _run_and_get_stdout(['dmesg', '-a'])
240
+
241
+ @staticmethod
242
+ def cat_var_run_dmesg_boot():
243
+ return _run_and_get_stdout(['cat', '/var/run/dmesg.boot'])
244
+
245
+ @staticmethod
246
+ def sysctl_machdep_cpu_hw_cpufrequency():
247
+ return _run_and_get_stdout(['sysctl', 'machdep.cpu', 'hw.cpufrequency'])
248
+
249
+ @staticmethod
250
+ def isainfo_vb():
251
+ return _run_and_get_stdout(['isainfo', '-vb'])
252
+
253
+ @staticmethod
254
+ def kstat_m_cpu_info():
255
+ return _run_and_get_stdout(['kstat', '-m', 'cpu_info'])
256
+
257
+ @staticmethod
258
+ def sysinfo_cpu():
259
+ return _run_and_get_stdout(['sysinfo', '-cpu'])
260
+
261
+ @staticmethod
262
+ def lscpu():
263
+ return _run_and_get_stdout(['lscpu'])
264
+
265
+ @staticmethod
266
+ def ibm_pa_features():
267
+ import glob
268
+
269
+ ibm_features = glob.glob('/proc/device-tree/cpus/*/ibm,pa-features')
270
+ if ibm_features:
271
+ return _run_and_get_stdout(['lsprop', ibm_features[0]])
272
+
273
+ @staticmethod
274
+ def wmic_cpu():
275
+ return _run_and_get_stdout(['wmic', 'cpu', 'get', 'Name,CurrentClockSpeed,L2CacheSize,L3CacheSize,Description,Caption,Manufacturer', '/format:list'])
276
+
277
+ @staticmethod
278
+ def winreg_processor_brand():
279
+ processor_brand = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "ProcessorNameString")
280
+ return processor_brand.strip()
281
+
282
+ @staticmethod
283
+ def winreg_vendor_id_raw():
284
+ vendor_id_raw = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "VendorIdentifier")
285
+ return vendor_id_raw
286
+
287
+ @staticmethod
288
+ def winreg_arch_string_raw():
289
+ arch_string_raw = _read_windows_registry_key(r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", "PROCESSOR_ARCHITECTURE")
290
+ return arch_string_raw
291
+
292
+ @staticmethod
293
+ def winreg_hz_actual():
294
+ hz_actual = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "~Mhz")
295
+ hz_actual = _to_decimal_string(hz_actual)
296
+ return hz_actual
297
+
298
+ @staticmethod
299
+ def winreg_feature_bits():
300
+ feature_bits = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "FeatureSet")
301
+ return feature_bits
302
+
303
+
304
+ def _program_paths(program_name):
305
+ paths = []
306
+ exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
307
+ for p in os.environ['PATH'].split(os.pathsep):
308
+ p = os.path.join(p, program_name)
309
+ if os.access(p, os.X_OK):
310
+ paths.append(p)
311
+ for e in exts:
312
+ pext = p + e
313
+ if os.access(pext, os.X_OK):
314
+ paths.append(pext)
315
+ return paths
316
+
317
+ def _run_and_get_stdout(command, pipe_command=None):
318
+ from subprocess import Popen, PIPE
319
+
320
+ g_trace.command_header('Running command "' + ' '.join(command) + '" ...')
321
+
322
+ # Run the command normally
323
+ if not pipe_command:
324
+ p1 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
325
+ # Run the command and pipe it into another command
326
+ else:
327
+ p2 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
328
+ p1 = Popen(pipe_command, stdin=p2.stdout, stdout=PIPE, stderr=PIPE)
329
+ p2.stdout.close()
330
+
331
+ # Get the stdout and stderr
332
+ stdout_output, stderr_output = p1.communicate()
333
+ stdout_output = stdout_output.decode(encoding='UTF-8')
334
+ stderr_output = stderr_output.decode(encoding='UTF-8')
335
+
336
+ # Send the result to the logger
337
+ g_trace.command_output('return code:', str(p1.returncode))
338
+ g_trace.command_output('stdout:', stdout_output)
339
+
340
+ # Return the return code and stdout
341
+ return p1.returncode, stdout_output
342
+
343
+ def _read_windows_registry_key(key_name, field_name):
344
+ g_trace.command_header('Reading Registry key "{0}" field "{1}" ...'.format(key_name, field_name))
345
+
346
+ try:
347
+ import _winreg as winreg
348
+ except ImportError as err:
349
+ try:
350
+ import winreg
351
+ except ImportError as err:
352
+ pass
353
+
354
+ key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name)
355
+ value = winreg.QueryValueEx(key, field_name)[0]
356
+ winreg.CloseKey(key)
357
+ g_trace.command_output('value:', str(value))
358
+ return value
359
+
360
+ # Make sure we are running on a supported system
361
+ def _check_arch():
362
+ arch, bits = _parse_arch(DataSource.arch_string_raw)
363
+ if not arch in ['X86_32', 'X86_64', 'ARM_7', 'ARM_8',
364
+ 'PPC_64', 'S390X', 'MIPS_32', 'MIPS_64',
365
+ "RISCV_32", "RISCV_64"]:
366
+ raise Exception("py-cpuinfo currently only works on X86 "
367
+ "and some ARM/PPC/S390X/MIPS/RISCV CPUs.")
368
+
369
+ def _obj_to_b64(thing):
370
+ import pickle
371
+ import base64
372
+
373
+ a = thing
374
+ b = pickle.dumps(a)
375
+ c = base64.b64encode(b)
376
+ d = c.decode('utf8')
377
+ return d
378
+
379
+ def _b64_to_obj(thing):
380
+ import pickle
381
+ import base64
382
+
383
+ try:
384
+ a = base64.b64decode(thing)
385
+ b = pickle.loads(a)
386
+ return b
387
+ except Exception:
388
+ return {}
389
+
390
+ def _utf_to_str(input):
391
+ if isinstance(input, list):
392
+ return [_utf_to_str(element) for element in input]
393
+ elif isinstance(input, dict):
394
+ return {_utf_to_str(key): _utf_to_str(value)
395
+ for key, value in input.items()}
396
+ else:
397
+ return input
398
+
399
+ def _copy_new_fields(info, new_info):
400
+ keys = [
401
+ 'vendor_id_raw', 'hardware_raw', 'brand_raw', 'hz_advertised_friendly', 'hz_actual_friendly',
402
+ 'hz_advertised', 'hz_actual', 'arch', 'bits', 'count',
403
+ 'arch_string_raw', 'uname_string_raw',
404
+ 'l2_cache_size', 'l2_cache_line_size', 'l2_cache_associativity',
405
+ 'stepping', 'model', 'family',
406
+ 'processor_type', 'flags',
407
+ 'l3_cache_size', 'l1_data_cache_size', 'l1_instruction_cache_size'
408
+ ]
409
+
410
+ g_trace.keys(keys, info, new_info)
411
+
412
+ # Update the keys with new values
413
+ for key in keys:
414
+ if new_info.get(key, None) and not info.get(key, None):
415
+ info[key] = new_info[key]
416
+ elif key == 'flags' and new_info.get('flags'):
417
+ for f in new_info['flags']:
418
+ if f not in info['flags']: info['flags'].append(f)
419
+ info['flags'].sort()
420
+
421
+ def _get_field_actual(cant_be_number, raw_string, field_names):
422
+ for line in raw_string.splitlines():
423
+ for field_name in field_names:
424
+ field_name = field_name.lower()
425
+ if ':' in line:
426
+ left, right = line.split(':', 1)
427
+ left = left.strip().lower()
428
+ right = right.strip()
429
+ if left == field_name and len(right) > 0:
430
+ if cant_be_number:
431
+ if not right.isdigit():
432
+ return right
433
+ else:
434
+ return right
435
+
436
+ return None
437
+
438
+ def _get_field(cant_be_number, raw_string, convert_to, default_value, *field_names):
439
+ retval = _get_field_actual(cant_be_number, raw_string, field_names)
440
+
441
+ # Convert the return value
442
+ if retval and convert_to:
443
+ try:
444
+ retval = convert_to(retval)
445
+ except Exception:
446
+ retval = default_value
447
+
448
+ # Return the default if there is no return value
449
+ if retval is None:
450
+ retval = default_value
451
+
452
+ return retval
453
+
454
+ def _to_decimal_string(ticks):
455
+ try:
456
+ # Convert to string
457
+ ticks = '{0}'.format(ticks)
458
+ # Sometimes ',' is used as a decimal separator
459
+ ticks = ticks.replace(',', '.')
460
+
461
+ # Strip off non numbers and decimal places
462
+ ticks = "".join(n for n in ticks if n.isdigit() or n=='.').strip()
463
+ if ticks == '':
464
+ ticks = '0'
465
+
466
+ # Add decimal if missing
467
+ if '.' not in ticks:
468
+ ticks = '{0}.0'.format(ticks)
469
+
470
+ # Remove trailing zeros
471
+ ticks = ticks.rstrip('0')
472
+
473
+ # Add one trailing zero for empty right side
474
+ if ticks.endswith('.'):
475
+ ticks = '{0}0'.format(ticks)
476
+
477
+ # Make sure the number can be converted to a float
478
+ ticks = float(ticks)
479
+ ticks = '{0}'.format(ticks)
480
+ return ticks
481
+ except Exception:
482
+ return '0.0'
483
+
484
+ def _hz_short_to_full(ticks, scale):
485
+ try:
486
+ # Make sure the number can be converted to a float
487
+ ticks = float(ticks)
488
+ ticks = '{0}'.format(ticks)
489
+
490
+ # Scale the numbers
491
+ hz = ticks.lstrip('0')
492
+ old_index = hz.index('.')
493
+ hz = hz.replace('.', '')
494
+ hz = hz.ljust(scale + old_index+1, '0')
495
+ new_index = old_index + scale
496
+ hz = '{0}.{1}'.format(hz[:new_index], hz[new_index:])
497
+ left, right = hz.split('.')
498
+ left, right = int(left), int(right)
499
+ return (left, right)
500
+ except Exception:
501
+ return (0, 0)
502
+
503
+ def _hz_friendly_to_full(hz_string):
504
+ try:
505
+ hz_string = hz_string.strip().lower()
506
+ hz, scale = (None, None)
507
+
508
+ if hz_string.endswith('ghz'):
509
+ scale = 9
510
+ elif hz_string.endswith('mhz'):
511
+ scale = 6
512
+ elif hz_string.endswith('hz'):
513
+ scale = 0
514
+
515
+ hz = "".join(n for n in hz_string if n.isdigit() or n=='.').strip()
516
+ if not '.' in hz:
517
+ hz += '.0'
518
+
519
+ hz, scale = _hz_short_to_full(hz, scale)
520
+
521
+ return (hz, scale)
522
+ except Exception:
523
+ return (0, 0)
524
+
525
+ def _hz_short_to_friendly(ticks, scale):
526
+ try:
527
+ # Get the raw Hz as a string
528
+ left, right = _hz_short_to_full(ticks, scale)
529
+ result = '{0}.{1}'.format(left, right)
530
+
531
+ # Get the location of the dot, and remove said dot
532
+ dot_index = result.index('.')
533
+ result = result.replace('.', '')
534
+
535
+ # Get the Hz symbol and scale
536
+ symbol = "Hz"
537
+ scale = 0
538
+ if dot_index > 9:
539
+ symbol = "GHz"
540
+ scale = 9
541
+ elif dot_index > 6:
542
+ symbol = "MHz"
543
+ scale = 6
544
+ elif dot_index > 3:
545
+ symbol = "KHz"
546
+ scale = 3
547
+
548
+ # Get the Hz with the dot at the new scaled point
549
+ result = '{0}.{1}'.format(result[:-scale-1], result[-scale-1:])
550
+
551
+ # Format the ticks to have 4 numbers after the decimal
552
+ # and remove any superfluous zeroes.
553
+ result = '{0:.4f} {1}'.format(float(result), symbol)
554
+ result = result.rstrip('0')
555
+ return result
556
+ except Exception:
557
+ return '0.0000 Hz'
558
+
559
+ def _to_friendly_bytes(input):
560
+ import re
561
+
562
+ if not input:
563
+ return input
564
+ input = "{0}".format(input)
565
+
566
+ formats = {
567
+ r"^[0-9]+B$" : 'B',
568
+ r"^[0-9]+K$" : 'KB',
569
+ r"^[0-9]+M$" : 'MB',
570
+ r"^[0-9]+G$" : 'GB'
571
+ }
572
+
573
+ for pattern, friendly_size in formats.items():
574
+ if re.match(pattern, input):
575
+ return "{0} {1}".format(input[ : -1].strip(), friendly_size)
576
+
577
+ return input
578
+
579
+ def _friendly_bytes_to_int(friendly_bytes):
580
+ input = friendly_bytes.lower()
581
+
582
+ formats = [
583
+ {'gib' : 1024 * 1024 * 1024},
584
+ {'mib' : 1024 * 1024},
585
+ {'kib' : 1024},
586
+
587
+ {'gb' : 1024 * 1024 * 1024},
588
+ {'mb' : 1024 * 1024},
589
+ {'kb' : 1024},
590
+
591
+ {'g' : 1024 * 1024 * 1024},
592
+ {'m' : 1024 * 1024},
593
+ {'k' : 1024},
594
+ {'b' : 1},
595
+ ]
596
+
597
+ try:
598
+ for entry in formats:
599
+ pattern = list(entry.keys())[0]
600
+ multiplier = list(entry.values())[0]
601
+ if input.endswith(pattern):
602
+ return int(input.split(pattern)[0].strip()) * multiplier
603
+
604
+ except Exception as err:
605
+ pass
606
+
607
+ return friendly_bytes
608
+
609
+ def _parse_cpu_brand_string(cpu_string):
610
+ # Just return 0 if the processor brand does not have the Hz
611
+ if not 'hz' in cpu_string.lower():
612
+ return ('0.0', 0)
613
+
614
+ hz = cpu_string.lower()
615
+ scale = 0
616
+
617
+ if hz.endswith('mhz'):
618
+ scale = 6
619
+ elif hz.endswith('ghz'):
620
+ scale = 9
621
+ if '@' in hz:
622
+ hz = hz.split('@')[1]
623
+ else:
624
+ hz = hz.rsplit(None, 1)[1]
625
+
626
+ hz = hz.rstrip('mhz').rstrip('ghz').strip()
627
+ hz = _to_decimal_string(hz)
628
+
629
+ return (hz, scale)
630
+
631
+ def _parse_cpu_brand_string_dx(cpu_string):
632
+ import re
633
+
634
+ # Find all the strings inside brackets ()
635
+ starts = [m.start() for m in re.finditer(r"\(", cpu_string)]
636
+ ends = [m.start() for m in re.finditer(r"\)", cpu_string)]
637
+ insides = {k: v for k, v in zip(starts, ends)}
638
+ insides = [cpu_string[start+1 : end] for start, end in insides.items()]
639
+
640
+ # Find all the fields
641
+ vendor_id, stepping, model, family = (None, None, None, None)
642
+ for inside in insides:
643
+ for pair in inside.split(','):
644
+ pair = [n.strip() for n in pair.split(':')]
645
+ if len(pair) > 1:
646
+ name, value = pair[0], pair[1]
647
+ if name == 'origin':
648
+ vendor_id = value.strip('"')
649
+ elif name == 'stepping':
650
+ stepping = int(value.lstrip('0x'), 16)
651
+ elif name == 'model':
652
+ model = int(value.lstrip('0x'), 16)
653
+ elif name in ['fam', 'family']:
654
+ family = int(value.lstrip('0x'), 16)
655
+
656
+ # Find the Processor Brand
657
+ # Strip off extra strings in brackets at end
658
+ brand = cpu_string.strip()
659
+ is_working = True
660
+ while is_working:
661
+ is_working = False
662
+ for inside in insides:
663
+ full = "({0})".format(inside)
664
+ if brand.endswith(full):
665
+ brand = brand[ :-len(full)].strip()
666
+ is_working = True
667
+
668
+ # Find the Hz in the brand string
669
+ hz_brand, scale = _parse_cpu_brand_string(brand)
670
+
671
+ # Find Hz inside brackets () after the brand string
672
+ if hz_brand == '0.0':
673
+ for inside in insides:
674
+ hz = inside
675
+ for entry in ['GHz', 'MHz', 'Hz']:
676
+ if entry in hz:
677
+ hz = "CPU @ " + hz[ : hz.find(entry) + len(entry)]
678
+ hz_brand, scale = _parse_cpu_brand_string(hz)
679
+ break
680
+
681
+ return (hz_brand, scale, brand, vendor_id, stepping, model, family)
682
+
683
+ def _parse_dmesg_output(output):
684
+ try:
685
+ # Get all the dmesg lines that might contain a CPU string
686
+ lines = output.split(' CPU0:')[1:] + \
687
+ output.split(' CPU1:')[1:] + \
688
+ output.split(' CPU:')[1:] + \
689
+ output.split('\nCPU0:')[1:] + \
690
+ output.split('\nCPU1:')[1:] + \
691
+ output.split('\nCPU:')[1:]
692
+ lines = [l.split('\n')[0].strip() for l in lines]
693
+
694
+ # Convert the lines to CPU strings
695
+ cpu_strings = [_parse_cpu_brand_string_dx(l) for l in lines]
696
+
697
+ # Find the CPU string that has the most fields
698
+ best_string = None
699
+ highest_count = 0
700
+ for cpu_string in cpu_strings:
701
+ count = sum([n is not None for n in cpu_string])
702
+ if count > highest_count:
703
+ highest_count = count
704
+ best_string = cpu_string
705
+
706
+ # If no CPU string was found, return {}
707
+ if not best_string:
708
+ return {}
709
+
710
+ hz_actual, scale, processor_brand, vendor_id, stepping, model, family = best_string
711
+
712
+ # Origin
713
+ if ' Origin=' in output:
714
+ fields = output[output.find(' Origin=') : ].split('\n')[0]
715
+ fields = fields.strip().split()
716
+ fields = [n.strip().split('=') for n in fields]
717
+ fields = [{n[0].strip().lower() : n[1].strip()} for n in fields]
718
+
719
+ for field in fields:
720
+ name = list(field.keys())[0]
721
+ value = list(field.values())[0]
722
+
723
+ if name == 'origin':
724
+ vendor_id = value.strip('"')
725
+ elif name == 'stepping':
726
+ stepping = int(value.lstrip('0x'), 16)
727
+ elif name == 'model':
728
+ model = int(value.lstrip('0x'), 16)
729
+ elif name in ['fam', 'family']:
730
+ family = int(value.lstrip('0x'), 16)
731
+
732
+ # Features
733
+ flag_lines = []
734
+ for category in [' Features=', ' Features2=', ' AMD Features=', ' AMD Features2=']:
735
+ if category in output:
736
+ flag_lines.append(output.split(category)[1].split('\n')[0])
737
+
738
+ flags = []
739
+ for line in flag_lines:
740
+ line = line.split('<')[1].split('>')[0].lower()
741
+ for flag in line.split(','):
742
+ flags.append(flag)
743
+ flags.sort()
744
+
745
+ # Convert from GHz/MHz string to Hz
746
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
747
+
748
+ # If advertised hz not found, use the actual hz
749
+ if hz_advertised == '0.0':
750
+ scale = 6
751
+ hz_advertised = _to_decimal_string(hz_actual)
752
+
753
+ info = {
754
+ 'vendor_id_raw' : vendor_id,
755
+ 'brand_raw' : processor_brand,
756
+
757
+ 'stepping' : stepping,
758
+ 'model' : model,
759
+ 'family' : family,
760
+ 'flags' : flags
761
+ }
762
+
763
+ if hz_advertised and hz_advertised != '0.0':
764
+ info['hz_advertised_friendly'] = _hz_short_to_friendly(hz_advertised, scale)
765
+ info['hz_actual_friendly'] = _hz_short_to_friendly(hz_actual, scale)
766
+
767
+ if hz_advertised and hz_advertised != '0.0':
768
+ info['hz_advertised'] = _hz_short_to_full(hz_advertised, scale)
769
+ info['hz_actual'] = _hz_short_to_full(hz_actual, scale)
770
+
771
+ return {k: v for k, v in info.items() if v}
772
+ except Exception as err:
773
+ g_trace.fail(err)
774
+ #raise
775
+
776
+ return {}
777
+
778
+ def _parse_arch(arch_string_raw):
779
+ import re
780
+
781
+ arch, bits = None, None
782
+ arch_string_raw = arch_string_raw.lower()
783
+
784
+ # X86
785
+ if re.match(r'^i\d86$|^x86$|^x86_32$|^i86pc$|^ia32$|^ia-32$|^bepc$', arch_string_raw):
786
+ arch = 'X86_32'
787
+ bits = 32
788
+ elif re.match(r'^x64$|^x86_64$|^x86_64t$|^i686-64$|^amd64$|^ia64$|^ia-64$', arch_string_raw):
789
+ arch = 'X86_64'
790
+ bits = 64
791
+ # ARM
792
+ elif re.match(r'^armv8-a|aarch64|arm64$', arch_string_raw):
793
+ arch = 'ARM_8'
794
+ bits = 64
795
+ elif re.match(r'^armv7$|^armv7[a-z]$|^armv7-[a-z]$|^armv6[a-z]$', arch_string_raw):
796
+ arch = 'ARM_7'
797
+ bits = 32
798
+ elif re.match(r'^armv8$|^armv8[a-z]$|^armv8-[a-z]$', arch_string_raw):
799
+ arch = 'ARM_8'
800
+ bits = 32
801
+ # PPC
802
+ elif re.match(r'^ppc32$|^prep$|^pmac$|^powermac$', arch_string_raw):
803
+ arch = 'PPC_32'
804
+ bits = 32
805
+ elif re.match(r'^powerpc$|^ppc64$|^ppc64le$', arch_string_raw):
806
+ arch = 'PPC_64'
807
+ bits = 64
808
+ # SPARC
809
+ elif re.match(r'^sparc32$|^sparc$', arch_string_raw):
810
+ arch = 'SPARC_32'
811
+ bits = 32
812
+ elif re.match(r'^sparc64$|^sun4u$|^sun4v$', arch_string_raw):
813
+ arch = 'SPARC_64'
814
+ bits = 64
815
+ # S390X
816
+ elif re.match(r'^s390x$', arch_string_raw):
817
+ arch = 'S390X'
818
+ bits = 64
819
+ elif arch_string_raw == 'mips':
820
+ arch = 'MIPS_32'
821
+ bits = 32
822
+ elif arch_string_raw == 'mips64':
823
+ arch = 'MIPS_64'
824
+ bits = 64
825
+ # RISCV
826
+ elif re.match(r'^riscv$|^riscv32$|^riscv32be$', arch_string_raw):
827
+ arch = 'RISCV_32'
828
+ bits = 32
829
+ elif re.match(r'^riscv64$|^riscv64be$', arch_string_raw):
830
+ arch = 'RISCV_64'
831
+ bits = 64
832
+
833
+ return (arch, bits)
834
+
835
+ def _is_bit_set(reg, bit):
836
+ mask = 1 << bit
837
+ is_set = reg & mask > 0
838
+ return is_set
839
+
840
+
841
+ def _is_selinux_enforcing(trace):
842
+ # Just return if the SE Linux Status Tool is not installed
843
+ if not DataSource.has_sestatus():
844
+ trace.fail('Failed to find sestatus.')
845
+ return False
846
+
847
+ # Run the sestatus, and just return if it failed to run
848
+ returncode, output = DataSource.sestatus_b()
849
+ if returncode != 0:
850
+ trace.fail('Failed to run sestatus. Skipping ...')
851
+ return False
852
+
853
+ # Figure out if explicitly in enforcing mode
854
+ for line in output.splitlines():
855
+ line = line.strip().lower()
856
+ if line.startswith("current mode:"):
857
+ if line.endswith("enforcing"):
858
+ return True
859
+ else:
860
+ return False
861
+
862
+ # Figure out if we can execute heap and execute memory
863
+ can_selinux_exec_heap = False
864
+ can_selinux_exec_memory = False
865
+ for line in output.splitlines():
866
+ line = line.strip().lower()
867
+ if line.startswith("allow_execheap") and line.endswith("on"):
868
+ can_selinux_exec_heap = True
869
+ elif line.startswith("allow_execmem") and line.endswith("on"):
870
+ can_selinux_exec_memory = True
871
+
872
+ trace.command_output('can_selinux_exec_heap:', can_selinux_exec_heap)
873
+ trace.command_output('can_selinux_exec_memory:', can_selinux_exec_memory)
874
+
875
+ return (not can_selinux_exec_heap or not can_selinux_exec_memory)
876
+
877
+ def _filter_dict_keys_with_empty_values(info, acceptable_values = {}):
878
+ filtered_info = {}
879
+ for key in info:
880
+ value = info[key]
881
+
882
+ # Keep if value is acceptable
883
+ if key in acceptable_values:
884
+ if acceptable_values[key] == value:
885
+ filtered_info[key] = value
886
+ continue
887
+
888
+ # Filter out None, 0, "", (), {}, []
889
+ if not value:
890
+ continue
891
+
892
+ # Filter out (0, 0)
893
+ if value == (0, 0):
894
+ continue
895
+
896
+ # Filter out -1
897
+ if value == -1:
898
+ continue
899
+
900
+ # Filter out strings that start with "0.0"
901
+ if type(value) == str and value.startswith('0.0'):
902
+ continue
903
+
904
+ filtered_info[key] = value
905
+
906
+ return filtered_info
907
+
908
+ class ASM(object):
909
+ def __init__(self, restype=None, argtypes=(), machine_code=[]):
910
+ self.restype = restype
911
+ self.argtypes = argtypes
912
+ self.machine_code = machine_code
913
+ self.prochandle = None
914
+ self.mm = None
915
+ self.func = None
916
+ self.address = None
917
+ self.size = 0
918
+
919
+ def compile(self):
920
+ machine_code = bytes.join(b'', self.machine_code)
921
+ self.size = ctypes.c_size_t(len(machine_code))
922
+
923
+ if DataSource.is_windows:
924
+ # Allocate a memory segment the size of the machine code, and make it executable
925
+ size = len(machine_code)
926
+ # Alloc at least 1 page to ensure we own all pages that we want to change protection on
927
+ if size < 0x1000: size = 0x1000
928
+ MEM_COMMIT = ctypes.c_ulong(0x1000)
929
+ PAGE_READWRITE = ctypes.c_ulong(0x4)
930
+ pfnVirtualAlloc = ctypes.windll.kernel32.VirtualAlloc
931
+ pfnVirtualAlloc.restype = ctypes.c_void_p
932
+ self.address = pfnVirtualAlloc(None, ctypes.c_size_t(size), MEM_COMMIT, PAGE_READWRITE)
933
+ if not self.address:
934
+ raise Exception("Failed to VirtualAlloc")
935
+
936
+ # Copy the machine code into the memory segment
937
+ memmove = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t)(ctypes._memmove_addr)
938
+ if memmove(self.address, machine_code, size) < 0:
939
+ raise Exception("Failed to memmove")
940
+
941
+ # Enable execute permissions
942
+ PAGE_EXECUTE = ctypes.c_ulong(0x10)
943
+ old_protect = ctypes.c_ulong(0)
944
+ pfnVirtualProtect = ctypes.windll.kernel32.VirtualProtect
945
+ res = pfnVirtualProtect(ctypes.c_void_p(self.address), ctypes.c_size_t(size), PAGE_EXECUTE, ctypes.byref(old_protect))
946
+ if not res:
947
+ raise Exception("Failed VirtualProtect")
948
+
949
+ # Flush Instruction Cache
950
+ # First, get process Handle
951
+ if not self.prochandle:
952
+ pfnGetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
953
+ pfnGetCurrentProcess.restype = ctypes.c_void_p
954
+ self.prochandle = ctypes.c_void_p(pfnGetCurrentProcess())
955
+ # Actually flush cache
956
+ res = ctypes.windll.kernel32.FlushInstructionCache(self.prochandle, ctypes.c_void_p(self.address), ctypes.c_size_t(size))
957
+ if not res:
958
+ raise Exception("Failed FlushInstructionCache")
959
+ else:
960
+ from mmap import mmap, MAP_PRIVATE, MAP_ANONYMOUS, PROT_WRITE, PROT_READ, PROT_EXEC
961
+
962
+ # Allocate a private and executable memory segment the size of the machine code
963
+ machine_code = bytes.join(b'', self.machine_code)
964
+ self.size = len(machine_code)
965
+ self.mm = mmap(-1, self.size, flags=MAP_PRIVATE | MAP_ANONYMOUS, prot=PROT_WRITE | PROT_READ | PROT_EXEC)
966
+
967
+ # Copy the machine code into the memory segment
968
+ self.mm.write(machine_code)
969
+ self.address = ctypes.addressof(ctypes.c_int.from_buffer(self.mm))
970
+
971
+ # Cast the memory segment into a function
972
+ functype = ctypes.CFUNCTYPE(self.restype, *self.argtypes)
973
+ self.func = functype(self.address)
974
+
975
+ def run(self):
976
+ # Call the machine code like a function
977
+ retval = self.func()
978
+
979
+ return retval
980
+
981
+ def free(self):
982
+ # Free the function memory segment
983
+ if DataSource.is_windows:
984
+ MEM_RELEASE = ctypes.c_ulong(0x8000)
985
+ ctypes.windll.kernel32.VirtualFree(ctypes.c_void_p(self.address), ctypes.c_size_t(0), MEM_RELEASE)
986
+ else:
987
+ self.mm.close()
988
+
989
+ self.prochandle = None
990
+ self.mm = None
991
+ self.func = None
992
+ self.address = None
993
+ self.size = 0
994
+
995
+
996
+ class CPUID(object):
997
+ def __init__(self, trace=None):
998
+ if trace is None:
999
+ trace = Trace(False, False)
1000
+
1001
+ # Figure out if SE Linux is on and in enforcing mode
1002
+ self.is_selinux_enforcing = _is_selinux_enforcing(trace)
1003
+
1004
+ def _asm_func(self, restype=None, argtypes=(), machine_code=[]):
1005
+ asm = ASM(restype, argtypes, machine_code)
1006
+ asm.compile()
1007
+ return asm
1008
+
1009
+ def _run_asm(self, *machine_code):
1010
+ asm = ASM(ctypes.c_uint32, (), machine_code)
1011
+ asm.compile()
1012
+ retval = asm.run()
1013
+ asm.free()
1014
+ return retval
1015
+
1016
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D0:_Get_vendor_ID
1017
+ def get_vendor_id(self):
1018
+ # EBX
1019
+ ebx = self._run_asm(
1020
+ b"\x31\xC0", # xor eax,eax
1021
+ b"\x0F\xA2" # cpuid
1022
+ b"\x89\xD8" # mov ax,bx
1023
+ b"\xC3" # ret
1024
+ )
1025
+
1026
+ # ECX
1027
+ ecx = self._run_asm(
1028
+ b"\x31\xC0", # xor eax,eax
1029
+ b"\x0f\xa2" # cpuid
1030
+ b"\x89\xC8" # mov ax,cx
1031
+ b"\xC3" # ret
1032
+ )
1033
+
1034
+ # EDX
1035
+ edx = self._run_asm(
1036
+ b"\x31\xC0", # xor eax,eax
1037
+ b"\x0f\xa2" # cpuid
1038
+ b"\x89\xD0" # mov ax,dx
1039
+ b"\xC3" # ret
1040
+ )
1041
+
1042
+ # Each 4bits is a ascii letter in the name
1043
+ vendor_id = []
1044
+ for reg in [ebx, edx, ecx]:
1045
+ for n in [0, 8, 16, 24]:
1046
+ vendor_id.append(chr((reg >> n) & 0xFF))
1047
+ vendor_id = ''.join(vendor_id)
1048
+
1049
+ return vendor_id
1050
+
1051
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D1:_Processor_Info_and_Feature_Bits
1052
+ def get_info(self):
1053
+ # EAX
1054
+ eax = self._run_asm(
1055
+ b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
1056
+ b"\x0f\xa2" # cpuid
1057
+ b"\xC3" # ret
1058
+ )
1059
+
1060
+ # Get the CPU info
1061
+ stepping_id = (eax >> 0) & 0xF # 4 bits
1062
+ model = (eax >> 4) & 0xF # 4 bits
1063
+ family_id = (eax >> 8) & 0xF # 4 bits
1064
+ processor_type = (eax >> 12) & 0x3 # 2 bits
1065
+ extended_model_id = (eax >> 16) & 0xF # 4 bits
1066
+ extended_family_id = (eax >> 20) & 0xFF # 8 bits
1067
+ family = 0
1068
+
1069
+ if family_id in [15]:
1070
+ family = extended_family_id + family_id
1071
+ else:
1072
+ family = family_id
1073
+
1074
+ if family_id in [6, 15]:
1075
+ model = (extended_model_id << 4) + model
1076
+
1077
+ return {
1078
+ 'stepping' : stepping_id,
1079
+ 'model' : model,
1080
+ 'family' : family,
1081
+ 'processor_type' : processor_type
1082
+ }
1083
+
1084
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D80000000h:_Get_Highest_Extended_Function_Supported
1085
+ def get_max_extension_support(self):
1086
+ # Check for extension support
1087
+ max_extension_support = self._run_asm(
1088
+ b"\xB8\x00\x00\x00\x80" # mov ax,0x80000000
1089
+ b"\x0f\xa2" # cpuid
1090
+ b"\xC3" # ret
1091
+ )
1092
+
1093
+ return max_extension_support
1094
+
1095
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D1:_Processor_Info_and_Feature_Bits
1096
+ def get_flags(self, max_extension_support):
1097
+ # EDX
1098
+ edx = self._run_asm(
1099
+ b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
1100
+ b"\x0f\xa2" # cpuid
1101
+ b"\x89\xD0" # mov ax,dx
1102
+ b"\xC3" # ret
1103
+ )
1104
+
1105
+ # ECX
1106
+ ecx = self._run_asm(
1107
+ b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
1108
+ b"\x0f\xa2" # cpuid
1109
+ b"\x89\xC8" # mov ax,cx
1110
+ b"\xC3" # ret
1111
+ )
1112
+
1113
+ # Get the CPU flags
1114
+ flags = {
1115
+ 'fpu' : _is_bit_set(edx, 0),
1116
+ 'vme' : _is_bit_set(edx, 1),
1117
+ 'de' : _is_bit_set(edx, 2),
1118
+ 'pse' : _is_bit_set(edx, 3),
1119
+ 'tsc' : _is_bit_set(edx, 4),
1120
+ 'msr' : _is_bit_set(edx, 5),
1121
+ 'pae' : _is_bit_set(edx, 6),
1122
+ 'mce' : _is_bit_set(edx, 7),
1123
+ 'cx8' : _is_bit_set(edx, 8),
1124
+ 'apic' : _is_bit_set(edx, 9),
1125
+ #'reserved1' : _is_bit_set(edx, 10),
1126
+ 'sep' : _is_bit_set(edx, 11),
1127
+ 'mtrr' : _is_bit_set(edx, 12),
1128
+ 'pge' : _is_bit_set(edx, 13),
1129
+ 'mca' : _is_bit_set(edx, 14),
1130
+ 'cmov' : _is_bit_set(edx, 15),
1131
+ 'pat' : _is_bit_set(edx, 16),
1132
+ 'pse36' : _is_bit_set(edx, 17),
1133
+ 'pn' : _is_bit_set(edx, 18),
1134
+ 'clflush' : _is_bit_set(edx, 19),
1135
+ #'reserved2' : _is_bit_set(edx, 20),
1136
+ 'dts' : _is_bit_set(edx, 21),
1137
+ 'acpi' : _is_bit_set(edx, 22),
1138
+ 'mmx' : _is_bit_set(edx, 23),
1139
+ 'fxsr' : _is_bit_set(edx, 24),
1140
+ 'sse' : _is_bit_set(edx, 25),
1141
+ 'sse2' : _is_bit_set(edx, 26),
1142
+ 'ss' : _is_bit_set(edx, 27),
1143
+ 'ht' : _is_bit_set(edx, 28),
1144
+ 'tm' : _is_bit_set(edx, 29),
1145
+ 'ia64' : _is_bit_set(edx, 30),
1146
+ 'pbe' : _is_bit_set(edx, 31),
1147
+
1148
+ 'pni' : _is_bit_set(ecx, 0),
1149
+ 'pclmulqdq' : _is_bit_set(ecx, 1),
1150
+ 'dtes64' : _is_bit_set(ecx, 2),
1151
+ 'monitor' : _is_bit_set(ecx, 3),
1152
+ 'ds_cpl' : _is_bit_set(ecx, 4),
1153
+ 'vmx' : _is_bit_set(ecx, 5),
1154
+ 'smx' : _is_bit_set(ecx, 6),
1155
+ 'est' : _is_bit_set(ecx, 7),
1156
+ 'tm2' : _is_bit_set(ecx, 8),
1157
+ 'ssse3' : _is_bit_set(ecx, 9),
1158
+ 'cid' : _is_bit_set(ecx, 10),
1159
+ #'reserved3' : _is_bit_set(ecx, 11),
1160
+ 'fma' : _is_bit_set(ecx, 12),
1161
+ 'cx16' : _is_bit_set(ecx, 13),
1162
+ 'xtpr' : _is_bit_set(ecx, 14),
1163
+ 'pdcm' : _is_bit_set(ecx, 15),
1164
+ #'reserved4' : _is_bit_set(ecx, 16),
1165
+ 'pcid' : _is_bit_set(ecx, 17),
1166
+ 'dca' : _is_bit_set(ecx, 18),
1167
+ 'sse4_1' : _is_bit_set(ecx, 19),
1168
+ 'sse4_2' : _is_bit_set(ecx, 20),
1169
+ 'x2apic' : _is_bit_set(ecx, 21),
1170
+ 'movbe' : _is_bit_set(ecx, 22),
1171
+ 'popcnt' : _is_bit_set(ecx, 23),
1172
+ 'tscdeadline' : _is_bit_set(ecx, 24),
1173
+ 'aes' : _is_bit_set(ecx, 25),
1174
+ 'xsave' : _is_bit_set(ecx, 26),
1175
+ 'osxsave' : _is_bit_set(ecx, 27),
1176
+ 'avx' : _is_bit_set(ecx, 28),
1177
+ 'f16c' : _is_bit_set(ecx, 29),
1178
+ 'rdrnd' : _is_bit_set(ecx, 30),
1179
+ 'hypervisor' : _is_bit_set(ecx, 31)
1180
+ }
1181
+
1182
+ # Get a list of only the flags that are true
1183
+ flags = [k for k, v in flags.items() if v]
1184
+
1185
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D7.2C_ECX.3D0:_Extended_Features
1186
+ if max_extension_support >= 7:
1187
+ # EBX
1188
+ ebx = self._run_asm(
1189
+ b"\x31\xC9", # xor ecx,ecx
1190
+ b"\xB8\x07\x00\x00\x00" # mov eax,7
1191
+ b"\x0f\xa2" # cpuid
1192
+ b"\x89\xD8" # mov ax,bx
1193
+ b"\xC3" # ret
1194
+ )
1195
+
1196
+ # ECX
1197
+ ecx = self._run_asm(
1198
+ b"\x31\xC9", # xor ecx,ecx
1199
+ b"\xB8\x07\x00\x00\x00" # mov eax,7
1200
+ b"\x0f\xa2" # cpuid
1201
+ b"\x89\xC8" # mov ax,cx
1202
+ b"\xC3" # ret
1203
+ )
1204
+
1205
+ # Get the extended CPU flags
1206
+ extended_flags = {
1207
+ #'fsgsbase' : _is_bit_set(ebx, 0),
1208
+ #'IA32_TSC_ADJUST' : _is_bit_set(ebx, 1),
1209
+ 'sgx' : _is_bit_set(ebx, 2),
1210
+ 'bmi1' : _is_bit_set(ebx, 3),
1211
+ 'hle' : _is_bit_set(ebx, 4),
1212
+ 'avx2' : _is_bit_set(ebx, 5),
1213
+ #'reserved' : _is_bit_set(ebx, 6),
1214
+ 'smep' : _is_bit_set(ebx, 7),
1215
+ 'bmi2' : _is_bit_set(ebx, 8),
1216
+ 'erms' : _is_bit_set(ebx, 9),
1217
+ 'invpcid' : _is_bit_set(ebx, 10),
1218
+ 'rtm' : _is_bit_set(ebx, 11),
1219
+ 'pqm' : _is_bit_set(ebx, 12),
1220
+ #'FPU CS and FPU DS deprecated' : _is_bit_set(ebx, 13),
1221
+ 'mpx' : _is_bit_set(ebx, 14),
1222
+ 'pqe' : _is_bit_set(ebx, 15),
1223
+ 'avx512f' : _is_bit_set(ebx, 16),
1224
+ 'avx512dq' : _is_bit_set(ebx, 17),
1225
+ 'rdseed' : _is_bit_set(ebx, 18),
1226
+ 'adx' : _is_bit_set(ebx, 19),
1227
+ 'smap' : _is_bit_set(ebx, 20),
1228
+ 'avx512ifma' : _is_bit_set(ebx, 21),
1229
+ 'pcommit' : _is_bit_set(ebx, 22),
1230
+ 'clflushopt' : _is_bit_set(ebx, 23),
1231
+ 'clwb' : _is_bit_set(ebx, 24),
1232
+ 'intel_pt' : _is_bit_set(ebx, 25),
1233
+ 'avx512pf' : _is_bit_set(ebx, 26),
1234
+ 'avx512er' : _is_bit_set(ebx, 27),
1235
+ 'avx512cd' : _is_bit_set(ebx, 28),
1236
+ 'sha' : _is_bit_set(ebx, 29),
1237
+ 'avx512bw' : _is_bit_set(ebx, 30),
1238
+ 'avx512vl' : _is_bit_set(ebx, 31),
1239
+
1240
+ 'prefetchwt1' : _is_bit_set(ecx, 0),
1241
+ 'avx512vbmi' : _is_bit_set(ecx, 1),
1242
+ 'umip' : _is_bit_set(ecx, 2),
1243
+ 'pku' : _is_bit_set(ecx, 3),
1244
+ 'ospke' : _is_bit_set(ecx, 4),
1245
+ #'reserved' : _is_bit_set(ecx, 5),
1246
+ 'avx512vbmi2' : _is_bit_set(ecx, 6),
1247
+ #'reserved' : _is_bit_set(ecx, 7),
1248
+ 'gfni' : _is_bit_set(ecx, 8),
1249
+ 'vaes' : _is_bit_set(ecx, 9),
1250
+ 'vpclmulqdq' : _is_bit_set(ecx, 10),
1251
+ 'avx512vnni' : _is_bit_set(ecx, 11),
1252
+ 'avx512bitalg' : _is_bit_set(ecx, 12),
1253
+ #'reserved' : _is_bit_set(ecx, 13),
1254
+ 'avx512vpopcntdq' : _is_bit_set(ecx, 14),
1255
+ #'reserved' : _is_bit_set(ecx, 15),
1256
+ #'reserved' : _is_bit_set(ecx, 16),
1257
+ #'mpx0' : _is_bit_set(ecx, 17),
1258
+ #'mpx1' : _is_bit_set(ecx, 18),
1259
+ #'mpx2' : _is_bit_set(ecx, 19),
1260
+ #'mpx3' : _is_bit_set(ecx, 20),
1261
+ #'mpx4' : _is_bit_set(ecx, 21),
1262
+ 'rdpid' : _is_bit_set(ecx, 22),
1263
+ #'reserved' : _is_bit_set(ecx, 23),
1264
+ #'reserved' : _is_bit_set(ecx, 24),
1265
+ #'reserved' : _is_bit_set(ecx, 25),
1266
+ #'reserved' : _is_bit_set(ecx, 26),
1267
+ #'reserved' : _is_bit_set(ecx, 27),
1268
+ #'reserved' : _is_bit_set(ecx, 28),
1269
+ #'reserved' : _is_bit_set(ecx, 29),
1270
+ 'sgx_lc' : _is_bit_set(ecx, 30),
1271
+ #'reserved' : _is_bit_set(ecx, 31)
1272
+ }
1273
+
1274
+ # Get a list of only the flags that are true
1275
+ extended_flags = [k for k, v in extended_flags.items() if v]
1276
+ flags += extended_flags
1277
+
1278
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D80000001h:_Extended_Processor_Info_and_Feature_Bits
1279
+ if max_extension_support >= 0x80000001:
1280
+ # EBX
1281
+ ebx = self._run_asm(
1282
+ b"\xB8\x01\x00\x00\x80" # mov ax,0x80000001
1283
+ b"\x0f\xa2" # cpuid
1284
+ b"\x89\xD8" # mov ax,bx
1285
+ b"\xC3" # ret
1286
+ )
1287
+
1288
+ # ECX
1289
+ ecx = self._run_asm(
1290
+ b"\xB8\x01\x00\x00\x80" # mov ax,0x80000001
1291
+ b"\x0f\xa2" # cpuid
1292
+ b"\x89\xC8" # mov ax,cx
1293
+ b"\xC3" # ret
1294
+ )
1295
+
1296
+ # Get the extended CPU flags
1297
+ extended_flags = {
1298
+ 'fpu' : _is_bit_set(ebx, 0),
1299
+ 'vme' : _is_bit_set(ebx, 1),
1300
+ 'de' : _is_bit_set(ebx, 2),
1301
+ 'pse' : _is_bit_set(ebx, 3),
1302
+ 'tsc' : _is_bit_set(ebx, 4),
1303
+ 'msr' : _is_bit_set(ebx, 5),
1304
+ 'pae' : _is_bit_set(ebx, 6),
1305
+ 'mce' : _is_bit_set(ebx, 7),
1306
+ 'cx8' : _is_bit_set(ebx, 8),
1307
+ 'apic' : _is_bit_set(ebx, 9),
1308
+ #'reserved' : _is_bit_set(ebx, 10),
1309
+ 'syscall' : _is_bit_set(ebx, 11),
1310
+ 'mtrr' : _is_bit_set(ebx, 12),
1311
+ 'pge' : _is_bit_set(ebx, 13),
1312
+ 'mca' : _is_bit_set(ebx, 14),
1313
+ 'cmov' : _is_bit_set(ebx, 15),
1314
+ 'pat' : _is_bit_set(ebx, 16),
1315
+ 'pse36' : _is_bit_set(ebx, 17),
1316
+ #'reserved' : _is_bit_set(ebx, 18),
1317
+ 'mp' : _is_bit_set(ebx, 19),
1318
+ 'nx' : _is_bit_set(ebx, 20),
1319
+ #'reserved' : _is_bit_set(ebx, 21),
1320
+ 'mmxext' : _is_bit_set(ebx, 22),
1321
+ 'mmx' : _is_bit_set(ebx, 23),
1322
+ 'fxsr' : _is_bit_set(ebx, 24),
1323
+ 'fxsr_opt' : _is_bit_set(ebx, 25),
1324
+ 'pdpe1gp' : _is_bit_set(ebx, 26),
1325
+ 'rdtscp' : _is_bit_set(ebx, 27),
1326
+ #'reserved' : _is_bit_set(ebx, 28),
1327
+ 'lm' : _is_bit_set(ebx, 29),
1328
+ '3dnowext' : _is_bit_set(ebx, 30),
1329
+ '3dnow' : _is_bit_set(ebx, 31),
1330
+
1331
+ 'lahf_lm' : _is_bit_set(ecx, 0),
1332
+ 'cmp_legacy' : _is_bit_set(ecx, 1),
1333
+ 'svm' : _is_bit_set(ecx, 2),
1334
+ 'extapic' : _is_bit_set(ecx, 3),
1335
+ 'cr8_legacy' : _is_bit_set(ecx, 4),
1336
+ 'abm' : _is_bit_set(ecx, 5),
1337
+ 'sse4a' : _is_bit_set(ecx, 6),
1338
+ 'misalignsse' : _is_bit_set(ecx, 7),
1339
+ '3dnowprefetch' : _is_bit_set(ecx, 8),
1340
+ 'osvw' : _is_bit_set(ecx, 9),
1341
+ 'ibs' : _is_bit_set(ecx, 10),
1342
+ 'xop' : _is_bit_set(ecx, 11),
1343
+ 'skinit' : _is_bit_set(ecx, 12),
1344
+ 'wdt' : _is_bit_set(ecx, 13),
1345
+ #'reserved' : _is_bit_set(ecx, 14),
1346
+ 'lwp' : _is_bit_set(ecx, 15),
1347
+ 'fma4' : _is_bit_set(ecx, 16),
1348
+ 'tce' : _is_bit_set(ecx, 17),
1349
+ #'reserved' : _is_bit_set(ecx, 18),
1350
+ 'nodeid_msr' : _is_bit_set(ecx, 19),
1351
+ #'reserved' : _is_bit_set(ecx, 20),
1352
+ 'tbm' : _is_bit_set(ecx, 21),
1353
+ 'topoext' : _is_bit_set(ecx, 22),
1354
+ 'perfctr_core' : _is_bit_set(ecx, 23),
1355
+ 'perfctr_nb' : _is_bit_set(ecx, 24),
1356
+ #'reserved' : _is_bit_set(ecx, 25),
1357
+ 'dbx' : _is_bit_set(ecx, 26),
1358
+ 'perftsc' : _is_bit_set(ecx, 27),
1359
+ 'pci_l2i' : _is_bit_set(ecx, 28),
1360
+ #'reserved' : _is_bit_set(ecx, 29),
1361
+ #'reserved' : _is_bit_set(ecx, 30),
1362
+ #'reserved' : _is_bit_set(ecx, 31)
1363
+ }
1364
+
1365
+ # Get a list of only the flags that are true
1366
+ extended_flags = [k for k, v in extended_flags.items() if v]
1367
+ flags += extended_flags
1368
+
1369
+ flags.sort()
1370
+ return flags
1371
+
1372
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D80000002h.2C80000003h.2C80000004h:_Processor_Brand_String
1373
+ def get_processor_brand(self, max_extension_support):
1374
+ processor_brand = ""
1375
+
1376
+ # Processor brand string
1377
+ if max_extension_support >= 0x80000004:
1378
+ instructions = [
1379
+ b"\xB8\x02\x00\x00\x80", # mov ax,0x80000002
1380
+ b"\xB8\x03\x00\x00\x80", # mov ax,0x80000003
1381
+ b"\xB8\x04\x00\x00\x80" # mov ax,0x80000004
1382
+ ]
1383
+ for instruction in instructions:
1384
+ # EAX
1385
+ eax = self._run_asm(
1386
+ instruction, # mov ax,0x8000000?
1387
+ b"\x0f\xa2" # cpuid
1388
+ b"\x89\xC0" # mov ax,ax
1389
+ b"\xC3" # ret
1390
+ )
1391
+
1392
+ # EBX
1393
+ ebx = self._run_asm(
1394
+ instruction, # mov ax,0x8000000?
1395
+ b"\x0f\xa2" # cpuid
1396
+ b"\x89\xD8" # mov ax,bx
1397
+ b"\xC3" # ret
1398
+ )
1399
+
1400
+ # ECX
1401
+ ecx = self._run_asm(
1402
+ instruction, # mov ax,0x8000000?
1403
+ b"\x0f\xa2" # cpuid
1404
+ b"\x89\xC8" # mov ax,cx
1405
+ b"\xC3" # ret
1406
+ )
1407
+
1408
+ # EDX
1409
+ edx = self._run_asm(
1410
+ instruction, # mov ax,0x8000000?
1411
+ b"\x0f\xa2" # cpuid
1412
+ b"\x89\xD0" # mov ax,dx
1413
+ b"\xC3" # ret
1414
+ )
1415
+
1416
+ # Combine each of the 4 bytes in each register into the string
1417
+ for reg in [eax, ebx, ecx, edx]:
1418
+ for n in [0, 8, 16, 24]:
1419
+ processor_brand += chr((reg >> n) & 0xFF)
1420
+
1421
+ # Strip off any trailing NULL terminators and white space
1422
+ processor_brand = processor_brand.strip("\0").strip()
1423
+
1424
+ return processor_brand
1425
+
1426
+ # http://en.wikipedia.org/wiki/CPUID#EAX.3D80000006h:_Extended_L2_Cache_Features
1427
+ def get_cache(self, max_extension_support):
1428
+ cache_info = {}
1429
+
1430
+ # Just return if the cache feature is not supported
1431
+ if max_extension_support < 0x80000006:
1432
+ return cache_info
1433
+
1434
+ # ECX
1435
+ ecx = self._run_asm(
1436
+ b"\xB8\x06\x00\x00\x80" # mov ax,0x80000006
1437
+ b"\x0f\xa2" # cpuid
1438
+ b"\x89\xC8" # mov ax,cx
1439
+ b"\xC3" # ret
1440
+ )
1441
+
1442
+ cache_info = {
1443
+ 'size_b' : (ecx & 0xFF) * 1024,
1444
+ 'associativity' : (ecx >> 12) & 0xF,
1445
+ 'line_size_b' : (ecx >> 16) & 0xFFFF
1446
+ }
1447
+
1448
+ return cache_info
1449
+
1450
+ def get_ticks_func(self):
1451
+ retval = None
1452
+
1453
+ if DataSource.bits == '32bit':
1454
+ # Works on x86_32
1455
+ restype = None
1456
+ argtypes = (ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint))
1457
+ get_ticks_x86_32 = self._asm_func(restype, argtypes,
1458
+ [
1459
+ b"\x55", # push bp
1460
+ b"\x89\xE5", # mov bp,sp
1461
+ b"\x31\xC0", # xor ax,ax
1462
+ b"\x0F\xA2", # cpuid
1463
+ b"\x0F\x31", # rdtsc
1464
+ b"\x8B\x5D\x08", # mov bx,[di+0x8]
1465
+ b"\x8B\x4D\x0C", # mov cx,[di+0xc]
1466
+ b"\x89\x13", # mov [bp+di],dx
1467
+ b"\x89\x01", # mov [bx+di],ax
1468
+ b"\x5D", # pop bp
1469
+ b"\xC3" # ret
1470
+ ]
1471
+ )
1472
+
1473
+ # Monkey patch func to combine high and low args into one return
1474
+ old_func = get_ticks_x86_32.func
1475
+ def new_func():
1476
+ # Pass two uint32s into function
1477
+ high = ctypes.c_uint32(0)
1478
+ low = ctypes.c_uint32(0)
1479
+ old_func(ctypes.byref(high), ctypes.byref(low))
1480
+
1481
+ # Shift the two uint32s into one uint64
1482
+ retval = ((high.value << 32) & 0xFFFFFFFF00000000) | low.value
1483
+ return retval
1484
+ get_ticks_x86_32.func = new_func
1485
+
1486
+ retval = get_ticks_x86_32
1487
+ elif DataSource.bits == '64bit':
1488
+ # Works on x86_64
1489
+ restype = ctypes.c_uint64
1490
+ argtypes = ()
1491
+ get_ticks_x86_64 = self._asm_func(restype, argtypes,
1492
+ [
1493
+ b"\x48", # dec ax
1494
+ b"\x31\xC0", # xor ax,ax
1495
+ b"\x0F\xA2", # cpuid
1496
+ b"\x0F\x31", # rdtsc
1497
+ b"\x48", # dec ax
1498
+ b"\xC1\xE2\x20", # shl dx,byte 0x20
1499
+ b"\x48", # dec ax
1500
+ b"\x09\xD0", # or ax,dx
1501
+ b"\xC3", # ret
1502
+ ]
1503
+ )
1504
+
1505
+ retval = get_ticks_x86_64
1506
+ return retval
1507
+
1508
+ def get_raw_hz(self):
1509
+ from time import sleep
1510
+
1511
+ ticks_fn = self.get_ticks_func()
1512
+
1513
+ start = ticks_fn.func()
1514
+ sleep(1)
1515
+ end = ticks_fn.func()
1516
+
1517
+ ticks = (end - start)
1518
+ ticks_fn.free()
1519
+
1520
+ return ticks
1521
+
1522
+ def _get_cpu_info_from_cpuid_actual():
1523
+ '''
1524
+ Warning! This function has the potential to crash the Python runtime.
1525
+ Do not call it directly. Use the _get_cpu_info_from_cpuid function instead.
1526
+ It will safely call this function in another process.
1527
+ '''
1528
+
1529
+ from io import StringIO
1530
+
1531
+ trace = Trace(True, True)
1532
+ info = {}
1533
+
1534
+ # Pipe stdout and stderr to strings
1535
+ sys.stdout = trace._stdout
1536
+ sys.stderr = trace._stderr
1537
+
1538
+ try:
1539
+ # Get the CPU arch and bits
1540
+ arch, bits = _parse_arch(DataSource.arch_string_raw)
1541
+
1542
+ # Return none if this is not an X86 CPU
1543
+ if not arch in ['X86_32', 'X86_64']:
1544
+ trace.fail('Not running on X86_32 or X86_64. Skipping ...')
1545
+ return trace.to_dict(info, True)
1546
+
1547
+ # Return none if SE Linux is in enforcing mode
1548
+ cpuid = CPUID(trace)
1549
+ if cpuid.is_selinux_enforcing:
1550
+ trace.fail('SELinux is enforcing. Skipping ...')
1551
+ return trace.to_dict(info, True)
1552
+
1553
+ # Get the cpu info from the CPUID register
1554
+ max_extension_support = cpuid.get_max_extension_support()
1555
+ cache_info = cpuid.get_cache(max_extension_support)
1556
+ info = cpuid.get_info()
1557
+
1558
+ processor_brand = cpuid.get_processor_brand(max_extension_support)
1559
+
1560
+ # Get the Hz and scale
1561
+ hz_actual = cpuid.get_raw_hz()
1562
+ hz_actual = _to_decimal_string(hz_actual)
1563
+
1564
+ # Get the Hz and scale
1565
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
1566
+ info = {
1567
+ 'vendor_id_raw' : cpuid.get_vendor_id(),
1568
+ 'hardware_raw' : '',
1569
+ 'brand_raw' : processor_brand,
1570
+
1571
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
1572
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
1573
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
1574
+ 'hz_actual' : _hz_short_to_full(hz_actual, 0),
1575
+
1576
+ 'l2_cache_size' : cache_info['size_b'],
1577
+ 'l2_cache_line_size' : cache_info['line_size_b'],
1578
+ 'l2_cache_associativity' : cache_info['associativity'],
1579
+
1580
+ 'stepping' : info['stepping'],
1581
+ 'model' : info['model'],
1582
+ 'family' : info['family'],
1583
+ 'processor_type' : info['processor_type'],
1584
+ 'flags' : cpuid.get_flags(max_extension_support)
1585
+ }
1586
+
1587
+ info = _filter_dict_keys_with_empty_values(info)
1588
+ trace.success()
1589
+ except Exception as err:
1590
+ from traceback import format_exc
1591
+ err_string = format_exc()
1592
+ trace._err = ''.join(['\t\t{0}\n'.format(n) for n in err_string.split('\n')]) + '\n'
1593
+ return trace.to_dict(info, True)
1594
+
1595
+ return trace.to_dict(info, False)
1596
+
1597
+ def _get_cpu_info_from_cpuid_subprocess_wrapper(queue):
1598
+ orig_stdout = sys.stdout
1599
+ orig_stderr = sys.stderr
1600
+
1601
+ output = _get_cpu_info_from_cpuid_actual()
1602
+
1603
+ sys.stdout = orig_stdout
1604
+ sys.stderr = orig_stderr
1605
+
1606
+ queue.put(_obj_to_b64(output))
1607
+
1608
+ def _get_cpu_info_from_cpuid():
1609
+ '''
1610
+ Returns the CPU info gathered by querying the X86 cpuid register in a new process.
1611
+ Returns {} on non X86 cpus.
1612
+ Returns {} if SELinux is in enforcing mode.
1613
+ '''
1614
+
1615
+ g_trace.header('Tying to get info from CPUID ...')
1616
+
1617
+ from multiprocessing import Process, Queue
1618
+
1619
+ # Return {} if can't cpuid
1620
+ if not DataSource.can_cpuid:
1621
+ g_trace.fail('Can\'t CPUID. Skipping ...')
1622
+ return {}
1623
+
1624
+ # Get the CPU arch and bits
1625
+ arch, bits = _parse_arch(DataSource.arch_string_raw)
1626
+
1627
+ # Return {} if this is not an X86 CPU
1628
+ if not arch in ['X86_32', 'X86_64']:
1629
+ g_trace.fail('Not running on X86_32 or X86_64. Skipping ...')
1630
+ return {}
1631
+
1632
+ try:
1633
+ if CAN_CALL_CPUID_IN_SUBPROCESS:
1634
+ # Start running the function in a subprocess
1635
+ queue = Queue()
1636
+ p = Process(target=_get_cpu_info_from_cpuid_subprocess_wrapper, args=(queue,))
1637
+ p.start()
1638
+
1639
+ # Wait for the process to end, while it is still alive
1640
+ while p.is_alive():
1641
+ p.join(0)
1642
+
1643
+ # Return {} if it failed
1644
+ if p.exitcode != 0:
1645
+ g_trace.fail('Failed to run CPUID in process. Skipping ...')
1646
+ return {}
1647
+
1648
+ # Return {} if no results
1649
+ if queue.empty():
1650
+ g_trace.fail('Failed to get anything from CPUID process. Skipping ...')
1651
+ return {}
1652
+ # Return the result, only if there is something to read
1653
+ else:
1654
+ output = _b64_to_obj(queue.get())
1655
+ import pprint
1656
+ pp = pprint.PrettyPrinter(indent=4)
1657
+ #pp.pprint(output)
1658
+
1659
+ if 'output' in output and output['output']:
1660
+ g_trace.write(output['output'])
1661
+
1662
+ if 'stdout' in output and output['stdout']:
1663
+ sys.stdout.write('{0}\n'.format(output['stdout']))
1664
+ sys.stdout.flush()
1665
+
1666
+ if 'stderr' in output and output['stderr']:
1667
+ sys.stderr.write('{0}\n'.format(output['stderr']))
1668
+ sys.stderr.flush()
1669
+
1670
+ if 'is_fail' not in output:
1671
+ g_trace.fail('Failed to get is_fail from CPUID process. Skipping ...')
1672
+ return {}
1673
+
1674
+ # Fail if there was an exception
1675
+ if 'err' in output and output['err']:
1676
+ g_trace.fail('Failed to run CPUID in process. Skipping ...')
1677
+ g_trace.write(output['err'])
1678
+ g_trace.write('Failed ...')
1679
+ return {}
1680
+
1681
+ if 'is_fail' in output and output['is_fail']:
1682
+ g_trace.write('Failed ...')
1683
+ return {}
1684
+
1685
+ if 'info' not in output or not output['info']:
1686
+ g_trace.fail('Failed to get return info from CPUID process. Skipping ...')
1687
+ return {}
1688
+
1689
+ return output['info']
1690
+ else:
1691
+ # FIXME: This should write the values like in the above call to actual
1692
+ orig_stdout = sys.stdout
1693
+ orig_stderr = sys.stderr
1694
+
1695
+ output = _get_cpu_info_from_cpuid_actual()
1696
+
1697
+ sys.stdout = orig_stdout
1698
+ sys.stderr = orig_stderr
1699
+
1700
+ g_trace.success()
1701
+ return output['info']
1702
+ except Exception as err:
1703
+ g_trace.fail(err)
1704
+
1705
+ # Return {} if everything failed
1706
+ return {}
1707
+
1708
+ def _get_cpu_info_from_proc_cpuinfo():
1709
+ '''
1710
+ Returns the CPU info gathered from /proc/cpuinfo.
1711
+ Returns {} if /proc/cpuinfo is not found.
1712
+ '''
1713
+
1714
+ g_trace.header('Tying to get info from /proc/cpuinfo ...')
1715
+
1716
+ try:
1717
+ # Just return {} if there is no cpuinfo
1718
+ if not DataSource.has_proc_cpuinfo():
1719
+ g_trace.fail('Failed to find /proc/cpuinfo. Skipping ...')
1720
+ return {}
1721
+
1722
+ returncode, output = DataSource.cat_proc_cpuinfo()
1723
+ if returncode != 0:
1724
+ g_trace.fail('Failed to run cat /proc/cpuinfo. Skipping ...')
1725
+ return {}
1726
+
1727
+ # Various fields
1728
+ vendor_id = _get_field(False, output, None, '', 'vendor_id', 'vendor id', 'vendor')
1729
+ processor_brand = _get_field(True, output, None, None, 'model name', 'cpu', 'processor', 'uarch')
1730
+ cache_size = _get_field(False, output, None, '', 'cache size')
1731
+ stepping = _get_field(False, output, int, -1, 'stepping')
1732
+ model = _get_field(False, output, int, -1, 'model')
1733
+ family = _get_field(False, output, int, -1, 'cpu family')
1734
+ hardware = _get_field(False, output, None, '', 'Hardware')
1735
+
1736
+ # Flags
1737
+ flags = _get_field(False, output, None, None, 'flags', 'Features', 'ASEs implemented')
1738
+ if flags:
1739
+ flags = flags.split()
1740
+ flags.sort()
1741
+
1742
+ # Check for other cache format
1743
+ if not cache_size:
1744
+ try:
1745
+ for i in range(0, 10):
1746
+ name = "cache{0}".format(i)
1747
+ value = _get_field(False, output, None, None, name)
1748
+ if value:
1749
+ value = [entry.split('=') for entry in value.split(' ')]
1750
+ value = dict(value)
1751
+ if 'level' in value and value['level'] == '3' and 'size' in value:
1752
+ cache_size = value['size']
1753
+ break
1754
+ except Exception:
1755
+ pass
1756
+
1757
+ # Convert from MHz string to Hz
1758
+ hz_actual = _get_field(False, output, None, '', 'cpu MHz', 'cpu speed', 'clock', 'cpu MHz dynamic', 'cpu MHz static')
1759
+ hz_actual = hz_actual.lower().rstrip('mhz').strip()
1760
+ hz_actual = _to_decimal_string(hz_actual)
1761
+
1762
+ # Convert from GHz/MHz string to Hz
1763
+ hz_advertised, scale = (None, 0)
1764
+ try:
1765
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
1766
+ except Exception:
1767
+ pass
1768
+
1769
+ info = {
1770
+ 'hardware_raw' : hardware,
1771
+ 'brand_raw' : processor_brand,
1772
+
1773
+ 'l3_cache_size' : _friendly_bytes_to_int(cache_size),
1774
+ 'flags' : flags,
1775
+ 'vendor_id_raw' : vendor_id,
1776
+ 'stepping' : stepping,
1777
+ 'model' : model,
1778
+ 'family' : family,
1779
+ }
1780
+
1781
+ # Make the Hz the same for actual and advertised if missing any
1782
+ if not hz_advertised or hz_advertised == '0.0':
1783
+ hz_advertised = hz_actual
1784
+ scale = 6
1785
+ elif not hz_actual or hz_actual == '0.0':
1786
+ hz_actual = hz_advertised
1787
+
1788
+ # Add the Hz if there is one
1789
+ if _hz_short_to_full(hz_advertised, scale) > (0, 0):
1790
+ info['hz_advertised_friendly'] = _hz_short_to_friendly(hz_advertised, scale)
1791
+ info['hz_advertised'] = _hz_short_to_full(hz_advertised, scale)
1792
+ if _hz_short_to_full(hz_actual, scale) > (0, 0):
1793
+ info['hz_actual_friendly'] = _hz_short_to_friendly(hz_actual, 6)
1794
+ info['hz_actual'] = _hz_short_to_full(hz_actual, 6)
1795
+
1796
+ info = _filter_dict_keys_with_empty_values(info, {'stepping':0, 'model':0, 'family':0})
1797
+ g_trace.success()
1798
+ return info
1799
+ except Exception as err:
1800
+ g_trace.fail(err)
1801
+ #raise # NOTE: To have this throw on error, uncomment this line
1802
+ return {}
1803
+
1804
+ def _get_cpu_info_from_cpufreq_info():
1805
+ '''
1806
+ Returns the CPU info gathered from cpufreq-info.
1807
+ Returns {} if cpufreq-info is not found.
1808
+ '''
1809
+
1810
+ g_trace.header('Tying to get info from cpufreq-info ...')
1811
+
1812
+ try:
1813
+ hz_brand, scale = '0.0', 0
1814
+
1815
+ if not DataSource.has_cpufreq_info():
1816
+ g_trace.fail('Failed to find cpufreq-info. Skipping ...')
1817
+ return {}
1818
+
1819
+ returncode, output = DataSource.cpufreq_info()
1820
+ if returncode != 0:
1821
+ g_trace.fail('Failed to run cpufreq-info. Skipping ...')
1822
+ return {}
1823
+
1824
+ hz_brand = output.split('current CPU frequency is')[1].split('\n')[0]
1825
+ i = hz_brand.find('Hz')
1826
+ assert(i != -1)
1827
+ hz_brand = hz_brand[0 : i+2].strip().lower()
1828
+
1829
+ if hz_brand.endswith('mhz'):
1830
+ scale = 6
1831
+ elif hz_brand.endswith('ghz'):
1832
+ scale = 9
1833
+ hz_brand = hz_brand.rstrip('mhz').rstrip('ghz').strip()
1834
+ hz_brand = _to_decimal_string(hz_brand)
1835
+
1836
+ info = {
1837
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_brand, scale),
1838
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_brand, scale),
1839
+ 'hz_advertised' : _hz_short_to_full(hz_brand, scale),
1840
+ 'hz_actual' : _hz_short_to_full(hz_brand, scale),
1841
+ }
1842
+
1843
+ info = _filter_dict_keys_with_empty_values(info)
1844
+ g_trace.success()
1845
+ return info
1846
+ except Exception as err:
1847
+ g_trace.fail(err)
1848
+ #raise # NOTE: To have this throw on error, uncomment this line
1849
+ return {}
1850
+
1851
+ def _get_cpu_info_from_lscpu():
1852
+ '''
1853
+ Returns the CPU info gathered from lscpu.
1854
+ Returns {} if lscpu is not found.
1855
+ '''
1856
+
1857
+ g_trace.header('Tying to get info from lscpu ...')
1858
+
1859
+ try:
1860
+ if not DataSource.has_lscpu():
1861
+ g_trace.fail('Failed to find lscpu. Skipping ...')
1862
+ return {}
1863
+
1864
+ returncode, output = DataSource.lscpu()
1865
+ if returncode != 0:
1866
+ g_trace.fail('Failed to run lscpu. Skipping ...')
1867
+ return {}
1868
+
1869
+ info = {}
1870
+
1871
+ new_hz = _get_field(False, output, None, None, 'CPU max MHz', 'CPU MHz')
1872
+ if new_hz:
1873
+ new_hz = _to_decimal_string(new_hz)
1874
+ scale = 6
1875
+ info['hz_advertised_friendly'] = _hz_short_to_friendly(new_hz, scale)
1876
+ info['hz_actual_friendly'] = _hz_short_to_friendly(new_hz, scale)
1877
+ info['hz_advertised'] = _hz_short_to_full(new_hz, scale)
1878
+ info['hz_actual'] = _hz_short_to_full(new_hz, scale)
1879
+
1880
+ new_hz = _get_field(False, output, None, None, 'CPU dynamic MHz', 'CPU static MHz')
1881
+ if new_hz:
1882
+ new_hz = _to_decimal_string(new_hz)
1883
+ scale = 6
1884
+ info['hz_advertised_friendly'] = _hz_short_to_friendly(new_hz, scale)
1885
+ info['hz_actual_friendly'] = _hz_short_to_friendly(new_hz, scale)
1886
+ info['hz_advertised'] = _hz_short_to_full(new_hz, scale)
1887
+ info['hz_actual'] = _hz_short_to_full(new_hz, scale)
1888
+
1889
+ vendor_id = _get_field(False, output, None, None, 'Vendor ID')
1890
+ if vendor_id:
1891
+ info['vendor_id_raw'] = vendor_id
1892
+
1893
+ brand = _get_field(False, output, None, None, 'Model name')
1894
+ if brand:
1895
+ info['brand_raw'] = brand
1896
+ else:
1897
+ brand = _get_field(False, output, None, None, 'Model')
1898
+ if brand and not brand.isdigit():
1899
+ info['brand_raw'] = brand
1900
+
1901
+ family = _get_field(False, output, None, None, 'CPU family')
1902
+ if family and family.isdigit():
1903
+ info['family'] = int(family)
1904
+
1905
+ stepping = _get_field(False, output, None, None, 'Stepping')
1906
+ if stepping and stepping.isdigit():
1907
+ info['stepping'] = int(stepping)
1908
+
1909
+ model = _get_field(False, output, None, None, 'Model')
1910
+ if model and model.isdigit():
1911
+ info['model'] = int(model)
1912
+
1913
+ l1_data_cache_size = _get_field(False, output, None, None, 'L1d cache')
1914
+ if l1_data_cache_size:
1915
+ l1_data_cache_size = l1_data_cache_size.split('(')[0].strip()
1916
+ info['l1_data_cache_size'] = _friendly_bytes_to_int(l1_data_cache_size)
1917
+
1918
+ l1_instruction_cache_size = _get_field(False, output, None, None, 'L1i cache')
1919
+ if l1_instruction_cache_size:
1920
+ l1_instruction_cache_size = l1_instruction_cache_size.split('(')[0].strip()
1921
+ info['l1_instruction_cache_size'] = _friendly_bytes_to_int(l1_instruction_cache_size)
1922
+
1923
+ l2_cache_size = _get_field(False, output, None, None, 'L2 cache', 'L2d cache')
1924
+ if l2_cache_size:
1925
+ l2_cache_size = l2_cache_size.split('(')[0].strip()
1926
+ info['l2_cache_size'] = _friendly_bytes_to_int(l2_cache_size)
1927
+
1928
+ l3_cache_size = _get_field(False, output, None, None, 'L3 cache')
1929
+ if l3_cache_size:
1930
+ l3_cache_size = l3_cache_size.split('(')[0].strip()
1931
+ info['l3_cache_size'] = _friendly_bytes_to_int(l3_cache_size)
1932
+
1933
+ # Flags
1934
+ flags = _get_field(False, output, None, None, 'flags', 'Features', 'ASEs implemented')
1935
+ if flags:
1936
+ flags = flags.split()
1937
+ flags.sort()
1938
+ info['flags'] = flags
1939
+
1940
+ info = _filter_dict_keys_with_empty_values(info, {'stepping':0, 'model':0, 'family':0})
1941
+ g_trace.success()
1942
+ return info
1943
+ except Exception as err:
1944
+ g_trace.fail(err)
1945
+ #raise # NOTE: To have this throw on error, uncomment this line
1946
+ return {}
1947
+
1948
+ def _get_cpu_info_from_dmesg():
1949
+ '''
1950
+ Returns the CPU info gathered from dmesg.
1951
+ Returns {} if dmesg is not found or does not have the desired info.
1952
+ '''
1953
+
1954
+ g_trace.header('Tying to get info from the dmesg ...')
1955
+
1956
+ # Just return {} if this arch has an unreliable dmesg log
1957
+ arch, bits = _parse_arch(DataSource.arch_string_raw)
1958
+ if arch in ['S390X']:
1959
+ g_trace.fail('Running on S390X. Skipping ...')
1960
+ return {}
1961
+
1962
+ # Just return {} if there is no dmesg
1963
+ if not DataSource.has_dmesg():
1964
+ g_trace.fail('Failed to find dmesg. Skipping ...')
1965
+ return {}
1966
+
1967
+ # If dmesg fails return {}
1968
+ returncode, output = DataSource.dmesg_a()
1969
+ if output is None or returncode != 0:
1970
+ g_trace.fail('Failed to run \"dmesg -a\". Skipping ...')
1971
+ return {}
1972
+
1973
+ info = _parse_dmesg_output(output)
1974
+ g_trace.success()
1975
+ return info
1976
+
1977
+
1978
+ # https://openpowerfoundation.org/wp-content/uploads/2016/05/LoPAPR_DRAFT_v11_24March2016_cmt1.pdf
1979
+ # page 767
1980
+ def _get_cpu_info_from_ibm_pa_features():
1981
+ '''
1982
+ Returns the CPU info gathered from lsprop /proc/device-tree/cpus/*/ibm,pa-features
1983
+ Returns {} if lsprop is not found or ibm,pa-features does not have the desired info.
1984
+ '''
1985
+
1986
+ g_trace.header('Tying to get info from lsprop ...')
1987
+
1988
+ try:
1989
+ # Just return {} if there is no lsprop
1990
+ if not DataSource.has_ibm_pa_features():
1991
+ g_trace.fail('Failed to find lsprop. Skipping ...')
1992
+ return {}
1993
+
1994
+ # If ibm,pa-features fails return {}
1995
+ returncode, output = DataSource.ibm_pa_features()
1996
+ if output is None or returncode != 0:
1997
+ g_trace.fail('Failed to glob /proc/device-tree/cpus/*/ibm,pa-features. Skipping ...')
1998
+ return {}
1999
+
2000
+ # Filter out invalid characters from output
2001
+ value = output.split("ibm,pa-features")[1].lower()
2002
+ value = [s for s in value if s in list('0123456789abcfed')]
2003
+ value = ''.join(value)
2004
+
2005
+ # Get data converted to Uint32 chunks
2006
+ left = int(value[0 : 8], 16)
2007
+ right = int(value[8 : 16], 16)
2008
+
2009
+ # Get the CPU flags
2010
+ flags = {
2011
+ # Byte 0
2012
+ 'mmu' : _is_bit_set(left, 0),
2013
+ 'fpu' : _is_bit_set(left, 1),
2014
+ 'slb' : _is_bit_set(left, 2),
2015
+ 'run' : _is_bit_set(left, 3),
2016
+ #'reserved' : _is_bit_set(left, 4),
2017
+ 'dabr' : _is_bit_set(left, 5),
2018
+ 'ne' : _is_bit_set(left, 6),
2019
+ 'wtr' : _is_bit_set(left, 7),
2020
+
2021
+ # Byte 1
2022
+ 'mcr' : _is_bit_set(left, 8),
2023
+ 'dsisr' : _is_bit_set(left, 9),
2024
+ 'lp' : _is_bit_set(left, 10),
2025
+ 'ri' : _is_bit_set(left, 11),
2026
+ 'dabrx' : _is_bit_set(left, 12),
2027
+ 'sprg3' : _is_bit_set(left, 13),
2028
+ 'rislb' : _is_bit_set(left, 14),
2029
+ 'pp' : _is_bit_set(left, 15),
2030
+
2031
+ # Byte 2
2032
+ 'vpm' : _is_bit_set(left, 16),
2033
+ 'dss_2.05' : _is_bit_set(left, 17),
2034
+ #'reserved' : _is_bit_set(left, 18),
2035
+ 'dar' : _is_bit_set(left, 19),
2036
+ #'reserved' : _is_bit_set(left, 20),
2037
+ 'ppr' : _is_bit_set(left, 21),
2038
+ 'dss_2.02' : _is_bit_set(left, 22),
2039
+ 'dss_2.06' : _is_bit_set(left, 23),
2040
+
2041
+ # Byte 3
2042
+ 'lsd_in_dscr' : _is_bit_set(left, 24),
2043
+ 'ugr_in_dscr' : _is_bit_set(left, 25),
2044
+ #'reserved' : _is_bit_set(left, 26),
2045
+ #'reserved' : _is_bit_set(left, 27),
2046
+ #'reserved' : _is_bit_set(left, 28),
2047
+ #'reserved' : _is_bit_set(left, 29),
2048
+ #'reserved' : _is_bit_set(left, 30),
2049
+ #'reserved' : _is_bit_set(left, 31),
2050
+
2051
+ # Byte 4
2052
+ 'sso_2.06' : _is_bit_set(right, 0),
2053
+ #'reserved' : _is_bit_set(right, 1),
2054
+ #'reserved' : _is_bit_set(right, 2),
2055
+ #'reserved' : _is_bit_set(right, 3),
2056
+ #'reserved' : _is_bit_set(right, 4),
2057
+ #'reserved' : _is_bit_set(right, 5),
2058
+ #'reserved' : _is_bit_set(right, 6),
2059
+ #'reserved' : _is_bit_set(right, 7),
2060
+
2061
+ # Byte 5
2062
+ 'le' : _is_bit_set(right, 8),
2063
+ 'cfar' : _is_bit_set(right, 9),
2064
+ 'eb' : _is_bit_set(right, 10),
2065
+ 'lsq_2.07' : _is_bit_set(right, 11),
2066
+ #'reserved' : _is_bit_set(right, 12),
2067
+ #'reserved' : _is_bit_set(right, 13),
2068
+ #'reserved' : _is_bit_set(right, 14),
2069
+ #'reserved' : _is_bit_set(right, 15),
2070
+
2071
+ # Byte 6
2072
+ 'dss_2.07' : _is_bit_set(right, 16),
2073
+ #'reserved' : _is_bit_set(right, 17),
2074
+ #'reserved' : _is_bit_set(right, 18),
2075
+ #'reserved' : _is_bit_set(right, 19),
2076
+ #'reserved' : _is_bit_set(right, 20),
2077
+ #'reserved' : _is_bit_set(right, 21),
2078
+ #'reserved' : _is_bit_set(right, 22),
2079
+ #'reserved' : _is_bit_set(right, 23),
2080
+
2081
+ # Byte 7
2082
+ #'reserved' : _is_bit_set(right, 24),
2083
+ #'reserved' : _is_bit_set(right, 25),
2084
+ #'reserved' : _is_bit_set(right, 26),
2085
+ #'reserved' : _is_bit_set(right, 27),
2086
+ #'reserved' : _is_bit_set(right, 28),
2087
+ #'reserved' : _is_bit_set(right, 29),
2088
+ #'reserved' : _is_bit_set(right, 30),
2089
+ #'reserved' : _is_bit_set(right, 31),
2090
+ }
2091
+
2092
+ # Get a list of only the flags that are true
2093
+ flags = [k for k, v in flags.items() if v]
2094
+ flags.sort()
2095
+
2096
+ info = {
2097
+ 'flags' : flags
2098
+ }
2099
+ info = _filter_dict_keys_with_empty_values(info)
2100
+ g_trace.success()
2101
+ return info
2102
+ except Exception as err:
2103
+ g_trace.fail(err)
2104
+ return {}
2105
+
2106
+
2107
+ def _get_cpu_info_from_cat_var_run_dmesg_boot():
2108
+ '''
2109
+ Returns the CPU info gathered from /var/run/dmesg.boot.
2110
+ Returns {} if dmesg is not found or does not have the desired info.
2111
+ '''
2112
+
2113
+ g_trace.header('Tying to get info from the /var/run/dmesg.boot log ...')
2114
+
2115
+ # Just return {} if there is no /var/run/dmesg.boot
2116
+ if not DataSource.has_var_run_dmesg_boot():
2117
+ g_trace.fail('Failed to find /var/run/dmesg.boot file. Skipping ...')
2118
+ return {}
2119
+
2120
+ # If dmesg.boot fails return {}
2121
+ returncode, output = DataSource.cat_var_run_dmesg_boot()
2122
+ if output is None or returncode != 0:
2123
+ g_trace.fail('Failed to run \"cat /var/run/dmesg.boot\". Skipping ...')
2124
+ return {}
2125
+
2126
+ info = _parse_dmesg_output(output)
2127
+ g_trace.success()
2128
+ return info
2129
+
2130
+
2131
+ def _get_cpu_info_from_sysctl():
2132
+ '''
2133
+ Returns the CPU info gathered from sysctl.
2134
+ Returns {} if sysctl is not found.
2135
+ '''
2136
+
2137
+ g_trace.header('Tying to get info from sysctl ...')
2138
+
2139
+ try:
2140
+ # Just return {} if there is no sysctl
2141
+ if not DataSource.has_sysctl():
2142
+ g_trace.fail('Failed to find sysctl. Skipping ...')
2143
+ return {}
2144
+
2145
+ # If sysctl fails return {}
2146
+ returncode, output = DataSource.sysctl_machdep_cpu_hw_cpufrequency()
2147
+ if output is None or returncode != 0:
2148
+ g_trace.fail('Failed to run \"sysctl machdep.cpu hw.cpufrequency\". Skipping ...')
2149
+ return {}
2150
+
2151
+ # Various fields
2152
+ vendor_id = _get_field(False, output, None, None, 'machdep.cpu.vendor')
2153
+ processor_brand = _get_field(True, output, None, None, 'machdep.cpu.brand_string')
2154
+ cache_size = _get_field(False, output, int, 0, 'machdep.cpu.cache.size')
2155
+ stepping = _get_field(False, output, int, 0, 'machdep.cpu.stepping')
2156
+ model = _get_field(False, output, int, 0, 'machdep.cpu.model')
2157
+ family = _get_field(False, output, int, 0, 'machdep.cpu.family')
2158
+
2159
+ # Flags
2160
+ flags = _get_field(False, output, None, '', 'machdep.cpu.features').lower().split()
2161
+ flags.extend(_get_field(False, output, None, '', 'machdep.cpu.leaf7_features').lower().split())
2162
+ flags.extend(_get_field(False, output, None, '', 'machdep.cpu.extfeatures').lower().split())
2163
+ flags.sort()
2164
+
2165
+ # Convert from GHz/MHz string to Hz
2166
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
2167
+ hz_actual = _get_field(False, output, None, None, 'hw.cpufrequency')
2168
+ hz_actual = _to_decimal_string(hz_actual)
2169
+
2170
+ info = {
2171
+ 'vendor_id_raw' : vendor_id,
2172
+ 'brand_raw' : processor_brand,
2173
+
2174
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
2175
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
2176
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
2177
+ 'hz_actual' : _hz_short_to_full(hz_actual, 0),
2178
+
2179
+ 'l2_cache_size' : int(cache_size) * 1024,
2180
+
2181
+ 'stepping' : stepping,
2182
+ 'model' : model,
2183
+ 'family' : family,
2184
+ 'flags' : flags
2185
+ }
2186
+
2187
+ info = _filter_dict_keys_with_empty_values(info)
2188
+ g_trace.success()
2189
+ return info
2190
+ except Exception as err:
2191
+ g_trace.fail(err)
2192
+ return {}
2193
+
2194
+
2195
+ def _get_cpu_info_from_sysinfo():
2196
+ '''
2197
+ Returns the CPU info gathered from sysinfo.
2198
+ Returns {} if sysinfo is not found.
2199
+ '''
2200
+
2201
+ info = _get_cpu_info_from_sysinfo_v1()
2202
+ info.update(_get_cpu_info_from_sysinfo_v2())
2203
+ return info
2204
+
2205
+ def _get_cpu_info_from_sysinfo_v1():
2206
+ '''
2207
+ Returns the CPU info gathered from sysinfo.
2208
+ Returns {} if sysinfo is not found.
2209
+ '''
2210
+
2211
+ g_trace.header('Tying to get info from sysinfo version 1 ...')
2212
+
2213
+ try:
2214
+ # Just return {} if there is no sysinfo
2215
+ if not DataSource.has_sysinfo():
2216
+ g_trace.fail('Failed to find sysinfo. Skipping ...')
2217
+ return {}
2218
+
2219
+ # If sysinfo fails return {}
2220
+ returncode, output = DataSource.sysinfo_cpu()
2221
+ if output is None or returncode != 0:
2222
+ g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...')
2223
+ return {}
2224
+
2225
+ # Various fields
2226
+ vendor_id = '' #_get_field(False, output, None, None, 'CPU #0: ')
2227
+ processor_brand = output.split('CPU #0: "')[1].split('"\n')[0].strip()
2228
+ cache_size = '' #_get_field(False, output, None, None, 'machdep.cpu.cache.size')
2229
+ stepping = int(output.split(', stepping ')[1].split(',')[0].strip())
2230
+ model = int(output.split(', model ')[1].split(',')[0].strip())
2231
+ family = int(output.split(', family ')[1].split(',')[0].strip())
2232
+
2233
+ # Flags
2234
+ flags = []
2235
+ for line in output.split('\n'):
2236
+ if line.startswith('\t\t'):
2237
+ for flag in line.strip().lower().split():
2238
+ flags.append(flag)
2239
+ flags.sort()
2240
+
2241
+ # Convert from GHz/MHz string to Hz
2242
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
2243
+ hz_actual = hz_advertised
2244
+
2245
+ info = {
2246
+ 'vendor_id_raw' : vendor_id,
2247
+ 'brand_raw' : processor_brand,
2248
+
2249
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
2250
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale),
2251
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
2252
+ 'hz_actual' : _hz_short_to_full(hz_actual, scale),
2253
+
2254
+ 'l2_cache_size' : _to_friendly_bytes(cache_size),
2255
+
2256
+ 'stepping' : stepping,
2257
+ 'model' : model,
2258
+ 'family' : family,
2259
+ 'flags' : flags
2260
+ }
2261
+
2262
+ info = _filter_dict_keys_with_empty_values(info)
2263
+ g_trace.success()
2264
+ return info
2265
+ except Exception as err:
2266
+ g_trace.fail(err)
2267
+ #raise # NOTE: To have this throw on error, uncomment this line
2268
+ return {}
2269
+
2270
+ def _get_cpu_info_from_sysinfo_v2():
2271
+ '''
2272
+ Returns the CPU info gathered from sysinfo.
2273
+ Returns {} if sysinfo is not found.
2274
+ '''
2275
+
2276
+ g_trace.header('Tying to get info from sysinfo version 2 ...')
2277
+
2278
+ try:
2279
+ # Just return {} if there is no sysinfo
2280
+ if not DataSource.has_sysinfo():
2281
+ g_trace.fail('Failed to find sysinfo. Skipping ...')
2282
+ return {}
2283
+
2284
+ # If sysinfo fails return {}
2285
+ returncode, output = DataSource.sysinfo_cpu()
2286
+ if output is None or returncode != 0:
2287
+ g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...')
2288
+ return {}
2289
+
2290
+ # Various fields
2291
+ vendor_id = '' #_get_field(False, output, None, None, 'CPU #0: ')
2292
+ processor_brand = output.split('CPU #0: "')[1].split('"\n')[0].strip()
2293
+ cache_size = '' #_get_field(False, output, None, None, 'machdep.cpu.cache.size')
2294
+ signature = output.split('Signature:')[1].split('\n')[0].strip()
2295
+ #
2296
+ stepping = int(signature.split('stepping ')[1].split(',')[0].strip())
2297
+ model = int(signature.split('model ')[1].split(',')[0].strip())
2298
+ family = int(signature.split('family ')[1].split(',')[0].strip())
2299
+
2300
+ # Flags
2301
+ def get_subsection_flags(output):
2302
+ retval = []
2303
+ for line in output.split('\n')[1:]:
2304
+ if not line.startswith(' ') and not line.startswith(' '): break
2305
+ for entry in line.strip().lower().split(' '):
2306
+ retval.append(entry)
2307
+ return retval
2308
+
2309
+ flags = get_subsection_flags(output.split('Features: ')[1]) + \
2310
+ get_subsection_flags(output.split('Extended Features (0x00000001): ')[1]) + \
2311
+ get_subsection_flags(output.split('Extended Features (0x80000001): ')[1])
2312
+ flags.sort()
2313
+
2314
+ # Convert from GHz/MHz string to Hz
2315
+ lines = [n for n in output.split('\n') if n]
2316
+ raw_hz = lines[0].split('running at ')[1].strip().lower()
2317
+ hz_advertised = raw_hz.rstrip('mhz').rstrip('ghz').strip()
2318
+ hz_advertised = _to_decimal_string(hz_advertised)
2319
+ hz_actual = hz_advertised
2320
+
2321
+ scale = 0
2322
+ if raw_hz.endswith('mhz'):
2323
+ scale = 6
2324
+ elif raw_hz.endswith('ghz'):
2325
+ scale = 9
2326
+
2327
+ info = {
2328
+ 'vendor_id_raw' : vendor_id,
2329
+ 'brand_raw' : processor_brand,
2330
+
2331
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
2332
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale),
2333
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
2334
+ 'hz_actual' : _hz_short_to_full(hz_actual, scale),
2335
+
2336
+ 'l2_cache_size' : _to_friendly_bytes(cache_size),
2337
+
2338
+ 'stepping' : stepping,
2339
+ 'model' : model,
2340
+ 'family' : family,
2341
+ 'flags' : flags
2342
+ }
2343
+
2344
+ info = _filter_dict_keys_with_empty_values(info)
2345
+ g_trace.success()
2346
+ return info
2347
+ except Exception as err:
2348
+ g_trace.fail(err)
2349
+ #raise # NOTE: To have this throw on error, uncomment this line
2350
+ return {}
2351
+
2352
+ def _get_cpu_info_from_wmic():
2353
+ '''
2354
+ Returns the CPU info gathered from WMI.
2355
+ Returns {} if not on Windows, or wmic is not installed.
2356
+ '''
2357
+ g_trace.header('Tying to get info from wmic ...')
2358
+
2359
+ try:
2360
+ # Just return {} if not Windows or there is no wmic
2361
+ if not DataSource.is_windows or not DataSource.has_wmic():
2362
+ g_trace.fail('Failed to find WMIC, or not on Windows. Skipping ...')
2363
+ return {}
2364
+
2365
+ returncode, output = DataSource.wmic_cpu()
2366
+ if output is None or returncode != 0:
2367
+ g_trace.fail('Failed to run wmic. Skipping ...')
2368
+ return {}
2369
+
2370
+ # Break the list into key values pairs
2371
+ value = output.split("\n")
2372
+ value = [s.rstrip().split('=') for s in value if '=' in s]
2373
+ value = {k: v for k, v in value if v}
2374
+
2375
+ # Get the advertised MHz
2376
+ processor_brand = value.get('Name')
2377
+ hz_advertised, scale_advertised = _parse_cpu_brand_string(processor_brand)
2378
+
2379
+ # Get the actual MHz
2380
+ hz_actual = value.get('CurrentClockSpeed')
2381
+ scale_actual = 6
2382
+ if hz_actual:
2383
+ hz_actual = _to_decimal_string(hz_actual)
2384
+
2385
+ # Get cache sizes
2386
+ l2_cache_size = value.get('L2CacheSize') # NOTE: L2CacheSize is in kilobytes
2387
+ if l2_cache_size:
2388
+ l2_cache_size = int(l2_cache_size) * 1024
2389
+
2390
+ l3_cache_size = value.get('L3CacheSize') # NOTE: L3CacheSize is in kilobytes
2391
+ if l3_cache_size:
2392
+ l3_cache_size = int(l3_cache_size) * 1024
2393
+
2394
+ # Get family, model, and stepping
2395
+ family, model, stepping = '', '', ''
2396
+ description = value.get('Description') or value.get('Caption')
2397
+ entries = description.split(' ')
2398
+
2399
+ if 'Family' in entries and entries.index('Family') < len(entries)-1:
2400
+ i = entries.index('Family')
2401
+ family = int(entries[i + 1])
2402
+
2403
+ if 'Model' in entries and entries.index('Model') < len(entries)-1:
2404
+ i = entries.index('Model')
2405
+ model = int(entries[i + 1])
2406
+
2407
+ if 'Stepping' in entries and entries.index('Stepping') < len(entries)-1:
2408
+ i = entries.index('Stepping')
2409
+ stepping = int(entries[i + 1])
2410
+
2411
+ info = {
2412
+ 'vendor_id_raw' : value.get('Manufacturer'),
2413
+ 'brand_raw' : processor_brand,
2414
+
2415
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale_advertised),
2416
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale_actual),
2417
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale_advertised),
2418
+ 'hz_actual' : _hz_short_to_full(hz_actual, scale_actual),
2419
+
2420
+ 'l2_cache_size' : l2_cache_size,
2421
+ 'l3_cache_size' : l3_cache_size,
2422
+
2423
+ 'stepping' : stepping,
2424
+ 'model' : model,
2425
+ 'family' : family,
2426
+ }
2427
+
2428
+ info = _filter_dict_keys_with_empty_values(info)
2429
+ g_trace.success()
2430
+ return info
2431
+ except Exception as err:
2432
+ g_trace.fail(err)
2433
+ #raise # NOTE: To have this throw on error, uncomment this line
2434
+ return {}
2435
+
2436
+ def _get_cpu_info_from_registry():
2437
+ '''
2438
+ Returns the CPU info gathered from the Windows Registry.
2439
+ Returns {} if not on Windows.
2440
+ '''
2441
+
2442
+ g_trace.header('Tying to get info from Windows registry ...')
2443
+
2444
+ try:
2445
+ # Just return {} if not on Windows
2446
+ if not DataSource.is_windows:
2447
+ g_trace.fail('Not running on Windows. Skipping ...')
2448
+ return {}
2449
+
2450
+ # Get the CPU name
2451
+ processor_brand = DataSource.winreg_processor_brand().strip()
2452
+
2453
+ # Get the CPU vendor id
2454
+ vendor_id = DataSource.winreg_vendor_id_raw()
2455
+
2456
+ # Get the CPU arch and bits
2457
+ arch_string_raw = DataSource.winreg_arch_string_raw()
2458
+ arch, bits = _parse_arch(arch_string_raw)
2459
+
2460
+ # Get the actual CPU Hz
2461
+ hz_actual = DataSource.winreg_hz_actual()
2462
+ hz_actual = _to_decimal_string(hz_actual)
2463
+
2464
+ # Get the advertised CPU Hz
2465
+ hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
2466
+
2467
+ # If advertised hz not found, use the actual hz
2468
+ if hz_advertised == '0.0':
2469
+ scale = 6
2470
+ hz_advertised = _to_decimal_string(hz_actual)
2471
+
2472
+ # Get the CPU features
2473
+ feature_bits = DataSource.winreg_feature_bits()
2474
+
2475
+ def is_set(bit):
2476
+ mask = 0x80000000 >> bit
2477
+ retval = mask & feature_bits > 0
2478
+ return retval
2479
+
2480
+ # http://en.wikipedia.org/wiki/CPUID
2481
+ # http://unix.stackexchange.com/questions/43539/what-do-the-flags-in-proc-cpuinfo-mean
2482
+ # http://www.lohninger.com/helpcsuite/public_constants_cpuid.htm
2483
+ flags = {
2484
+ 'fpu' : is_set(0), # Floating Point Unit
2485
+ 'vme' : is_set(1), # V86 Mode Extensions
2486
+ 'de' : is_set(2), # Debug Extensions - I/O breakpoints supported
2487
+ 'pse' : is_set(3), # Page Size Extensions (4 MB pages supported)
2488
+ 'tsc' : is_set(4), # Time Stamp Counter and RDTSC instruction are available
2489
+ 'msr' : is_set(5), # Model Specific Registers
2490
+ 'pae' : is_set(6), # Physical Address Extensions (36 bit address, 2MB pages)
2491
+ 'mce' : is_set(7), # Machine Check Exception supported
2492
+ 'cx8' : is_set(8), # Compare Exchange Eight Byte instruction available
2493
+ 'apic' : is_set(9), # Local APIC present (multiprocessor operation support)
2494
+ 'sepamd' : is_set(10), # Fast system calls (AMD only)
2495
+ 'sep' : is_set(11), # Fast system calls
2496
+ 'mtrr' : is_set(12), # Memory Type Range Registers
2497
+ 'pge' : is_set(13), # Page Global Enable
2498
+ 'mca' : is_set(14), # Machine Check Architecture
2499
+ 'cmov' : is_set(15), # Conditional MOVe instructions
2500
+ 'pat' : is_set(16), # Page Attribute Table
2501
+ 'pse36' : is_set(17), # 36 bit Page Size Extensions
2502
+ 'serial' : is_set(18), # Processor Serial Number
2503
+ 'clflush' : is_set(19), # Cache Flush
2504
+ #'reserved1' : is_set(20), # reserved
2505
+ 'dts' : is_set(21), # Debug Trace Store
2506
+ 'acpi' : is_set(22), # ACPI support
2507
+ 'mmx' : is_set(23), # MultiMedia Extensions
2508
+ 'fxsr' : is_set(24), # FXSAVE and FXRSTOR instructions
2509
+ 'sse' : is_set(25), # SSE instructions
2510
+ 'sse2' : is_set(26), # SSE2 (WNI) instructions
2511
+ 'ss' : is_set(27), # self snoop
2512
+ #'reserved2' : is_set(28), # reserved
2513
+ 'tm' : is_set(29), # Automatic clock control
2514
+ 'ia64' : is_set(30), # IA64 instructions
2515
+ '3dnow' : is_set(31) # 3DNow! instructions available
2516
+ }
2517
+
2518
+ # Get a list of only the flags that are true
2519
+ flags = [k for k, v in flags.items() if v]
2520
+ flags.sort()
2521
+
2522
+ info = {
2523
+ 'vendor_id_raw' : vendor_id,
2524
+ 'brand_raw' : processor_brand,
2525
+
2526
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
2527
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 6),
2528
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
2529
+ 'hz_actual' : _hz_short_to_full(hz_actual, 6),
2530
+
2531
+ 'flags' : flags
2532
+ }
2533
+
2534
+ info = _filter_dict_keys_with_empty_values(info)
2535
+ g_trace.success()
2536
+ return info
2537
+ except Exception as err:
2538
+ g_trace.fail(err)
2539
+ return {}
2540
+
2541
+ def _get_cpu_info_from_kstat():
2542
+ '''
2543
+ Returns the CPU info gathered from isainfo and kstat.
2544
+ Returns {} if isainfo or kstat are not found.
2545
+ '''
2546
+
2547
+ g_trace.header('Tying to get info from kstat ...')
2548
+
2549
+ try:
2550
+ # Just return {} if there is no isainfo or kstat
2551
+ if not DataSource.has_isainfo() or not DataSource.has_kstat():
2552
+ g_trace.fail('Failed to find isinfo or kstat. Skipping ...')
2553
+ return {}
2554
+
2555
+ # If isainfo fails return {}
2556
+ returncode, flag_output = DataSource.isainfo_vb()
2557
+ if flag_output is None or returncode != 0:
2558
+ g_trace.fail('Failed to run \"isainfo -vb\". Skipping ...')
2559
+ return {}
2560
+
2561
+ # If kstat fails return {}
2562
+ returncode, kstat = DataSource.kstat_m_cpu_info()
2563
+ if kstat is None or returncode != 0:
2564
+ g_trace.fail('Failed to run \"kstat -m cpu_info\". Skipping ...')
2565
+ return {}
2566
+
2567
+ # Various fields
2568
+ vendor_id = kstat.split('\tvendor_id ')[1].split('\n')[0].strip()
2569
+ processor_brand = kstat.split('\tbrand ')[1].split('\n')[0].strip()
2570
+ stepping = int(kstat.split('\tstepping ')[1].split('\n')[0].strip())
2571
+ model = int(kstat.split('\tmodel ')[1].split('\n')[0].strip())
2572
+ family = int(kstat.split('\tfamily ')[1].split('\n')[0].strip())
2573
+
2574
+ # Flags
2575
+ flags = flag_output.strip().split('\n')[-1].strip().lower().split()
2576
+ flags.sort()
2577
+
2578
+ # Convert from GHz/MHz string to Hz
2579
+ scale = 6
2580
+ hz_advertised = kstat.split('\tclock_MHz ')[1].split('\n')[0].strip()
2581
+ hz_advertised = _to_decimal_string(hz_advertised)
2582
+
2583
+ # Convert from GHz/MHz string to Hz
2584
+ hz_actual = kstat.split('\tcurrent_clock_Hz ')[1].split('\n')[0].strip()
2585
+ hz_actual = _to_decimal_string(hz_actual)
2586
+
2587
+ info = {
2588
+ 'vendor_id_raw' : vendor_id,
2589
+ 'brand_raw' : processor_brand,
2590
+
2591
+ 'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
2592
+ 'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
2593
+ 'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
2594
+ 'hz_actual' : _hz_short_to_full(hz_actual, 0),
2595
+
2596
+ 'stepping' : stepping,
2597
+ 'model' : model,
2598
+ 'family' : family,
2599
+ 'flags' : flags
2600
+ }
2601
+
2602
+ info = _filter_dict_keys_with_empty_values(info)
2603
+ g_trace.success()
2604
+ return info
2605
+ except Exception as err:
2606
+ g_trace.fail(err)
2607
+ return {}
2608
+
2609
+ def _get_cpu_info_from_platform_uname():
2610
+
2611
+ g_trace.header('Tying to get info from platform.uname ...')
2612
+
2613
+ try:
2614
+ uname = DataSource.uname_string_raw.split(',')[0]
2615
+
2616
+ family, model, stepping = (None, None, None)
2617
+ entries = uname.split(' ')
2618
+
2619
+ if 'Family' in entries and entries.index('Family') < len(entries)-1:
2620
+ i = entries.index('Family')
2621
+ family = int(entries[i + 1])
2622
+
2623
+ if 'Model' in entries and entries.index('Model') < len(entries)-1:
2624
+ i = entries.index('Model')
2625
+ model = int(entries[i + 1])
2626
+
2627
+ if 'Stepping' in entries and entries.index('Stepping') < len(entries)-1:
2628
+ i = entries.index('Stepping')
2629
+ stepping = int(entries[i + 1])
2630
+
2631
+ info = {
2632
+ 'family' : family,
2633
+ 'model' : model,
2634
+ 'stepping' : stepping
2635
+ }
2636
+ info = _filter_dict_keys_with_empty_values(info)
2637
+ g_trace.success()
2638
+ return info
2639
+ except Exception as err:
2640
+ g_trace.fail(err)
2641
+ return {}
2642
+
2643
+ def _get_cpu_info_internal():
2644
+ '''
2645
+ Returns the CPU info by using the best sources of information for your OS.
2646
+ Returns {} if nothing is found.
2647
+ '''
2648
+
2649
+ g_trace.write('!' * 80)
2650
+
2651
+ # Get the CPU arch and bits
2652
+ arch, bits = _parse_arch(DataSource.arch_string_raw)
2653
+
2654
+ friendly_maxsize = { 2**31-1: '32 bit', 2**63-1: '64 bit' }.get(sys.maxsize) or 'unknown bits'
2655
+ friendly_version = "{0}.{1}.{2}.{3}.{4}".format(*sys.version_info)
2656
+ PYTHON_VERSION = "{0} ({1})".format(friendly_version, friendly_maxsize)
2657
+
2658
+ info = {
2659
+ 'python_version' : PYTHON_VERSION,
2660
+ 'cpuinfo_version' : CPUINFO_VERSION,
2661
+ 'cpuinfo_version_string' : CPUINFO_VERSION_STRING,
2662
+ 'arch' : arch,
2663
+ 'bits' : bits,
2664
+ 'count' : DataSource.cpu_count,
2665
+ 'arch_string_raw' : DataSource.arch_string_raw,
2666
+ }
2667
+
2668
+ g_trace.write("python_version: {0}".format(info['python_version']))
2669
+ g_trace.write("cpuinfo_version: {0}".format(info['cpuinfo_version']))
2670
+ g_trace.write("arch: {0}".format(info['arch']))
2671
+ g_trace.write("bits: {0}".format(info['bits']))
2672
+ g_trace.write("count: {0}".format(info['count']))
2673
+ g_trace.write("arch_string_raw: {0}".format(info['arch_string_raw']))
2674
+
2675
+ # Try the Windows wmic
2676
+ _copy_new_fields(info, _get_cpu_info_from_wmic())
2677
+
2678
+ # Try the Windows registry
2679
+ _copy_new_fields(info, _get_cpu_info_from_registry())
2680
+
2681
+ # Try /proc/cpuinfo
2682
+ _copy_new_fields(info, _get_cpu_info_from_proc_cpuinfo())
2683
+
2684
+ # Try cpufreq-info
2685
+ _copy_new_fields(info, _get_cpu_info_from_cpufreq_info())
2686
+
2687
+ # Try LSCPU
2688
+ _copy_new_fields(info, _get_cpu_info_from_lscpu())
2689
+
2690
+ # Try sysctl
2691
+ _copy_new_fields(info, _get_cpu_info_from_sysctl())
2692
+
2693
+ # Try kstat
2694
+ _copy_new_fields(info, _get_cpu_info_from_kstat())
2695
+
2696
+ # Try dmesg
2697
+ _copy_new_fields(info, _get_cpu_info_from_dmesg())
2698
+
2699
+ # Try /var/run/dmesg.boot
2700
+ _copy_new_fields(info, _get_cpu_info_from_cat_var_run_dmesg_boot())
2701
+
2702
+ # Try lsprop ibm,pa-features
2703
+ _copy_new_fields(info, _get_cpu_info_from_ibm_pa_features())
2704
+
2705
+ # Try sysinfo
2706
+ _copy_new_fields(info, _get_cpu_info_from_sysinfo())
2707
+
2708
+ # Try querying the CPU cpuid register
2709
+ # FIXME: This should print stdout and stderr to trace log
2710
+ _copy_new_fields(info, _get_cpu_info_from_cpuid())
2711
+
2712
+ # Try platform.uname
2713
+ _copy_new_fields(info, _get_cpu_info_from_platform_uname())
2714
+
2715
+ g_trace.write('!' * 80)
2716
+
2717
+ return info
2718
+
2719
+ def get_cpu_info_json():
2720
+ '''
2721
+ Returns the CPU info by using the best sources of information for your OS.
2722
+ Returns the result in a json string
2723
+ '''
2724
+
2725
+ import json
2726
+
2727
+ output = None
2728
+
2729
+ # If running under pyinstaller, run normally
2730
+ if getattr(sys, 'frozen', False):
2731
+ info = _get_cpu_info_internal()
2732
+ output = json.dumps(info)
2733
+ output = "{0}".format(output)
2734
+ # if not running under pyinstaller, run in another process.
2735
+ # This is done because multiprocesing has a design flaw that
2736
+ # causes non main programs to run multiple times on Windows.
2737
+ else:
2738
+ from subprocess import Popen, PIPE
2739
+
2740
+ command = [sys.executable, __file__, '--json']
2741
+ p1 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
2742
+ output = p1.communicate()[0]
2743
+
2744
+ if p1.returncode != 0:
2745
+ return "{}"
2746
+
2747
+ output = output.decode(encoding='UTF-8')
2748
+
2749
+ return output
2750
+
2751
+ def get_cpu_info():
2752
+ '''
2753
+ Returns the CPU info by using the best sources of information for your OS.
2754
+ Returns the result in a dict
2755
+ '''
2756
+
2757
+ import json
2758
+
2759
+ output = get_cpu_info_json()
2760
+
2761
+ # Convert JSON to Python with non unicode strings
2762
+ output = json.loads(output, object_hook = _utf_to_str)
2763
+
2764
+ return output
2765
+
2766
+ def main():
2767
+ from argparse import ArgumentParser
2768
+ import json
2769
+
2770
+ # Parse args
2771
+ parser = ArgumentParser(description='Gets CPU info with pure Python')
2772
+ parser.add_argument('--json', action='store_true', help='Return the info in JSON format')
2773
+ parser.add_argument('--version', action='store_true', help='Return the version of py-cpuinfo')
2774
+ parser.add_argument('--trace', action='store_true', help='Traces code paths used to find CPU info to file')
2775
+ args = parser.parse_args()
2776
+
2777
+ global g_trace
2778
+ g_trace = Trace(args.trace, False)
2779
+
2780
+ try:
2781
+ _check_arch()
2782
+ except Exception as err:
2783
+ sys.stderr.write(str(err) + "\n")
2784
+ sys.exit(1)
2785
+
2786
+ info = _get_cpu_info_internal()
2787
+
2788
+ if not info:
2789
+ sys.stderr.write("Failed to find cpu info\n")
2790
+ sys.exit(1)
2791
+
2792
+ if args.json:
2793
+ print(json.dumps(info))
2794
+ elif args.version:
2795
+ print(CPUINFO_VERSION_STRING)
2796
+ else:
2797
+ print('Python Version: {0}'.format(info.get('python_version', '')))
2798
+ print('Cpuinfo Version: {0}'.format(info.get('cpuinfo_version_string', '')))
2799
+ print('Vendor ID Raw: {0}'.format(info.get('vendor_id_raw', '')))
2800
+ print('Hardware Raw: {0}'.format(info.get('hardware_raw', '')))
2801
+ print('Brand Raw: {0}'.format(info.get('brand_raw', '')))
2802
+ print('Hz Advertised Friendly: {0}'.format(info.get('hz_advertised_friendly', '')))
2803
+ print('Hz Actual Friendly: {0}'.format(info.get('hz_actual_friendly', '')))
2804
+ print('Hz Advertised: {0}'.format(info.get('hz_advertised', '')))
2805
+ print('Hz Actual: {0}'.format(info.get('hz_actual', '')))
2806
+ print('Arch: {0}'.format(info.get('arch', '')))
2807
+ print('Bits: {0}'.format(info.get('bits', '')))
2808
+ print('Count: {0}'.format(info.get('count', '')))
2809
+ print('Arch String Raw: {0}'.format(info.get('arch_string_raw', '')))
2810
+ print('L1 Data Cache Size: {0}'.format(info.get('l1_data_cache_size', '')))
2811
+ print('L1 Instruction Cache Size: {0}'.format(info.get('l1_instruction_cache_size', '')))
2812
+ print('L2 Cache Size: {0}'.format(info.get('l2_cache_size', '')))
2813
+ print('L2 Cache Line Size: {0}'.format(info.get('l2_cache_line_size', '')))
2814
+ print('L2 Cache Associativity: {0}'.format(info.get('l2_cache_associativity', '')))
2815
+ print('L3 Cache Size: {0}'.format(info.get('l3_cache_size', '')))
2816
+ print('Stepping: {0}'.format(info.get('stepping', '')))
2817
+ print('Model: {0}'.format(info.get('model', '')))
2818
+ print('Family: {0}'.format(info.get('family', '')))
2819
+ print('Processor Type: {0}'.format(info.get('processor_type', '')))
2820
+ print('Flags: {0}'.format(', '.join(info.get('flags', ''))))
2821
+
2822
+
2823
+ if __name__ == '__main__':
2824
+ main()
2825
+ else:
2826
+ g_trace = Trace(False, False)
2827
+ _check_arch()
evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (21.7 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/badness.cpython-310.pyc ADDED
Binary file (6.59 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/ftfy/__pycache__/chardata.cpython-310.pyc ADDED
Binary file (5.54 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ Cross-specification, implementation-agnostic JSON referencing.
3
+ """
4
+
5
+ from referencing._core import Anchor, Registry, Resource, Specification
6
+
7
+ __all__ = ["Anchor", "Registry", "Resource", "Specification"]
evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/_attrs.cpython-310.pyc ADDED
Binary file (1.28 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (5.13 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/jsonschema.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/__pycache__/retrieval.cpython-310.pyc ADDED
Binary file (3.17 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/_attrs.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import NoReturn, TypeVar
4
+
5
+ from attrs import define as _define, frozen as _frozen
6
+
7
+ _T = TypeVar("_T")
8
+
9
+
10
+ def define(cls: type[_T]) -> type[_T]: # pragma: no cover
11
+ cls.__init_subclass__ = _do_not_subclass
12
+ return _define(cls)
13
+
14
+
15
+ def frozen(cls: type[_T]) -> type[_T]:
16
+ cls.__init_subclass__ = _do_not_subclass
17
+ return _frozen(cls)
18
+
19
+
20
+ class UnsupportedSubclassing(Exception):
21
+ def __str__(self):
22
+ return (
23
+ "Subclassing is not part of referencing's public API. "
24
+ "If no other suitable API exists for what you're trying to do, "
25
+ "feel free to file an issue asking for one."
26
+ )
27
+
28
+
29
+ @staticmethod
30
+ def _do_not_subclass() -> NoReturn: # pragma: no cover
31
+ raise UnsupportedSubclassing()
evalkit_cambrian/lib/python3.10/site-packages/referencing/_attrs.pyi ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Callable, TypeVar, Union
2
+
3
+ from attr import attrib, field
4
+
5
+ class UnsupportedSubclassing(Exception): ...
6
+
7
+ _T = TypeVar("_T")
8
+
9
+ def __dataclass_transform__(
10
+ *,
11
+ frozen_default: bool = False,
12
+ field_descriptors: tuple[Union[type, Callable[..., Any]], ...] = ...,
13
+ ) -> Callable[[_T], _T]: ...
14
+ @__dataclass_transform__(field_descriptors=(attrib, field))
15
+ def define(cls: type[_T]) -> type[_T]: ...
16
+ @__dataclass_transform__(
17
+ frozen_default=True,
18
+ field_descriptors=(attrib, field),
19
+ )
20
+ def frozen(cls: type[_T]) -> type[_T]: ...
evalkit_cambrian/lib/python3.10/site-packages/referencing/_core.py ADDED
@@ -0,0 +1,739 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable, Iterator, Sequence
4
+ from enum import Enum
5
+ from typing import Any, Callable, ClassVar, Generic, Protocol
6
+ from urllib.parse import unquote, urldefrag, urljoin
7
+
8
+ from attrs import evolve, field
9
+ from rpds import HashTrieMap, HashTrieSet, List
10
+
11
+ try:
12
+ from typing_extensions import TypeVar
13
+ except ImportError: # pragma: no cover
14
+ from typing import TypeVar
15
+
16
+ from referencing import exceptions
17
+ from referencing._attrs import frozen
18
+ from referencing.typing import URI, Anchor as AnchorType, D, Mapping, Retrieve
19
+
20
+ EMPTY_UNCRAWLED: HashTrieSet[URI] = HashTrieSet()
21
+ EMPTY_PREVIOUS_RESOLVERS: List[URI] = List()
22
+
23
+
24
+ class _Unset(Enum):
25
+ """
26
+ What sillyness...
27
+ """
28
+
29
+ SENTINEL = 1
30
+
31
+
32
+ _UNSET = _Unset.SENTINEL
33
+
34
+
35
+ class _MaybeInSubresource(Protocol[D]):
36
+ def __call__(
37
+ self,
38
+ segments: Sequence[int | str],
39
+ resolver: Resolver[D],
40
+ subresource: Resource[D],
41
+ ) -> Resolver[D]: ...
42
+
43
+
44
+ def _detect_or_error(contents: D) -> Specification[D]:
45
+ if not isinstance(contents, Mapping):
46
+ raise exceptions.CannotDetermineSpecification(contents)
47
+
48
+ jsonschema_dialect_id = contents.get("$schema") # type: ignore[reportUnknownMemberType]
49
+ if not isinstance(jsonschema_dialect_id, str):
50
+ raise exceptions.CannotDetermineSpecification(contents)
51
+
52
+ from referencing.jsonschema import specification_with
53
+
54
+ return specification_with(jsonschema_dialect_id)
55
+
56
+
57
+ def _detect_or_default(
58
+ default: Specification[D],
59
+ ) -> Callable[[D], Specification[D]]:
60
+ def _detect(contents: D) -> Specification[D]:
61
+ if not isinstance(contents, Mapping):
62
+ return default
63
+
64
+ jsonschema_dialect_id = contents.get("$schema") # type: ignore[reportUnknownMemberType]
65
+ if jsonschema_dialect_id is None:
66
+ return default
67
+
68
+ from referencing.jsonschema import specification_with
69
+
70
+ return specification_with(
71
+ jsonschema_dialect_id, # type: ignore[reportUnknownArgumentType]
72
+ default=default,
73
+ )
74
+
75
+ return _detect
76
+
77
+
78
+ class _SpecificationDetector:
79
+ def __get__(
80
+ self,
81
+ instance: Specification[D] | None,
82
+ cls: type[Specification[D]],
83
+ ) -> Callable[[D], Specification[D]]:
84
+ if instance is None:
85
+ return _detect_or_error
86
+ else:
87
+ return _detect_or_default(instance)
88
+
89
+
90
+ @frozen
91
+ class Specification(Generic[D]):
92
+ """
93
+ A specification which defines referencing behavior.
94
+
95
+ The various methods of a `Specification` allow for varying referencing
96
+ behavior across JSON Schema specification versions, etc.
97
+ """
98
+
99
+ #: A short human-readable name for the specification, used for debugging.
100
+ name: str
101
+
102
+ #: Find the ID of a given document.
103
+ id_of: Callable[[D], URI | None]
104
+
105
+ #: Retrieve the subresources of the given document (without traversing into
106
+ #: the subresources themselves).
107
+ subresources_of: Callable[[D], Iterable[D]]
108
+
109
+ #: While resolving a JSON pointer, conditionally enter a subresource
110
+ #: (if e.g. we have just entered a keyword whose value is a subresource)
111
+ maybe_in_subresource: _MaybeInSubresource[D]
112
+
113
+ #: Retrieve the anchors contained in the given document.
114
+ _anchors_in: Callable[
115
+ [Specification[D], D],
116
+ Iterable[AnchorType[D]],
117
+ ] = field(alias="anchors_in")
118
+
119
+ #: An opaque specification where resources have no subresources
120
+ #: nor internal identifiers.
121
+ OPAQUE: ClassVar[Specification[Any]]
122
+
123
+ #: Attempt to discern which specification applies to the given contents.
124
+ #:
125
+ #: May be called either as an instance method or as a class method, with
126
+ #: slightly different behavior in the following case:
127
+ #:
128
+ #: Recall that not all contents contains enough internal information about
129
+ #: which specification it is written for -- the JSON Schema ``{}``,
130
+ #: for instance, is valid under many different dialects and may be
131
+ #: interpreted as any one of them.
132
+ #:
133
+ #: When this method is used as an instance method (i.e. called on a
134
+ #: specific specification), that specification is used as the default
135
+ #: if the given contents are unidentifiable.
136
+ #:
137
+ #: On the other hand when called as a class method, an error is raised.
138
+ #:
139
+ #: To reiterate, ``DRAFT202012.detect({})`` will return ``DRAFT202012``
140
+ #: whereas the class method ``Specification.detect({})`` will raise an
141
+ #: error.
142
+ #:
143
+ #: (Note that of course ``DRAFT202012.detect(...)`` may return some other
144
+ #: specification when given a schema which *does* identify as being for
145
+ #: another version).
146
+ #:
147
+ #: Raises:
148
+ #:
149
+ #: `CannotDetermineSpecification`
150
+ #:
151
+ #: if the given contents don't have any discernible
152
+ #: information which could be used to guess which
153
+ #: specification they identify as
154
+ detect = _SpecificationDetector()
155
+
156
+ def __repr__(self) -> str:
157
+ return f"<Specification name={self.name!r}>"
158
+
159
+ def anchors_in(self, contents: D):
160
+ """
161
+ Retrieve the anchors contained in the given document.
162
+ """
163
+ return self._anchors_in(self, contents)
164
+
165
+ def create_resource(self, contents: D) -> Resource[D]:
166
+ """
167
+ Create a resource which is interpreted using this specification.
168
+ """
169
+ return Resource(contents=contents, specification=self)
170
+
171
+
172
+ Specification.OPAQUE = Specification(
173
+ name="opaque",
174
+ id_of=lambda contents: None,
175
+ subresources_of=lambda contents: [],
176
+ anchors_in=lambda specification, contents: [],
177
+ maybe_in_subresource=lambda segments, resolver, subresource: resolver,
178
+ )
179
+
180
+
181
+ @frozen
182
+ class Resource(Generic[D]):
183
+ r"""
184
+ A document (deserialized JSON) with a concrete interpretation under a spec.
185
+
186
+ In other words, a Python object, along with an instance of `Specification`
187
+ which describes how the document interacts with referencing -- both
188
+ internally (how it refers to other `Resource`\ s) and externally (how it
189
+ should be identified such that it is referenceable by other documents).
190
+ """
191
+
192
+ contents: D
193
+ _specification: Specification[D] = field(alias="specification")
194
+
195
+ @classmethod
196
+ def from_contents(
197
+ cls,
198
+ contents: D,
199
+ default_specification: (
200
+ type[Specification[D]] | Specification[D]
201
+ ) = Specification,
202
+ ) -> Resource[D]:
203
+ """
204
+ Create a resource guessing which specification applies to the contents.
205
+
206
+ Raises:
207
+
208
+ `CannotDetermineSpecification`
209
+
210
+ if the given contents don't have any discernible
211
+ information which could be used to guess which
212
+ specification they identify as
213
+
214
+ """
215
+ specification = default_specification.detect(contents)
216
+ return specification.create_resource(contents=contents)
217
+
218
+ @classmethod
219
+ def opaque(cls, contents: D) -> Resource[D]:
220
+ """
221
+ Create an opaque `Resource` -- i.e. one with opaque specification.
222
+
223
+ See `Specification.OPAQUE` for details.
224
+ """
225
+ return Specification.OPAQUE.create_resource(contents=contents)
226
+
227
+ def id(self) -> URI | None:
228
+ """
229
+ Retrieve this resource's (specification-specific) identifier.
230
+ """
231
+ id = self._specification.id_of(self.contents)
232
+ if id is None:
233
+ return
234
+ return id.rstrip("#")
235
+
236
+ def subresources(self) -> Iterable[Resource[D]]:
237
+ """
238
+ Retrieve this resource's subresources.
239
+ """
240
+ return (
241
+ Resource.from_contents(
242
+ each,
243
+ default_specification=self._specification,
244
+ )
245
+ for each in self._specification.subresources_of(self.contents)
246
+ )
247
+
248
+ def anchors(self) -> Iterable[AnchorType[D]]:
249
+ """
250
+ Retrieve this resource's (specification-specific) identifier.
251
+ """
252
+ return self._specification.anchors_in(self.contents)
253
+
254
+ def pointer(self, pointer: str, resolver: Resolver[D]) -> Resolved[D]:
255
+ """
256
+ Resolve the given JSON pointer.
257
+
258
+ Raises:
259
+
260
+ `exceptions.PointerToNowhere`
261
+
262
+ if the pointer points to a location not present in the document
263
+
264
+ """
265
+ if not pointer:
266
+ return Resolved(contents=self.contents, resolver=resolver)
267
+
268
+ contents = self.contents
269
+ segments: list[int | str] = []
270
+ for segment in unquote(pointer[1:]).split("/"):
271
+ if isinstance(contents, Sequence):
272
+ segment = int(segment)
273
+ else:
274
+ segment = segment.replace("~1", "/").replace("~0", "~")
275
+ try:
276
+ contents = contents[segment] # type: ignore[reportUnknownArgumentType]
277
+ except LookupError as lookup_error:
278
+ error = exceptions.PointerToNowhere(ref=pointer, resource=self)
279
+ raise error from lookup_error
280
+
281
+ segments.append(segment)
282
+ last = resolver
283
+ resolver = self._specification.maybe_in_subresource(
284
+ segments=segments,
285
+ resolver=resolver,
286
+ subresource=self._specification.create_resource(contents),
287
+ )
288
+ if resolver is not last:
289
+ segments = []
290
+ return Resolved(contents=contents, resolver=resolver) # type: ignore[reportUnknownArgumentType]
291
+
292
+
293
+ def _fail_to_retrieve(uri: URI):
294
+ raise exceptions.NoSuchResource(ref=uri)
295
+
296
+
297
+ @frozen
298
+ class Registry(Mapping[URI, Resource[D]]):
299
+ r"""
300
+ A registry of `Resource`\ s, each identified by their canonical URIs.
301
+
302
+ Registries store a collection of in-memory resources, and optionally
303
+ enable additional resources which may be stored elsewhere (e.g. in a
304
+ database, a separate set of files, over the network, etc.).
305
+
306
+ They also lazily walk their known resources, looking for subresources
307
+ within them. In other words, subresources contained within any added
308
+ resources will be retrievable via their own IDs (though this discovery of
309
+ subresources will be delayed until necessary).
310
+
311
+ Registries are immutable, and their methods return new instances of the
312
+ registry with the additional resources added to them.
313
+
314
+ The ``retrieve`` argument can be used to configure retrieval of resources
315
+ dynamically, either over the network, from a database, or the like.
316
+ Pass it a callable which will be called if any URI not present in the
317
+ registry is accessed. It must either return a `Resource` or else raise a
318
+ `NoSuchResource` exception indicating that the resource does not exist
319
+ even according to the retrieval logic.
320
+ """
321
+
322
+ _resources: HashTrieMap[URI, Resource[D]] = field(
323
+ default=HashTrieMap(),
324
+ converter=HashTrieMap.convert, # type: ignore[reportGeneralTypeIssues]
325
+ alias="resources",
326
+ )
327
+ _anchors: HashTrieMap[tuple[URI, str], AnchorType[D]] = HashTrieMap()
328
+ _uncrawled: HashTrieSet[URI] = EMPTY_UNCRAWLED
329
+ _retrieve: Retrieve[D] = field(default=_fail_to_retrieve, alias="retrieve")
330
+
331
+ def __getitem__(self, uri: URI) -> Resource[D]:
332
+ """
333
+ Return the (already crawled) `Resource` identified by the given URI.
334
+ """
335
+ try:
336
+ return self._resources[uri.rstrip("#")]
337
+ except KeyError:
338
+ raise exceptions.NoSuchResource(ref=uri) from None
339
+
340
+ def __iter__(self) -> Iterator[URI]:
341
+ """
342
+ Iterate over all crawled URIs in the registry.
343
+ """
344
+ return iter(self._resources)
345
+
346
+ def __len__(self) -> int:
347
+ """
348
+ Count the total number of fully crawled resources in this registry.
349
+ """
350
+ return len(self._resources)
351
+
352
+ def __rmatmul__(
353
+ self,
354
+ new: Resource[D] | Iterable[Resource[D]],
355
+ ) -> Registry[D]:
356
+ """
357
+ Create a new registry with resource(s) added using their internal IDs.
358
+
359
+ Resources must have a internal IDs (e.g. the :kw:`$id` keyword in
360
+ modern JSON Schema versions), otherwise an error will be raised.
361
+
362
+ Both a single resource as well as an iterable of resources works, i.e.:
363
+
364
+ * ``resource @ registry`` or
365
+
366
+ * ``[iterable, of, multiple, resources] @ registry``
367
+
368
+ which -- again, assuming the resources have internal IDs -- is
369
+ equivalent to calling `Registry.with_resources` as such:
370
+
371
+ .. code:: python
372
+
373
+ registry.with_resources(
374
+ (resource.id(), resource) for resource in new_resources
375
+ )
376
+
377
+ Raises:
378
+
379
+ `NoInternalID`
380
+
381
+ if the resource(s) in fact do not have IDs
382
+
383
+ """
384
+ if isinstance(new, Resource):
385
+ new = (new,)
386
+
387
+ resources = self._resources
388
+ uncrawled = self._uncrawled
389
+ for resource in new:
390
+ id = resource.id()
391
+ if id is None:
392
+ raise exceptions.NoInternalID(resource=resource)
393
+ uncrawled = uncrawled.insert(id)
394
+ resources = resources.insert(id, resource)
395
+ return evolve(self, resources=resources, uncrawled=uncrawled)
396
+
397
+ def __repr__(self) -> str:
398
+ size = len(self)
399
+ pluralized = "resource" if size == 1 else "resources"
400
+ if self._uncrawled:
401
+ uncrawled = len(self._uncrawled)
402
+ if uncrawled == size:
403
+ summary = f"uncrawled {pluralized}"
404
+ else:
405
+ summary = f"{pluralized}, {uncrawled} uncrawled"
406
+ else:
407
+ summary = f"{pluralized}"
408
+ return f"<Registry ({size} {summary})>"
409
+
410
+ def get_or_retrieve(self, uri: URI) -> Retrieved[D, Resource[D]]:
411
+ """
412
+ Get a resource from the registry, crawling or retrieving if necessary.
413
+
414
+ May involve crawling to find the given URI if it is not already known,
415
+ so the returned object is a `Retrieved` object which contains both the
416
+ resource value as well as the registry which ultimately contained it.
417
+ """
418
+ resource = self._resources.get(uri)
419
+ if resource is not None:
420
+ return Retrieved(registry=self, value=resource)
421
+
422
+ registry = self.crawl()
423
+ resource = registry._resources.get(uri)
424
+ if resource is not None:
425
+ return Retrieved(registry=registry, value=resource)
426
+
427
+ try:
428
+ resource = registry._retrieve(uri)
429
+ except (
430
+ exceptions.CannotDetermineSpecification,
431
+ exceptions.NoSuchResource,
432
+ ):
433
+ raise
434
+ except Exception as error:
435
+ raise exceptions.Unretrievable(ref=uri) from error
436
+ else:
437
+ registry = registry.with_resource(uri, resource)
438
+ return Retrieved(registry=registry, value=resource)
439
+
440
+ def remove(self, uri: URI):
441
+ """
442
+ Return a registry with the resource identified by a given URI removed.
443
+ """
444
+ if uri not in self._resources:
445
+ raise exceptions.NoSuchResource(ref=uri)
446
+
447
+ return evolve(
448
+ self,
449
+ resources=self._resources.remove(uri),
450
+ uncrawled=self._uncrawled.discard(uri),
451
+ anchors=HashTrieMap(
452
+ (k, v) for k, v in self._anchors.items() if k[0] != uri
453
+ ),
454
+ )
455
+
456
+ def anchor(self, uri: URI, name: str):
457
+ """
458
+ Retrieve a given anchor from a resource which must already be crawled.
459
+ """
460
+ value = self._anchors.get((uri, name))
461
+ if value is not None:
462
+ return Retrieved(value=value, registry=self)
463
+
464
+ registry = self.crawl()
465
+ value = registry._anchors.get((uri, name))
466
+ if value is not None:
467
+ return Retrieved(value=value, registry=registry)
468
+
469
+ resource = self[uri]
470
+ canonical_uri = resource.id()
471
+ if canonical_uri is not None:
472
+ value = registry._anchors.get((canonical_uri, name))
473
+ if value is not None:
474
+ return Retrieved(value=value, registry=registry)
475
+
476
+ if "/" in name:
477
+ raise exceptions.InvalidAnchor(
478
+ ref=uri,
479
+ resource=resource,
480
+ anchor=name,
481
+ )
482
+ raise exceptions.NoSuchAnchor(ref=uri, resource=resource, anchor=name)
483
+
484
+ def contents(self, uri: URI) -> D:
485
+ """
486
+ Retrieve the (already crawled) contents identified by the given URI.
487
+ """
488
+ return self[uri].contents
489
+
490
+ def crawl(self) -> Registry[D]:
491
+ """
492
+ Crawl all added resources, discovering subresources.
493
+ """
494
+ resources = self._resources
495
+ anchors = self._anchors
496
+ uncrawled = [(uri, resources[uri]) for uri in self._uncrawled]
497
+ while uncrawled:
498
+ uri, resource = uncrawled.pop()
499
+
500
+ id = resource.id()
501
+ if id is not None:
502
+ uri = urljoin(uri, id)
503
+ resources = resources.insert(uri, resource)
504
+ for each in resource.anchors():
505
+ anchors = anchors.insert((uri, each.name), each)
506
+ uncrawled.extend((uri, each) for each in resource.subresources())
507
+ return evolve(
508
+ self,
509
+ resources=resources,
510
+ anchors=anchors,
511
+ uncrawled=EMPTY_UNCRAWLED,
512
+ )
513
+
514
+ def with_resource(self, uri: URI, resource: Resource[D]):
515
+ """
516
+ Add the given `Resource` to the registry, without crawling it.
517
+ """
518
+ return self.with_resources([(uri, resource)])
519
+
520
+ def with_resources(
521
+ self,
522
+ pairs: Iterable[tuple[URI, Resource[D]]],
523
+ ) -> Registry[D]:
524
+ r"""
525
+ Add the given `Resource`\ s to the registry, without crawling them.
526
+ """
527
+ resources = self._resources
528
+ uncrawled = self._uncrawled
529
+ for uri, resource in pairs:
530
+ # Empty fragment URIs are equivalent to URIs without the fragment.
531
+ # TODO: Is this true for non JSON Schema resources? Probably not.
532
+ uri = uri.rstrip("#")
533
+ uncrawled = uncrawled.insert(uri)
534
+ resources = resources.insert(uri, resource)
535
+ return evolve(self, resources=resources, uncrawled=uncrawled)
536
+
537
+ def with_contents(
538
+ self,
539
+ pairs: Iterable[tuple[URI, D]],
540
+ **kwargs: Any,
541
+ ) -> Registry[D]:
542
+ r"""
543
+ Add the given contents to the registry, autodetecting when necessary.
544
+ """
545
+ return self.with_resources(
546
+ (uri, Resource.from_contents(each, **kwargs))
547
+ for uri, each in pairs
548
+ )
549
+
550
+ def combine(self, *registries: Registry[D]) -> Registry[D]:
551
+ """
552
+ Combine together one or more other registries, producing a unified one.
553
+ """
554
+ if registries == (self,):
555
+ return self
556
+ resources = self._resources
557
+ anchors = self._anchors
558
+ uncrawled = self._uncrawled
559
+ retrieve = self._retrieve
560
+ for registry in registries:
561
+ resources = resources.update(registry._resources)
562
+ anchors = anchors.update(registry._anchors)
563
+ uncrawled = uncrawled.update(registry._uncrawled)
564
+
565
+ if registry._retrieve is not _fail_to_retrieve: # type: ignore[reportUnnecessaryComparison] ???
566
+ if registry._retrieve is not retrieve is not _fail_to_retrieve: # type: ignore[reportUnnecessaryComparison] ???
567
+ raise ValueError( # noqa: TRY003
568
+ "Cannot combine registries with conflicting retrieval "
569
+ "functions.",
570
+ )
571
+ retrieve = registry._retrieve
572
+ return evolve(
573
+ self,
574
+ anchors=anchors,
575
+ resources=resources,
576
+ uncrawled=uncrawled,
577
+ retrieve=retrieve,
578
+ )
579
+
580
+ def resolver(self, base_uri: URI = "") -> Resolver[D]:
581
+ """
582
+ Return a `Resolver` which resolves references against this registry.
583
+ """
584
+ return Resolver(base_uri=base_uri, registry=self)
585
+
586
+ def resolver_with_root(self, resource: Resource[D]) -> Resolver[D]:
587
+ """
588
+ Return a `Resolver` with a specific root resource.
589
+ """
590
+ uri = resource.id() or ""
591
+ return Resolver(
592
+ base_uri=uri,
593
+ registry=self.with_resource(uri, resource),
594
+ )
595
+
596
+
597
+ #: An anchor or resource.
598
+ AnchorOrResource = TypeVar(
599
+ "AnchorOrResource",
600
+ AnchorType[Any],
601
+ Resource[Any],
602
+ default=Resource[Any],
603
+ )
604
+
605
+
606
+ @frozen
607
+ class Retrieved(Generic[D, AnchorOrResource]):
608
+ """
609
+ A value retrieved from a `Registry`.
610
+ """
611
+
612
+ value: AnchorOrResource
613
+ registry: Registry[D]
614
+
615
+
616
+ @frozen
617
+ class Resolved(Generic[D]):
618
+ """
619
+ A reference resolved to its contents by a `Resolver`.
620
+ """
621
+
622
+ contents: D
623
+ resolver: Resolver[D]
624
+
625
+
626
+ @frozen
627
+ class Resolver(Generic[D]):
628
+ """
629
+ A reference resolver.
630
+
631
+ Resolvers help resolve references (including relative ones) by
632
+ pairing a fixed base URI with a `Registry`.
633
+
634
+ This object, under normal circumstances, is expected to be used by
635
+ *implementers of libraries* built on top of `referencing` (e.g. JSON Schema
636
+ implementations or other libraries resolving JSON references),
637
+ not directly by end-users populating registries or while writing
638
+ schemas or other resources.
639
+
640
+ References are resolved against the base URI, and the combined URI
641
+ is then looked up within the registry.
642
+
643
+ The process of resolving a reference may itself involve calculating
644
+ a *new* base URI for future reference resolution (e.g. if an
645
+ intermediate resource sets a new base URI), or may involve encountering
646
+ additional subresources and adding them to a new registry.
647
+ """
648
+
649
+ _base_uri: URI = field(alias="base_uri")
650
+ _registry: Registry[D] = field(alias="registry")
651
+ _previous: List[URI] = field(default=List(), repr=False, alias="previous")
652
+
653
+ def lookup(self, ref: URI) -> Resolved[D]:
654
+ """
655
+ Resolve the given reference to the resource it points to.
656
+
657
+ Raises:
658
+
659
+ `exceptions.Unresolvable`
660
+
661
+ or a subclass thereof (see below) if the reference isn't
662
+ resolvable
663
+
664
+ `exceptions.NoSuchAnchor`
665
+
666
+ if the reference is to a URI where a resource exists but
667
+ contains a plain name fragment which does not exist within
668
+ the resource
669
+
670
+ `exceptions.PointerToNowhere`
671
+
672
+ if the reference is to a URI where a resource exists but
673
+ contains a JSON pointer to a location within the resource
674
+ that does not exist
675
+
676
+ """
677
+ if ref.startswith("#"):
678
+ uri, fragment = self._base_uri, ref[1:]
679
+ else:
680
+ uri, fragment = urldefrag(urljoin(self._base_uri, ref))
681
+ try:
682
+ retrieved = self._registry.get_or_retrieve(uri)
683
+ except exceptions.NoSuchResource:
684
+ raise exceptions.Unresolvable(ref=ref) from None
685
+ except exceptions.Unretrievable as error:
686
+ raise exceptions.Unresolvable(ref=ref) from error
687
+
688
+ if fragment.startswith("/"):
689
+ resolver = self._evolve(registry=retrieved.registry, base_uri=uri)
690
+ return retrieved.value.pointer(pointer=fragment, resolver=resolver)
691
+
692
+ if fragment:
693
+ retrieved = retrieved.registry.anchor(uri, fragment)
694
+ resolver = self._evolve(registry=retrieved.registry, base_uri=uri)
695
+ return retrieved.value.resolve(resolver=resolver)
696
+
697
+ resolver = self._evolve(registry=retrieved.registry, base_uri=uri)
698
+ return Resolved(contents=retrieved.value.contents, resolver=resolver)
699
+
700
+ def in_subresource(self, subresource: Resource[D]) -> Resolver[D]:
701
+ """
702
+ Create a resolver for a subresource (which may have a new base URI).
703
+ """
704
+ id = subresource.id()
705
+ if id is None:
706
+ return self
707
+ return evolve(self, base_uri=urljoin(self._base_uri, id))
708
+
709
+ def dynamic_scope(self) -> Iterable[tuple[URI, Registry[D]]]:
710
+ """
711
+ In specs with such a notion, return the URIs in the dynamic scope.
712
+ """
713
+ for uri in self._previous:
714
+ yield uri, self._registry
715
+
716
+ def _evolve(self, base_uri: URI, **kwargs: Any):
717
+ """
718
+ Evolve, appending to the dynamic scope.
719
+ """
720
+ previous = self._previous
721
+ if self._base_uri and (not previous or base_uri != self._base_uri):
722
+ previous = previous.push_front(self._base_uri)
723
+ return evolve(self, base_uri=base_uri, previous=previous, **kwargs)
724
+
725
+
726
+ @frozen
727
+ class Anchor(Generic[D]):
728
+ """
729
+ A simple anchor in a `Resource`.
730
+ """
731
+
732
+ name: str
733
+ resource: Resource[D]
734
+
735
+ def resolve(self, resolver: Resolver[D]):
736
+ """
737
+ Return the resource for this anchor.
738
+ """
739
+ return Resolved(contents=self.resource.contents, resolver=resolver)
evalkit_cambrian/lib/python3.10/site-packages/referencing/exceptions.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Errors, oh no!
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ import attrs
10
+
11
+ from referencing._attrs import frozen
12
+
13
+ if TYPE_CHECKING:
14
+ from referencing import Resource
15
+ from referencing.typing import URI
16
+
17
+
18
+ @frozen
19
+ class NoSuchResource(KeyError):
20
+ """
21
+ The given URI is not present in a registry.
22
+
23
+ Unlike most exceptions, this class *is* intended to be publicly
24
+ instantiable and *is* part of the public API of the package.
25
+ """
26
+
27
+ ref: URI
28
+
29
+ def __eq__(self, other: object) -> bool:
30
+ if self.__class__ is not other.__class__:
31
+ return NotImplemented
32
+ return attrs.astuple(self) == attrs.astuple(other)
33
+
34
+ def __hash__(self) -> int:
35
+ return hash(attrs.astuple(self))
36
+
37
+
38
+ @frozen
39
+ class NoInternalID(Exception):
40
+ """
41
+ A resource has no internal ID, but one is needed.
42
+
43
+ E.g. in modern JSON Schema drafts, this is the :kw:`$id` keyword.
44
+
45
+ One might be needed if a resource was to-be added to a registry but no
46
+ other URI is available, and the resource doesn't declare its canonical URI.
47
+ """
48
+
49
+ resource: Resource[Any]
50
+
51
+ def __eq__(self, other: object) -> bool:
52
+ if self.__class__ is not other.__class__:
53
+ return NotImplemented
54
+ return attrs.astuple(self) == attrs.astuple(other)
55
+
56
+ def __hash__(self) -> int:
57
+ return hash(attrs.astuple(self))
58
+
59
+
60
+ @frozen
61
+ class Unretrievable(KeyError):
62
+ """
63
+ The given URI is not present in a registry, and retrieving it failed.
64
+ """
65
+
66
+ ref: URI
67
+
68
+ def __eq__(self, other: object) -> bool:
69
+ if self.__class__ is not other.__class__:
70
+ return NotImplemented
71
+ return attrs.astuple(self) == attrs.astuple(other)
72
+
73
+ def __hash__(self) -> int:
74
+ return hash(attrs.astuple(self))
75
+
76
+
77
+ @frozen
78
+ class CannotDetermineSpecification(Exception):
79
+ """
80
+ Attempting to detect the appropriate `Specification` failed.
81
+
82
+ This happens if no discernible information is found in the contents of the
83
+ new resource which would help identify it.
84
+ """
85
+
86
+ contents: Any
87
+
88
+ def __eq__(self, other: object) -> bool:
89
+ if self.__class__ is not other.__class__:
90
+ return NotImplemented
91
+ return attrs.astuple(self) == attrs.astuple(other)
92
+
93
+ def __hash__(self) -> int:
94
+ return hash(attrs.astuple(self))
95
+
96
+
97
+ @attrs.frozen # Because here we allow subclassing below.
98
+ class Unresolvable(Exception):
99
+ """
100
+ A reference was unresolvable.
101
+ """
102
+
103
+ ref: URI
104
+
105
+ def __eq__(self, other: object) -> bool:
106
+ if self.__class__ is not other.__class__:
107
+ return NotImplemented
108
+ return attrs.astuple(self) == attrs.astuple(other)
109
+
110
+ def __hash__(self) -> int:
111
+ return hash(attrs.astuple(self))
112
+
113
+
114
+ @frozen
115
+ class PointerToNowhere(Unresolvable):
116
+ """
117
+ A JSON Pointer leads to a part of a document that does not exist.
118
+ """
119
+
120
+ resource: Resource[Any]
121
+
122
+ def __str__(self) -> str:
123
+ msg = f"{self.ref!r} does not exist within {self.resource.contents!r}"
124
+ if self.ref == "/":
125
+ msg += (
126
+ ". The pointer '/' is a valid JSON Pointer but it points to "
127
+ "an empty string property ''. If you intended to point "
128
+ "to the entire resource, you should use '#'."
129
+ )
130
+ return msg
131
+
132
+
133
+ @frozen
134
+ class NoSuchAnchor(Unresolvable):
135
+ """
136
+ An anchor does not exist within a particular resource.
137
+ """
138
+
139
+ resource: Resource[Any]
140
+ anchor: str
141
+
142
+ def __str__(self) -> str:
143
+ return (
144
+ f"{self.anchor!r} does not exist within {self.resource.contents!r}"
145
+ )
146
+
147
+
148
+ @frozen
149
+ class InvalidAnchor(Unresolvable):
150
+ """
151
+ An anchor which could never exist in a resource was dereferenced.
152
+
153
+ It is somehow syntactically invalid.
154
+ """
155
+
156
+ resource: Resource[Any]
157
+ anchor: str
158
+
159
+ def __str__(self) -> str:
160
+ return (
161
+ f"'#{self.anchor}' is not a valid anchor, neither as a "
162
+ "plain name anchor nor as a JSON Pointer. You may have intended "
163
+ f"to use '#/{self.anchor}', as the slash is required *before each "
164
+ "segment* of a JSON pointer."
165
+ )
evalkit_cambrian/lib/python3.10/site-packages/referencing/jsonschema.py ADDED
@@ -0,0 +1,642 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Referencing implementations for JSON Schema specs (historic & current).
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from collections.abc import Iterable, Sequence, Set
8
+ from typing import Any, Union
9
+
10
+ from referencing import Anchor, Registry, Resource, Specification, exceptions
11
+ from referencing._attrs import frozen
12
+ from referencing._core import (
13
+ _UNSET, # type: ignore[reportPrivateUsage]
14
+ Resolved as _Resolved,
15
+ Resolver as _Resolver,
16
+ _Unset, # type: ignore[reportPrivateUsage]
17
+ )
18
+ from referencing.typing import URI, Anchor as AnchorType, Mapping
19
+
20
+ #: A JSON Schema which is a JSON object
21
+ ObjectSchema = Mapping[str, Any]
22
+
23
+ #: A JSON Schema of any kind
24
+ Schema = Union[bool, ObjectSchema]
25
+
26
+ #: A Resource whose contents are JSON Schemas
27
+ SchemaResource = Resource[Schema]
28
+
29
+ #: A JSON Schema Registry
30
+ SchemaRegistry = Registry[Schema]
31
+
32
+ #: The empty JSON Schema Registry
33
+ EMPTY_REGISTRY: SchemaRegistry = Registry()
34
+
35
+
36
+ @frozen
37
+ class UnknownDialect(Exception):
38
+ """
39
+ A dialect identifier was found for a dialect unknown by this library.
40
+
41
+ If it's a custom ("unofficial") dialect, be sure you've registered it.
42
+ """
43
+
44
+ uri: URI
45
+
46
+
47
+ def _dollar_id(contents: Schema) -> URI | None:
48
+ if isinstance(contents, bool):
49
+ return
50
+ return contents.get("$id")
51
+
52
+
53
+ def _legacy_dollar_id(contents: Schema) -> URI | None:
54
+ if isinstance(contents, bool) or "$ref" in contents:
55
+ return
56
+ id = contents.get("$id")
57
+ if id is not None and not id.startswith("#"):
58
+ return id
59
+
60
+
61
+ def _legacy_id(contents: ObjectSchema) -> URI | None:
62
+ if "$ref" in contents:
63
+ return
64
+ id = contents.get("id")
65
+ if id is not None and not id.startswith("#"):
66
+ return id
67
+
68
+
69
+ def _anchor(
70
+ specification: Specification[Schema],
71
+ contents: Schema,
72
+ ) -> Iterable[AnchorType[Schema]]:
73
+ if isinstance(contents, bool):
74
+ return
75
+ anchor = contents.get("$anchor")
76
+ if anchor is not None:
77
+ yield Anchor(
78
+ name=anchor,
79
+ resource=specification.create_resource(contents),
80
+ )
81
+
82
+ dynamic_anchor = contents.get("$dynamicAnchor")
83
+ if dynamic_anchor is not None:
84
+ yield DynamicAnchor(
85
+ name=dynamic_anchor,
86
+ resource=specification.create_resource(contents),
87
+ )
88
+
89
+
90
+ def _anchor_2019(
91
+ specification: Specification[Schema],
92
+ contents: Schema,
93
+ ) -> Iterable[Anchor[Schema]]:
94
+ if isinstance(contents, bool):
95
+ return []
96
+ anchor = contents.get("$anchor")
97
+ if anchor is None:
98
+ return []
99
+ return [
100
+ Anchor(
101
+ name=anchor,
102
+ resource=specification.create_resource(contents),
103
+ ),
104
+ ]
105
+
106
+
107
+ def _legacy_anchor_in_dollar_id(
108
+ specification: Specification[Schema],
109
+ contents: Schema,
110
+ ) -> Iterable[Anchor[Schema]]:
111
+ if isinstance(contents, bool):
112
+ return []
113
+ id = contents.get("$id", "")
114
+ if not id.startswith("#"):
115
+ return []
116
+ return [
117
+ Anchor(
118
+ name=id[1:],
119
+ resource=specification.create_resource(contents),
120
+ ),
121
+ ]
122
+
123
+
124
+ def _legacy_anchor_in_id(
125
+ specification: Specification[ObjectSchema],
126
+ contents: ObjectSchema,
127
+ ) -> Iterable[Anchor[ObjectSchema]]:
128
+ id = contents.get("id", "")
129
+ if not id.startswith("#"):
130
+ return []
131
+ return [
132
+ Anchor(
133
+ name=id[1:],
134
+ resource=specification.create_resource(contents),
135
+ ),
136
+ ]
137
+
138
+
139
+ def _subresources_of(
140
+ in_value: Set[str] = frozenset(),
141
+ in_subvalues: Set[str] = frozenset(),
142
+ in_subarray: Set[str] = frozenset(),
143
+ ):
144
+ """
145
+ Create a callable returning JSON Schema specification-style subschemas.
146
+
147
+ Relies on specifying the set of keywords containing subschemas in their
148
+ values, in a subobject's values, or in a subarray.
149
+ """
150
+
151
+ def subresources_of(contents: Schema) -> Iterable[ObjectSchema]:
152
+ if isinstance(contents, bool):
153
+ return
154
+ for each in in_value:
155
+ if each in contents:
156
+ yield contents[each]
157
+ for each in in_subarray:
158
+ if each in contents:
159
+ yield from contents[each]
160
+ for each in in_subvalues:
161
+ if each in contents:
162
+ yield from contents[each].values()
163
+
164
+ return subresources_of
165
+
166
+
167
+ def _subresources_of_with_crazy_items(
168
+ in_value: Set[str] = frozenset(),
169
+ in_subvalues: Set[str] = frozenset(),
170
+ in_subarray: Set[str] = frozenset(),
171
+ ):
172
+ """
173
+ Specifically handle older drafts where there are some funky keywords.
174
+ """
175
+
176
+ def subresources_of(contents: Schema) -> Iterable[ObjectSchema]:
177
+ if isinstance(contents, bool):
178
+ return
179
+ for each in in_value:
180
+ if each in contents:
181
+ yield contents[each]
182
+ for each in in_subarray:
183
+ if each in contents:
184
+ yield from contents[each]
185
+ for each in in_subvalues:
186
+ if each in contents:
187
+ yield from contents[each].values()
188
+
189
+ items = contents.get("items")
190
+ if items is not None:
191
+ if isinstance(items, Sequence):
192
+ yield from items
193
+ else:
194
+ yield items
195
+
196
+ return subresources_of
197
+
198
+
199
+ def _subresources_of_with_crazy_items_dependencies(
200
+ in_value: Set[str] = frozenset(),
201
+ in_subvalues: Set[str] = frozenset(),
202
+ in_subarray: Set[str] = frozenset(),
203
+ ):
204
+ """
205
+ Specifically handle older drafts where there are some funky keywords.
206
+ """
207
+
208
+ def subresources_of(contents: Schema) -> Iterable[ObjectSchema]:
209
+ if isinstance(contents, bool):
210
+ return
211
+ for each in in_value:
212
+ if each in contents:
213
+ yield contents[each]
214
+ for each in in_subarray:
215
+ if each in contents:
216
+ yield from contents[each]
217
+ for each in in_subvalues:
218
+ if each in contents:
219
+ yield from contents[each].values()
220
+
221
+ items = contents.get("items")
222
+ if items is not None:
223
+ if isinstance(items, Sequence):
224
+ yield from items
225
+ else:
226
+ yield items
227
+ dependencies = contents.get("dependencies")
228
+ if dependencies is not None:
229
+ values = iter(dependencies.values())
230
+ value = next(values, None)
231
+ if isinstance(value, Mapping):
232
+ yield value
233
+ yield from values
234
+
235
+ return subresources_of
236
+
237
+
238
+ def _subresources_of_with_crazy_aP_items_dependencies(
239
+ in_value: Set[str] = frozenset(),
240
+ in_subvalues: Set[str] = frozenset(),
241
+ in_subarray: Set[str] = frozenset(),
242
+ ):
243
+ """
244
+ Specifically handle even older drafts where there are some funky keywords.
245
+ """
246
+
247
+ def subresources_of(contents: ObjectSchema) -> Iterable[ObjectSchema]:
248
+ for each in in_value:
249
+ if each in contents:
250
+ yield contents[each]
251
+ for each in in_subarray:
252
+ if each in contents:
253
+ yield from contents[each]
254
+ for each in in_subvalues:
255
+ if each in contents:
256
+ yield from contents[each].values()
257
+
258
+ items = contents.get("items")
259
+ if items is not None:
260
+ if isinstance(items, Sequence):
261
+ yield from items
262
+ else:
263
+ yield items
264
+ dependencies = contents.get("dependencies")
265
+ if dependencies is not None:
266
+ values = iter(dependencies.values())
267
+ value = next(values, None)
268
+ if isinstance(value, Mapping):
269
+ yield value
270
+ yield from values
271
+
272
+ for each in "additionalItems", "additionalProperties":
273
+ value = contents.get(each)
274
+ if isinstance(value, Mapping):
275
+ yield value
276
+
277
+ return subresources_of
278
+
279
+
280
+ def _maybe_in_subresource(
281
+ in_value: Set[str] = frozenset(),
282
+ in_subvalues: Set[str] = frozenset(),
283
+ in_subarray: Set[str] = frozenset(),
284
+ ):
285
+ in_child = in_subvalues | in_subarray
286
+
287
+ def maybe_in_subresource(
288
+ segments: Sequence[int | str],
289
+ resolver: _Resolver[Any],
290
+ subresource: Resource[Any],
291
+ ) -> _Resolver[Any]:
292
+ _segments = iter(segments)
293
+ for segment in _segments:
294
+ if segment not in in_value and (
295
+ segment not in in_child or next(_segments, None) is None
296
+ ):
297
+ return resolver
298
+ return resolver.in_subresource(subresource)
299
+
300
+ return maybe_in_subresource
301
+
302
+
303
+ def _maybe_in_subresource_crazy_items(
304
+ in_value: Set[str] = frozenset(),
305
+ in_subvalues: Set[str] = frozenset(),
306
+ in_subarray: Set[str] = frozenset(),
307
+ ):
308
+ in_child = in_subvalues | in_subarray
309
+
310
+ def maybe_in_subresource(
311
+ segments: Sequence[int | str],
312
+ resolver: _Resolver[Any],
313
+ subresource: Resource[Any],
314
+ ) -> _Resolver[Any]:
315
+ _segments = iter(segments)
316
+ for segment in _segments:
317
+ if segment == "items" and isinstance(
318
+ subresource.contents,
319
+ Mapping,
320
+ ):
321
+ return resolver.in_subresource(subresource)
322
+ if segment not in in_value and (
323
+ segment not in in_child or next(_segments, None) is None
324
+ ):
325
+ return resolver
326
+ return resolver.in_subresource(subresource)
327
+
328
+ return maybe_in_subresource
329
+
330
+
331
+ def _maybe_in_subresource_crazy_items_dependencies(
332
+ in_value: Set[str] = frozenset(),
333
+ in_subvalues: Set[str] = frozenset(),
334
+ in_subarray: Set[str] = frozenset(),
335
+ ):
336
+ in_child = in_subvalues | in_subarray
337
+
338
+ def maybe_in_subresource(
339
+ segments: Sequence[int | str],
340
+ resolver: _Resolver[Any],
341
+ subresource: Resource[Any],
342
+ ) -> _Resolver[Any]:
343
+ _segments = iter(segments)
344
+ for segment in _segments:
345
+ if segment in {"items", "dependencies"} and isinstance(
346
+ subresource.contents,
347
+ Mapping,
348
+ ):
349
+ return resolver.in_subresource(subresource)
350
+ if segment not in in_value and (
351
+ segment not in in_child or next(_segments, None) is None
352
+ ):
353
+ return resolver
354
+ return resolver.in_subresource(subresource)
355
+
356
+ return maybe_in_subresource
357
+
358
+
359
+ #: JSON Schema draft 2020-12
360
+ DRAFT202012 = Specification(
361
+ name="draft2020-12",
362
+ id_of=_dollar_id,
363
+ subresources_of=_subresources_of(
364
+ in_value={
365
+ "additionalProperties",
366
+ "contains",
367
+ "contentSchema",
368
+ "else",
369
+ "if",
370
+ "items",
371
+ "not",
372
+ "propertyNames",
373
+ "then",
374
+ "unevaluatedItems",
375
+ "unevaluatedProperties",
376
+ },
377
+ in_subarray={"allOf", "anyOf", "oneOf", "prefixItems"},
378
+ in_subvalues={
379
+ "$defs",
380
+ "definitions",
381
+ "dependentSchemas",
382
+ "patternProperties",
383
+ "properties",
384
+ },
385
+ ),
386
+ anchors_in=_anchor,
387
+ maybe_in_subresource=_maybe_in_subresource(
388
+ in_value={
389
+ "additionalProperties",
390
+ "contains",
391
+ "contentSchema",
392
+ "else",
393
+ "if",
394
+ "items",
395
+ "not",
396
+ "propertyNames",
397
+ "then",
398
+ "unevaluatedItems",
399
+ "unevaluatedProperties",
400
+ },
401
+ in_subarray={"allOf", "anyOf", "oneOf", "prefixItems"},
402
+ in_subvalues={
403
+ "$defs",
404
+ "definitions",
405
+ "dependentSchemas",
406
+ "patternProperties",
407
+ "properties",
408
+ },
409
+ ),
410
+ )
411
+ #: JSON Schema draft 2019-09
412
+ DRAFT201909 = Specification(
413
+ name="draft2019-09",
414
+ id_of=_dollar_id,
415
+ subresources_of=_subresources_of_with_crazy_items(
416
+ in_value={
417
+ "additionalItems",
418
+ "additionalProperties",
419
+ "contains",
420
+ "contentSchema",
421
+ "else",
422
+ "if",
423
+ "not",
424
+ "propertyNames",
425
+ "then",
426
+ "unevaluatedItems",
427
+ "unevaluatedProperties",
428
+ },
429
+ in_subarray={"allOf", "anyOf", "oneOf"},
430
+ in_subvalues={
431
+ "$defs",
432
+ "definitions",
433
+ "dependentSchemas",
434
+ "patternProperties",
435
+ "properties",
436
+ },
437
+ ),
438
+ anchors_in=_anchor_2019,
439
+ maybe_in_subresource=_maybe_in_subresource_crazy_items(
440
+ in_value={
441
+ "additionalItems",
442
+ "additionalProperties",
443
+ "contains",
444
+ "contentSchema",
445
+ "else",
446
+ "if",
447
+ "not",
448
+ "propertyNames",
449
+ "then",
450
+ "unevaluatedItems",
451
+ "unevaluatedProperties",
452
+ },
453
+ in_subarray={"allOf", "anyOf", "oneOf"},
454
+ in_subvalues={
455
+ "$defs",
456
+ "definitions",
457
+ "dependentSchemas",
458
+ "patternProperties",
459
+ "properties",
460
+ },
461
+ ),
462
+ )
463
+ #: JSON Schema draft 7
464
+ DRAFT7 = Specification(
465
+ name="draft-07",
466
+ id_of=_legacy_dollar_id,
467
+ subresources_of=_subresources_of_with_crazy_items_dependencies(
468
+ in_value={
469
+ "additionalItems",
470
+ "additionalProperties",
471
+ "contains",
472
+ "else",
473
+ "if",
474
+ "not",
475
+ "propertyNames",
476
+ "then",
477
+ },
478
+ in_subarray={"allOf", "anyOf", "oneOf"},
479
+ in_subvalues={"definitions", "patternProperties", "properties"},
480
+ ),
481
+ anchors_in=_legacy_anchor_in_dollar_id,
482
+ maybe_in_subresource=_maybe_in_subresource_crazy_items_dependencies(
483
+ in_value={
484
+ "additionalItems",
485
+ "additionalProperties",
486
+ "contains",
487
+ "else",
488
+ "if",
489
+ "not",
490
+ "propertyNames",
491
+ "then",
492
+ },
493
+ in_subarray={"allOf", "anyOf", "oneOf"},
494
+ in_subvalues={"definitions", "patternProperties", "properties"},
495
+ ),
496
+ )
497
+ #: JSON Schema draft 6
498
+ DRAFT6 = Specification(
499
+ name="draft-06",
500
+ id_of=_legacy_dollar_id,
501
+ subresources_of=_subresources_of_with_crazy_items_dependencies(
502
+ in_value={
503
+ "additionalItems",
504
+ "additionalProperties",
505
+ "contains",
506
+ "not",
507
+ "propertyNames",
508
+ },
509
+ in_subarray={"allOf", "anyOf", "oneOf"},
510
+ in_subvalues={"definitions", "patternProperties", "properties"},
511
+ ),
512
+ anchors_in=_legacy_anchor_in_dollar_id,
513
+ maybe_in_subresource=_maybe_in_subresource_crazy_items_dependencies(
514
+ in_value={
515
+ "additionalItems",
516
+ "additionalProperties",
517
+ "contains",
518
+ "not",
519
+ "propertyNames",
520
+ },
521
+ in_subarray={"allOf", "anyOf", "oneOf"},
522
+ in_subvalues={"definitions", "patternProperties", "properties"},
523
+ ),
524
+ )
525
+ #: JSON Schema draft 4
526
+ DRAFT4 = Specification(
527
+ name="draft-04",
528
+ id_of=_legacy_id,
529
+ subresources_of=_subresources_of_with_crazy_aP_items_dependencies(
530
+ in_value={"not"},
531
+ in_subarray={"allOf", "anyOf", "oneOf"},
532
+ in_subvalues={"definitions", "patternProperties", "properties"},
533
+ ),
534
+ anchors_in=_legacy_anchor_in_id,
535
+ maybe_in_subresource=_maybe_in_subresource_crazy_items_dependencies(
536
+ in_value={"additionalItems", "additionalProperties", "not"},
537
+ in_subarray={"allOf", "anyOf", "oneOf"},
538
+ in_subvalues={"definitions", "patternProperties", "properties"},
539
+ ),
540
+ )
541
+ #: JSON Schema draft 3
542
+ DRAFT3 = Specification(
543
+ name="draft-03",
544
+ id_of=_legacy_id,
545
+ subresources_of=_subresources_of_with_crazy_aP_items_dependencies(
546
+ in_subarray={"extends"},
547
+ in_subvalues={"definitions", "patternProperties", "properties"},
548
+ ),
549
+ anchors_in=_legacy_anchor_in_id,
550
+ maybe_in_subresource=_maybe_in_subresource_crazy_items_dependencies(
551
+ in_value={"additionalItems", "additionalProperties"},
552
+ in_subarray={"extends"},
553
+ in_subvalues={"definitions", "patternProperties", "properties"},
554
+ ),
555
+ )
556
+
557
+
558
+ _SPECIFICATIONS: Registry[Specification[Schema]] = Registry(
559
+ {
560
+ dialect_id: Resource.opaque(specification)
561
+ for dialect_id, specification in [
562
+ ("https://json-schema.org/draft/2020-12/schema", DRAFT202012),
563
+ ("https://json-schema.org/draft/2019-09/schema", DRAFT201909),
564
+ ("http://json-schema.org/draft-07/schema", DRAFT7),
565
+ ("http://json-schema.org/draft-06/schema", DRAFT6),
566
+ ("http://json-schema.org/draft-04/schema", DRAFT4),
567
+ ("http://json-schema.org/draft-03/schema", DRAFT3),
568
+ ]
569
+ },
570
+ )
571
+
572
+
573
+ def specification_with(
574
+ dialect_id: URI,
575
+ default: Specification[Any] | _Unset = _UNSET,
576
+ ) -> Specification[Any]:
577
+ """
578
+ Retrieve the `Specification` with the given dialect identifier.
579
+
580
+ Raises:
581
+
582
+ `UnknownDialect`
583
+
584
+ if the given ``dialect_id`` isn't known
585
+
586
+ """
587
+ resource = _SPECIFICATIONS.get(dialect_id.rstrip("#"))
588
+ if resource is not None:
589
+ return resource.contents
590
+ if default is _UNSET:
591
+ raise UnknownDialect(dialect_id)
592
+ return default
593
+
594
+
595
+ @frozen
596
+ class DynamicAnchor:
597
+ """
598
+ Dynamic anchors, introduced in draft 2020.
599
+ """
600
+
601
+ name: str
602
+ resource: SchemaResource
603
+
604
+ def resolve(self, resolver: _Resolver[Schema]) -> _Resolved[Schema]:
605
+ """
606
+ Resolve this anchor dynamically.
607
+ """
608
+ last = self.resource
609
+ for uri, registry in resolver.dynamic_scope():
610
+ try:
611
+ anchor = registry.anchor(uri, self.name).value
612
+ except exceptions.NoSuchAnchor:
613
+ continue
614
+ if isinstance(anchor, DynamicAnchor):
615
+ last = anchor.resource
616
+ return _Resolved(
617
+ contents=last.contents,
618
+ resolver=resolver.in_subresource(last),
619
+ )
620
+
621
+
622
+ def lookup_recursive_ref(resolver: _Resolver[Schema]) -> _Resolved[Schema]:
623
+ """
624
+ Recursive references (via recursive anchors), present only in draft 2019.
625
+
626
+ As per the 2019 specification (§ 8.2.4.2.1), only the ``#`` recursive
627
+ reference is supported (and is therefore assumed to be the relevant
628
+ reference).
629
+ """
630
+ resolved = resolver.lookup("#")
631
+ if isinstance(resolved.contents, Mapping) and resolved.contents.get(
632
+ "$recursiveAnchor",
633
+ ):
634
+ for uri, _ in resolver.dynamic_scope():
635
+ next_resolved = resolver.lookup(uri)
636
+ if not isinstance(
637
+ next_resolved.contents,
638
+ Mapping,
639
+ ) or not next_resolved.contents.get("$recursiveAnchor"):
640
+ break
641
+ resolved = next_resolved
642
+ return resolved
evalkit_cambrian/lib/python3.10/site-packages/referencing/py.typed ADDED
File without changes
evalkit_cambrian/lib/python3.10/site-packages/referencing/retrieval.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Helpers related to (dynamic) resource retrieval.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from functools import lru_cache
8
+ from typing import TYPE_CHECKING, Callable
9
+ import json
10
+
11
+ try:
12
+ from typing_extensions import TypeVar
13
+ except ImportError: # pragma: no cover
14
+ from typing import TypeVar
15
+
16
+ from referencing import Resource
17
+
18
+ if TYPE_CHECKING:
19
+ from referencing.typing import URI, D, Retrieve
20
+
21
+ #: A serialized document (e.g. a JSON string)
22
+ _T = TypeVar("_T", default=str)
23
+
24
+
25
+ def to_cached_resource(
26
+ cache: Callable[[Retrieve[D]], Retrieve[D]] | None = None,
27
+ loads: Callable[[_T], D] = json.loads,
28
+ from_contents: Callable[[D], Resource[D]] = Resource.from_contents,
29
+ ) -> Callable[[Callable[[URI], _T]], Retrieve[D]]:
30
+ """
31
+ Create a retriever which caches its return values from a simpler callable.
32
+
33
+ Takes a function which returns things like serialized JSON (strings) and
34
+ returns something suitable for passing to `Registry` as a retrieve
35
+ function.
36
+
37
+ This decorator both reduces a small bit of boilerplate for a common case
38
+ (deserializing JSON from strings and creating `Resource` objects from the
39
+ result) as well as makes the probable need for caching a bit easier.
40
+ Retrievers which otherwise do expensive operations (like hitting the
41
+ network) might otherwise be called repeatedly.
42
+
43
+ Examples
44
+ --------
45
+
46
+ .. testcode::
47
+
48
+ from referencing import Registry
49
+ from referencing.typing import URI
50
+ import referencing.retrieval
51
+
52
+
53
+ @referencing.retrieval.to_cached_resource()
54
+ def retrieve(uri: URI):
55
+ print(f"Retrieved {uri}")
56
+
57
+ # Normally, go get some expensive JSON from the network, a file ...
58
+ return '''
59
+ {
60
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
61
+ "foo": "bar"
62
+ }
63
+ '''
64
+
65
+ one = Registry(retrieve=retrieve).get_or_retrieve("urn:example:foo")
66
+ print(one.value.contents["foo"])
67
+
68
+ # Retrieving the same URI again reuses the same value (and thus doesn't
69
+ # print another retrieval message here)
70
+ two = Registry(retrieve=retrieve).get_or_retrieve("urn:example:foo")
71
+ print(two.value.contents["foo"])
72
+
73
+ .. testoutput::
74
+
75
+ Retrieved urn:example:foo
76
+ bar
77
+ bar
78
+
79
+ """
80
+ if cache is None:
81
+ cache = lru_cache(maxsize=None)
82
+
83
+ def decorator(retrieve: Callable[[URI], _T]):
84
+ @cache
85
+ def cached_retrieve(uri: URI):
86
+ response = retrieve(uri)
87
+ contents = loads(response)
88
+ return from_contents(contents)
89
+
90
+ return cached_retrieve
91
+
92
+ return decorator
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__init__.py ADDED
File without changes
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (179 Bytes). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_core.cpython-310.pyc ADDED
Binary file (36.7 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_exceptions.cpython-310.pyc ADDED
Binary file (1.89 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_jsonschema.cpython-310.pyc ADDED
Binary file (6.73 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_referencing_suite.cpython-310.pyc ADDED
Binary file (2.56 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/__pycache__/test_retrieval.cpython-310.pyc ADDED
Binary file (3.59 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_core.py ADDED
@@ -0,0 +1,1057 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from rpds import HashTrieMap
2
+ import pytest
3
+
4
+ from referencing import Anchor, Registry, Resource, Specification, exceptions
5
+ from referencing.jsonschema import DRAFT202012
6
+
7
+ ID_AND_CHILDREN = Specification(
8
+ name="id-and-children",
9
+ id_of=lambda contents: contents.get("ID"),
10
+ subresources_of=lambda contents: contents.get("children", []),
11
+ anchors_in=lambda specification, contents: [
12
+ Anchor(
13
+ name=name,
14
+ resource=specification.create_resource(contents=each),
15
+ )
16
+ for name, each in contents.get("anchors", {}).items()
17
+ ],
18
+ maybe_in_subresource=lambda segments, resolver, subresource: (
19
+ resolver.in_subresource(subresource)
20
+ if not len(segments) % 2
21
+ and all(each == "children" for each in segments[::2])
22
+ else resolver
23
+ ),
24
+ )
25
+
26
+
27
+ def blow_up(uri): # pragma: no cover
28
+ """
29
+ A retriever suitable for use in tests which expect it never to be used.
30
+ """
31
+ raise RuntimeError("This retrieve function expects to never be called!")
32
+
33
+
34
+ class TestRegistry:
35
+ def test_with_resource(self):
36
+ """
37
+ Adding a resource to the registry then allows re-retrieving it.
38
+ """
39
+
40
+ resource = Resource.opaque(contents={"foo": "bar"})
41
+ uri = "urn:example"
42
+ registry = Registry().with_resource(uri=uri, resource=resource)
43
+ assert registry[uri] is resource
44
+
45
+ def test_with_resources(self):
46
+ """
47
+ Adding multiple resources to the registry is like adding each one.
48
+ """
49
+
50
+ one = Resource.opaque(contents={})
51
+ two = Resource(contents={"foo": "bar"}, specification=ID_AND_CHILDREN)
52
+ registry = Registry().with_resources(
53
+ [
54
+ ("http://example.com/1", one),
55
+ ("http://example.com/foo/bar", two),
56
+ ],
57
+ )
58
+ assert registry == Registry().with_resource(
59
+ uri="http://example.com/1",
60
+ resource=one,
61
+ ).with_resource(
62
+ uri="http://example.com/foo/bar",
63
+ resource=two,
64
+ )
65
+
66
+ def test_matmul_resource(self):
67
+ uri = "urn:example:resource"
68
+ resource = ID_AND_CHILDREN.create_resource({"ID": uri, "foo": 12})
69
+ registry = resource @ Registry()
70
+ assert registry == Registry().with_resource(uri, resource)
71
+
72
+ def test_matmul_many_resources(self):
73
+ one_uri = "urn:example:one"
74
+ one = ID_AND_CHILDREN.create_resource({"ID": one_uri, "foo": 12})
75
+
76
+ two_uri = "urn:example:two"
77
+ two = ID_AND_CHILDREN.create_resource({"ID": two_uri, "foo": 12})
78
+
79
+ registry = [one, two] @ Registry()
80
+ assert registry == Registry().with_resources(
81
+ [(one_uri, one), (two_uri, two)],
82
+ )
83
+
84
+ def test_matmul_resource_without_id(self):
85
+ resource = Resource.opaque(contents={"foo": "bar"})
86
+ with pytest.raises(exceptions.NoInternalID) as e:
87
+ resource @ Registry()
88
+ assert e.value == exceptions.NoInternalID(resource=resource)
89
+
90
+ def test_with_contents_from_json_schema(self):
91
+ uri = "urn:example"
92
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
93
+ registry = Registry().with_contents([(uri, schema)])
94
+
95
+ expected = Resource(contents=schema, specification=DRAFT202012)
96
+ assert registry[uri] == expected
97
+
98
+ def test_with_contents_and_default_specification(self):
99
+ uri = "urn:example"
100
+ registry = Registry().with_contents(
101
+ [(uri, {"foo": "bar"})],
102
+ default_specification=Specification.OPAQUE,
103
+ )
104
+ assert registry[uri] == Resource.opaque({"foo": "bar"})
105
+
106
+ def test_len(self):
107
+ total = 5
108
+ registry = Registry().with_contents(
109
+ [(str(i), {"foo": "bar"}) for i in range(total)],
110
+ default_specification=Specification.OPAQUE,
111
+ )
112
+ assert len(registry) == total
113
+
114
+ def test_bool_empty(self):
115
+ assert not Registry()
116
+
117
+ def test_bool_not_empty(self):
118
+ registry = Registry().with_contents(
119
+ [(str(i), {"foo": "bar"}) for i in range(3)],
120
+ default_specification=Specification.OPAQUE,
121
+ )
122
+ assert registry
123
+
124
+ def test_iter(self):
125
+ registry = Registry().with_contents(
126
+ [(str(i), {"foo": "bar"}) for i in range(8)],
127
+ default_specification=Specification.OPAQUE,
128
+ )
129
+ assert set(registry) == {str(i) for i in range(8)}
130
+
131
+ def test_crawl_still_has_top_level_resource(self):
132
+ resource = Resource.opaque({"foo": "bar"})
133
+ uri = "urn:example"
134
+ registry = Registry({uri: resource}).crawl()
135
+ assert registry[uri] is resource
136
+
137
+ def test_crawl_finds_a_subresource(self):
138
+ child_id = "urn:child"
139
+ root = ID_AND_CHILDREN.create_resource(
140
+ {"ID": "urn:root", "children": [{"ID": child_id, "foo": 12}]},
141
+ )
142
+ registry = root @ Registry()
143
+ with pytest.raises(LookupError):
144
+ registry[child_id]
145
+
146
+ expected = ID_AND_CHILDREN.create_resource({"ID": child_id, "foo": 12})
147
+ assert registry.crawl()[child_id] == expected
148
+
149
+ def test_crawl_finds_anchors_with_id(self):
150
+ resource = ID_AND_CHILDREN.create_resource(
151
+ {"ID": "urn:bar", "anchors": {"foo": 12}},
152
+ )
153
+ registry = resource @ Registry()
154
+
155
+ assert registry.crawl().anchor(resource.id(), "foo").value == Anchor(
156
+ name="foo",
157
+ resource=ID_AND_CHILDREN.create_resource(12),
158
+ )
159
+
160
+ def test_crawl_finds_anchors_no_id(self):
161
+ resource = ID_AND_CHILDREN.create_resource({"anchors": {"foo": 12}})
162
+ registry = Registry().with_resource("urn:root", resource)
163
+
164
+ assert registry.crawl().anchor("urn:root", "foo").value == Anchor(
165
+ name="foo",
166
+ resource=ID_AND_CHILDREN.create_resource(12),
167
+ )
168
+
169
+ def test_contents(self):
170
+ resource = Resource.opaque({"foo": "bar"})
171
+ uri = "urn:example"
172
+ registry = Registry().with_resource(uri, resource)
173
+ assert registry.contents(uri) == {"foo": "bar"}
174
+
175
+ def test_getitem_strips_empty_fragments(self):
176
+ uri = "http://example.com/"
177
+ resource = ID_AND_CHILDREN.create_resource({"ID": uri + "#"})
178
+ registry = resource @ Registry()
179
+ assert registry[uri] == registry[uri + "#"] == resource
180
+
181
+ def test_contents_strips_empty_fragments(self):
182
+ uri = "http://example.com/"
183
+ resource = ID_AND_CHILDREN.create_resource({"ID": uri + "#"})
184
+ registry = resource @ Registry()
185
+ assert (
186
+ registry.contents(uri)
187
+ == registry.contents(uri + "#")
188
+ == {"ID": uri + "#"}
189
+ )
190
+
191
+ def test_contents_nonexistent_resource(self):
192
+ registry = Registry()
193
+ with pytest.raises(exceptions.NoSuchResource) as e:
194
+ registry.contents("urn:example")
195
+ assert e.value == exceptions.NoSuchResource(ref="urn:example")
196
+
197
+ def test_crawled_anchor(self):
198
+ resource = ID_AND_CHILDREN.create_resource({"anchors": {"foo": "bar"}})
199
+ registry = Registry().with_resource("urn:example", resource)
200
+ retrieved = registry.anchor("urn:example", "foo")
201
+ assert retrieved.value == Anchor(
202
+ name="foo",
203
+ resource=ID_AND_CHILDREN.create_resource("bar"),
204
+ )
205
+ assert retrieved.registry == registry.crawl()
206
+
207
+ def test_anchor_in_nonexistent_resource(self):
208
+ registry = Registry()
209
+ with pytest.raises(exceptions.NoSuchResource) as e:
210
+ registry.anchor("urn:example", "foo")
211
+ assert e.value == exceptions.NoSuchResource(ref="urn:example")
212
+
213
+ def test_init(self):
214
+ one = Resource.opaque(contents={})
215
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
216
+ registry = Registry(
217
+ {
218
+ "http://example.com/1": one,
219
+ "http://example.com/foo/bar": two,
220
+ },
221
+ )
222
+ assert (
223
+ registry
224
+ == Registry()
225
+ .with_resources(
226
+ [
227
+ ("http://example.com/1", one),
228
+ ("http://example.com/foo/bar", two),
229
+ ],
230
+ )
231
+ .crawl()
232
+ )
233
+
234
+ def test_dict_conversion(self):
235
+ """
236
+ Passing a `dict` to `Registry` gets converted to a `HashTrieMap`.
237
+
238
+ So continuing to use the registry works.
239
+ """
240
+
241
+ one = Resource.opaque(contents={})
242
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
243
+ registry = Registry(
244
+ {"http://example.com/1": one},
245
+ ).with_resource("http://example.com/foo/bar", two)
246
+ assert (
247
+ registry.crawl()
248
+ == Registry()
249
+ .with_resources(
250
+ [
251
+ ("http://example.com/1", one),
252
+ ("http://example.com/foo/bar", two),
253
+ ],
254
+ )
255
+ .crawl()
256
+ )
257
+
258
+ def test_no_such_resource(self):
259
+ registry = Registry()
260
+ with pytest.raises(exceptions.NoSuchResource) as e:
261
+ registry["urn:bigboom"]
262
+ assert e.value == exceptions.NoSuchResource(ref="urn:bigboom")
263
+
264
+ def test_combine(self):
265
+ one = Resource.opaque(contents={})
266
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
267
+ three = ID_AND_CHILDREN.create_resource({"baz": "quux"})
268
+ four = ID_AND_CHILDREN.create_resource({"anchors": {"foo": 12}})
269
+
270
+ first = Registry({"http://example.com/1": one})
271
+ second = Registry().with_resource("http://example.com/foo/bar", two)
272
+ third = Registry(
273
+ {
274
+ "http://example.com/1": one,
275
+ "http://example.com/baz": three,
276
+ },
277
+ )
278
+ fourth = (
279
+ Registry()
280
+ .with_resource(
281
+ "http://example.com/foo/quux",
282
+ four,
283
+ )
284
+ .crawl()
285
+ )
286
+ assert first.combine(second, third, fourth) == Registry(
287
+ [
288
+ ("http://example.com/1", one),
289
+ ("http://example.com/baz", three),
290
+ ("http://example.com/foo/quux", four),
291
+ ],
292
+ anchors=HashTrieMap(
293
+ {
294
+ ("http://example.com/foo/quux", "foo"): Anchor(
295
+ name="foo",
296
+ resource=ID_AND_CHILDREN.create_resource(12),
297
+ ),
298
+ },
299
+ ),
300
+ ).with_resource("http://example.com/foo/bar", two)
301
+
302
+ def test_combine_self(self):
303
+ """
304
+ Combining a registry with itself short-circuits.
305
+
306
+ This is a performance optimization -- otherwise we do lots more work
307
+ (in jsonschema this seems to correspond to making the test suite take
308
+ *3x* longer).
309
+ """
310
+
311
+ registry = Registry({"urn:foo": "bar"})
312
+ assert registry.combine(registry) is registry
313
+
314
+ def test_combine_with_uncrawled_resources(self):
315
+ one = Resource.opaque(contents={})
316
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
317
+ three = ID_AND_CHILDREN.create_resource({"baz": "quux"})
318
+
319
+ first = Registry().with_resource("http://example.com/1", one)
320
+ second = Registry().with_resource("http://example.com/foo/bar", two)
321
+ third = Registry(
322
+ {
323
+ "http://example.com/1": one,
324
+ "http://example.com/baz": three,
325
+ },
326
+ )
327
+ expected = Registry(
328
+ [
329
+ ("http://example.com/1", one),
330
+ ("http://example.com/foo/bar", two),
331
+ ("http://example.com/baz", three),
332
+ ],
333
+ )
334
+ combined = first.combine(second, third)
335
+ assert combined != expected
336
+ assert combined.crawl() == expected
337
+
338
+ def test_combine_with_single_retrieve(self):
339
+ one = Resource.opaque(contents={})
340
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
341
+ three = ID_AND_CHILDREN.create_resource({"baz": "quux"})
342
+
343
+ def retrieve(uri): # pragma: no cover
344
+ pass
345
+
346
+ first = Registry().with_resource("http://example.com/1", one)
347
+ second = Registry(
348
+ retrieve=retrieve,
349
+ ).with_resource("http://example.com/2", two)
350
+ third = Registry().with_resource("http://example.com/3", three)
351
+
352
+ assert first.combine(second, third) == Registry(
353
+ retrieve=retrieve,
354
+ ).with_resources(
355
+ [
356
+ ("http://example.com/1", one),
357
+ ("http://example.com/2", two),
358
+ ("http://example.com/3", three),
359
+ ],
360
+ )
361
+ assert second.combine(first, third) == Registry(
362
+ retrieve=retrieve,
363
+ ).with_resources(
364
+ [
365
+ ("http://example.com/1", one),
366
+ ("http://example.com/2", two),
367
+ ("http://example.com/3", three),
368
+ ],
369
+ )
370
+
371
+ def test_combine_with_common_retrieve(self):
372
+ one = Resource.opaque(contents={})
373
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
374
+ three = ID_AND_CHILDREN.create_resource({"baz": "quux"})
375
+
376
+ def retrieve(uri): # pragma: no cover
377
+ pass
378
+
379
+ first = Registry(retrieve=retrieve).with_resource(
380
+ "http://example.com/1",
381
+ one,
382
+ )
383
+ second = Registry(
384
+ retrieve=retrieve,
385
+ ).with_resource("http://example.com/2", two)
386
+ third = Registry(retrieve=retrieve).with_resource(
387
+ "http://example.com/3",
388
+ three,
389
+ )
390
+
391
+ assert first.combine(second, third) == Registry(
392
+ retrieve=retrieve,
393
+ ).with_resources(
394
+ [
395
+ ("http://example.com/1", one),
396
+ ("http://example.com/2", two),
397
+ ("http://example.com/3", three),
398
+ ],
399
+ )
400
+ assert second.combine(first, third) == Registry(
401
+ retrieve=retrieve,
402
+ ).with_resources(
403
+ [
404
+ ("http://example.com/1", one),
405
+ ("http://example.com/2", two),
406
+ ("http://example.com/3", three),
407
+ ],
408
+ )
409
+
410
+ def test_combine_conflicting_retrieve(self):
411
+ one = Resource.opaque(contents={})
412
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
413
+ three = ID_AND_CHILDREN.create_resource({"baz": "quux"})
414
+
415
+ def foo_retrieve(uri): # pragma: no cover
416
+ pass
417
+
418
+ def bar_retrieve(uri): # pragma: no cover
419
+ pass
420
+
421
+ first = Registry(retrieve=foo_retrieve).with_resource(
422
+ "http://example.com/1",
423
+ one,
424
+ )
425
+ second = Registry().with_resource("http://example.com/2", two)
426
+ third = Registry(retrieve=bar_retrieve).with_resource(
427
+ "http://example.com/3",
428
+ three,
429
+ )
430
+
431
+ with pytest.raises(Exception, match="conflict.*retriev"):
432
+ first.combine(second, third)
433
+
434
+ def test_remove(self):
435
+ one = Resource.opaque(contents={})
436
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
437
+ registry = Registry({"urn:foo": one, "urn:bar": two})
438
+ assert registry.remove("urn:foo") == Registry({"urn:bar": two})
439
+
440
+ def test_remove_uncrawled(self):
441
+ one = Resource.opaque(contents={})
442
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
443
+ registry = Registry().with_resources(
444
+ [("urn:foo", one), ("urn:bar", two)],
445
+ )
446
+ assert registry.remove("urn:foo") == Registry().with_resource(
447
+ "urn:bar",
448
+ two,
449
+ )
450
+
451
+ def test_remove_with_anchors(self):
452
+ one = Resource.opaque(contents={})
453
+ two = ID_AND_CHILDREN.create_resource({"anchors": {"foo": "bar"}})
454
+ registry = (
455
+ Registry()
456
+ .with_resources(
457
+ [("urn:foo", one), ("urn:bar", two)],
458
+ )
459
+ .crawl()
460
+ )
461
+ assert (
462
+ registry.remove("urn:bar")
463
+ == Registry()
464
+ .with_resource(
465
+ "urn:foo",
466
+ one,
467
+ )
468
+ .crawl()
469
+ )
470
+
471
+ def test_remove_nonexistent_uri(self):
472
+ with pytest.raises(exceptions.NoSuchResource) as e:
473
+ Registry().remove("urn:doesNotExist")
474
+ assert e.value == exceptions.NoSuchResource(ref="urn:doesNotExist")
475
+
476
+ def test_retrieve(self):
477
+ foo = Resource.opaque({"foo": "bar"})
478
+ registry = Registry(retrieve=lambda uri: foo)
479
+ assert registry.get_or_retrieve("urn:example").value == foo
480
+
481
+ def test_retrieve_arbitrary_exception(self):
482
+ foo = Resource.opaque({"foo": "bar"})
483
+
484
+ def retrieve(uri):
485
+ if uri == "urn:succeed":
486
+ return foo
487
+ raise Exception("Oh no!")
488
+
489
+ registry = Registry(retrieve=retrieve)
490
+ assert registry.get_or_retrieve("urn:succeed").value == foo
491
+ with pytest.raises(exceptions.Unretrievable):
492
+ registry.get_or_retrieve("urn:uhoh")
493
+
494
+ def test_retrieve_no_such_resource(self):
495
+ foo = Resource.opaque({"foo": "bar"})
496
+
497
+ def retrieve(uri):
498
+ if uri == "urn:succeed":
499
+ return foo
500
+ raise exceptions.NoSuchResource(ref=uri)
501
+
502
+ registry = Registry(retrieve=retrieve)
503
+ assert registry.get_or_retrieve("urn:succeed").value == foo
504
+ with pytest.raises(exceptions.NoSuchResource):
505
+ registry.get_or_retrieve("urn:uhoh")
506
+
507
+ def test_retrieve_cannot_determine_specification(self):
508
+ def retrieve(uri):
509
+ return Resource.from_contents({})
510
+
511
+ registry = Registry(retrieve=retrieve)
512
+ with pytest.raises(exceptions.CannotDetermineSpecification):
513
+ registry.get_or_retrieve("urn:uhoh")
514
+
515
+ def test_retrieve_already_available_resource(self):
516
+ foo = Resource.opaque({"foo": "bar"})
517
+ registry = Registry({"urn:example": foo}, retrieve=blow_up)
518
+ assert registry["urn:example"] == foo
519
+ assert registry.get_or_retrieve("urn:example").value == foo
520
+
521
+ def test_retrieve_first_checks_crawlable_resource(self):
522
+ child = ID_AND_CHILDREN.create_resource({"ID": "urn:child", "foo": 12})
523
+ root = ID_AND_CHILDREN.create_resource({"children": [child.contents]})
524
+ registry = Registry(retrieve=blow_up).with_resource("urn:root", root)
525
+ assert registry.crawl()["urn:child"] == child
526
+
527
+ def test_resolver(self):
528
+ one = Resource.opaque(contents={})
529
+ registry = Registry({"http://example.com": one})
530
+ resolver = registry.resolver(base_uri="http://example.com")
531
+ assert resolver.lookup("#").contents == {}
532
+
533
+ def test_resolver_with_root_identified(self):
534
+ root = ID_AND_CHILDREN.create_resource({"ID": "http://example.com"})
535
+ resolver = Registry().resolver_with_root(root)
536
+ assert resolver.lookup("http://example.com").contents == root.contents
537
+ assert resolver.lookup("#").contents == root.contents
538
+
539
+ def test_resolver_with_root_unidentified(self):
540
+ root = Resource.opaque(contents={})
541
+ resolver = Registry().resolver_with_root(root)
542
+ assert resolver.lookup("#").contents == root.contents
543
+
544
+ def test_repr(self):
545
+ one = Resource.opaque(contents={})
546
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
547
+ registry = Registry().with_resources(
548
+ [
549
+ ("http://example.com/1", one),
550
+ ("http://example.com/foo/bar", two),
551
+ ],
552
+ )
553
+ assert repr(registry) == "<Registry (2 uncrawled resources)>"
554
+ assert repr(registry.crawl()) == "<Registry (2 resources)>"
555
+
556
+ def test_repr_mixed_crawled(self):
557
+ one = Resource.opaque(contents={})
558
+ two = ID_AND_CHILDREN.create_resource({"foo": "bar"})
559
+ registry = (
560
+ Registry(
561
+ {"http://example.com/1": one},
562
+ )
563
+ .crawl()
564
+ .with_resource(uri="http://example.com/foo/bar", resource=two)
565
+ )
566
+ assert repr(registry) == "<Registry (2 resources, 1 uncrawled)>"
567
+
568
+ def test_repr_one_resource(self):
569
+ registry = Registry().with_resource(
570
+ uri="http://example.com/1",
571
+ resource=Resource.opaque(contents={}),
572
+ )
573
+ assert repr(registry) == "<Registry (1 uncrawled resource)>"
574
+
575
+ def test_repr_empty(self):
576
+ assert repr(Registry()) == "<Registry (0 resources)>"
577
+
578
+
579
+ class TestResource:
580
+ def test_from_contents_from_json_schema(self):
581
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
582
+ resource = Resource.from_contents(schema)
583
+ assert resource == Resource(contents=schema, specification=DRAFT202012)
584
+
585
+ def test_from_contents_with_no_discernible_information(self):
586
+ """
587
+ Creating a resource with no discernible way to see what
588
+ specification it belongs to (e.g. no ``$schema`` keyword for JSON
589
+ Schema) raises an error.
590
+ """
591
+
592
+ with pytest.raises(exceptions.CannotDetermineSpecification):
593
+ Resource.from_contents({"foo": "bar"})
594
+
595
+ def test_from_contents_with_no_discernible_information_and_default(self):
596
+ resource = Resource.from_contents(
597
+ {"foo": "bar"},
598
+ default_specification=Specification.OPAQUE,
599
+ )
600
+ assert resource == Resource.opaque(contents={"foo": "bar"})
601
+
602
+ def test_from_contents_unneeded_default(self):
603
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
604
+ resource = Resource.from_contents(
605
+ schema,
606
+ default_specification=Specification.OPAQUE,
607
+ )
608
+ assert resource == Resource(
609
+ contents=schema,
610
+ specification=DRAFT202012,
611
+ )
612
+
613
+ def test_non_mapping_from_contents(self):
614
+ resource = Resource.from_contents(
615
+ True,
616
+ default_specification=ID_AND_CHILDREN,
617
+ )
618
+ assert resource == Resource(
619
+ contents=True,
620
+ specification=ID_AND_CHILDREN,
621
+ )
622
+
623
+ def test_from_contents_with_fallback(self):
624
+ resource = Resource.from_contents(
625
+ {"foo": "bar"},
626
+ default_specification=Specification.OPAQUE,
627
+ )
628
+ assert resource == Resource.opaque(contents={"foo": "bar"})
629
+
630
+ def test_id_delegates_to_specification(self):
631
+ specification = Specification(
632
+ name="",
633
+ id_of=lambda contents: "urn:fixedID",
634
+ subresources_of=lambda contents: [],
635
+ anchors_in=lambda specification, contents: [],
636
+ maybe_in_subresource=(
637
+ lambda segments, resolver, subresource: resolver
638
+ ),
639
+ )
640
+ resource = Resource(
641
+ contents={"foo": "baz"},
642
+ specification=specification,
643
+ )
644
+ assert resource.id() == "urn:fixedID"
645
+
646
+ def test_id_strips_empty_fragment(self):
647
+ uri = "http://example.com/"
648
+ root = ID_AND_CHILDREN.create_resource({"ID": uri + "#"})
649
+ assert root.id() == uri
650
+
651
+ def test_subresources_delegates_to_specification(self):
652
+ resource = ID_AND_CHILDREN.create_resource({"children": [{}, 12]})
653
+ assert list(resource.subresources()) == [
654
+ ID_AND_CHILDREN.create_resource(each) for each in [{}, 12]
655
+ ]
656
+
657
+ def test_subresource_with_different_specification(self):
658
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
659
+ resource = ID_AND_CHILDREN.create_resource({"children": [schema]})
660
+ assert list(resource.subresources()) == [
661
+ DRAFT202012.create_resource(schema),
662
+ ]
663
+
664
+ def test_anchors_delegates_to_specification(self):
665
+ resource = ID_AND_CHILDREN.create_resource(
666
+ {"anchors": {"foo": {}, "bar": 1, "baz": ""}},
667
+ )
668
+ assert list(resource.anchors()) == [
669
+ Anchor(name="foo", resource=ID_AND_CHILDREN.create_resource({})),
670
+ Anchor(name="bar", resource=ID_AND_CHILDREN.create_resource(1)),
671
+ Anchor(name="baz", resource=ID_AND_CHILDREN.create_resource("")),
672
+ ]
673
+
674
+ def test_pointer_to_mapping(self):
675
+ resource = Resource.opaque(contents={"foo": "baz"})
676
+ resolver = Registry().resolver()
677
+ assert resource.pointer("/foo", resolver=resolver).contents == "baz"
678
+
679
+ def test_pointer_to_array(self):
680
+ resource = Resource.opaque(contents={"foo": {"bar": [3]}})
681
+ resolver = Registry().resolver()
682
+ assert resource.pointer("/foo/bar/0", resolver=resolver).contents == 3
683
+
684
+ def test_root_pointer(self):
685
+ contents = {"foo": "baz"}
686
+ resource = Resource.opaque(contents=contents)
687
+ resolver = Registry().resolver()
688
+ assert resource.pointer("", resolver=resolver).contents == contents
689
+
690
+ def test_opaque(self):
691
+ contents = {"foo": "bar"}
692
+ assert Resource.opaque(contents) == Resource(
693
+ contents=contents,
694
+ specification=Specification.OPAQUE,
695
+ )
696
+
697
+
698
+ class TestResolver:
699
+ def test_lookup_exact_uri(self):
700
+ resource = Resource.opaque(contents={"foo": "baz"})
701
+ resolver = Registry({"http://example.com/1": resource}).resolver()
702
+ resolved = resolver.lookup("http://example.com/1")
703
+ assert resolved.contents == resource.contents
704
+
705
+ def test_lookup_subresource(self):
706
+ root = ID_AND_CHILDREN.create_resource(
707
+ {
708
+ "ID": "http://example.com/",
709
+ "children": [
710
+ {"ID": "http://example.com/a", "foo": 12},
711
+ ],
712
+ },
713
+ )
714
+ registry = root @ Registry()
715
+ resolved = registry.resolver().lookup("http://example.com/a")
716
+ assert resolved.contents == {"ID": "http://example.com/a", "foo": 12}
717
+
718
+ def test_lookup_anchor_with_id(self):
719
+ root = ID_AND_CHILDREN.create_resource(
720
+ {
721
+ "ID": "http://example.com/",
722
+ "anchors": {"foo": 12},
723
+ },
724
+ )
725
+ registry = root @ Registry()
726
+ resolved = registry.resolver().lookup("http://example.com/#foo")
727
+ assert resolved.contents == 12
728
+
729
+ def test_lookup_anchor_without_id(self):
730
+ root = ID_AND_CHILDREN.create_resource({"anchors": {"foo": 12}})
731
+ resolver = Registry().with_resource("urn:example", root).resolver()
732
+ resolved = resolver.lookup("urn:example#foo")
733
+ assert resolved.contents == 12
734
+
735
+ def test_lookup_unknown_reference(self):
736
+ resolver = Registry().resolver()
737
+ ref = "http://example.com/does/not/exist"
738
+ with pytest.raises(exceptions.Unresolvable) as e:
739
+ resolver.lookup(ref)
740
+ assert e.value == exceptions.Unresolvable(ref=ref)
741
+
742
+ def test_lookup_non_existent_pointer(self):
743
+ resource = Resource.opaque({"foo": {}})
744
+ resolver = Registry({"http://example.com/1": resource}).resolver()
745
+ ref = "http://example.com/1#/foo/bar"
746
+ with pytest.raises(exceptions.Unresolvable) as e:
747
+ resolver.lookup(ref)
748
+ assert e.value == exceptions.PointerToNowhere(
749
+ ref="/foo/bar",
750
+ resource=resource,
751
+ )
752
+ assert str(e.value) == "'/foo/bar' does not exist within {'foo': {}}"
753
+
754
+ def test_lookup_non_existent_pointer_to_array_index(self):
755
+ resource = Resource.opaque([1, 2, 4, 8])
756
+ resolver = Registry({"http://example.com/1": resource}).resolver()
757
+ ref = "http://example.com/1#/10"
758
+ with pytest.raises(exceptions.Unresolvable) as e:
759
+ resolver.lookup(ref)
760
+ assert e.value == exceptions.PointerToNowhere(
761
+ ref="/10",
762
+ resource=resource,
763
+ )
764
+
765
+ def test_lookup_pointer_to_empty_string(self):
766
+ resolver = Registry().resolver_with_root(Resource.opaque({"": {}}))
767
+ assert resolver.lookup("#/").contents == {}
768
+
769
+ def test_lookup_non_existent_pointer_to_empty_string(self):
770
+ resource = Resource.opaque({"foo": {}})
771
+ resolver = Registry().resolver_with_root(resource)
772
+ with pytest.raises(
773
+ exceptions.Unresolvable,
774
+ match="^'/' does not exist within {'foo': {}}.*'#'",
775
+ ) as e:
776
+ resolver.lookup("#/")
777
+ assert e.value == exceptions.PointerToNowhere(
778
+ ref="/",
779
+ resource=resource,
780
+ )
781
+
782
+ def test_lookup_non_existent_anchor(self):
783
+ root = ID_AND_CHILDREN.create_resource({"anchors": {}})
784
+ resolver = Registry().with_resource("urn:example", root).resolver()
785
+ resolved = resolver.lookup("urn:example")
786
+ assert resolved.contents == root.contents
787
+
788
+ ref = "urn:example#noSuchAnchor"
789
+ with pytest.raises(exceptions.Unresolvable) as e:
790
+ resolver.lookup(ref)
791
+ assert "'noSuchAnchor' does not exist" in str(e.value)
792
+ assert e.value == exceptions.NoSuchAnchor(
793
+ ref="urn:example",
794
+ resource=root,
795
+ anchor="noSuchAnchor",
796
+ )
797
+
798
+ def test_lookup_invalid_JSON_pointerish_anchor(self):
799
+ resolver = Registry().resolver_with_root(
800
+ ID_AND_CHILDREN.create_resource(
801
+ {
802
+ "ID": "http://example.com/",
803
+ "foo": {"bar": 12},
804
+ },
805
+ ),
806
+ )
807
+
808
+ valid = resolver.lookup("#/foo/bar")
809
+ assert valid.contents == 12
810
+
811
+ with pytest.raises(exceptions.InvalidAnchor) as e:
812
+ resolver.lookup("#foo/bar")
813
+ assert " '#/foo/bar'" in str(e.value)
814
+
815
+ def test_lookup_retrieved_resource(self):
816
+ resource = Resource.opaque(contents={"foo": "baz"})
817
+ resolver = Registry(retrieve=lambda uri: resource).resolver()
818
+ resolved = resolver.lookup("http://example.com/")
819
+ assert resolved.contents == resource.contents
820
+
821
+ def test_lookup_failed_retrieved_resource(self):
822
+ """
823
+ Unretrievable exceptions are also wrapped in Unresolvable.
824
+ """
825
+
826
+ uri = "http://example.com/"
827
+
828
+ registry = Registry(retrieve=blow_up)
829
+ with pytest.raises(exceptions.Unretrievable):
830
+ registry.get_or_retrieve(uri)
831
+
832
+ resolver = registry.resolver()
833
+ with pytest.raises(exceptions.Unresolvable):
834
+ resolver.lookup(uri)
835
+
836
+ def test_repeated_lookup_from_retrieved_resource(self):
837
+ """
838
+ A (custom-)retrieved resource is added to the registry returned by
839
+ looking it up.
840
+ """
841
+ resource = Resource.opaque(contents={"foo": "baz"})
842
+ once = [resource]
843
+
844
+ def retrieve(uri):
845
+ return once.pop()
846
+
847
+ resolver = Registry(retrieve=retrieve).resolver()
848
+ resolved = resolver.lookup("http://example.com/")
849
+ assert resolved.contents == resource.contents
850
+
851
+ resolved = resolved.resolver.lookup("http://example.com/")
852
+ assert resolved.contents == resource.contents
853
+
854
+ def test_repeated_anchor_lookup_from_retrieved_resource(self):
855
+ resource = Resource.opaque(contents={"foo": "baz"})
856
+ once = [resource]
857
+
858
+ def retrieve(uri):
859
+ return once.pop()
860
+
861
+ resolver = Registry(retrieve=retrieve).resolver()
862
+ resolved = resolver.lookup("http://example.com/")
863
+ assert resolved.contents == resource.contents
864
+
865
+ resolved = resolved.resolver.lookup("#")
866
+ assert resolved.contents == resource.contents
867
+
868
+ # FIXME: The tests below aren't really representable in the current
869
+ # suite, though we should probably think of ways to do so.
870
+
871
+ def test_in_subresource(self):
872
+ root = ID_AND_CHILDREN.create_resource(
873
+ {
874
+ "ID": "http://example.com/",
875
+ "children": [
876
+ {
877
+ "ID": "child/",
878
+ "children": [{"ID": "grandchild"}],
879
+ },
880
+ ],
881
+ },
882
+ )
883
+ registry = root @ Registry()
884
+
885
+ resolver = registry.resolver()
886
+ first = resolver.lookup("http://example.com/")
887
+ assert first.contents == root.contents
888
+
889
+ with pytest.raises(exceptions.Unresolvable):
890
+ first.resolver.lookup("grandchild")
891
+
892
+ sub = first.resolver.in_subresource(
893
+ ID_AND_CHILDREN.create_resource(first.contents["children"][0]),
894
+ )
895
+ second = sub.lookup("grandchild")
896
+ assert second.contents == {"ID": "grandchild"}
897
+
898
+ def test_in_pointer_subresource(self):
899
+ root = ID_AND_CHILDREN.create_resource(
900
+ {
901
+ "ID": "http://example.com/",
902
+ "children": [
903
+ {
904
+ "ID": "child/",
905
+ "children": [{"ID": "grandchild"}],
906
+ },
907
+ ],
908
+ },
909
+ )
910
+ registry = root @ Registry()
911
+
912
+ resolver = registry.resolver()
913
+ first = resolver.lookup("http://example.com/")
914
+ assert first.contents == root.contents
915
+
916
+ with pytest.raises(exceptions.Unresolvable):
917
+ first.resolver.lookup("grandchild")
918
+
919
+ second = first.resolver.lookup("#/children/0")
920
+ third = second.resolver.lookup("grandchild")
921
+ assert third.contents == {"ID": "grandchild"}
922
+
923
+ def test_dynamic_scope(self):
924
+ one = ID_AND_CHILDREN.create_resource(
925
+ {
926
+ "ID": "http://example.com/",
927
+ "children": [
928
+ {
929
+ "ID": "child/",
930
+ "children": [{"ID": "grandchild"}],
931
+ },
932
+ ],
933
+ },
934
+ )
935
+ two = ID_AND_CHILDREN.create_resource(
936
+ {
937
+ "ID": "http://example.com/two",
938
+ "children": [{"ID": "two-child/"}],
939
+ },
940
+ )
941
+ registry = [one, two] @ Registry()
942
+
943
+ resolver = registry.resolver()
944
+ first = resolver.lookup("http://example.com/")
945
+ second = first.resolver.lookup("#/children/0")
946
+ third = second.resolver.lookup("grandchild")
947
+ fourth = third.resolver.lookup("http://example.com/two")
948
+ assert list(fourth.resolver.dynamic_scope()) == [
949
+ ("http://example.com/child/grandchild", fourth.resolver._registry),
950
+ ("http://example.com/child/", fourth.resolver._registry),
951
+ ("http://example.com/", fourth.resolver._registry),
952
+ ]
953
+ assert list(third.resolver.dynamic_scope()) == [
954
+ ("http://example.com/child/", third.resolver._registry),
955
+ ("http://example.com/", third.resolver._registry),
956
+ ]
957
+ assert list(second.resolver.dynamic_scope()) == [
958
+ ("http://example.com/", second.resolver._registry),
959
+ ]
960
+ assert list(first.resolver.dynamic_scope()) == []
961
+
962
+
963
+ class TestSpecification:
964
+ def test_create_resource(self):
965
+ specification = Specification(
966
+ name="",
967
+ id_of=lambda contents: "urn:fixedID",
968
+ subresources_of=lambda contents: [],
969
+ anchors_in=lambda specification, contents: [],
970
+ maybe_in_subresource=(
971
+ lambda segments, resolver, subresource: resolver
972
+ ),
973
+ )
974
+ resource = specification.create_resource(contents={"foo": "baz"})
975
+ assert resource == Resource(
976
+ contents={"foo": "baz"},
977
+ specification=specification,
978
+ )
979
+ assert resource.id() == "urn:fixedID"
980
+
981
+ def test_detect_from_json_schema(self):
982
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
983
+ specification = Specification.detect(schema)
984
+ assert specification == DRAFT202012
985
+
986
+ def test_detect_with_no_discernible_information(self):
987
+ with pytest.raises(exceptions.CannotDetermineSpecification):
988
+ Specification.detect({"foo": "bar"})
989
+
990
+ def test_detect_with_non_URI_schema(self):
991
+ with pytest.raises(exceptions.CannotDetermineSpecification):
992
+ Specification.detect({"$schema": 37})
993
+
994
+ def test_detect_with_no_discernible_information_and_default(self):
995
+ specification = Specification.OPAQUE.detect({"foo": "bar"})
996
+ assert specification is Specification.OPAQUE
997
+
998
+ def test_detect_unneeded_default(self):
999
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
1000
+ specification = Specification.OPAQUE.detect(schema)
1001
+ assert specification == DRAFT202012
1002
+
1003
+ def test_non_mapping_detect(self):
1004
+ with pytest.raises(exceptions.CannotDetermineSpecification):
1005
+ Specification.detect(True)
1006
+
1007
+ def test_non_mapping_detect_with_default(self):
1008
+ specification = ID_AND_CHILDREN.detect(True)
1009
+ assert specification is ID_AND_CHILDREN
1010
+
1011
+ def test_detect_with_fallback(self):
1012
+ specification = Specification.OPAQUE.detect({"foo": "bar"})
1013
+ assert specification is Specification.OPAQUE
1014
+
1015
+ def test_repr(self):
1016
+ assert (
1017
+ repr(ID_AND_CHILDREN) == "<Specification name='id-and-children'>"
1018
+ )
1019
+
1020
+
1021
+ class TestOpaqueSpecification:
1022
+ THINGS = [{"foo": "bar"}, True, 37, "foo", object()]
1023
+
1024
+ @pytest.mark.parametrize("thing", THINGS)
1025
+ def test_no_id(self, thing):
1026
+ """
1027
+ An arbitrary thing has no ID.
1028
+ """
1029
+
1030
+ assert Specification.OPAQUE.id_of(thing) is None
1031
+
1032
+ @pytest.mark.parametrize("thing", THINGS)
1033
+ def test_no_subresources(self, thing):
1034
+ """
1035
+ An arbitrary thing has no subresources.
1036
+ """
1037
+
1038
+ assert list(Specification.OPAQUE.subresources_of(thing)) == []
1039
+
1040
+ @pytest.mark.parametrize("thing", THINGS)
1041
+ def test_no_anchors(self, thing):
1042
+ """
1043
+ An arbitrary thing has no anchors.
1044
+ """
1045
+
1046
+ assert list(Specification.OPAQUE.anchors_in(thing)) == []
1047
+
1048
+
1049
+ @pytest.mark.parametrize(
1050
+ "cls",
1051
+ [Anchor, Registry, Resource, Specification, exceptions.PointerToNowhere],
1052
+ )
1053
+ def test_nonsubclassable(cls):
1054
+ with pytest.raises(Exception, match="(?i)subclassing"):
1055
+
1056
+ class Boom(cls): # pragma: no cover
1057
+ pass
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_exceptions.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+
3
+ import pytest
4
+
5
+ from referencing import Resource, exceptions
6
+
7
+
8
+ def pairs(choices):
9
+ return itertools.combinations(choices, 2)
10
+
11
+
12
+ TRUE = Resource.opaque(True)
13
+
14
+
15
+ thunks = (
16
+ lambda: exceptions.CannotDetermineSpecification(TRUE),
17
+ lambda: exceptions.NoSuchResource("urn:example:foo"),
18
+ lambda: exceptions.NoInternalID(TRUE),
19
+ lambda: exceptions.InvalidAnchor(resource=TRUE, anchor="foo", ref="a#b"),
20
+ lambda: exceptions.NoSuchAnchor(resource=TRUE, anchor="foo", ref="a#b"),
21
+ lambda: exceptions.PointerToNowhere(resource=TRUE, ref="urn:example:foo"),
22
+ lambda: exceptions.Unresolvable("urn:example:foo"),
23
+ lambda: exceptions.Unretrievable("urn:example:foo"),
24
+ )
25
+
26
+
27
+ @pytest.mark.parametrize("one, two", pairs(each() for each in thunks))
28
+ def test_eq_incompatible_types(one, two):
29
+ assert one != two
30
+
31
+
32
+ @pytest.mark.parametrize("thunk", thunks)
33
+ def test_hash(thunk):
34
+ assert thunk() in {thunk()}
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_jsonschema.py ADDED
@@ -0,0 +1,382 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from referencing import Registry, Resource, Specification
4
+ import referencing.jsonschema
5
+
6
+
7
+ @pytest.mark.parametrize(
8
+ "uri, expected",
9
+ [
10
+ (
11
+ "https://json-schema.org/draft/2020-12/schema",
12
+ referencing.jsonschema.DRAFT202012,
13
+ ),
14
+ (
15
+ "https://json-schema.org/draft/2019-09/schema",
16
+ referencing.jsonschema.DRAFT201909,
17
+ ),
18
+ (
19
+ "http://json-schema.org/draft-07/schema#",
20
+ referencing.jsonschema.DRAFT7,
21
+ ),
22
+ (
23
+ "http://json-schema.org/draft-06/schema#",
24
+ referencing.jsonschema.DRAFT6,
25
+ ),
26
+ (
27
+ "http://json-schema.org/draft-04/schema#",
28
+ referencing.jsonschema.DRAFT4,
29
+ ),
30
+ (
31
+ "http://json-schema.org/draft-03/schema#",
32
+ referencing.jsonschema.DRAFT3,
33
+ ),
34
+ ],
35
+ )
36
+ def test_schemas_with_explicit_schema_keywords_are_detected(uri, expected):
37
+ """
38
+ The $schema keyword in JSON Schema is a dialect identifier.
39
+ """
40
+ contents = {"$schema": uri}
41
+ resource = Resource.from_contents(contents)
42
+ assert resource == Resource(contents=contents, specification=expected)
43
+
44
+
45
+ def test_unknown_dialect():
46
+ dialect_id = "http://example.com/unknown-json-schema-dialect-id"
47
+ with pytest.raises(referencing.jsonschema.UnknownDialect) as excinfo:
48
+ Resource.from_contents({"$schema": dialect_id})
49
+ assert excinfo.value.uri == dialect_id
50
+
51
+
52
+ @pytest.mark.parametrize(
53
+ "id, specification",
54
+ [
55
+ ("$id", referencing.jsonschema.DRAFT202012),
56
+ ("$id", referencing.jsonschema.DRAFT201909),
57
+ ("$id", referencing.jsonschema.DRAFT7),
58
+ ("$id", referencing.jsonschema.DRAFT6),
59
+ ("id", referencing.jsonschema.DRAFT4),
60
+ ("id", referencing.jsonschema.DRAFT3),
61
+ ],
62
+ )
63
+ def test_id_of_mapping(id, specification):
64
+ uri = "http://example.com/some-schema"
65
+ assert specification.id_of({id: uri}) == uri
66
+
67
+
68
+ @pytest.mark.parametrize(
69
+ "specification",
70
+ [
71
+ referencing.jsonschema.DRAFT202012,
72
+ referencing.jsonschema.DRAFT201909,
73
+ referencing.jsonschema.DRAFT7,
74
+ referencing.jsonschema.DRAFT6,
75
+ ],
76
+ )
77
+ @pytest.mark.parametrize("value", [True, False])
78
+ def test_id_of_bool(specification, value):
79
+ assert specification.id_of(value) is None
80
+
81
+
82
+ @pytest.mark.parametrize(
83
+ "specification",
84
+ [
85
+ referencing.jsonschema.DRAFT202012,
86
+ referencing.jsonschema.DRAFT201909,
87
+ referencing.jsonschema.DRAFT7,
88
+ referencing.jsonschema.DRAFT6,
89
+ ],
90
+ )
91
+ @pytest.mark.parametrize("value", [True, False])
92
+ def test_anchors_in_bool(specification, value):
93
+ assert list(specification.anchors_in(value)) == []
94
+
95
+
96
+ @pytest.mark.parametrize(
97
+ "specification",
98
+ [
99
+ referencing.jsonschema.DRAFT202012,
100
+ referencing.jsonschema.DRAFT201909,
101
+ referencing.jsonschema.DRAFT7,
102
+ referencing.jsonschema.DRAFT6,
103
+ ],
104
+ )
105
+ @pytest.mark.parametrize("value", [True, False])
106
+ def test_subresources_of_bool(specification, value):
107
+ assert list(specification.subresources_of(value)) == []
108
+
109
+
110
+ @pytest.mark.parametrize(
111
+ "uri, expected",
112
+ [
113
+ (
114
+ "https://json-schema.org/draft/2020-12/schema",
115
+ referencing.jsonschema.DRAFT202012,
116
+ ),
117
+ (
118
+ "https://json-schema.org/draft/2019-09/schema",
119
+ referencing.jsonschema.DRAFT201909,
120
+ ),
121
+ (
122
+ "http://json-schema.org/draft-07/schema#",
123
+ referencing.jsonschema.DRAFT7,
124
+ ),
125
+ (
126
+ "http://json-schema.org/draft-06/schema#",
127
+ referencing.jsonschema.DRAFT6,
128
+ ),
129
+ (
130
+ "http://json-schema.org/draft-04/schema#",
131
+ referencing.jsonschema.DRAFT4,
132
+ ),
133
+ (
134
+ "http://json-schema.org/draft-03/schema#",
135
+ referencing.jsonschema.DRAFT3,
136
+ ),
137
+ ],
138
+ )
139
+ def test_specification_with(uri, expected):
140
+ assert referencing.jsonschema.specification_with(uri) == expected
141
+
142
+
143
+ @pytest.mark.parametrize(
144
+ "uri, expected",
145
+ [
146
+ (
147
+ "http://json-schema.org/draft-07/schema",
148
+ referencing.jsonschema.DRAFT7,
149
+ ),
150
+ (
151
+ "http://json-schema.org/draft-06/schema",
152
+ referencing.jsonschema.DRAFT6,
153
+ ),
154
+ (
155
+ "http://json-schema.org/draft-04/schema",
156
+ referencing.jsonschema.DRAFT4,
157
+ ),
158
+ (
159
+ "http://json-schema.org/draft-03/schema",
160
+ referencing.jsonschema.DRAFT3,
161
+ ),
162
+ ],
163
+ )
164
+ def test_specification_with_no_empty_fragment(uri, expected):
165
+ assert referencing.jsonschema.specification_with(uri) == expected
166
+
167
+
168
+ def test_specification_with_unknown_dialect():
169
+ dialect_id = "http://example.com/unknown-json-schema-dialect-id"
170
+ with pytest.raises(referencing.jsonschema.UnknownDialect) as excinfo:
171
+ referencing.jsonschema.specification_with(dialect_id)
172
+ assert excinfo.value.uri == dialect_id
173
+
174
+
175
+ def test_specification_with_default():
176
+ dialect_id = "http://example.com/unknown-json-schema-dialect-id"
177
+ specification = referencing.jsonschema.specification_with(
178
+ dialect_id,
179
+ default=Specification.OPAQUE,
180
+ )
181
+ assert specification is Specification.OPAQUE
182
+
183
+
184
+ # FIXME: The tests below should move to the referencing suite but I haven't yet
185
+ # figured out how to represent dynamic (& recursive) ref lookups in it.
186
+ def test_lookup_trivial_dynamic_ref():
187
+ one = referencing.jsonschema.DRAFT202012.create_resource(
188
+ {"$dynamicAnchor": "foo"},
189
+ )
190
+ resolver = Registry().with_resource("http://example.com", one).resolver()
191
+ resolved = resolver.lookup("http://example.com#foo")
192
+ assert resolved.contents == one.contents
193
+
194
+
195
+ def test_multiple_lookup_trivial_dynamic_ref():
196
+ TRUE = referencing.jsonschema.DRAFT202012.create_resource(True)
197
+ root = referencing.jsonschema.DRAFT202012.create_resource(
198
+ {
199
+ "$id": "http://example.com",
200
+ "$dynamicAnchor": "fooAnchor",
201
+ "$defs": {
202
+ "foo": {
203
+ "$id": "foo",
204
+ "$dynamicAnchor": "fooAnchor",
205
+ "$defs": {
206
+ "bar": True,
207
+ "baz": {
208
+ "$dynamicAnchor": "fooAnchor",
209
+ },
210
+ },
211
+ },
212
+ },
213
+ },
214
+ )
215
+ resolver = (
216
+ Registry()
217
+ .with_resources(
218
+ [
219
+ ("http://example.com", root),
220
+ ("http://example.com/foo/", TRUE),
221
+ ("http://example.com/foo/bar", root),
222
+ ],
223
+ )
224
+ .resolver()
225
+ )
226
+
227
+ first = resolver.lookup("http://example.com")
228
+ second = first.resolver.lookup("foo/")
229
+ resolver = second.resolver.lookup("bar").resolver
230
+ fourth = resolver.lookup("#fooAnchor")
231
+ assert fourth.contents == root.contents
232
+
233
+
234
+ def test_multiple_lookup_dynamic_ref_to_nondynamic_ref():
235
+ one = referencing.jsonschema.DRAFT202012.create_resource(
236
+ {"$anchor": "fooAnchor"},
237
+ )
238
+ two = referencing.jsonschema.DRAFT202012.create_resource(
239
+ {
240
+ "$id": "http://example.com",
241
+ "$dynamicAnchor": "fooAnchor",
242
+ "$defs": {
243
+ "foo": {
244
+ "$id": "foo",
245
+ "$dynamicAnchor": "fooAnchor",
246
+ "$defs": {
247
+ "bar": True,
248
+ "baz": {
249
+ "$dynamicAnchor": "fooAnchor",
250
+ },
251
+ },
252
+ },
253
+ },
254
+ },
255
+ )
256
+ resolver = (
257
+ Registry()
258
+ .with_resources(
259
+ [
260
+ ("http://example.com", two),
261
+ ("http://example.com/foo/", one),
262
+ ("http://example.com/foo/bar", two),
263
+ ],
264
+ )
265
+ .resolver()
266
+ )
267
+
268
+ first = resolver.lookup("http://example.com")
269
+ second = first.resolver.lookup("foo/")
270
+ resolver = second.resolver.lookup("bar").resolver
271
+ fourth = resolver.lookup("#fooAnchor")
272
+ assert fourth.contents == two.contents
273
+
274
+
275
+ def test_lookup_trivial_recursive_ref():
276
+ one = referencing.jsonschema.DRAFT201909.create_resource(
277
+ {"$recursiveAnchor": True},
278
+ )
279
+ resolver = Registry().with_resource("http://example.com", one).resolver()
280
+ first = resolver.lookup("http://example.com")
281
+ resolved = referencing.jsonschema.lookup_recursive_ref(
282
+ resolver=first.resolver,
283
+ )
284
+ assert resolved.contents == one.contents
285
+
286
+
287
+ def test_lookup_recursive_ref_to_bool():
288
+ TRUE = referencing.jsonschema.DRAFT201909.create_resource(True)
289
+ registry = Registry({"http://example.com": TRUE})
290
+ resolved = referencing.jsonschema.lookup_recursive_ref(
291
+ resolver=registry.resolver(base_uri="http://example.com"),
292
+ )
293
+ assert resolved.contents == TRUE.contents
294
+
295
+
296
+ def test_multiple_lookup_recursive_ref_to_bool():
297
+ TRUE = referencing.jsonschema.DRAFT201909.create_resource(True)
298
+ root = referencing.jsonschema.DRAFT201909.create_resource(
299
+ {
300
+ "$id": "http://example.com",
301
+ "$recursiveAnchor": True,
302
+ "$defs": {
303
+ "foo": {
304
+ "$id": "foo",
305
+ "$recursiveAnchor": True,
306
+ "$defs": {
307
+ "bar": True,
308
+ "baz": {
309
+ "$recursiveAnchor": True,
310
+ "$anchor": "fooAnchor",
311
+ },
312
+ },
313
+ },
314
+ },
315
+ },
316
+ )
317
+ resolver = (
318
+ Registry()
319
+ .with_resources(
320
+ [
321
+ ("http://example.com", root),
322
+ ("http://example.com/foo/", TRUE),
323
+ ("http://example.com/foo/bar", root),
324
+ ],
325
+ )
326
+ .resolver()
327
+ )
328
+
329
+ first = resolver.lookup("http://example.com")
330
+ second = first.resolver.lookup("foo/")
331
+ resolver = second.resolver.lookup("bar").resolver
332
+ fourth = referencing.jsonschema.lookup_recursive_ref(resolver=resolver)
333
+ assert fourth.contents == root.contents
334
+
335
+
336
+ def test_multiple_lookup_recursive_ref_with_nonrecursive_ref():
337
+ one = referencing.jsonschema.DRAFT201909.create_resource(
338
+ {"$recursiveAnchor": True},
339
+ )
340
+ two = referencing.jsonschema.DRAFT201909.create_resource(
341
+ {
342
+ "$id": "http://example.com",
343
+ "$recursiveAnchor": True,
344
+ "$defs": {
345
+ "foo": {
346
+ "$id": "foo",
347
+ "$recursiveAnchor": True,
348
+ "$defs": {
349
+ "bar": True,
350
+ "baz": {
351
+ "$recursiveAnchor": True,
352
+ "$anchor": "fooAnchor",
353
+ },
354
+ },
355
+ },
356
+ },
357
+ },
358
+ )
359
+ three = referencing.jsonschema.DRAFT201909.create_resource(
360
+ {"$recursiveAnchor": False},
361
+ )
362
+ resolver = (
363
+ Registry()
364
+ .with_resources(
365
+ [
366
+ ("http://example.com", three),
367
+ ("http://example.com/foo/", two),
368
+ ("http://example.com/foo/bar", one),
369
+ ],
370
+ )
371
+ .resolver()
372
+ )
373
+
374
+ first = resolver.lookup("http://example.com")
375
+ second = first.resolver.lookup("foo/")
376
+ resolver = second.resolver.lookup("bar").resolver
377
+ fourth = referencing.jsonschema.lookup_recursive_ref(resolver=resolver)
378
+ assert fourth.contents == two.contents
379
+
380
+
381
+ def test_empty_registry():
382
+ assert referencing.jsonschema.EMPTY_REGISTRY == Registry()
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_referencing_suite.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+ import json
3
+ import os
4
+
5
+ import pytest
6
+
7
+ from referencing import Registry
8
+ from referencing.exceptions import Unresolvable
9
+ import referencing.jsonschema
10
+
11
+
12
+ class SuiteNotFound(Exception):
13
+ def __str__(self): # pragma: no cover
14
+ return (
15
+ "Cannot find the referencing suite. "
16
+ "Set the REFERENCING_SUITE environment variable to the path to "
17
+ "the suite, or run the test suite from alongside a full checkout "
18
+ "of the git repository."
19
+ )
20
+
21
+
22
+ if "REFERENCING_SUITE" in os.environ: # pragma: no cover
23
+ SUITE = Path(os.environ["REFERENCING_SUITE"]) / "tests"
24
+ else:
25
+ SUITE = Path(__file__).parent.parent.parent / "suite/tests"
26
+ if not SUITE.is_dir(): # pragma: no cover
27
+ raise SuiteNotFound()
28
+ DIALECT_IDS = json.loads(SUITE.joinpath("specifications.json").read_text())
29
+
30
+
31
+ @pytest.mark.parametrize(
32
+ "test_path",
33
+ [
34
+ pytest.param(each, id=f"{each.parent.name}-{each.stem}")
35
+ for each in SUITE.glob("*/**/*.json")
36
+ ],
37
+ )
38
+ def test_referencing_suite(test_path, subtests):
39
+ dialect_id = DIALECT_IDS[test_path.relative_to(SUITE).parts[0]]
40
+ specification = referencing.jsonschema.specification_with(dialect_id)
41
+ loaded = json.loads(test_path.read_text())
42
+ registry = loaded["registry"]
43
+ registry = Registry().with_resources(
44
+ (uri, specification.create_resource(contents))
45
+ for uri, contents in loaded["registry"].items()
46
+ )
47
+ for test in loaded["tests"]:
48
+ with subtests.test(test=test):
49
+ if "normalization" in test_path.stem:
50
+ pytest.xfail("APIs need to change for proper URL support.")
51
+
52
+ resolver = registry.resolver(base_uri=test.get("base_uri", ""))
53
+
54
+ if test.get("error"):
55
+ with pytest.raises(Unresolvable):
56
+ resolver.lookup(test["ref"])
57
+ else:
58
+ resolved = resolver.lookup(test["ref"])
59
+ assert resolved.contents == test["target"]
60
+
61
+ then = test.get("then")
62
+ while then: # pragma: no cover
63
+ with subtests.test(test=test, then=then):
64
+ resolved = resolved.resolver.lookup(then["ref"])
65
+ assert resolved.contents == then["target"]
66
+ then = then.get("then")
evalkit_cambrian/lib/python3.10/site-packages/referencing/tests/test_retrieval.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import lru_cache
2
+ import json
3
+
4
+ import pytest
5
+
6
+ from referencing import Registry, Resource, exceptions
7
+ from referencing.jsonschema import DRAFT202012
8
+ from referencing.retrieval import to_cached_resource
9
+
10
+
11
+ class TestToCachedResource:
12
+ def test_it_caches_retrieved_resources(self):
13
+ contents = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
14
+ stack = [json.dumps(contents)]
15
+
16
+ @to_cached_resource()
17
+ def retrieve(uri):
18
+ return stack.pop()
19
+
20
+ registry = Registry(retrieve=retrieve)
21
+
22
+ expected = Resource.from_contents(contents)
23
+
24
+ got = registry.get_or_retrieve("urn:example:schema")
25
+ assert got.value == expected
26
+
27
+ # And a second time we get the same value.
28
+ again = registry.get_or_retrieve("urn:example:schema")
29
+ assert again.value is got.value
30
+
31
+ def test_custom_loader(self):
32
+ contents = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
33
+ stack = [json.dumps(contents)[::-1]]
34
+
35
+ @to_cached_resource(loads=lambda s: json.loads(s[::-1]))
36
+ def retrieve(uri):
37
+ return stack.pop()
38
+
39
+ registry = Registry(retrieve=retrieve)
40
+
41
+ expected = Resource.from_contents(contents)
42
+
43
+ got = registry.get_or_retrieve("urn:example:schema")
44
+ assert got.value == expected
45
+
46
+ # And a second time we get the same value.
47
+ again = registry.get_or_retrieve("urn:example:schema")
48
+ assert again.value is got.value
49
+
50
+ def test_custom_from_contents(self):
51
+ contents = {}
52
+ stack = [json.dumps(contents)]
53
+
54
+ @to_cached_resource(from_contents=DRAFT202012.create_resource)
55
+ def retrieve(uri):
56
+ return stack.pop()
57
+
58
+ registry = Registry(retrieve=retrieve)
59
+
60
+ expected = DRAFT202012.create_resource(contents)
61
+
62
+ got = registry.get_or_retrieve("urn:example:schema")
63
+ assert got.value == expected
64
+
65
+ # And a second time we get the same value.
66
+ again = registry.get_or_retrieve("urn:example:schema")
67
+ assert again.value is got.value
68
+
69
+ def test_custom_cache(self):
70
+ schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"}
71
+ mapping = {
72
+ "urn:example:1": dict(schema, foo=1),
73
+ "urn:example:2": dict(schema, foo=2),
74
+ "urn:example:3": dict(schema, foo=3),
75
+ }
76
+
77
+ resources = {
78
+ uri: Resource.from_contents(contents)
79
+ for uri, contents in mapping.items()
80
+ }
81
+
82
+ @to_cached_resource(cache=lru_cache(maxsize=2))
83
+ def retrieve(uri):
84
+ return json.dumps(mapping.pop(uri))
85
+
86
+ registry = Registry(retrieve=retrieve)
87
+
88
+ got = registry.get_or_retrieve("urn:example:1")
89
+ assert got.value == resources["urn:example:1"]
90
+ assert registry.get_or_retrieve("urn:example:1").value is got.value
91
+ assert registry.get_or_retrieve("urn:example:1").value is got.value
92
+
93
+ got = registry.get_or_retrieve("urn:example:2")
94
+ assert got.value == resources["urn:example:2"]
95
+ assert registry.get_or_retrieve("urn:example:2").value is got.value
96
+ assert registry.get_or_retrieve("urn:example:2").value is got.value
97
+
98
+ # This still succeeds, but evicts the first URI
99
+ got = registry.get_or_retrieve("urn:example:3")
100
+ assert got.value == resources["urn:example:3"]
101
+ assert registry.get_or_retrieve("urn:example:3").value is got.value
102
+ assert registry.get_or_retrieve("urn:example:3").value is got.value
103
+
104
+ # And now this fails (as we popped the value out of `mapping`)
105
+ with pytest.raises(exceptions.Unretrievable):
106
+ registry.get_or_retrieve("urn:example:1")
evalkit_cambrian/lib/python3.10/site-packages/referencing/typing.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Type-annotation related support for the referencing library.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from collections.abc import Mapping as Mapping
8
+ from typing import TYPE_CHECKING, Any, Protocol
9
+
10
+ try:
11
+ from typing_extensions import TypeVar
12
+ except ImportError: # pragma: no cover
13
+ from typing import TypeVar
14
+
15
+ if TYPE_CHECKING:
16
+ from referencing._core import Resolved, Resolver, Resource
17
+
18
+ #: A URI which identifies a `Resource`.
19
+ URI = str
20
+
21
+ #: The type of documents within a registry.
22
+ D = TypeVar("D", default=Any)
23
+
24
+
25
+ class Retrieve(Protocol[D]):
26
+ """
27
+ A retrieval callable, usable within a `Registry` for resource retrieval.
28
+
29
+ Does not make assumptions about where the resource might be coming from.
30
+ """
31
+
32
+ def __call__(self, uri: URI) -> Resource[D]:
33
+ """
34
+ Retrieve the resource with the given URI.
35
+
36
+ Raise `referencing.exceptions.NoSuchResource` if you wish to indicate
37
+ the retriever cannot lookup the given URI.
38
+ """
39
+ ...
40
+
41
+
42
+ class Anchor(Protocol[D]):
43
+ """
44
+ An anchor within a `Resource`.
45
+
46
+ Beyond "simple" anchors, some specifications like JSON Schema's 2020
47
+ version have dynamic anchors.
48
+ """
49
+
50
+ @property
51
+ def name(self) -> str:
52
+ """
53
+ Return the name of this anchor.
54
+ """
55
+ ...
56
+
57
+ def resolve(self, resolver: Resolver[D]) -> Resolved[D]:
58
+ """
59
+ Return the resource for this anchor.
60
+ """
61
+ ...
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__init__.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tomlkit.api import TOMLDocument
2
+ from tomlkit.api import aot
3
+ from tomlkit.api import array
4
+ from tomlkit.api import boolean
5
+ from tomlkit.api import comment
6
+ from tomlkit.api import date
7
+ from tomlkit.api import datetime
8
+ from tomlkit.api import document
9
+ from tomlkit.api import dump
10
+ from tomlkit.api import dumps
11
+ from tomlkit.api import float_
12
+ from tomlkit.api import inline_table
13
+ from tomlkit.api import integer
14
+ from tomlkit.api import item
15
+ from tomlkit.api import key
16
+ from tomlkit.api import key_value
17
+ from tomlkit.api import load
18
+ from tomlkit.api import loads
19
+ from tomlkit.api import nl
20
+ from tomlkit.api import parse
21
+ from tomlkit.api import register_encoder
22
+ from tomlkit.api import string
23
+ from tomlkit.api import table
24
+ from tomlkit.api import time
25
+ from tomlkit.api import unregister_encoder
26
+ from tomlkit.api import value
27
+ from tomlkit.api import ws
28
+
29
+
30
+ __version__ = "0.12.0"
31
+ __all__ = [
32
+ "aot",
33
+ "array",
34
+ "boolean",
35
+ "comment",
36
+ "date",
37
+ "datetime",
38
+ "document",
39
+ "dump",
40
+ "dumps",
41
+ "float_",
42
+ "inline_table",
43
+ "integer",
44
+ "item",
45
+ "key",
46
+ "key_value",
47
+ "load",
48
+ "loads",
49
+ "nl",
50
+ "parse",
51
+ "string",
52
+ "table",
53
+ "time",
54
+ "TOMLDocument",
55
+ "value",
56
+ "ws",
57
+ "register_encoder",
58
+ "unregister_encoder",
59
+ ]
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/api.cpython-310.pyc ADDED
Binary file (9.22 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (8.11 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/items.cpython-310.pyc ADDED
Binary file (56.7 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/parser.cpython-310.pyc ADDED
Binary file (21.8 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/__pycache__/toml_file.cpython-310.pyc ADDED
Binary file (1.77 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/_types.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+ from typing import Any
5
+ from typing import TypeVar
6
+
7
+
8
+ WT = TypeVar("WT", bound="WrapperType")
9
+
10
+ if TYPE_CHECKING: # pragma: no cover
11
+ # Define _CustomList and _CustomDict as a workaround for:
12
+ # https://github.com/python/mypy/issues/11427
13
+ #
14
+ # According to this issue, the typeshed contains a "lie"
15
+ # (it adds MutableSequence to the ancestry of list and MutableMapping to
16
+ # the ancestry of dict) which completely messes with the type inference for
17
+ # Table, InlineTable, Array and Container.
18
+ #
19
+ # Importing from builtins is preferred over simple assignment, see issues:
20
+ # https://github.com/python/mypy/issues/8715
21
+ # https://github.com/python/mypy/issues/10068
22
+ from builtins import dict as _CustomDict # noqa: N812
23
+ from builtins import float as _CustomFloat # noqa: N812
24
+ from builtins import int as _CustomInt # noqa: N812
25
+ from builtins import list as _CustomList # noqa: N812
26
+ from typing import Callable
27
+ from typing import Concatenate
28
+ from typing import ParamSpec
29
+ from typing import Protocol
30
+
31
+ P = ParamSpec("P")
32
+
33
+ class WrapperType(Protocol):
34
+ def _new(self: WT, value: Any) -> WT:
35
+ ...
36
+
37
+ else:
38
+ from collections.abc import MutableMapping
39
+ from collections.abc import MutableSequence
40
+ from numbers import Integral
41
+ from numbers import Real
42
+
43
+ class _CustomList(MutableSequence, list):
44
+ """Adds MutableSequence mixin while pretending to be a builtin list"""
45
+
46
+ class _CustomDict(MutableMapping, dict):
47
+ """Adds MutableMapping mixin while pretending to be a builtin dict"""
48
+
49
+ class _CustomInt(Integral, int):
50
+ """Adds Integral mixin while pretending to be a builtin int"""
51
+
52
+ class _CustomFloat(Real, float):
53
+ """Adds Real mixin while pretending to be a builtin float"""
54
+
55
+
56
+ def wrap_method(
57
+ original_method: Callable[Concatenate[WT, P], Any]
58
+ ) -> Callable[Concatenate[WT, P], Any]:
59
+ def wrapper(self: WT, *args: P.args, **kwargs: P.kwargs) -> Any:
60
+ result = original_method(self, *args, **kwargs)
61
+ if result is NotImplemented:
62
+ return result
63
+ return self._new(result)
64
+
65
+ return wrapper
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/parser.py ADDED
@@ -0,0 +1,1141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import re
5
+ import string
6
+
7
+ from tomlkit._compat import decode
8
+ from tomlkit._utils import RFC_3339_LOOSE
9
+ from tomlkit._utils import _escaped
10
+ from tomlkit._utils import parse_rfc3339
11
+ from tomlkit.container import Container
12
+ from tomlkit.exceptions import EmptyKeyError
13
+ from tomlkit.exceptions import EmptyTableNameError
14
+ from tomlkit.exceptions import InternalParserError
15
+ from tomlkit.exceptions import InvalidCharInStringError
16
+ from tomlkit.exceptions import InvalidControlChar
17
+ from tomlkit.exceptions import InvalidDateError
18
+ from tomlkit.exceptions import InvalidDateTimeError
19
+ from tomlkit.exceptions import InvalidNumberError
20
+ from tomlkit.exceptions import InvalidTimeError
21
+ from tomlkit.exceptions import InvalidUnicodeValueError
22
+ from tomlkit.exceptions import ParseError
23
+ from tomlkit.exceptions import UnexpectedCharError
24
+ from tomlkit.exceptions import UnexpectedEofError
25
+ from tomlkit.items import AoT
26
+ from tomlkit.items import Array
27
+ from tomlkit.items import Bool
28
+ from tomlkit.items import BoolType
29
+ from tomlkit.items import Comment
30
+ from tomlkit.items import Date
31
+ from tomlkit.items import DateTime
32
+ from tomlkit.items import Float
33
+ from tomlkit.items import InlineTable
34
+ from tomlkit.items import Integer
35
+ from tomlkit.items import Item
36
+ from tomlkit.items import Key
37
+ from tomlkit.items import KeyType
38
+ from tomlkit.items import Null
39
+ from tomlkit.items import SingleKey
40
+ from tomlkit.items import String
41
+ from tomlkit.items import StringType
42
+ from tomlkit.items import Table
43
+ from tomlkit.items import Time
44
+ from tomlkit.items import Trivia
45
+ from tomlkit.items import Whitespace
46
+ from tomlkit.source import Source
47
+ from tomlkit.toml_char import TOMLChar
48
+ from tomlkit.toml_document import TOMLDocument
49
+
50
+
51
+ CTRL_I = 0x09 # Tab
52
+ CTRL_J = 0x0A # Line feed
53
+ CTRL_M = 0x0D # Carriage return
54
+ CTRL_CHAR_LIMIT = 0x1F
55
+ CHR_DEL = 0x7F
56
+
57
+
58
+ class Parser:
59
+ """
60
+ Parser for TOML documents.
61
+ """
62
+
63
+ def __init__(self, string: str | bytes) -> None:
64
+ # Input to parse
65
+ self._src = Source(decode(string))
66
+
67
+ self._aot_stack: list[Key] = []
68
+
69
+ @property
70
+ def _state(self):
71
+ return self._src.state
72
+
73
+ @property
74
+ def _idx(self):
75
+ return self._src.idx
76
+
77
+ @property
78
+ def _current(self):
79
+ return self._src.current
80
+
81
+ @property
82
+ def _marker(self):
83
+ return self._src.marker
84
+
85
+ def extract(self) -> str:
86
+ """
87
+ Extracts the value between marker and index
88
+ """
89
+ return self._src.extract()
90
+
91
+ def inc(self, exception: type[ParseError] | None = None) -> bool:
92
+ """
93
+ Increments the parser if the end of the input has not been reached.
94
+ Returns whether or not it was able to advance.
95
+ """
96
+ return self._src.inc(exception=exception)
97
+
98
+ def inc_n(self, n: int, exception: type[ParseError] | None = None) -> bool:
99
+ """
100
+ Increments the parser by n characters
101
+ if the end of the input has not been reached.
102
+ """
103
+ return self._src.inc_n(n=n, exception=exception)
104
+
105
+ def consume(self, chars, min=0, max=-1):
106
+ """
107
+ Consume chars until min/max is satisfied is valid.
108
+ """
109
+ return self._src.consume(chars=chars, min=min, max=max)
110
+
111
+ def end(self) -> bool:
112
+ """
113
+ Returns True if the parser has reached the end of the input.
114
+ """
115
+ return self._src.end()
116
+
117
+ def mark(self) -> None:
118
+ """
119
+ Sets the marker to the index's current position
120
+ """
121
+ self._src.mark()
122
+
123
+ def parse_error(self, exception=ParseError, *args, **kwargs):
124
+ """
125
+ Creates a generic "parse error" at the current position.
126
+ """
127
+ return self._src.parse_error(exception, *args, **kwargs)
128
+
129
+ def parse(self) -> TOMLDocument:
130
+ body = TOMLDocument(True)
131
+
132
+ # Take all keyvals outside of tables/AoT's.
133
+ while not self.end():
134
+ # Break out if a table is found
135
+ if self._current == "[":
136
+ break
137
+
138
+ # Otherwise, take and append one KV
139
+ item = self._parse_item()
140
+ if not item:
141
+ break
142
+
143
+ key, value = item
144
+ if (key is not None and key.is_multi()) or not self._merge_ws(value, body):
145
+ # We actually have a table
146
+ try:
147
+ body.append(key, value)
148
+ except Exception as e:
149
+ raise self.parse_error(ParseError, str(e)) from e
150
+
151
+ self.mark()
152
+
153
+ while not self.end():
154
+ key, value = self._parse_table()
155
+ if isinstance(value, Table) and value.is_aot_element():
156
+ # This is just the first table in an AoT. Parse the rest of the array
157
+ # along with it.
158
+ value = self._parse_aot(value, key)
159
+
160
+ try:
161
+ body.append(key, value)
162
+ except Exception as e:
163
+ raise self.parse_error(ParseError, str(e)) from e
164
+
165
+ body.parsing(False)
166
+
167
+ return body
168
+
169
+ def _merge_ws(self, item: Item, container: Container) -> bool:
170
+ """
171
+ Merges the given Item with the last one currently in the given Container if
172
+ both are whitespace items.
173
+
174
+ Returns True if the items were merged.
175
+ """
176
+ last = container.last_item()
177
+ if not last:
178
+ return False
179
+
180
+ if not isinstance(item, Whitespace) or not isinstance(last, Whitespace):
181
+ return False
182
+
183
+ start = self._idx - (len(last.s) + len(item.s))
184
+ container.body[-1] = (
185
+ container.body[-1][0],
186
+ Whitespace(self._src[start : self._idx]),
187
+ )
188
+
189
+ return True
190
+
191
+ def _is_child(self, parent: Key, child: Key) -> bool:
192
+ """
193
+ Returns whether a key is strictly a child of another key.
194
+ AoT siblings are not considered children of one another.
195
+ """
196
+ parent_parts = tuple(parent)
197
+ child_parts = tuple(child)
198
+
199
+ if parent_parts == child_parts:
200
+ return False
201
+
202
+ return parent_parts == child_parts[: len(parent_parts)]
203
+
204
+ def _parse_item(self) -> tuple[Key | None, Item] | None:
205
+ """
206
+ Attempts to parse the next item and returns it, along with its key
207
+ if the item is value-like.
208
+ """
209
+ self.mark()
210
+ with self._state as state:
211
+ while True:
212
+ c = self._current
213
+ if c == "\n":
214
+ # Found a newline; Return all whitespace found up to this point.
215
+ self.inc()
216
+
217
+ return None, Whitespace(self.extract())
218
+ elif c in " \t\r":
219
+ # Skip whitespace.
220
+ if not self.inc():
221
+ return None, Whitespace(self.extract())
222
+ elif c == "#":
223
+ # Found a comment, parse it
224
+ indent = self.extract()
225
+ cws, comment, trail = self._parse_comment_trail()
226
+
227
+ return None, Comment(Trivia(indent, cws, comment, trail))
228
+ elif c == "[":
229
+ # Found a table, delegate to the calling function.
230
+ return
231
+ else:
232
+ # Beginning of a KV pair.
233
+ # Return to beginning of whitespace so it gets included
234
+ # as indentation for the KV about to be parsed.
235
+ state.restore = True
236
+ break
237
+
238
+ return self._parse_key_value(True)
239
+
240
+ def _parse_comment_trail(self, parse_trail: bool = True) -> tuple[str, str, str]:
241
+ """
242
+ Returns (comment_ws, comment, trail)
243
+ If there is no comment, comment_ws and comment will
244
+ simply be empty.
245
+ """
246
+ if self.end():
247
+ return "", "", ""
248
+
249
+ comment = ""
250
+ comment_ws = ""
251
+ self.mark()
252
+
253
+ while True:
254
+ c = self._current
255
+
256
+ if c == "\n":
257
+ break
258
+ elif c == "#":
259
+ comment_ws = self.extract()
260
+
261
+ self.mark()
262
+ self.inc() # Skip #
263
+
264
+ # The comment itself
265
+ while not self.end() and not self._current.is_nl():
266
+ code = ord(self._current)
267
+ if code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I:
268
+ raise self.parse_error(InvalidControlChar, code, "comments")
269
+
270
+ if not self.inc():
271
+ break
272
+
273
+ comment = self.extract()
274
+ self.mark()
275
+
276
+ break
277
+ elif c in " \t\r":
278
+ self.inc()
279
+ else:
280
+ raise self.parse_error(UnexpectedCharError, c)
281
+
282
+ if self.end():
283
+ break
284
+
285
+ trail = ""
286
+ if parse_trail:
287
+ while self._current.is_spaces() and self.inc():
288
+ pass
289
+
290
+ if self._current == "\r":
291
+ self.inc()
292
+
293
+ if self._current == "\n":
294
+ self.inc()
295
+
296
+ if self._idx != self._marker or self._current.is_ws():
297
+ trail = self.extract()
298
+
299
+ return comment_ws, comment, trail
300
+
301
+ def _parse_key_value(self, parse_comment: bool = False) -> tuple[Key, Item]:
302
+ # Leading indent
303
+ self.mark()
304
+
305
+ while self._current.is_spaces() and self.inc():
306
+ pass
307
+
308
+ indent = self.extract()
309
+
310
+ # Key
311
+ key = self._parse_key()
312
+
313
+ self.mark()
314
+
315
+ found_equals = self._current == "="
316
+ while self._current.is_kv_sep() and self.inc():
317
+ if self._current == "=":
318
+ if found_equals:
319
+ raise self.parse_error(UnexpectedCharError, "=")
320
+ else:
321
+ found_equals = True
322
+ if not found_equals:
323
+ raise self.parse_error(UnexpectedCharError, self._current)
324
+
325
+ if not key.sep:
326
+ key.sep = self.extract()
327
+ else:
328
+ key.sep += self.extract()
329
+
330
+ # Value
331
+ val = self._parse_value()
332
+ # Comment
333
+ if parse_comment:
334
+ cws, comment, trail = self._parse_comment_trail()
335
+ meta = val.trivia
336
+ if not meta.comment_ws:
337
+ meta.comment_ws = cws
338
+
339
+ meta.comment = comment
340
+ meta.trail = trail
341
+ else:
342
+ val.trivia.trail = ""
343
+
344
+ val.trivia.indent = indent
345
+
346
+ return key, val
347
+
348
+ def _parse_key(self) -> Key:
349
+ """
350
+ Parses a Key at the current position;
351
+ WS before the key must be exhausted first at the callsite.
352
+ """
353
+ self.mark()
354
+ while self._current.is_spaces() and self.inc():
355
+ # Skip any leading whitespace
356
+ pass
357
+ if self._current in "\"'":
358
+ return self._parse_quoted_key()
359
+ else:
360
+ return self._parse_bare_key()
361
+
362
+ def _parse_quoted_key(self) -> Key:
363
+ """
364
+ Parses a key enclosed in either single or double quotes.
365
+ """
366
+ # Extract the leading whitespace
367
+ original = self.extract()
368
+ quote_style = self._current
369
+ key_type = next((t for t in KeyType if t.value == quote_style), None)
370
+
371
+ if key_type is None:
372
+ raise RuntimeError("Should not have entered _parse_quoted_key()")
373
+
374
+ key_str = self._parse_string(
375
+ StringType.SLB if key_type == KeyType.Basic else StringType.SLL
376
+ )
377
+ if key_str._t.is_multiline():
378
+ raise self.parse_error(UnexpectedCharError, key_str._t.value)
379
+ original += key_str.as_string()
380
+ self.mark()
381
+ while self._current.is_spaces() and self.inc():
382
+ pass
383
+ original += self.extract()
384
+ key = SingleKey(str(key_str), t=key_type, sep="", original=original)
385
+ if self._current == ".":
386
+ self.inc()
387
+ key = key.concat(self._parse_key())
388
+
389
+ return key
390
+
391
+ def _parse_bare_key(self) -> Key:
392
+ """
393
+ Parses a bare key.
394
+ """
395
+ while (
396
+ self._current.is_bare_key_char() or self._current.is_spaces()
397
+ ) and self.inc():
398
+ pass
399
+
400
+ original = self.extract()
401
+ key = original.strip()
402
+ if not key:
403
+ # Empty key
404
+ raise self.parse_error(EmptyKeyError)
405
+
406
+ if " " in key:
407
+ # Bare key with spaces in it
408
+ raise self.parse_error(ParseError, f'Invalid key "{key}"')
409
+
410
+ key = SingleKey(key, KeyType.Bare, "", original)
411
+
412
+ if self._current == ".":
413
+ self.inc()
414
+ key = key.concat(self._parse_key())
415
+
416
+ return key
417
+
418
+ def _parse_value(self) -> Item:
419
+ """
420
+ Attempts to parse a value at the current position.
421
+ """
422
+ self.mark()
423
+ c = self._current
424
+ trivia = Trivia()
425
+
426
+ if c == StringType.SLB.value:
427
+ return self._parse_basic_string()
428
+ elif c == StringType.SLL.value:
429
+ return self._parse_literal_string()
430
+ elif c == BoolType.TRUE.value[0]:
431
+ return self._parse_true()
432
+ elif c == BoolType.FALSE.value[0]:
433
+ return self._parse_false()
434
+ elif c == "[":
435
+ return self._parse_array()
436
+ elif c == "{":
437
+ return self._parse_inline_table()
438
+ elif c in "+-" or self._peek(4) in {
439
+ "+inf",
440
+ "-inf",
441
+ "inf",
442
+ "+nan",
443
+ "-nan",
444
+ "nan",
445
+ }:
446
+ # Number
447
+ while self._current not in " \t\n\r#,]}" and self.inc():
448
+ pass
449
+
450
+ raw = self.extract()
451
+
452
+ item = self._parse_number(raw, trivia)
453
+ if item is not None:
454
+ return item
455
+
456
+ raise self.parse_error(InvalidNumberError)
457
+ elif c in string.digits:
458
+ # Integer, Float, Date, Time or DateTime
459
+ while self._current not in " \t\n\r#,]}" and self.inc():
460
+ pass
461
+
462
+ raw = self.extract()
463
+
464
+ m = RFC_3339_LOOSE.match(raw)
465
+ if m:
466
+ if m.group(1) and m.group(5):
467
+ # datetime
468
+ try:
469
+ dt = parse_rfc3339(raw)
470
+ assert isinstance(dt, datetime.datetime)
471
+ return DateTime(
472
+ dt.year,
473
+ dt.month,
474
+ dt.day,
475
+ dt.hour,
476
+ dt.minute,
477
+ dt.second,
478
+ dt.microsecond,
479
+ dt.tzinfo,
480
+ trivia,
481
+ raw,
482
+ )
483
+ except ValueError:
484
+ raise self.parse_error(InvalidDateTimeError)
485
+
486
+ if m.group(1):
487
+ try:
488
+ dt = parse_rfc3339(raw)
489
+ assert isinstance(dt, datetime.date)
490
+ date = Date(dt.year, dt.month, dt.day, trivia, raw)
491
+ self.mark()
492
+ while self._current not in "\t\n\r#,]}" and self.inc():
493
+ pass
494
+
495
+ time_raw = self.extract()
496
+ time_part = time_raw.rstrip()
497
+ trivia.comment_ws = time_raw[len(time_part) :]
498
+ if not time_part:
499
+ return date
500
+
501
+ dt = parse_rfc3339(raw + time_part)
502
+ assert isinstance(dt, datetime.datetime)
503
+ return DateTime(
504
+ dt.year,
505
+ dt.month,
506
+ dt.day,
507
+ dt.hour,
508
+ dt.minute,
509
+ dt.second,
510
+ dt.microsecond,
511
+ dt.tzinfo,
512
+ trivia,
513
+ raw + time_part,
514
+ )
515
+ except ValueError:
516
+ raise self.parse_error(InvalidDateError)
517
+
518
+ if m.group(5):
519
+ try:
520
+ t = parse_rfc3339(raw)
521
+ assert isinstance(t, datetime.time)
522
+ return Time(
523
+ t.hour,
524
+ t.minute,
525
+ t.second,
526
+ t.microsecond,
527
+ t.tzinfo,
528
+ trivia,
529
+ raw,
530
+ )
531
+ except ValueError:
532
+ raise self.parse_error(InvalidTimeError)
533
+
534
+ item = self._parse_number(raw, trivia)
535
+ if item is not None:
536
+ return item
537
+
538
+ raise self.parse_error(InvalidNumberError)
539
+ else:
540
+ raise self.parse_error(UnexpectedCharError, c)
541
+
542
+ def _parse_true(self):
543
+ return self._parse_bool(BoolType.TRUE)
544
+
545
+ def _parse_false(self):
546
+ return self._parse_bool(BoolType.FALSE)
547
+
548
+ def _parse_bool(self, style: BoolType) -> Bool:
549
+ with self._state:
550
+ style = BoolType(style)
551
+
552
+ # only keep parsing for bool if the characters match the style
553
+ # try consuming rest of chars in style
554
+ for c in style:
555
+ self.consume(c, min=1, max=1)
556
+
557
+ return Bool(style, Trivia())
558
+
559
+ def _parse_array(self) -> Array:
560
+ # Consume opening bracket, EOF here is an issue (middle of array)
561
+ self.inc(exception=UnexpectedEofError)
562
+
563
+ elems: list[Item] = []
564
+ prev_value = None
565
+ while True:
566
+ # consume whitespace
567
+ mark = self._idx
568
+ self.consume(TOMLChar.SPACES + TOMLChar.NL)
569
+ indent = self._src[mark : self._idx]
570
+ newline = set(TOMLChar.NL) & set(indent)
571
+ if newline:
572
+ elems.append(Whitespace(indent))
573
+ continue
574
+
575
+ # consume comment
576
+ if self._current == "#":
577
+ cws, comment, trail = self._parse_comment_trail(parse_trail=False)
578
+ elems.append(Comment(Trivia(indent, cws, comment, trail)))
579
+ continue
580
+
581
+ # consume indent
582
+ if indent:
583
+ elems.append(Whitespace(indent))
584
+ continue
585
+
586
+ # consume value
587
+ if not prev_value:
588
+ try:
589
+ elems.append(self._parse_value())
590
+ prev_value = True
591
+ continue
592
+ except UnexpectedCharError:
593
+ pass
594
+
595
+ # consume comma
596
+ if prev_value and self._current == ",":
597
+ self.inc(exception=UnexpectedEofError)
598
+ elems.append(Whitespace(","))
599
+ prev_value = False
600
+ continue
601
+
602
+ # consume closing bracket
603
+ if self._current == "]":
604
+ # consume closing bracket, EOF here doesn't matter
605
+ self.inc()
606
+ break
607
+
608
+ raise self.parse_error(UnexpectedCharError, self._current)
609
+
610
+ try:
611
+ res = Array(elems, Trivia())
612
+ except ValueError:
613
+ pass
614
+ else:
615
+ return res
616
+
617
+ def _parse_inline_table(self) -> InlineTable:
618
+ # consume opening bracket, EOF here is an issue (middle of array)
619
+ self.inc(exception=UnexpectedEofError)
620
+
621
+ elems = Container(True)
622
+ trailing_comma = None
623
+ while True:
624
+ # consume leading whitespace
625
+ mark = self._idx
626
+ self.consume(TOMLChar.SPACES)
627
+ raw = self._src[mark : self._idx]
628
+ if raw:
629
+ elems.add(Whitespace(raw))
630
+
631
+ if not trailing_comma:
632
+ # None: empty inline table
633
+ # False: previous key-value pair was not followed by a comma
634
+ if self._current == "}":
635
+ # consume closing bracket, EOF here doesn't matter
636
+ self.inc()
637
+ break
638
+
639
+ if (
640
+ trailing_comma is False
641
+ or trailing_comma is None
642
+ and self._current == ","
643
+ ):
644
+ # Either the previous key-value pair was not followed by a comma
645
+ # or the table has an unexpected leading comma.
646
+ raise self.parse_error(UnexpectedCharError, self._current)
647
+ else:
648
+ # True: previous key-value pair was followed by a comma
649
+ if self._current == "}" or self._current == ",":
650
+ raise self.parse_error(UnexpectedCharError, self._current)
651
+
652
+ key, val = self._parse_key_value(False)
653
+ elems.add(key, val)
654
+
655
+ # consume trailing whitespace
656
+ mark = self._idx
657
+ self.consume(TOMLChar.SPACES)
658
+ raw = self._src[mark : self._idx]
659
+ if raw:
660
+ elems.add(Whitespace(raw))
661
+
662
+ # consume trailing comma
663
+ trailing_comma = self._current == ","
664
+ if trailing_comma:
665
+ # consume closing bracket, EOF here is an issue (middle of inline table)
666
+ self.inc(exception=UnexpectedEofError)
667
+
668
+ return InlineTable(elems, Trivia())
669
+
670
+ def _parse_number(self, raw: str, trivia: Trivia) -> Item | None:
671
+ # Leading zeros are not allowed
672
+ sign = ""
673
+ if raw.startswith(("+", "-")):
674
+ sign = raw[0]
675
+ raw = raw[1:]
676
+
677
+ if len(raw) > 1 and (
678
+ raw.startswith("0")
679
+ and not raw.startswith(("0.", "0o", "0x", "0b", "0e"))
680
+ or sign
681
+ and raw.startswith(".")
682
+ ):
683
+ return None
684
+
685
+ if raw.startswith(("0o", "0x", "0b")) and sign:
686
+ return None
687
+
688
+ digits = "[0-9]"
689
+ base = 10
690
+ if raw.startswith("0b"):
691
+ digits = "[01]"
692
+ base = 2
693
+ elif raw.startswith("0o"):
694
+ digits = "[0-7]"
695
+ base = 8
696
+ elif raw.startswith("0x"):
697
+ digits = "[0-9a-f]"
698
+ base = 16
699
+
700
+ # Underscores should be surrounded by digits
701
+ clean = re.sub(f"(?i)(?<={digits})_(?={digits})", "", raw).lower()
702
+
703
+ if "_" in clean:
704
+ return None
705
+
706
+ if (
707
+ clean.endswith(".")
708
+ or not clean.startswith("0x")
709
+ and clean.split("e", 1)[0].endswith(".")
710
+ ):
711
+ return None
712
+
713
+ try:
714
+ return Integer(int(sign + clean, base), trivia, sign + raw)
715
+ except ValueError:
716
+ try:
717
+ return Float(float(sign + clean), trivia, sign + raw)
718
+ except ValueError:
719
+ return None
720
+
721
+ def _parse_literal_string(self) -> String:
722
+ with self._state:
723
+ return self._parse_string(StringType.SLL)
724
+
725
+ def _parse_basic_string(self) -> String:
726
+ with self._state:
727
+ return self._parse_string(StringType.SLB)
728
+
729
+ def _parse_escaped_char(self, multiline):
730
+ if multiline and self._current.is_ws():
731
+ # When the last non-whitespace character on a line is
732
+ # a \, it will be trimmed along with all whitespace
733
+ # (including newlines) up to the next non-whitespace
734
+ # character or closing delimiter.
735
+ # """\
736
+ # hello \
737
+ # world"""
738
+ tmp = ""
739
+ while self._current.is_ws():
740
+ tmp += self._current
741
+ # consume the whitespace, EOF here is an issue
742
+ # (middle of string)
743
+ self.inc(exception=UnexpectedEofError)
744
+ continue
745
+
746
+ # the escape followed by whitespace must have a newline
747
+ # before any other chars
748
+ if "\n" not in tmp:
749
+ raise self.parse_error(InvalidCharInStringError, self._current)
750
+
751
+ return ""
752
+
753
+ if self._current in _escaped:
754
+ c = _escaped[self._current]
755
+
756
+ # consume this char, EOF here is an issue (middle of string)
757
+ self.inc(exception=UnexpectedEofError)
758
+
759
+ return c
760
+
761
+ if self._current in {"u", "U"}:
762
+ # this needs to be a unicode
763
+ u, ue = self._peek_unicode(self._current == "U")
764
+ if u is not None:
765
+ # consume the U char and the unicode value
766
+ self.inc_n(len(ue) + 1)
767
+
768
+ return u
769
+
770
+ raise self.parse_error(InvalidUnicodeValueError)
771
+
772
+ raise self.parse_error(InvalidCharInStringError, self._current)
773
+
774
+ def _parse_string(self, delim: StringType) -> String:
775
+ # only keep parsing for string if the current character matches the delim
776
+ if self._current != delim.unit:
777
+ raise self.parse_error(
778
+ InternalParserError,
779
+ f"Invalid character for string type {delim}",
780
+ )
781
+
782
+ # consume the opening/first delim, EOF here is an issue
783
+ # (middle of string or middle of delim)
784
+ self.inc(exception=UnexpectedEofError)
785
+
786
+ if self._current == delim.unit:
787
+ # consume the closing/second delim, we do not care if EOF occurs as
788
+ # that would simply imply an empty single line string
789
+ if not self.inc() or self._current != delim.unit:
790
+ # Empty string
791
+ return String(delim, "", "", Trivia())
792
+
793
+ # consume the third delim, EOF here is an issue (middle of string)
794
+ self.inc(exception=UnexpectedEofError)
795
+
796
+ delim = delim.toggle() # convert delim to multi delim
797
+
798
+ self.mark() # to extract the original string with whitespace and all
799
+ value = ""
800
+
801
+ # A newline immediately following the opening delimiter will be trimmed.
802
+ if delim.is_multiline():
803
+ if self._current == "\n":
804
+ # consume the newline, EOF here is an issue (middle of string)
805
+ self.inc(exception=UnexpectedEofError)
806
+ else:
807
+ cur = self._current
808
+ with self._state(restore=True):
809
+ if self.inc():
810
+ cur += self._current
811
+ if cur == "\r\n":
812
+ self.inc_n(2, exception=UnexpectedEofError)
813
+
814
+ escaped = False # whether the previous key was ESCAPE
815
+ while True:
816
+ code = ord(self._current)
817
+ if (
818
+ delim.is_singleline()
819
+ and not escaped
820
+ and (code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I)
821
+ ) or (
822
+ delim.is_multiline()
823
+ and not escaped
824
+ and (
825
+ code == CHR_DEL
826
+ or code <= CTRL_CHAR_LIMIT
827
+ and code not in [CTRL_I, CTRL_J, CTRL_M]
828
+ )
829
+ ):
830
+ raise self.parse_error(InvalidControlChar, code, "strings")
831
+ elif not escaped and self._current == delim.unit:
832
+ # try to process current as a closing delim
833
+ original = self.extract()
834
+
835
+ close = ""
836
+ if delim.is_multiline():
837
+ # Consume the delimiters to see if we are at the end of the string
838
+ close = ""
839
+ while self._current == delim.unit:
840
+ close += self._current
841
+ self.inc()
842
+
843
+ if len(close) < 3:
844
+ # Not a triple quote, leave in result as-is.
845
+ # Adding back the characters we already consumed
846
+ value += close
847
+ continue
848
+
849
+ if len(close) == 3:
850
+ # We are at the end of the string
851
+ return String(delim, value, original, Trivia())
852
+
853
+ if len(close) >= 6:
854
+ raise self.parse_error(InvalidCharInStringError, self._current)
855
+
856
+ value += close[:-3]
857
+ original += close[:-3]
858
+
859
+ return String(delim, value, original, Trivia())
860
+ else:
861
+ # consume the closing delim, we do not care if EOF occurs as
862
+ # that would simply imply the end of self._src
863
+ self.inc()
864
+
865
+ return String(delim, value, original, Trivia())
866
+ elif delim.is_basic() and escaped:
867
+ # attempt to parse the current char as an escaped value, an exception
868
+ # is raised if this fails
869
+ value += self._parse_escaped_char(delim.is_multiline())
870
+
871
+ # no longer escaped
872
+ escaped = False
873
+ elif delim.is_basic() and self._current == "\\":
874
+ # the next char is being escaped
875
+ escaped = True
876
+
877
+ # consume this char, EOF here is an issue (middle of string)
878
+ self.inc(exception=UnexpectedEofError)
879
+ else:
880
+ # this is either a literal string where we keep everything as is,
881
+ # or this is not a special escaped char in a basic string
882
+ value += self._current
883
+
884
+ # consume this char, EOF here is an issue (middle of string)
885
+ self.inc(exception=UnexpectedEofError)
886
+
887
+ def _parse_table(
888
+ self, parent_name: Key | None = None, parent: Table | None = None
889
+ ) -> tuple[Key, Table | AoT]:
890
+ """
891
+ Parses a table element.
892
+ """
893
+ if self._current != "[":
894
+ raise self.parse_error(
895
+ InternalParserError, "_parse_table() called on non-bracket character."
896
+ )
897
+
898
+ indent = self.extract()
899
+ self.inc() # Skip opening bracket
900
+
901
+ if self.end():
902
+ raise self.parse_error(UnexpectedEofError)
903
+
904
+ is_aot = False
905
+ if self._current == "[":
906
+ if not self.inc():
907
+ raise self.parse_error(UnexpectedEofError)
908
+
909
+ is_aot = True
910
+ try:
911
+ key = self._parse_key()
912
+ except EmptyKeyError:
913
+ raise self.parse_error(EmptyTableNameError) from None
914
+ if self.end():
915
+ raise self.parse_error(UnexpectedEofError)
916
+ elif self._current != "]":
917
+ raise self.parse_error(UnexpectedCharError, self._current)
918
+
919
+ key.sep = ""
920
+ full_key = key
921
+ name_parts = tuple(key)
922
+ if any(" " in part.key.strip() and part.is_bare() for part in name_parts):
923
+ raise self.parse_error(
924
+ ParseError, f'Invalid table name "{full_key.as_string()}"'
925
+ )
926
+
927
+ missing_table = False
928
+ if parent_name:
929
+ parent_name_parts = tuple(parent_name)
930
+ else:
931
+ parent_name_parts = ()
932
+
933
+ if len(name_parts) > len(parent_name_parts) + 1:
934
+ missing_table = True
935
+
936
+ name_parts = name_parts[len(parent_name_parts) :]
937
+
938
+ values = Container(True)
939
+
940
+ self.inc() # Skip closing bracket
941
+ if is_aot:
942
+ # TODO: Verify close bracket
943
+ self.inc()
944
+
945
+ cws, comment, trail = self._parse_comment_trail()
946
+
947
+ result = Null()
948
+ table = Table(
949
+ values,
950
+ Trivia(indent, cws, comment, trail),
951
+ is_aot,
952
+ name=name_parts[0].key if name_parts else key.key,
953
+ display_name=full_key.as_string(),
954
+ is_super_table=False,
955
+ )
956
+
957
+ if len(name_parts) > 1:
958
+ if missing_table:
959
+ # Missing super table
960
+ # i.e. a table initialized like this: [foo.bar]
961
+ # without initializing [foo]
962
+ #
963
+ # So we have to create the parent tables
964
+ table = Table(
965
+ Container(True),
966
+ Trivia("", cws, comment, trail),
967
+ is_aot and name_parts[0] in self._aot_stack,
968
+ is_super_table=True,
969
+ name=name_parts[0].key,
970
+ )
971
+
972
+ result = table
973
+ key = name_parts[0]
974
+
975
+ for i, _name in enumerate(name_parts[1:]):
976
+ child = table.get(
977
+ _name,
978
+ Table(
979
+ Container(True),
980
+ Trivia(indent, cws, comment, trail),
981
+ is_aot and i == len(name_parts) - 2,
982
+ is_super_table=i < len(name_parts) - 2,
983
+ name=_name.key,
984
+ display_name=full_key.as_string()
985
+ if i == len(name_parts) - 2
986
+ else None,
987
+ ),
988
+ )
989
+
990
+ if is_aot and i == len(name_parts) - 2:
991
+ table.raw_append(_name, AoT([child], name=table.name, parsed=True))
992
+ else:
993
+ table.raw_append(_name, child)
994
+
995
+ table = child
996
+ values = table.value
997
+ else:
998
+ if name_parts:
999
+ key = name_parts[0]
1000
+
1001
+ while not self.end():
1002
+ item = self._parse_item()
1003
+ if item:
1004
+ _key, item = item
1005
+ if not self._merge_ws(item, values):
1006
+ table.raw_append(_key, item)
1007
+ else:
1008
+ if self._current == "[":
1009
+ _, key_next = self._peek_table()
1010
+
1011
+ if self._is_child(full_key, key_next):
1012
+ key_next, table_next = self._parse_table(full_key, table)
1013
+
1014
+ table.raw_append(key_next, table_next)
1015
+
1016
+ # Picking up any sibling
1017
+ while not self.end():
1018
+ _, key_next = self._peek_table()
1019
+
1020
+ if not self._is_child(full_key, key_next):
1021
+ break
1022
+
1023
+ key_next, table_next = self._parse_table(full_key, table)
1024
+
1025
+ table.raw_append(key_next, table_next)
1026
+
1027
+ break
1028
+ else:
1029
+ raise self.parse_error(
1030
+ InternalParserError,
1031
+ "_parse_item() returned None on a non-bracket character.",
1032
+ )
1033
+
1034
+ if isinstance(result, Null):
1035
+ result = table
1036
+
1037
+ if is_aot and (not self._aot_stack or full_key != self._aot_stack[-1]):
1038
+ result = self._parse_aot(result, full_key)
1039
+
1040
+ return key, result
1041
+
1042
+ def _peek_table(self) -> tuple[bool, Key]:
1043
+ """
1044
+ Peeks ahead non-intrusively by cloning then restoring the
1045
+ initial state of the parser.
1046
+
1047
+ Returns the name of the table about to be parsed,
1048
+ as well as whether it is part of an AoT.
1049
+ """
1050
+ # we always want to restore after exiting this scope
1051
+ with self._state(save_marker=True, restore=True):
1052
+ if self._current != "[":
1053
+ raise self.parse_error(
1054
+ InternalParserError,
1055
+ "_peek_table() entered on non-bracket character",
1056
+ )
1057
+
1058
+ # AoT
1059
+ self.inc()
1060
+ is_aot = False
1061
+ if self._current == "[":
1062
+ self.inc()
1063
+ is_aot = True
1064
+ try:
1065
+ return is_aot, self._parse_key()
1066
+ except EmptyKeyError:
1067
+ raise self.parse_error(EmptyTableNameError) from None
1068
+
1069
+ def _parse_aot(self, first: Table, name_first: Key) -> AoT:
1070
+ """
1071
+ Parses all siblings of the provided table first and bundles them into
1072
+ an AoT.
1073
+ """
1074
+ payload = [first]
1075
+ self._aot_stack.append(name_first)
1076
+ while not self.end():
1077
+ is_aot_next, name_next = self._peek_table()
1078
+ if is_aot_next and name_next == name_first:
1079
+ _, table = self._parse_table(name_first)
1080
+ payload.append(table)
1081
+ else:
1082
+ break
1083
+
1084
+ self._aot_stack.pop()
1085
+
1086
+ return AoT(payload, parsed=True)
1087
+
1088
+ def _peek(self, n: int) -> str:
1089
+ """
1090
+ Peeks ahead n characters.
1091
+
1092
+ n is the max number of characters that will be peeked.
1093
+ """
1094
+ # we always want to restore after exiting this scope
1095
+ with self._state(restore=True):
1096
+ buf = ""
1097
+ for _ in range(n):
1098
+ if self._current not in " \t\n\r#,]}" + self._src.EOF:
1099
+ buf += self._current
1100
+ self.inc()
1101
+ continue
1102
+
1103
+ break
1104
+ return buf
1105
+
1106
+ def _peek_unicode(self, is_long: bool) -> tuple[str | None, str | None]:
1107
+ """
1108
+ Peeks ahead non-intrusively by cloning then restoring the
1109
+ initial state of the parser.
1110
+
1111
+ Returns the unicode value is it's a valid one else None.
1112
+ """
1113
+ # we always want to restore after exiting this scope
1114
+ with self._state(save_marker=True, restore=True):
1115
+ if self._current not in {"u", "U"}:
1116
+ raise self.parse_error(
1117
+ InternalParserError, "_peek_unicode() entered on non-unicode value"
1118
+ )
1119
+
1120
+ self.inc() # Dropping prefix
1121
+ self.mark()
1122
+
1123
+ if is_long:
1124
+ chars = 8
1125
+ else:
1126
+ chars = 4
1127
+
1128
+ if not self.inc_n(chars):
1129
+ value, extracted = None, None
1130
+ else:
1131
+ extracted = self.extract()
1132
+
1133
+ if extracted[0].lower() == "d" and extracted[1].strip("01234567"):
1134
+ return None, None
1135
+
1136
+ try:
1137
+ value = chr(int(extracted, 16))
1138
+ except (ValueError, OverflowError):
1139
+ value = None
1140
+
1141
+ return value, extracted
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/py.typed ADDED
File without changes
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/toml_char.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import string
2
+
3
+
4
+ class TOMLChar(str):
5
+ def __init__(self, c):
6
+ super().__init__()
7
+
8
+ if len(self) > 1:
9
+ raise ValueError("A TOML character must be of length 1")
10
+
11
+ BARE = string.ascii_letters + string.digits + "-_"
12
+ KV = "= \t"
13
+ NUMBER = string.digits + "+-_.e"
14
+ SPACES = " \t"
15
+ NL = "\n\r"
16
+ WS = SPACES + NL
17
+
18
+ def is_bare_key_char(self) -> bool:
19
+ """
20
+ Whether the character is a valid bare key name or not.
21
+ """
22
+ return self in self.BARE
23
+
24
+ def is_kv_sep(self) -> bool:
25
+ """
26
+ Whether the character is a valid key/value separator or not.
27
+ """
28
+ return self in self.KV
29
+
30
+ def is_int_float_char(self) -> bool:
31
+ """
32
+ Whether the character if a valid integer or float value character or not.
33
+ """
34
+ return self in self.NUMBER
35
+
36
+ def is_ws(self) -> bool:
37
+ """
38
+ Whether the character is a whitespace character or not.
39
+ """
40
+ return self in self.WS
41
+
42
+ def is_nl(self) -> bool:
43
+ """
44
+ Whether the character is a new line character or not.
45
+ """
46
+ return self in self.NL
47
+
48
+ def is_spaces(self) -> bool:
49
+ """
50
+ Whether the character is a space or not
51
+ """
52
+ return self in self.SPACES
evalkit_cambrian/lib/python3.10/site-packages/tomlkit/toml_file.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+
4
+ from typing import TYPE_CHECKING
5
+
6
+ from tomlkit.api import loads
7
+ from tomlkit.toml_document import TOMLDocument
8
+
9
+
10
+ if TYPE_CHECKING:
11
+ from _typeshed import StrPath as _StrPath
12
+ else:
13
+ from typing import Union
14
+
15
+ _StrPath = Union[str, os.PathLike]
16
+
17
+
18
+ class TOMLFile:
19
+ """
20
+ Represents a TOML file.
21
+
22
+ :param path: path to the TOML file
23
+ """
24
+
25
+ def __init__(self, path: _StrPath) -> None:
26
+ self._path = path
27
+ self._linesep = os.linesep
28
+
29
+ def read(self) -> TOMLDocument:
30
+ """Read the file content as a :class:`tomlkit.toml_document.TOMLDocument`."""
31
+ with open(self._path, encoding="utf-8", newline="") as f:
32
+ content = f.read()
33
+
34
+ # check if consistent line endings
35
+ num_newline = content.count("\n")
36
+ if num_newline > 0:
37
+ num_win_eol = content.count("\r\n")
38
+ if num_win_eol == num_newline:
39
+ self._linesep = "\r\n"
40
+ elif num_win_eol == 0:
41
+ self._linesep = "\n"
42
+ else:
43
+ self._linesep = "mixed"
44
+
45
+ return loads(content)
46
+
47
+ def write(self, data: TOMLDocument) -> None:
48
+ """Write the TOMLDocument to the file."""
49
+ content = data.as_string()
50
+
51
+ # apply linesep
52
+ if self._linesep == "\n":
53
+ content = content.replace("\r\n", "\n")
54
+ elif self._linesep == "\r\n":
55
+ content = re.sub(r"(?<!\r)\n", "\r\n", content)
56
+
57
+ with open(self._path, "w", encoding="utf-8", newline="") as f:
58
+ f.write(content)
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_adaptive_avg_pool3d_backward_cpu_dispatch.h ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+ // @generated by torchgen/gen.py from DispatchKeyFunction.h
3
+
4
+ // NB: The implementing C++ file is RegisterDispatchKey.cpp
5
+
6
+ // The only #includes we need are for custom classes that have defaults in the C++ API
7
+ #include <c10/core/MemoryFormat.h>
8
+ #include <c10/core/Scalar.h>
9
+ #include <ATen/core/Reduction.h>
10
+
11
+ // Forward declarations of any types needed in the operator signatures.
12
+ // We can't directly include these classes because it will cause circular include dependencies.
13
+ // This file is included by TensorBody.h, which defines the Tensor class.
14
+ #include <ATen/core/ATen_fwd.h>
15
+
16
+ namespace at {
17
+
18
+ namespace cpu {
19
+
20
+ TORCH_API at::Tensor _adaptive_avg_pool3d_backward(const at::Tensor & grad_output, const at::Tensor & self);
21
+
22
+ } // namespace cpu
23
+ } // namespace at
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_add_relu_ops.h ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from Operator.h
4
+
5
+ #include <tuple>
6
+ #include <vector>
7
+
8
+ // Forward declarations of any types needed in the operator signatures.
9
+ // We can't directly include these classes because it will cause circular include dependencies.
10
+ // This file is included by TensorBody.h, which defines the Tensor class.
11
+ #include <ATen/core/ATen_fwd.h>
12
+
13
+ namespace at {
14
+ namespace _ops {
15
+
16
+
17
+ struct TORCH_API _add_relu_Tensor {
18
+ using schema = at::Tensor (const at::Tensor &, const at::Tensor &, const at::Scalar &);
19
+ using ptr_schema = schema*;
20
+ // See Note [static constexpr char* members for windows NVCC]
21
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu")
22
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor")
23
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor")
24
+ static at::Tensor call(const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha);
25
+ static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha);
26
+ };
27
+
28
+ struct TORCH_API _add_relu__Tensor {
29
+ using schema = at::Tensor & (at::Tensor &, const at::Tensor &, const at::Scalar &);
30
+ using ptr_schema = schema*;
31
+ // See Note [static constexpr char* members for windows NVCC]
32
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu_")
33
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor")
34
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu_.Tensor(Tensor(a!) self, Tensor other, *, Scalar alpha=1) -> Tensor(a!)")
35
+ static at::Tensor & call(at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha);
36
+ static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha);
37
+ };
38
+
39
+ struct TORCH_API _add_relu_out {
40
+ using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, const at::Scalar &, at::Tensor &);
41
+ using ptr_schema = schema*;
42
+ // See Note [static constexpr char* members for windows NVCC]
43
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu")
44
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
45
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu.out(Tensor self, Tensor other, *, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)")
46
+ static at::Tensor & call(const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha, at::Tensor & out);
47
+ static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha, at::Tensor & out);
48
+ };
49
+
50
+ struct TORCH_API _add_relu_Scalar {
51
+ using schema = at::Tensor (const at::Tensor &, const at::Scalar &, const at::Scalar &);
52
+ using ptr_schema = schema*;
53
+ // See Note [static constexpr char* members for windows NVCC]
54
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu")
55
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
56
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> Tensor")
57
+ static at::Tensor call(const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha);
58
+ static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha);
59
+ };
60
+
61
+ struct TORCH_API _add_relu__Scalar {
62
+ using schema = at::Tensor & (at::Tensor &, const at::Scalar &, const at::Scalar &);
63
+ using ptr_schema = schema*;
64
+ // See Note [static constexpr char* members for windows NVCC]
65
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu_")
66
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
67
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu_.Scalar(Tensor(a!) self, Scalar other, Scalar alpha=1) -> Tensor(a!)")
68
+ static at::Tensor & call(at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha);
69
+ static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha);
70
+ };
71
+
72
+ struct TORCH_API _add_relu_Scalar_out {
73
+ using schema = at::Tensor & (const at::Tensor &, const at::Scalar &, const at::Scalar &, at::Tensor &);
74
+ using ptr_schema = schema*;
75
+ // See Note [static constexpr char* members for windows NVCC]
76
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_add_relu")
77
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar_out")
78
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_add_relu.Scalar_out(Tensor self, Scalar other, Scalar alpha=1, *, Tensor(a!) out) -> Tensor(a!)")
79
+ static at::Tensor & call(const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha, at::Tensor & out);
80
+ static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha, at::Tensor & out);
81
+ };
82
+
83
+ }} // namespace at::_ops
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_addmm_activation.h ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from Function.h
4
+
5
+ #include <ATen/Context.h>
6
+ #include <ATen/DeviceGuard.h>
7
+ #include <ATen/TensorUtils.h>
8
+ #include <ATen/TracerMode.h>
9
+ #include <ATen/core/Generator.h>
10
+ #include <ATen/core/Reduction.h>
11
+ #include <ATen/core/Tensor.h>
12
+ #include <c10/core/Scalar.h>
13
+ #include <c10/core/Storage.h>
14
+ #include <c10/core/TensorOptions.h>
15
+ #include <c10/util/Deprecated.h>
16
+ #include <optional>
17
+
18
+
19
+
20
+ #include <ATen/ops/_addmm_activation_ops.h>
21
+
22
+ namespace at {
23
+
24
+
25
+ // aten::_addmm_activation.out(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1, bool use_gelu=False, Tensor(a!) out) -> Tensor(a!)
26
+ inline at::Tensor & _addmm_activation_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & mat1, const at::Tensor & mat2, const at::Scalar & beta=1, const at::Scalar & alpha=1, bool use_gelu=false) {
27
+ return at::_ops::_addmm_activation_out::call(self, mat1, mat2, beta, alpha, use_gelu, out);
28
+ }
29
+ // aten::_addmm_activation.out(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1, bool use_gelu=False, Tensor(a!) out) -> Tensor(a!)
30
+ inline at::Tensor & _addmm_activation_outf(const at::Tensor & self, const at::Tensor & mat1, const at::Tensor & mat2, const at::Scalar & beta, const at::Scalar & alpha, bool use_gelu, at::Tensor & out) {
31
+ return at::_ops::_addmm_activation_out::call(self, mat1, mat2, beta, alpha, use_gelu, out);
32
+ }
33
+
34
+ // aten::_addmm_activation(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1, bool use_gelu=False) -> Tensor
35
+ inline at::Tensor _addmm_activation(const at::Tensor & self, const at::Tensor & mat1, const at::Tensor & mat2, const at::Scalar & beta=1, const at::Scalar & alpha=1, bool use_gelu=false) {
36
+ return at::_ops::_addmm_activation::call(self, mat1, mat2, beta, alpha, use_gelu);
37
+ }
38
+
39
+ }
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_coalesced_native.h ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from NativeFunction.h
4
+
5
+ #include <c10/core/Scalar.h>
6
+ #include <c10/core/Storage.h>
7
+ #include <c10/core/TensorOptions.h>
8
+ #include <c10/util/Deprecated.h>
9
+ #include <optional>
10
+ #include <c10/core/QScheme.h>
11
+ #include <ATen/core/Reduction.h>
12
+ #include <ATen/core/Tensor.h>
13
+ #include <tuple>
14
+ #include <vector>
15
+
16
+
17
+ namespace at {
18
+ namespace native {
19
+ TORCH_API at::Tensor _coalesced(const at::Tensor & self, bool coalesced);
20
+ TORCH_API at::Tensor & _coalesced_out(const at::Tensor & self, bool coalesced, at::Tensor & out);
21
+ TORCH_API at::Tensor & _coalesced_sparse_(at::Tensor & self, bool coalesced);
22
+ } // namespace native
23
+ } // namespace at
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cufft_get_plan_cache_size_native.h ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from NativeFunction.h
4
+
5
+ #include <c10/core/Scalar.h>
6
+ #include <c10/core/Storage.h>
7
+ #include <c10/core/TensorOptions.h>
8
+ #include <c10/util/Deprecated.h>
9
+ #include <optional>
10
+ #include <c10/core/QScheme.h>
11
+ #include <ATen/core/Reduction.h>
12
+ #include <ATen/core/Tensor.h>
13
+ #include <tuple>
14
+ #include <vector>
15
+
16
+
17
+ namespace at {
18
+ namespace native {
19
+ TORCH_API int64_t _cufft_get_plan_cache_size(at::DeviceIndex device_index);
20
+ } // namespace native
21
+ } // namespace at