diff --git a/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__init__.py b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73347aa906e831c8b402743b727d8e74bc6faef0 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0898de46c18f2ad4ce8c12333b2ec3c80ffab49 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py new file mode 100644 index 0000000000000000000000000000000000000000..c671138f9a61325f6e65cc7cafddc7cd46f19229 --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py @@ -0,0 +1,562 @@ +from sympy.core import symbols +from sympy.crypto.crypto import (cycle_list, + encipher_shift, encipher_affine, encipher_substitution, + check_and_join, encipher_vigenere, decipher_vigenere, + encipher_hill, decipher_hill, encipher_bifid5, encipher_bifid6, + bifid5_square, bifid6_square, bifid5, bifid6, + decipher_bifid5, decipher_bifid6, encipher_kid_rsa, + decipher_kid_rsa, kid_rsa_private_key, kid_rsa_public_key, + decipher_rsa, rsa_private_key, rsa_public_key, encipher_rsa, + lfsr_connection_polynomial, lfsr_autocorrelation, lfsr_sequence, + encode_morse, decode_morse, elgamal_private_key, elgamal_public_key, + encipher_elgamal, decipher_elgamal, dh_private_key, dh_public_key, + dh_shared_key, decipher_shift, decipher_affine, encipher_bifid, + decipher_bifid, bifid_square, padded_key, uniq, decipher_gm, + encipher_gm, gm_public_key, gm_private_key, encipher_bg, decipher_bg, + bg_private_key, bg_public_key, encipher_rot13, decipher_rot13, + encipher_atbash, decipher_atbash, NonInvertibleCipherWarning, + encipher_railfence, decipher_railfence) +from sympy.external.gmpy import gcd +from sympy.matrices import Matrix +from sympy.ntheory import isprime, is_primitive_root +from sympy.polys.domains import FF + +from sympy.testing.pytest import raises, warns + +from sympy.core.random import randrange + +def test_encipher_railfence(): + assert encipher_railfence("hello world",2) == "hlowrdel ol" + assert encipher_railfence("hello world",3) == "horel ollwd" + assert encipher_railfence("hello world",4) == "hwe olordll" + +def test_decipher_railfence(): + assert decipher_railfence("hlowrdel ol",2) == "hello world" + assert decipher_railfence("horel ollwd",3) == "hello world" + assert decipher_railfence("hwe olordll",4) == "hello world" + + +def test_cycle_list(): + assert cycle_list(3, 4) == [3, 0, 1, 2] + assert cycle_list(-1, 4) == [3, 0, 1, 2] + assert cycle_list(1, 4) == [1, 2, 3, 0] + + +def test_encipher_shift(): + assert encipher_shift("ABC", 0) == "ABC" + assert encipher_shift("ABC", 1) == "BCD" + assert encipher_shift("ABC", -1) == "ZAB" + assert decipher_shift("ZAB", -1) == "ABC" + +def test_encipher_rot13(): + assert encipher_rot13("ABC") == "NOP" + assert encipher_rot13("NOP") == "ABC" + assert decipher_rot13("ABC") == "NOP" + assert decipher_rot13("NOP") == "ABC" + + +def test_encipher_affine(): + assert encipher_affine("ABC", (1, 0)) == "ABC" + assert encipher_affine("ABC", (1, 1)) == "BCD" + assert encipher_affine("ABC", (-1, 0)) == "AZY" + assert encipher_affine("ABC", (-1, 1), symbols="ABCD") == "BAD" + assert encipher_affine("123", (-1, 1), symbols="1234") == "214" + assert encipher_affine("ABC", (3, 16)) == "QTW" + assert decipher_affine("QTW", (3, 16)) == "ABC" + +def test_encipher_atbash(): + assert encipher_atbash("ABC") == "ZYX" + assert encipher_atbash("ZYX") == "ABC" + assert decipher_atbash("ABC") == "ZYX" + assert decipher_atbash("ZYX") == "ABC" + +def test_encipher_substitution(): + assert encipher_substitution("ABC", "BAC", "ABC") == "BAC" + assert encipher_substitution("123", "1243", "1234") == "124" + + +def test_check_and_join(): + assert check_and_join("abc") == "abc" + assert check_and_join(uniq("aaabc")) == "abc" + assert check_and_join("ab c".split()) == "abc" + assert check_and_join("abc", "a", filter=True) == "a" + raises(ValueError, lambda: check_and_join('ab', 'a')) + + +def test_encipher_vigenere(): + assert encipher_vigenere("ABC", "ABC") == "ACE" + assert encipher_vigenere("ABC", "ABC", symbols="ABCD") == "ACA" + assert encipher_vigenere("ABC", "AB", symbols="ABCD") == "ACC" + assert encipher_vigenere("AB", "ABC", symbols="ABCD") == "AC" + assert encipher_vigenere("A", "ABC", symbols="ABCD") == "A" + + +def test_decipher_vigenere(): + assert decipher_vigenere("ABC", "ABC") == "AAA" + assert decipher_vigenere("ABC", "ABC", symbols="ABCD") == "AAA" + assert decipher_vigenere("ABC", "AB", symbols="ABCD") == "AAC" + assert decipher_vigenere("AB", "ABC", symbols="ABCD") == "AA" + assert decipher_vigenere("A", "ABC", symbols="ABCD") == "A" + + +def test_encipher_hill(): + A = Matrix(2, 2, [1, 2, 3, 5]) + assert encipher_hill("ABCD", A) == "CFIV" + A = Matrix(2, 2, [1, 0, 0, 1]) + assert encipher_hill("ABCD", A) == "ABCD" + assert encipher_hill("ABCD", A, symbols="ABCD") == "ABCD" + A = Matrix(2, 2, [1, 2, 3, 5]) + assert encipher_hill("ABCD", A, symbols="ABCD") == "CBAB" + assert encipher_hill("AB", A, symbols="ABCD") == "CB" + # message length, n, does not need to be a multiple of k; + # it is padded + assert encipher_hill("ABA", A) == "CFGC" + assert encipher_hill("ABA", A, pad="Z") == "CFYV" + + +def test_decipher_hill(): + A = Matrix(2, 2, [1, 2, 3, 5]) + assert decipher_hill("CFIV", A) == "ABCD" + A = Matrix(2, 2, [1, 0, 0, 1]) + assert decipher_hill("ABCD", A) == "ABCD" + assert decipher_hill("ABCD", A, symbols="ABCD") == "ABCD" + A = Matrix(2, 2, [1, 2, 3, 5]) + assert decipher_hill("CBAB", A, symbols="ABCD") == "ABCD" + assert decipher_hill("CB", A, symbols="ABCD") == "AB" + # n does not need to be a multiple of k + assert decipher_hill("CFA", A) == "ABAA" + + +def test_encipher_bifid5(): + assert encipher_bifid5("AB", "AB") == "AB" + assert encipher_bifid5("AB", "CD") == "CO" + assert encipher_bifid5("ab", "c") == "CH" + assert encipher_bifid5("a bc", "b") == "BAC" + + +def test_bifid5_square(): + A = bifid5 + f = lambda i, j: symbols(A[5*i + j]) + M = Matrix(5, 5, f) + assert bifid5_square("") == M + + +def test_decipher_bifid5(): + assert decipher_bifid5("AB", "AB") == "AB" + assert decipher_bifid5("CO", "CD") == "AB" + assert decipher_bifid5("ch", "c") == "AB" + assert decipher_bifid5("b ac", "b") == "ABC" + + +def test_encipher_bifid6(): + assert encipher_bifid6("AB", "AB") == "AB" + assert encipher_bifid6("AB", "CD") == "CP" + assert encipher_bifid6("ab", "c") == "CI" + assert encipher_bifid6("a bc", "b") == "BAC" + + +def test_decipher_bifid6(): + assert decipher_bifid6("AB", "AB") == "AB" + assert decipher_bifid6("CP", "CD") == "AB" + assert decipher_bifid6("ci", "c") == "AB" + assert decipher_bifid6("b ac", "b") == "ABC" + + +def test_bifid6_square(): + A = bifid6 + f = lambda i, j: symbols(A[6*i + j]) + M = Matrix(6, 6, f) + assert bifid6_square("") == M + + +def test_rsa_public_key(): + assert rsa_public_key(2, 3, 1) == (6, 1) + assert rsa_public_key(5, 3, 3) == (15, 3) + + with warns(NonInvertibleCipherWarning): + assert rsa_public_key(2, 2, 1) == (4, 1) + assert rsa_public_key(8, 8, 8) is False + + +def test_rsa_private_key(): + assert rsa_private_key(2, 3, 1) == (6, 1) + assert rsa_private_key(5, 3, 3) == (15, 3) + assert rsa_private_key(23,29,5) == (667,493) + + with warns(NonInvertibleCipherWarning): + assert rsa_private_key(2, 2, 1) == (4, 1) + assert rsa_private_key(8, 8, 8) is False + + +def test_rsa_large_key(): + # Sample from + # http://www.herongyang.com/Cryptography/JCE-Public-Key-RSA-Private-Public-Key-Pair-Sample.html + p = int('101565610013301240713207239558950144682174355406589305284428666'\ + '903702505233009') + q = int('894687191887545488935455605955948413812376003053143521429242133'\ + '12069293984003') + e = int('65537') + d = int('893650581832704239530398858744759129594796235440844479456143566'\ + '6999402846577625762582824202269399672579058991442587406384754958587'\ + '400493169361356902030209') + assert rsa_public_key(p, q, e) == (p*q, e) + assert rsa_private_key(p, q, e) == (p*q, d) + + +def test_encipher_rsa(): + puk = rsa_public_key(2, 3, 1) + assert encipher_rsa(2, puk) == 2 + puk = rsa_public_key(5, 3, 3) + assert encipher_rsa(2, puk) == 8 + + with warns(NonInvertibleCipherWarning): + puk = rsa_public_key(2, 2, 1) + assert encipher_rsa(2, puk) == 2 + + +def test_decipher_rsa(): + prk = rsa_private_key(2, 3, 1) + assert decipher_rsa(2, prk) == 2 + prk = rsa_private_key(5, 3, 3) + assert decipher_rsa(8, prk) == 2 + + with warns(NonInvertibleCipherWarning): + prk = rsa_private_key(2, 2, 1) + assert decipher_rsa(2, prk) == 2 + + +def test_mutltiprime_rsa_full_example(): + # Test example from + # https://iopscience.iop.org/article/10.1088/1742-6596/995/1/012030 + puk = rsa_public_key(2, 3, 5, 7, 11, 13, 7) + prk = rsa_private_key(2, 3, 5, 7, 11, 13, 7) + assert puk == (30030, 7) + assert prk == (30030, 823) + + msg = 10 + encrypted = encipher_rsa(2 * msg - 15, puk) + assert encrypted == 18065 + decrypted = (decipher_rsa(encrypted, prk) + 15) / 2 + assert decrypted == msg + + # Test example from + # https://www.scirp.org/pdf/JCC_2018032215502008.pdf + puk1 = rsa_public_key(53, 41, 43, 47, 41) + prk1 = rsa_private_key(53, 41, 43, 47, 41) + puk2 = rsa_public_key(53, 41, 43, 47, 97) + prk2 = rsa_private_key(53, 41, 43, 47, 97) + + assert puk1 == (4391633, 41) + assert prk1 == (4391633, 294041) + assert puk2 == (4391633, 97) + assert prk2 == (4391633, 455713) + + msg = 12321 + encrypted = encipher_rsa(encipher_rsa(msg, puk1), puk2) + assert encrypted == 1081588 + decrypted = decipher_rsa(decipher_rsa(encrypted, prk2), prk1) + assert decrypted == msg + + +def test_rsa_crt_extreme(): + p = int( + '10177157607154245068023861503693082120906487143725062283406501' \ + '54082258226204046999838297167140821364638180697194879500245557' \ + '65445186962893346463841419427008800341257468600224049986260471' \ + '92257248163014468841725476918639415726709736077813632961290911' \ + '0256421232977833028677441206049309220354796014376698325101693') + + q = int( + '28752342353095132872290181526607275886182793241660805077850801' \ + '75689512797754286972952273553128181861830576836289738668745250' \ + '34028199691128870676414118458442900035778874482624765513861643' \ + '27966696316822188398336199002306588703902894100476186823849595' \ + '103239410527279605442148285816149368667083114802852804976893') + + r = int( + '17698229259868825776879500736350186838850961935956310134378261' \ + '89771862186717463067541369694816245225291921138038800171125596' \ + '07315449521981157084370187887650624061033066022458512942411841' \ + '18747893789972315277160085086164119879536041875335384844820566' \ + '0287479617671726408053319619892052000850883994343378882717849') + + s = int( + '68925428438585431029269182233502611027091755064643742383515623' \ + '64321310582896893395529367074942808353187138794422745718419645' \ + '28291231865157212604266903677599180789896916456120289112752835' \ + '98502265889669730331688206825220074713977607415178738015831030' \ + '364290585369150502819743827343552098197095520550865360159439' + ) + + t = int( + '69035483433453632820551311892368908779778144568711455301541094' \ + '31487047642322695357696860925747923189635033183069823820910521' \ + '71172909106797748883261493224162414050106920442445896819806600' \ + '15448444826108008217972129130625571421904893252804729877353352' \ + '739420480574842850202181462656251626522910618936534699566291' + ) + + e = 65537 + puk = rsa_public_key(p, q, r, s, t, e) + prk = rsa_private_key(p, q, r, s, t, e) + + plaintext = 1000 + ciphertext_1 = encipher_rsa(plaintext, puk) + ciphertext_2 = encipher_rsa(plaintext, puk, [p, q, r, s, t]) + assert ciphertext_1 == ciphertext_2 + assert decipher_rsa(ciphertext_1, prk) == \ + decipher_rsa(ciphertext_1, prk, [p, q, r, s, t]) + + +def test_rsa_exhaustive(): + p, q = 61, 53 + e = 17 + puk = rsa_public_key(p, q, e, totient='Carmichael') + prk = rsa_private_key(p, q, e, totient='Carmichael') + + for msg in range(puk[0]): + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_rsa_multiprime_exhanstive(): + primes = [3, 5, 7, 11] + e = 7 + args = primes + [e] + puk = rsa_public_key(*args, totient='Carmichael') + prk = rsa_private_key(*args, totient='Carmichael') + n = puk[0] + + for msg in range(n): + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_rsa_multipower_exhanstive(): + primes = [5, 5, 7] + e = 7 + args = primes + [e] + puk = rsa_public_key(*args, multipower=True) + prk = rsa_private_key(*args, multipower=True) + n = puk[0] + + for msg in range(n): + if gcd(msg, n) != 1: + continue + + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_kid_rsa_public_key(): + assert kid_rsa_public_key(1, 2, 1, 1) == (5, 2) + assert kid_rsa_public_key(1, 2, 2, 1) == (8, 3) + assert kid_rsa_public_key(1, 2, 1, 2) == (7, 2) + + +def test_kid_rsa_private_key(): + assert kid_rsa_private_key(1, 2, 1, 1) == (5, 3) + assert kid_rsa_private_key(1, 2, 2, 1) == (8, 3) + assert kid_rsa_private_key(1, 2, 1, 2) == (7, 4) + + +def test_encipher_kid_rsa(): + assert encipher_kid_rsa(1, (5, 2)) == 2 + assert encipher_kid_rsa(1, (8, 3)) == 3 + assert encipher_kid_rsa(1, (7, 2)) == 2 + + +def test_decipher_kid_rsa(): + assert decipher_kid_rsa(2, (5, 3)) == 1 + assert decipher_kid_rsa(3, (8, 3)) == 1 + assert decipher_kid_rsa(2, (7, 4)) == 1 + + +def test_encode_morse(): + assert encode_morse('ABC') == '.-|-...|-.-.' + assert encode_morse('SMS ') == '...|--|...||' + assert encode_morse('SMS\n') == '...|--|...||' + assert encode_morse('') == '' + assert encode_morse(' ') == '||' + assert encode_morse(' ', sep='`') == '``' + assert encode_morse(' ', sep='``') == '````' + assert encode_morse('!@#$%^&*()_+') == '-.-.--|.--.-.|...-..-|-.--.|-.--.-|..--.-|.-.-.' + assert encode_morse('12345') == '.----|..---|...--|....-|.....' + assert encode_morse('67890') == '-....|--...|---..|----.|-----' + + +def test_decode_morse(): + assert decode_morse('-.-|.|-.--') == 'KEY' + assert decode_morse('.-.|..-|-.||') == 'RUN' + raises(KeyError, lambda: decode_morse('.....----')) + + +def test_lfsr_sequence(): + raises(TypeError, lambda: lfsr_sequence(1, [1], 1)) + raises(TypeError, lambda: lfsr_sequence([1], 1, 1)) + F = FF(2) + assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)] + assert lfsr_sequence([F(0)], [F(1)], 2) == [F(1), F(0)] + F = FF(3) + assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)] + assert lfsr_sequence([F(0)], [F(2)], 2) == [F(2), F(0)] + assert lfsr_sequence([F(1)], [F(2)], 2) == [F(2), F(2)] + + +def test_lfsr_autocorrelation(): + raises(TypeError, lambda: lfsr_autocorrelation(1, 2, 3)) + F = FF(2) + s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5) + assert lfsr_autocorrelation(s, 2, 0) == 1 + assert lfsr_autocorrelation(s, 2, 1) == -1 + + +def test_lfsr_connection_polynomial(): + F = FF(2) + x = symbols("x") + s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5) + assert lfsr_connection_polynomial(s) == x**2 + 1 + s = lfsr_sequence([F(1), F(1)], [F(0), F(1)], 5) + assert lfsr_connection_polynomial(s) == x**2 + x + 1 + + +def test_elgamal_private_key(): + a, b, _ = elgamal_private_key(digit=100) + assert isprime(a) + assert is_primitive_root(b, a) + assert len(bin(a)) >= 102 + + +def test_elgamal(): + dk = elgamal_private_key(5) + ek = elgamal_public_key(dk) + P = ek[0] + assert P - 1 == decipher_elgamal(encipher_elgamal(P - 1, ek), dk) + raises(ValueError, lambda: encipher_elgamal(P, dk)) + raises(ValueError, lambda: encipher_elgamal(-1, dk)) + + +def test_dh_private_key(): + p, g, _ = dh_private_key(digit = 100) + assert isprime(p) + assert is_primitive_root(g, p) + assert len(bin(p)) >= 102 + + +def test_dh_public_key(): + p1, g1, a = dh_private_key(digit = 100) + p2, g2, ga = dh_public_key((p1, g1, a)) + assert p1 == p2 + assert g1 == g2 + assert ga == pow(g1, a, p1) + + +def test_dh_shared_key(): + prk = dh_private_key(digit = 100) + p, _, ga = dh_public_key(prk) + b = randrange(2, p) + sk = dh_shared_key((p, _, ga), b) + assert sk == pow(ga, b, p) + raises(ValueError, lambda: dh_shared_key((1031, 14, 565), 2000)) + + +def test_padded_key(): + assert padded_key('b', 'ab') == 'ba' + raises(ValueError, lambda: padded_key('ab', 'ace')) + raises(ValueError, lambda: padded_key('ab', 'abba')) + + +def test_bifid(): + raises(ValueError, lambda: encipher_bifid('abc', 'b', 'abcde')) + assert encipher_bifid('abc', 'b', 'abcd') == 'bdb' + raises(ValueError, lambda: decipher_bifid('bdb', 'b', 'abcde')) + assert encipher_bifid('bdb', 'b', 'abcd') == 'abc' + raises(ValueError, lambda: bifid_square('abcde')) + assert bifid5_square("B") == \ + bifid5_square('BACDEFGHIKLMNOPQRSTUVWXYZ') + assert bifid6_square('B0') == \ + bifid6_square('B0ACDEFGHIJKLMNOPQRSTUVWXYZ123456789') + + +def test_encipher_decipher_gm(): + ps = [131, 137, 139, 149, 151, 157, 163, 167, + 173, 179, 181, 191, 193, 197, 199] + qs = [89, 97, 101, 103, 107, 109, 113, 127, + 131, 137, 139, 149, 151, 157, 47] + messages = [ + 0, 32855, 34303, 14805, 1280, 75859, 38368, + 724, 60356, 51675, 76697, 61854, 18661, + ] + for p, q in zip(ps, qs): + pri = gm_private_key(p, q) + for msg in messages: + pub = gm_public_key(p, q) + enc = encipher_gm(msg, pub) + dec = decipher_gm(enc, pri) + assert dec == msg + + +def test_gm_private_key(): + raises(ValueError, lambda: gm_public_key(13, 15)) + raises(ValueError, lambda: gm_public_key(0, 0)) + raises(ValueError, lambda: gm_public_key(0, 5)) + assert 17, 19 == gm_public_key(17, 19) + + +def test_gm_public_key(): + assert 323 == gm_public_key(17, 19)[1] + assert 15 == gm_public_key(3, 5)[1] + raises(ValueError, lambda: gm_public_key(15, 19)) + +def test_encipher_decipher_bg(): + ps = [67, 7, 71, 103, 11, 43, 107, 47, + 79, 19, 83, 23, 59, 127, 31] + qs = qs = [7, 71, 103, 11, 43, 107, 47, + 79, 19, 83, 23, 59, 127, 31, 67] + messages = [ + 0, 328, 343, 148, 1280, 758, 383, + 724, 603, 516, 766, 618, 186, + ] + + for p, q in zip(ps, qs): + pri = bg_private_key(p, q) + for msg in messages: + pub = bg_public_key(p, q) + enc = encipher_bg(msg, pub) + dec = decipher_bg(enc, pri) + assert dec == msg + +def test_bg_private_key(): + raises(ValueError, lambda: bg_private_key(8, 16)) + raises(ValueError, lambda: bg_private_key(8, 8)) + raises(ValueError, lambda: bg_private_key(13, 17)) + assert 23, 31 == bg_private_key(23, 31) + +def test_bg_public_key(): + assert 5293 == bg_public_key(67, 79) + assert 713 == bg_public_key(23, 31) + raises(ValueError, lambda: bg_private_key(13, 17)) diff --git a/openflamingo/lib/python3.10/site-packages/sympy/series/__pycache__/limits.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/series/__pycache__/limits.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30109fe57acc75722d46c96fdbda627d158d4e0b Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/series/__pycache__/limits.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/__init__.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1be468e0cfc5e99969cb038da808e711c57fa3fd Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/__init__.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/bench_order.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/bench_order.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa16b1e4fba1aedec5249185533a5c25c7b2bb51 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/__pycache__/bench_order.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/bench_order.py b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/bench_order.py new file mode 100644 index 0000000000000000000000000000000000000000..1c85fa173dfc2a478792de8ab816c23ba9d408ef --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/series/benchmarks/bench_order.py @@ -0,0 +1,10 @@ +from sympy.core.add import Add +from sympy.core.symbol import Symbol +from sympy.series.order import O + +x = Symbol('x') +l = [x**i for i in range(1000)] +l.append(O(x**1001)) + +def timeit_order_1x(): + Add(*l) diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/conditionset.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/conditionset.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f44a75f7b8aa9a393f6baec9344fac602760c9f2 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/conditionset.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/contains.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/contains.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70d8e6464dab673a6aeb229d33d8e8bffeaf6d46 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/contains.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/ordinals.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/ordinals.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a497dc4ada1d23dba596ea6ce185e4e0126a6ebc Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/ordinals.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/setexpr.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/setexpr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4bbb544bdc5d2393dd3c129a82d8e6001a0ceeda Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/__pycache__/setexpr.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/intersection.py b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/intersection.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb9309ef3e9d2722ab1bfe664f1d1644f17da5d --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/intersection.py @@ -0,0 +1,533 @@ +from sympy.core.basic import _aresame +from sympy.core.function import Lambda, expand_complex +from sympy.core.mul import Mul +from sympy.core.numbers import ilcm, Float +from sympy.core.relational import Eq +from sympy.core.singleton import S +from sympy.core.symbol import (Dummy, symbols) +from sympy.core.sorting import ordered +from sympy.functions.elementary.complexes import sign +from sympy.functions.elementary.integers import floor, ceiling +from sympy.sets.fancysets import ComplexRegion +from sympy.sets.sets import (FiniteSet, Intersection, Interval, Set, Union) +from sympy.multipledispatch import Dispatcher +from sympy.sets.conditionset import ConditionSet +from sympy.sets.fancysets import (Integers, Naturals, Reals, Range, + ImageSet, Rationals) +from sympy.sets.sets import EmptySet, UniversalSet, imageset, ProductSet +from sympy.simplify.radsimp import numer + + +intersection_sets = Dispatcher('intersection_sets') + + +@intersection_sets.register(ConditionSet, ConditionSet) +def _(a, b): + return None + +@intersection_sets.register(ConditionSet, Set) +def _(a, b): + return ConditionSet(a.sym, a.condition, Intersection(a.base_set, b)) + +@intersection_sets.register(Naturals, Integers) +def _(a, b): + return a + +@intersection_sets.register(Naturals, Naturals) +def _(a, b): + return a if a is S.Naturals else b + +@intersection_sets.register(Interval, Naturals) +def _(a, b): + return intersection_sets(b, a) + +@intersection_sets.register(ComplexRegion, Set) +def _(self, other): + if other.is_ComplexRegion: + # self in rectangular form + if (not self.polar) and (not other.polar): + return ComplexRegion(Intersection(self.sets, other.sets)) + + # self in polar form + elif self.polar and other.polar: + r1, theta1 = self.a_interval, self.b_interval + r2, theta2 = other.a_interval, other.b_interval + new_r_interval = Intersection(r1, r2) + new_theta_interval = Intersection(theta1, theta2) + + # 0 and 2*Pi means the same + if ((2*S.Pi in theta1 and S.Zero in theta2) or + (2*S.Pi in theta2 and S.Zero in theta1)): + new_theta_interval = Union(new_theta_interval, + FiniteSet(0)) + return ComplexRegion(new_r_interval*new_theta_interval, + polar=True) + + + if other.is_subset(S.Reals): + new_interval = [] + x = symbols("x", cls=Dummy, real=True) + + # self in rectangular form + if not self.polar: + for element in self.psets: + if S.Zero in element.args[1]: + new_interval.append(element.args[0]) + new_interval = Union(*new_interval) + return Intersection(new_interval, other) + + # self in polar form + elif self.polar: + for element in self.psets: + if S.Zero in element.args[1]: + new_interval.append(element.args[0]) + if S.Pi in element.args[1]: + new_interval.append(ImageSet(Lambda(x, -x), element.args[0])) + if S.Zero in element.args[0]: + new_interval.append(FiniteSet(0)) + new_interval = Union(*new_interval) + return Intersection(new_interval, other) + +@intersection_sets.register(Integers, Reals) +def _(a, b): + return a + +@intersection_sets.register(Range, Interval) +def _(a, b): + # Check that there are no symbolic arguments + if not all(i.is_number for i in a.args + b.args[:2]): + return + + # In case of null Range, return an EmptySet. + if a.size == 0: + return S.EmptySet + + # trim down to self's size, and represent + # as a Range with step 1. + start = ceiling(max(b.inf, a.inf)) + if start not in b: + start += 1 + end = floor(min(b.sup, a.sup)) + if end not in b: + end -= 1 + return intersection_sets(a, Range(start, end + 1)) + +@intersection_sets.register(Range, Naturals) +def _(a, b): + return intersection_sets(a, Interval(b.inf, S.Infinity)) + +@intersection_sets.register(Range, Range) +def _(a, b): + # Check that there are no symbolic range arguments + if not all(all(v.is_number for v in r.args) for r in [a, b]): + return None + + # non-overlap quick exits + if not b: + return S.EmptySet + if not a: + return S.EmptySet + if b.sup < a.inf: + return S.EmptySet + if b.inf > a.sup: + return S.EmptySet + + # work with finite end at the start + r1 = a + if r1.start.is_infinite: + r1 = r1.reversed + r2 = b + if r2.start.is_infinite: + r2 = r2.reversed + + # If both ends are infinite then it means that one Range is just the set + # of all integers (the step must be 1). + if r1.start.is_infinite: + return b + if r2.start.is_infinite: + return a + + from sympy.solvers.diophantine.diophantine import diop_linear + + # this equation represents the values of the Range; + # it's a linear equation + eq = lambda r, i: r.start + i*r.step + + # we want to know when the two equations might + # have integer solutions so we use the diophantine + # solver + va, vb = diop_linear(eq(r1, Dummy('a')) - eq(r2, Dummy('b'))) + + # check for no solution + no_solution = va is None and vb is None + if no_solution: + return S.EmptySet + + # there is a solution + # ------------------- + + # find the coincident point, c + a0 = va.as_coeff_Add()[0] + c = eq(r1, a0) + + # find the first point, if possible, in each range + # since c may not be that point + def _first_finite_point(r1, c): + if c == r1.start: + return c + # st is the signed step we need to take to + # get from c to r1.start + st = sign(r1.start - c)*step + # use Range to calculate the first point: + # we want to get as close as possible to + # r1.start; the Range will not be null since + # it will at least contain c + s1 = Range(c, r1.start + st, st)[-1] + if s1 == r1.start: + pass + else: + # if we didn't hit r1.start then, if the + # sign of st didn't match the sign of r1.step + # we are off by one and s1 is not in r1 + if sign(r1.step) != sign(st): + s1 -= st + if s1 not in r1: + return + return s1 + + # calculate the step size of the new Range + step = abs(ilcm(r1.step, r2.step)) + s1 = _first_finite_point(r1, c) + if s1 is None: + return S.EmptySet + s2 = _first_finite_point(r2, c) + if s2 is None: + return S.EmptySet + + # replace the corresponding start or stop in + # the original Ranges with these points; the + # result must have at least one point since + # we know that s1 and s2 are in the Ranges + def _updated_range(r, first): + st = sign(r.step)*step + if r.start.is_finite: + rv = Range(first, r.stop, st) + else: + rv = Range(r.start, first + st, st) + return rv + r1 = _updated_range(a, s1) + r2 = _updated_range(b, s2) + + # work with them both in the increasing direction + if sign(r1.step) < 0: + r1 = r1.reversed + if sign(r2.step) < 0: + r2 = r2.reversed + + # return clipped Range with positive step; it + # can't be empty at this point + start = max(r1.start, r2.start) + stop = min(r1.stop, r2.stop) + return Range(start, stop, step) + + +@intersection_sets.register(Range, Integers) +def _(a, b): + return a + + +@intersection_sets.register(Range, Rationals) +def _(a, b): + return a + + +@intersection_sets.register(ImageSet, Set) +def _(self, other): + from sympy.solvers.diophantine import diophantine + + # Only handle the straight-forward univariate case + if (len(self.lamda.variables) > 1 + or self.lamda.signature != self.lamda.variables): + return None + base_set = self.base_sets[0] + + # Intersection between ImageSets with Integers as base set + # For {f(n) : n in Integers} & {g(m) : m in Integers} we solve the + # diophantine equations f(n)=g(m). + # If the solutions for n are {h(t) : t in Integers} then we return + # {f(h(t)) : t in integers}. + # If the solutions for n are {n_1, n_2, ..., n_k} then we return + # {f(n_i) : 1 <= i <= k}. + if base_set is S.Integers: + gm = None + if isinstance(other, ImageSet) and other.base_sets == (S.Integers,): + gm = other.lamda.expr + var = other.lamda.variables[0] + # Symbol of second ImageSet lambda must be distinct from first + m = Dummy('m') + gm = gm.subs(var, m) + elif other is S.Integers: + m = gm = Dummy('m') + if gm is not None: + fn = self.lamda.expr + n = self.lamda.variables[0] + try: + solns = list(diophantine(fn - gm, syms=(n, m), permute=True)) + except (TypeError, NotImplementedError): + # TypeError if equation not polynomial with rational coeff. + # NotImplementedError if correct format but no solver. + return + # 3 cases are possible for solns: + # - empty set, + # - one or more parametric (infinite) solutions, + # - a finite number of (non-parametric) solution couples. + # Among those, there is one type of solution set that is + # not helpful here: multiple parametric solutions. + if len(solns) == 0: + return S.EmptySet + elif any(s.free_symbols for tupl in solns for s in tupl): + if len(solns) == 1: + soln, solm = solns[0] + (t,) = soln.free_symbols + expr = fn.subs(n, soln.subs(t, n)).expand() + return imageset(Lambda(n, expr), S.Integers) + else: + return + else: + return FiniteSet(*(fn.subs(n, s[0]) for s in solns)) + + if other == S.Reals: + from sympy.solvers.solvers import denoms, solve_linear + + def _solution_union(exprs, sym): + # return a union of linear solutions to i in expr; + # if i cannot be solved, use a ConditionSet for solution + sols = [] + for i in exprs: + x, xis = solve_linear(i, 0, [sym]) + if x == sym: + sols.append(FiniteSet(xis)) + else: + sols.append(ConditionSet(sym, Eq(i, 0))) + return Union(*sols) + + f = self.lamda.expr + n = self.lamda.variables[0] + + n_ = Dummy(n.name, real=True) + f_ = f.subs(n, n_) + + re, im = f_.as_real_imag() + im = expand_complex(im) + + re = re.subs(n_, n) + im = im.subs(n_, n) + ifree = im.free_symbols + lam = Lambda(n, re) + if im.is_zero: + # allow re-evaluation + # of self in this case to make + # the result canonical + pass + elif im.is_zero is False: + return S.EmptySet + elif ifree != {n}: + return None + else: + # univarite imaginary part in same variable; + # use numer instead of as_numer_denom to keep + # this as fast as possible while still handling + # simple cases + base_set &= _solution_union( + Mul.make_args(numer(im)), n) + # exclude values that make denominators 0 + base_set -= _solution_union(denoms(f), n) + return imageset(lam, base_set) + + elif isinstance(other, Interval): + from sympy.solvers.solveset import (invert_real, invert_complex, + solveset) + + f = self.lamda.expr + n = self.lamda.variables[0] + new_inf, new_sup = None, None + new_lopen, new_ropen = other.left_open, other.right_open + + if f.is_real: + inverter = invert_real + else: + inverter = invert_complex + + g1, h1 = inverter(f, other.inf, n) + g2, h2 = inverter(f, other.sup, n) + + if all(isinstance(i, FiniteSet) for i in (h1, h2)): + if g1 == n: + if len(h1) == 1: + new_inf = h1.args[0] + if g2 == n: + if len(h2) == 1: + new_sup = h2.args[0] + # TODO: Design a technique to handle multiple-inverse + # functions + + # Any of the new boundary values cannot be determined + if any(i is None for i in (new_sup, new_inf)): + return + + + range_set = S.EmptySet + + if all(i.is_real for i in (new_sup, new_inf)): + # this assumes continuity of underlying function + # however fixes the case when it is decreasing + if new_inf > new_sup: + new_inf, new_sup = new_sup, new_inf + new_interval = Interval(new_inf, new_sup, new_lopen, new_ropen) + range_set = base_set.intersect(new_interval) + else: + if other.is_subset(S.Reals): + solutions = solveset(f, n, S.Reals) + if not isinstance(range_set, (ImageSet, ConditionSet)): + range_set = solutions.intersect(other) + else: + return + + if range_set is S.EmptySet: + return S.EmptySet + elif isinstance(range_set, Range) and range_set.size is not S.Infinity: + range_set = FiniteSet(*list(range_set)) + + if range_set is not None: + return imageset(Lambda(n, f), range_set) + return + else: + return + + +@intersection_sets.register(ProductSet, ProductSet) +def _(a, b): + if len(b.args) != len(a.args): + return S.EmptySet + return ProductSet(*(i.intersect(j) for i, j in zip(a.sets, b.sets))) + + +@intersection_sets.register(Interval, Interval) +def _(a, b): + # handle (-oo, oo) + infty = S.NegativeInfinity, S.Infinity + if a == Interval(*infty): + l, r = a.left, a.right + if l.is_real or l in infty or r.is_real or r in infty: + return b + + # We can't intersect [0,3] with [x,6] -- we don't know if x>0 or x<0 + if not a._is_comparable(b): + return None + + empty = False + + if a.start <= b.end and b.start <= a.end: + # Get topology right. + if a.start < b.start: + start = b.start + left_open = b.left_open + elif a.start > b.start: + start = a.start + left_open = a.left_open + else: + start = a.start + if not _aresame(a.start, b.start): + # For example Integer(2) != Float(2) + # Prefer the Float boundary because Floats should be + # contagious in calculations. + if b.start.has(Float) and not a.start.has(Float): + start = b.start + elif a.start.has(Float) and not b.start.has(Float): + start = a.start + else: + #this is to ensure that if Eq(a.start, b.start) but + #type(a.start) != type(b.start) the order of a and b + #does not matter for the result + start = list(ordered([a,b]))[0].start + left_open = a.left_open or b.left_open + + if a.end < b.end: + end = a.end + right_open = a.right_open + elif a.end > b.end: + end = b.end + right_open = b.right_open + else: + # see above for logic with start + end = a.end + if not _aresame(a.end, b.end): + if b.end.has(Float) and not a.end.has(Float): + end = b.end + elif a.end.has(Float) and not b.end.has(Float): + end = a.end + else: + end = list(ordered([a,b]))[0].end + right_open = a.right_open or b.right_open + + if end - start == 0 and (left_open or right_open): + empty = True + else: + empty = True + + if empty: + return S.EmptySet + + return Interval(start, end, left_open, right_open) + +@intersection_sets.register(EmptySet, Set) +def _(a, b): + return S.EmptySet + +@intersection_sets.register(UniversalSet, Set) +def _(a, b): + return b + +@intersection_sets.register(FiniteSet, FiniteSet) +def _(a, b): + return FiniteSet(*(a._elements & b._elements)) + +@intersection_sets.register(FiniteSet, Set) +def _(a, b): + try: + return FiniteSet(*[el for el in a if el in b]) + except TypeError: + return None # could not evaluate `el in b` due to symbolic ranges. + +@intersection_sets.register(Set, Set) +def _(a, b): + return None + +@intersection_sets.register(Integers, Rationals) +def _(a, b): + return a + +@intersection_sets.register(Naturals, Rationals) +def _(a, b): + return a + +@intersection_sets.register(Rationals, Reals) +def _(a, b): + return a + +def _intlike_interval(a, b): + try: + if b._inf is S.NegativeInfinity and b._sup is S.Infinity: + return a + s = Range(max(a.inf, ceiling(b.left)), floor(b.right) + 1) + return intersection_sets(s, b) # take out endpoints if open interval + except ValueError: + return None + +@intersection_sets.register(Integers, Interval) +def _(a, b): + return _intlike_interval(a, b) + +@intersection_sets.register(Naturals, Interval) +def _(a, b): + return _intlike_interval(a, b) diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/issubset.py b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/issubset.py new file mode 100644 index 0000000000000000000000000000000000000000..cc23e8bf56f1743cd7f08452dd09a0acf981f5da --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/issubset.py @@ -0,0 +1,144 @@ +from sympy.core.singleton import S +from sympy.core.symbol import Symbol +from sympy.core.logic import fuzzy_and, fuzzy_bool, fuzzy_not, fuzzy_or +from sympy.core.relational import Eq +from sympy.sets.sets import FiniteSet, Interval, Set, Union, ProductSet +from sympy.sets.fancysets import Complexes, Reals, Range, Rationals +from sympy.multipledispatch import Dispatcher + + +_inf_sets = [S.Naturals, S.Naturals0, S.Integers, S.Rationals, S.Reals, S.Complexes] + + +is_subset_sets = Dispatcher('is_subset_sets') + + +@is_subset_sets.register(Set, Set) +def _(a, b): + return None + +@is_subset_sets.register(Interval, Interval) +def _(a, b): + # This is correct but can be made more comprehensive... + if fuzzy_bool(a.start < b.start): + return False + if fuzzy_bool(a.end > b.end): + return False + if (b.left_open and not a.left_open and fuzzy_bool(Eq(a.start, b.start))): + return False + if (b.right_open and not a.right_open and fuzzy_bool(Eq(a.end, b.end))): + return False + +@is_subset_sets.register(Interval, FiniteSet) +def _(a_interval, b_fs): + # An Interval can only be a subset of a finite set if it is finite + # which can only happen if it has zero measure. + if fuzzy_not(a_interval.measure.is_zero): + return False + +@is_subset_sets.register(Interval, Union) +def _(a_interval, b_u): + if all(isinstance(s, (Interval, FiniteSet)) for s in b_u.args): + intervals = [s for s in b_u.args if isinstance(s, Interval)] + if all(fuzzy_bool(a_interval.start < s.start) for s in intervals): + return False + if all(fuzzy_bool(a_interval.end > s.end) for s in intervals): + return False + if a_interval.measure.is_nonzero: + no_overlap = lambda s1, s2: fuzzy_or([ + fuzzy_bool(s1.end <= s2.start), + fuzzy_bool(s1.start >= s2.end), + ]) + if all(no_overlap(s, a_interval) for s in intervals): + return False + +@is_subset_sets.register(Range, Range) +def _(a, b): + if a.step == b.step == 1: + return fuzzy_and([fuzzy_bool(a.start >= b.start), + fuzzy_bool(a.stop <= b.stop)]) + +@is_subset_sets.register(Range, Interval) +def _(a_range, b_interval): + if a_range.step.is_positive: + if b_interval.left_open and a_range.inf.is_finite: + cond_left = a_range.inf > b_interval.left + else: + cond_left = a_range.inf >= b_interval.left + if b_interval.right_open and a_range.sup.is_finite: + cond_right = a_range.sup < b_interval.right + else: + cond_right = a_range.sup <= b_interval.right + return fuzzy_and([cond_left, cond_right]) + +@is_subset_sets.register(Range, FiniteSet) +def _(a_range, b_finiteset): + try: + a_size = a_range.size + except ValueError: + # symbolic Range of unknown size + return None + if a_size > len(b_finiteset): + return False + elif any(arg.has(Symbol) for arg in a_range.args): + return fuzzy_and(b_finiteset.contains(x) for x in a_range) + else: + # Checking A \ B == EmptySet is more efficient than repeated naive + # membership checks on an arbitrary FiniteSet. + a_set = set(a_range) + b_remaining = len(b_finiteset) + # Symbolic expressions and numbers of unknown type (integer or not) are + # all counted as "candidates", i.e. *potentially* matching some a in + # a_range. + cnt_candidate = 0 + for b in b_finiteset: + if b.is_Integer: + a_set.discard(b) + elif fuzzy_not(b.is_integer): + pass + else: + cnt_candidate += 1 + b_remaining -= 1 + if len(a_set) > b_remaining + cnt_candidate: + return False + if len(a_set) == 0: + return True + return None + +@is_subset_sets.register(Interval, Range) +def _(a_interval, b_range): + if a_interval.measure.is_extended_nonzero: + return False + +@is_subset_sets.register(Interval, Rationals) +def _(a_interval, b_rationals): + if a_interval.measure.is_extended_nonzero: + return False + +@is_subset_sets.register(Range, Complexes) +def _(a, b): + return True + +@is_subset_sets.register(Complexes, Interval) +def _(a, b): + return False + +@is_subset_sets.register(Complexes, Range) +def _(a, b): + return False + +@is_subset_sets.register(Complexes, Rationals) +def _(a, b): + return False + +@is_subset_sets.register(Rationals, Reals) +def _(a, b): + return True + +@is_subset_sets.register(Rationals, Range) +def _(a, b): + return False + +@is_subset_sets.register(ProductSet, FiniteSet) +def _(a_ps, b_fs): + return fuzzy_and(b_fs.contains(x) for x in a_ps) diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/power.py b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/power.py new file mode 100644 index 0000000000000000000000000000000000000000..3cad4ee49ab27770143bc121d1fbcd024bf01548 --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/power.py @@ -0,0 +1,107 @@ +from sympy.core import Basic, Expr +from sympy.core.function import Lambda +from sympy.core.numbers import oo, Infinity, NegativeInfinity, Zero, Integer +from sympy.core.singleton import S +from sympy.core.symbol import symbols +from sympy.functions.elementary.miscellaneous import (Max, Min) +from sympy.sets.fancysets import ImageSet +from sympy.sets.setexpr import set_div +from sympy.sets.sets import Set, Interval, FiniteSet, Union +from sympy.multipledispatch import Dispatcher + + +_x, _y = symbols("x y") + + +_set_pow = Dispatcher('_set_pow') + + +@_set_pow.register(Basic, Basic) +def _(x, y): + return None + +@_set_pow.register(Set, Set) +def _(x, y): + return ImageSet(Lambda((_x, _y), (_x ** _y)), x, y) + +@_set_pow.register(Expr, Expr) +def _(x, y): + return x**y + +@_set_pow.register(Interval, Zero) +def _(x, z): + return FiniteSet(S.One) + +@_set_pow.register(Interval, Integer) +def _(x, exponent): + """ + Powers in interval arithmetic + https://en.wikipedia.org/wiki/Interval_arithmetic + """ + s1 = x.start**exponent + s2 = x.end**exponent + if ((s2 > s1) if exponent > 0 else (x.end > -x.start)) == True: + left_open = x.left_open + right_open = x.right_open + # TODO: handle unevaluated condition. + sleft = s2 + else: + # TODO: `s2 > s1` could be unevaluated. + left_open = x.right_open + right_open = x.left_open + sleft = s1 + + if x.start.is_positive: + return Interval( + Min(s1, s2), + Max(s1, s2), left_open, right_open) + elif x.end.is_negative: + return Interval( + Min(s1, s2), + Max(s1, s2), left_open, right_open) + + # Case where x.start < 0 and x.end > 0: + if exponent.is_odd: + if exponent.is_negative: + if x.start.is_zero: + return Interval(s2, oo, x.right_open) + if x.end.is_zero: + return Interval(-oo, s1, True, x.left_open) + return Union(Interval(-oo, s1, True, x.left_open), Interval(s2, oo, x.right_open)) + else: + return Interval(s1, s2, x.left_open, x.right_open) + elif exponent.is_even: + if exponent.is_negative: + if x.start.is_zero: + return Interval(s2, oo, x.right_open) + if x.end.is_zero: + return Interval(s1, oo, x.left_open) + return Interval(0, oo) + else: + return Interval(S.Zero, sleft, S.Zero not in x, left_open) + +@_set_pow.register(Interval, Infinity) +def _(b, e): + # TODO: add logic for open intervals? + if b.start.is_nonnegative: + if b.end < 1: + return FiniteSet(S.Zero) + if b.start > 1: + return FiniteSet(S.Infinity) + return Interval(0, oo) + elif b.end.is_negative: + if b.start > -1: + return FiniteSet(S.Zero) + if b.end < -1: + return FiniteSet(-oo, oo) + return Interval(-oo, oo) + else: + if b.start > -1: + if b.end < 1: + return FiniteSet(S.Zero) + return Interval(0, oo) + return Interval(-oo, oo) + +@_set_pow.register(Interval, NegativeInfinity) +def _(b, e): + return _set_pow(set_div(S.One, b), oo) diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/union.py b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/union.py new file mode 100644 index 0000000000000000000000000000000000000000..75d867b49969ae2aeea76155dbaae7e05c1a6847 --- /dev/null +++ b/openflamingo/lib/python3.10/site-packages/sympy/sets/handlers/union.py @@ -0,0 +1,147 @@ +from sympy.core.singleton import S +from sympy.core.sympify import sympify +from sympy.functions.elementary.miscellaneous import Min, Max +from sympy.sets.sets import (EmptySet, FiniteSet, Intersection, + Interval, ProductSet, Set, Union, UniversalSet) +from sympy.sets.fancysets import (ComplexRegion, Naturals, Naturals0, + Integers, Rationals, Reals) +from sympy.multipledispatch import Dispatcher + + +union_sets = Dispatcher('union_sets') + + +@union_sets.register(Naturals0, Naturals) +def _(a, b): + return a + +@union_sets.register(Rationals, Naturals) +def _(a, b): + return a + +@union_sets.register(Rationals, Naturals0) +def _(a, b): + return a + +@union_sets.register(Reals, Naturals) +def _(a, b): + return a + +@union_sets.register(Reals, Naturals0) +def _(a, b): + return a + +@union_sets.register(Reals, Rationals) +def _(a, b): + return a + +@union_sets.register(Integers, Set) +def _(a, b): + intersect = Intersection(a, b) + if intersect == a: + return b + elif intersect == b: + return a + +@union_sets.register(ComplexRegion, Set) +def _(a, b): + if b.is_subset(S.Reals): + # treat a subset of reals as a complex region + b = ComplexRegion.from_real(b) + + if b.is_ComplexRegion: + # a in rectangular form + if (not a.polar) and (not b.polar): + return ComplexRegion(Union(a.sets, b.sets)) + # a in polar form + elif a.polar and b.polar: + return ComplexRegion(Union(a.sets, b.sets), polar=True) + return None + +@union_sets.register(EmptySet, Set) +def _(a, b): + return b + + +@union_sets.register(UniversalSet, Set) +def _(a, b): + return a + +@union_sets.register(ProductSet, ProductSet) +def _(a, b): + if b.is_subset(a): + return a + if len(b.sets) != len(a.sets): + return None + if len(a.sets) == 2: + a1, a2 = a.sets + b1, b2 = b.sets + if a1 == b1: + return a1 * Union(a2, b2) + if a2 == b2: + return Union(a1, b1) * a2 + return None + +@union_sets.register(ProductSet, Set) +def _(a, b): + if b.is_subset(a): + return a + return None + +@union_sets.register(Interval, Interval) +def _(a, b): + if a._is_comparable(b): + # Non-overlapping intervals + end = Min(a.end, b.end) + start = Max(a.start, b.start) + if (end < start or + (end == start and (end not in a and end not in b))): + return None + else: + start = Min(a.start, b.start) + end = Max(a.end, b.end) + + left_open = ((a.start != start or a.left_open) and + (b.start != start or b.left_open)) + right_open = ((a.end != end or a.right_open) and + (b.end != end or b.right_open)) + return Interval(start, end, left_open, right_open) + +@union_sets.register(Interval, UniversalSet) +def _(a, b): + return S.UniversalSet + +@union_sets.register(Interval, Set) +def _(a, b): + # If I have open end points and these endpoints are contained in b + # But only in case, when endpoints are finite. Because + # interval does not contain oo or -oo. + open_left_in_b_and_finite = (a.left_open and + sympify(b.contains(a.start)) is S.true and + a.start.is_finite) + open_right_in_b_and_finite = (a.right_open and + sympify(b.contains(a.end)) is S.true and + a.end.is_finite) + if open_left_in_b_and_finite or open_right_in_b_and_finite: + # Fill in my end points and return + open_left = a.left_open and a.start not in b + open_right = a.right_open and a.end not in b + new_a = Interval(a.start, a.end, open_left, open_right) + return {new_a, b} + return None + +@union_sets.register(FiniteSet, FiniteSet) +def _(a, b): + return FiniteSet(*(a._elements | b._elements)) + +@union_sets.register(FiniteSet, Set) +def _(a, b): + # If `b` set contains one of my elements, remove it from `a` + if any(b.contains(x) == True for x in a): + return { + FiniteSet(*[x for x in a if b.contains(x) != True]), b} + return None + +@union_sets.register(Set, Set) +def _(a, b): + return None diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_setexpr.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_setexpr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d842133434895237df3972ce217e3b5d61259075 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_setexpr.cpython-310.pyc differ diff --git a/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_sets.cpython-310.pyc b/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_sets.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f63aa6e027341bb11d0d5b87690525aa5baa7889 Binary files /dev/null and b/openflamingo/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_sets.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/__init__.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c90bf08d3093b3f82dd4b6f3c7027a24cd0dbb12 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/__init__.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/_version.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24a3df8aa6793bb2d5e11bcde42fd82ed4a08f82 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/_version.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/archive.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/archive.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f52ec0b3fc8cdf6f04154df9f7aafc71eb6c4189 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/archive.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/asyn.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/asyn.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..174dccc6997b29aa05dde44df393f2fbce846a86 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/asyn.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/caching.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/caching.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..031e94a67fa09a1c4cbd7b437c131cbbf4e9e1c4 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/caching.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/callbacks.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/callbacks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4d850b7cbf8d66b3d2e861f3d57aa5116cbc67a Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/callbacks.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/compression.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/compression.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..debb6452e688c2f8caef0c82598a03c34c96baa5 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/compression.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/config.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70fee364f38ada7bf9e1fe012c8f497fe7d2e4ba Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/config.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/conftest.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/conftest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ca979f7482fa2db5b08a36464148259be1cbdf0 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/conftest.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/core.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..096c784dca63771af149a92876225cad5e5a0655 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/core.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/dircache.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/dircache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..216dd3893bb1b639894380590fea07a778e60df5 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/dircache.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/exceptions.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a5143144b6cf127f158cdc245fc431447318b42 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/exceptions.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/fuse.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/fuse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b43a91a069540fc4a70aec47065f5eb2e026e70 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/fuse.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/generic.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/generic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68336806846226b6b9757e52db5e2412da487e70 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/generic.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/gui.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/gui.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f064545368a6eec17e47e63f7db487ccd2292f8a Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/gui.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/json.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/json.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3608503c76a3153273b733a8db9c3eb89facb6c2 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/json.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/mapping.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/mapping.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83b8665bd708fb7a529ea63cff64ae201fa6311f Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/mapping.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/parquet.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/parquet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13be9894b2b93ab10de97fbe16f68bf4dc816bf3 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/parquet.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/registry.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/registry.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..325430f839be25951d8df5a8e77656195344ceb6 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/registry.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/spec.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/spec.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94030bb8b3c8caa1c81e42959944aec8a0dc5ab7 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/spec.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/transaction.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/transaction.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..561ff5977d20ef19f7c0c869c5a5cff6d8dd5f33 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/transaction.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/__pycache__/utils.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a64406bb345f38f8ece8739422450ad51ce47497 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/__pycache__/utils.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/__init__.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..15e4c316a06f44dfb870ab3b6a9fd6fb4fe9cc6d Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/__init__.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/cached.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/cached.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba9f9bd0cb34eb87200a1eaadec7309e01282b56 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/cached.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/dask.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/dask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a46d2d15fef164c84269e365b1951c90c563896 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/implementations/__pycache__/dask.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8ed2ad802ecaf021106c25c03112f29e75c7b2f8 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py @@ -0,0 +1,289 @@ +import os +from hashlib import md5 + +import pytest + +from fsspec.implementations.local import LocalFileSystem +from fsspec.tests.abstract.copy import AbstractCopyTests # noqa: F401 +from fsspec.tests.abstract.get import AbstractGetTests # noqa: F401 +from fsspec.tests.abstract.open import AbstractOpenTests # noqa: F401 +from fsspec.tests.abstract.pipe import AbstractPipeTests # noqa: F401 +from fsspec.tests.abstract.put import AbstractPutTests # noqa: F401 + + +class BaseAbstractFixtures: + """ + Abstract base class containing fixtures that are used by but never need to + be overridden in derived filesystem-specific classes to run the abstract + tests on such filesystems. + """ + + @pytest.fixture + def fs_bulk_operations_scenario_0(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_glob_edge_cases_files(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_dir_and_file_with_same_name_prefix(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_10_files_with_hashed_names(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_target(self, fs, fs_join, fs_path): + """ + Return name of remote directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = fs_join(fs_path, "target") + yield target + if fs.exists(target): + fs.rm(target, recursive=True) + + @pytest.fixture + def local_bulk_operations_scenario_0(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_glob_edge_cases_files(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_dir_and_file_with_same_name_prefix( + self, local_fs, local_join, local_path + ): + """ + Scenario on local filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix( + local_fs, local_join, local_path + ) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_10_files_with_hashed_names(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_target(self, local_fs, local_join, local_path): + """ + Return name of local directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = local_join(local_path, "target") + yield target + if local_fs.exists(target): + local_fs.rm(target, recursive=True) + + def _glob_edge_cases_files(self, some_fs, some_join, some_path): + """ + Scenario that is used for glob edge cases cp/get/put tests. + Creates the following directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + ├── 📁 subdir0 + │ ├── 📄 subfile1 + │ ├── 📄 subfile2 + │ └── 📁 nesteddir + │ └── 📄 nestedfile + └── 📁 subdir1 + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + + for subdir_idx in range(2): + subdir = some_join(source, f"subdir{subdir_idx}") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + + return source + + def _bulk_operations_scenario_0(self, some_fs, some_join, some_path): + """ + Scenario that is used for many cp/get/put tests. Creates the following + directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + └── 📁 subdir + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + return source + + def _dir_and_file_with_same_name_prefix(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put on directory and file with + the same name prefixes. Creates the following directory and file structure: + + 📁 source + ├── 📄 subdir.txt + └── 📁 subdir + └── 📄 subfile.txt + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + file = some_join(source, "subdir.txt") + subfile = some_join(subdir, "subfile.txt") + some_fs.makedirs(subdir) + some_fs.touch(file) + some_fs.touch(subfile) + return source + + def _10_files_with_hashed_names(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put files order when source and + destination are lists. Creates the following directory and file structure: + + 📁 source + └── 📄 {hashed([0-9])}.txt + """ + source = some_join(some_path, "source") + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + path = some_join(source, f"{hashed_i}.txt") + some_fs.pipe(path=path, value=f"{i}".encode()) + return source + + +class AbstractFixtures(BaseAbstractFixtures): + """ + Abstract base class containing fixtures that may be overridden in derived + filesystem-specific classes to run the abstract tests on such filesystems. + + For any particular filesystem some of these fixtures must be overridden, + such as ``fs`` and ``fs_path``, and others may be overridden if the + default functions here are not appropriate, such as ``fs_join``. + """ + + @pytest.fixture + def fs(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture + def fs_join(self): + """ + Return a function that joins its arguments together into a path. + + Most fsspec implementations join paths in a platform-dependent way, + but some will override this to always use a forward slash. + """ + return os.path.join + + @pytest.fixture + def fs_path(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture(scope="class") + def local_fs(self): + # Maybe need an option for auto_mkdir=False? This is only relevant + # for certain implementations. + return LocalFileSystem(auto_mkdir=True) + + @pytest.fixture + def local_join(self): + """ + Return a function that joins its arguments together into a path, on + the local filesystem. + """ + return os.path.join + + @pytest.fixture + def local_path(self, tmpdir): + return tmpdir + + @pytest.fixture + def supports_empty_directories(self): + """ + Return whether this implementation supports empty directories. + """ + return True + + @pytest.fixture + def fs_sanitize_path(self): + return lambda x: x diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/__init__.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37eb3333aeeefbda86e01d228d0f074af0d7b0d7 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/__init__.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/common.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38c62b4a5d8c6bae7a8efd7bb2170cf80ce630ef Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/common.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/copy.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/copy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c888174f4f390fba669c847696ececd49861616f Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/copy.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/get.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/get.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90576464d82509d9abac8a2dd33ea7f398d67af8 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/get.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/mv.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/mv.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d4146e8393a09879873de30fb829cc4ad41b3aa Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/mv.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/open.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/open.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3bb9c451f982c8e73fa7e8d4b3c2efaefce0c44 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/open.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/pipe.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/pipe.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ba3e9dc2aea292cd039c3de2fab788f866de9c0 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/pipe.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/put.cpython-310.pyc b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/put.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c55d00356551e20ebdbd38729fc922ee48839b5b Binary files /dev/null and b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/__pycache__/put.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/common.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/common.py new file mode 100644 index 0000000000000000000000000000000000000000..22e7c4140404ab2a8928689721419cf05c2760b9 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/common.py @@ -0,0 +1,175 @@ +GLOB_EDGE_CASES_TESTS = { + "argnames": ("path", "recursive", "maxdepth", "expected"), + "argvalues": [ + ("fil?1", False, None, ["file1"]), + ("fil?1", True, None, ["file1"]), + ("file[1-2]", False, None, ["file1", "file2"]), + ("file[1-2]", True, None, ["file1", "file2"]), + ("*", False, None, ["file1", "file2"]), + ( + "*", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*", True, 1, ["file1", "file2"]), + ( + "*", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("*1", False, None, ["file1"]), + ( + "*1", + True, + None, + [ + "file1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*1", True, 2, ["file1", "subdir1/subfile1", "subdir1/subfile2"]), + ( + "**", + False, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**", True, 1, ["file1", "file2"]), + ( + "**", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + False, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("**/*1", False, None, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ( + "**/*1", + True, + None, + [ + "file1", + "subdir0/subfile1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**/*1", True, 1, ["file1"]), + ( + "**/*1", + True, + 2, + ["file1", "subdir0/subfile1", "subdir1/subfile1", "subdir1/subfile2"], + ), + ("**/*1", False, 2, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ("**/subdir0", False, None, []), + ("**/subdir0", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("**/subdir0/nested*", False, 2, []), + ("**/subdir0/nested*", True, 2, ["nestedfile"]), + ("subdir[1-2]", False, None, []), + ("subdir[1-2]", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("subdir[1-2]", True, 2, ["subfile1", "subfile2"]), + ("subdir[0-1]", False, None, []), + ( + "subdir[0-1]", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + False, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ], +} diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py new file mode 100644 index 0000000000000000000000000000000000000000..e39e57e5f7d52bfda8ab5e2398b04cc2303630a0 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py @@ -0,0 +1,557 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractCopyTests: + def test_copy_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.cp(fs_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.cp(fs_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.cp(fs_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.cp(fs_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_copy_file_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.cp(fs_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_copy_file_to_file_in_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir", "newfile") + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_copy_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.cp(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.cp(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.cp(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.cp(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_copy_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_glob_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_copy_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + fs_target, + fs_sanitize_path, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.copy(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_copy_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.cp(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_list_of_files_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.cp(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_two_files_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # This is a duplicate of test_copy_list_of_files_to_new_directory and + # can eventually be removed. + source = fs_bulk_operations_scenario_0 + + target = fs_target + assert not fs.exists(target) + fs.cp([fs_join(source, "file1"), fs_join(source, "file2")], target) + + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + + def test_copy_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + fs_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.cp(fs_join(source, "subdir"), target, recursive=True) + + assert fs.isfile(fs_join(target, "subfile.txt")) + assert not fs.isfile(fs_join(target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.cp(fs_join(source, "subdir*"), target, recursive=True) + + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, fs_10_files_with_hashed_names + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.copy(path1=source_files, path2=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/get.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/get.py new file mode 100644 index 0000000000000000000000000000000000000000..851ab81ee581e74cac41c64c83ef0af75826d6b0 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/get.py @@ -0,0 +1,587 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.implementations.local import make_path_posix +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractGetTests: + def test_get_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + target_file2 = local_join(target, "file2") + target_subfile1 = local_join(target, "subfile1") + + # Copy from source directory + fs.get(fs_join(source, "file2"), target) + assert local_fs.isfile(target_file2) + + # Copy from sub directory + fs.get(fs_join(source, "subdir", "subfile1"), target) + assert local_fs.isfile(target_subfile1) + + # Remove copied files + local_fs.rm([target_file2, target_subfile1]) + assert not local_fs.exists(target_file2) + assert not local_fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.get(fs_join(source, "file2"), target + "/") + assert local_fs.isdir(target) + assert local_fs.isfile(target_file2) + + fs.get(fs_join(source, "subdir", "subfile1"), target + "/") + assert local_fs.isfile(target_subfile1) + + def test_get_file_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), local_join(target, "newdir/") + ) # Note trailing slash + + assert local_fs.isdir(target) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get(fs_join(source, "subdir", "subfile1"), local_join(target, "newfile")) + assert local_fs.isfile(local_join(target, "newfile")) + + def test_get_file_to_file_in_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), + local_join(target, "newdir", "newfile"), + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "newfile")) + + def test_get_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert local_fs.isdir(local_join(target, "subdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "subdir", "nesteddir", "nestedfile") + ) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert not local_fs.exists(local_join(target, "subdir", "nesteddir")) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + def test_get_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = local_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + def test_get_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.isdir(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_glob_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_fs.ls(target, detail=False), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_get_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = local_target + + for new_dir, target_slash in product([True, False], [True, False]): + local_fs.mkdir(target) + + t = local_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.get(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = local_fs.find(target) + if new_dir: + prefixed_expected = [ + make_path_posix(local_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + make_path_posix(local_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + local_fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_get_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.get(source_files, t) + assert local_fs.isfile(local_join(target, "file1")) + assert local_fs.isfile(local_join(target, "file2")) + assert local_fs.isfile(local_join(target, "subfile1")) + + local_fs.rm( + [ + local_join(target, "file1"), + local_join(target, "file2"), + local_join(target, "subfile1"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_list_of_files_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.get(source_files, local_join(target, "newdir") + "/") # Note trailing slash + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "file1")) + assert local_fs.isfile(local_join(target, "newdir", "file2")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_directory_recursive( + self, fs, fs_join, fs_path, local_fs, local_join, local_target + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = fs_join(fs_path, "src") + src_file = fs_join(src, "file") + fs.mkdir(src) + fs.touch(src_file) + + target = local_target + + # get without slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src, target, recursive=True) + assert local_fs.isdir(target) + + if loop == 0: + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + else: + assert local_fs.isfile(local_join(target, "file")) + assert local_fs.isdir(local_join(target, "src")) + assert local_fs.isfile(local_join(target, "src", "file")) + + local_fs.rm(target, recursive=True) + + # get with slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src + "/", target, recursive=True) + assert local_fs.isdir(target) + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + + def test_get_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_dir_and_file_with_same_name_prefix, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = local_target + + # Test without glob + fs.get(fs_join(source, "subdir"), target, recursive=True) + + assert local_fs.isfile(local_join(target, "subfile.txt")) + assert not local_fs.isfile(local_join(target, "subdir.txt")) + + local_fs.rm([local_join(target, "subfile.txt")]) + assert local_fs.ls(target) == [] + + # Test with glob + fs.get(fs_join(source, "subdir*"), target, recursive=True) + + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile.txt")) + assert local_fs.isfile(local_join(target, "subdir.txt")) + + def test_get_with_source_and_destination_as_list( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_10_files_with_hashed_names, + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = local_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append( + make_path_posix(local_join(target, f"{hashed_i}.txt")) + ) + + # Copy and assert order was kept + fs.get(rpath=source_files, lpath=destination_files) + + for i in range(10): + file_content = local_fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py new file mode 100644 index 0000000000000000000000000000000000000000..39f6caa3de815e024fa84de2acecc986c823ed29 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py @@ -0,0 +1,57 @@ +import os + +import pytest + +import fsspec + + +def test_move_raises_error_with_tmpdir(tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Define a destination that simulates a protected or invalid path + destination = tmpdir.join("non_existent_directory/destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Use the actual file paths as string + with pytest.raises(FileNotFoundError): + fs.mv(str(source), str(destination)) + + +@pytest.mark.parametrize("recursive", (True, False)) +def test_move_raises_error_with_tmpdir_permission(recursive, tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Create a protected directory (non-writable) + protected_dir = tmpdir.mkdir("protected_directory") + protected_path = str(protected_dir) + + # Set the directory to read-only + if os.name == "nt": + os.system(f'icacls "{protected_path}" /deny Everyone:(W)') + else: + os.chmod(protected_path, 0o555) # Sets the directory to read-only + + # Define a destination inside the protected directory + destination = protected_dir.join("destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Try to move the file to the read-only directory, expecting a permission error + with pytest.raises(PermissionError): + fs.mv(str(source), str(destination), recursive=recursive) + + # Assert the file was not created in the destination + assert not os.path.exists(destination) + + # Cleanup: Restore permissions so the directory can be cleaned up + if os.name == "nt": + os.system(f'icacls "{protected_path}" /remove:d Everyone') + else: + os.chmod(protected_path, 0o755) # Restore write permission for cleanup diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/open.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/open.py new file mode 100644 index 0000000000000000000000000000000000000000..bb75ea852276fb8d834345883813b8e27a0ae24c --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/open.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractOpenTests: + def test_open_exclusive(self, fs, fs_target): + with fs.open(fs_target, "wb") as f: + f.write(b"data") + with fs.open(fs_target, "rb") as f: + assert f.read() == b"data" + with pytest.raises(FileExistsError): + fs.open(fs_target, "xb") diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py new file mode 100644 index 0000000000000000000000000000000000000000..8ecca96e9d23ff268a253c48269d5cca451ea270 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractPipeTests: + def test_pipe_exclusive(self, fs, fs_target): + fs.pipe_file(fs_target, b"data") + assert fs.cat_file(fs_target) == b"data" + with pytest.raises(FileExistsError): + fs.pipe_file(fs_target, b"data", mode="create") + fs.pipe_file(fs_target, b"new data", mode="overwrite") + assert fs.cat_file(fs_target) == b"new data" diff --git a/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/put.py b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/put.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc349977f0384d9fc86126498be5c6ad99a21d3 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/fsspec/tests/abstract/put.py @@ -0,0 +1,591 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractPutTests: + def test_put_file_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.put(local_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.put(local_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.put(local_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.put(local_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_put_file_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1c + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.put(local_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_put_file_to_file_in_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1d + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), + fs_join(target, "newdir", "newfile"), + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_put_directory_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1e + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.put(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.put(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_directory_to_new_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1f + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.put(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.put(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_put_glob_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1g + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_glob_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1h + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_put_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_target, + local_glob_edge_cases_files, + local_join, + fs_sanitize_path, + ): + # Copy scenario 1g + source = local_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.put(local_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_put_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 2a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.put(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_list_of_files_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 2b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + fs.put(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_directory_recursive( + self, fs, fs_join, fs_target, local_fs, local_join, local_path + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = local_join(local_path, "src") + src_file = local_join(src, "file") + local_fs.mkdir(src) + local_fs.touch(src_file) + + target = fs_target + + # put without slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src, target, recursive=True) + assert fs.isdir(target) + + if loop == 0: + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + else: + assert fs.isfile(fs_join(target, "file")) + assert fs.isdir(fs_join(target, "src")) + assert fs.isfile(fs_join(target, "src", "file")) + + fs.rm(target, recursive=True) + + # put with slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src + "/", target, recursive=True) + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + + def test_put_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + local_join, + local_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = local_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.put(local_join(source, "subdir"), fs_target, recursive=True) + + assert fs.isfile(fs_join(fs_target, "subfile.txt")) + assert not fs.isfile(fs_join(fs_target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.put(local_join(source, "subdir*"), fs_target, recursive=True) + + assert fs.isdir(fs_join(fs_target, "subdir")) + assert fs.isfile(fs_join(fs_target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(fs_target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, local_join, local_10_files_with_hashed_names + ): + # Create the test dir + source = local_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(local_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.put(lpath=source_files, rpath=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/phi4/lib/python3.10/site-packages/packaging/__init__.py b/phi4/lib/python3.10/site-packages/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d79f73c574ffc759ef5d2145b1ec742d85c2500b --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "24.2" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = f"2014 {__author__}" diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f153c35e31d1a2e91d9db7b8cbd9e8f444f0ac8a Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/_parser.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c2fa585bcf56cf298d3c4b06162dba9c77663f40 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_parser.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/_structures.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_structures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3331ec060e894f63e00e51a51339d4f5313509c2 Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_structures.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6319e66230e02d6bd1e4191863b808d4a64c361a Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/requirements.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/requirements.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53aad2df93ae51d0baa02d7cebe66f9dbc05942a Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/requirements.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b96b7ebeae527df5edac89a188361bb2dab8a2f Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/__pycache__/version.cpython-310.pyc b/phi4/lib/python3.10/site-packages/packaging/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ceebd827d16436e5293ca9edfc964992f53822fd Binary files /dev/null and b/phi4/lib/python3.10/site-packages/packaging/__pycache__/version.cpython-310.pyc differ diff --git a/phi4/lib/python3.10/site-packages/packaging/_elffile.py b/phi4/lib/python3.10/site-packages/packaging/_elffile.py new file mode 100644 index 0000000000000000000000000000000000000000..25f4282cc29cb03d7be881f03dee841d7dbc215a --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/_elffile.py @@ -0,0 +1,110 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +from __future__ import annotations + +import enum +import os +import struct +from typing import IO + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error as e: + raise ELFInvalid("unable to parse identification") from e + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError as e: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) from e + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> str | None: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/phi4/lib/python3.10/site-packages/packaging/_parser.py b/phi4/lib/python3.10/site-packages/packaging/_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..c1238c06eab95f8c90c393383a703aa3b8c366a5 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/_parser.py @@ -0,0 +1,354 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +from __future__ import annotations + +import ast +from typing import NamedTuple, Sequence, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: list[str] + specifier: str + marker: MarkerList | None + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> tuple[str, str, MarkerList | None]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> list[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: list[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/phi4/lib/python3.10/site-packages/packaging/_structures.py b/phi4/lib/python3.10/site-packages/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..90a6465f9682c886363eea5327dac64bf623a6ff --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/phi4/lib/python3.10/site-packages/packaging/markers.py b/phi4/lib/python3.10/site-packages/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..fb7f49cf8cd43ffae71e3e8d15174d7536f9da02 --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/markers.py @@ -0,0 +1,331 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import operator +import os +import platform +import sys +from typing import Any, Callable, TypedDict, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "InvalidMarker", + "Marker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: list[str] | MarkerAtom | str, first: bool | None = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Operator | None = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize(*values: str, key: str) -> tuple[str, ...]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) + + # other environment markers don't have such standards + return values + + +def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool: + groups: list[list[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: sys._version_info) -> str: + version = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Environment: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate(self, environment: dict[str, str] | None = None) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = cast("dict[str, str]", default_environment()) + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers( + self._markers, _repair_python_full_version(current_environment) + ) + + +def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]: + """ + Work around platform.python_version() returning something that is not PEP 440 + compliant for non-tagged Python builds. + """ + if env["python_full_version"].endswith("+"): + env["python_full_version"] += "local" + return env diff --git a/phi4/lib/python3.10/site-packages/packaging/metadata.py b/phi4/lib/python3.10/site-packages/packaging/metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..721f411cfc44f6d24c13112e4246b5ad776a5e0b --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/metadata.py @@ -0,0 +1,863 @@ +from __future__ import annotations + +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import pathlib +import sys +import typing +from typing import ( + Any, + Callable, + Generic, + Literal, + TypedDict, + cast, +) + +from . import licenses, requirements, specifiers, utils +from . import version as version_module +from .licenses import NormalizedLicenseExpression + +T = typing.TypeVar("T") + + +if sys.version_info >= (3, 11): # pragma: no cover + ExceptionGroup = ExceptionGroup +else: # pragma: no cover + + class ExceptionGroup(Exception): + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: list[str] + summary: str + description: str + keywords: list[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: list[str] + download_url: str + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] + requires_python: str + requires_external: list[str] + project_urls: dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: list[str] + + # Metadata 2.2 - PEP 643 + dynamic: list[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoptability. + + # Metadata 2.4 - PEP 639 + license_expression: str + license_files: list[str] + + +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "license_expression", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "license_files", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> list[str]: + """Split a string of comma-separated keywords into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: list[str]) -> dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potentional issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparseable and add it + # to our list of unparsed fields. + parts = [p.strip() for p in pair.split(",", 1)] + parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items + + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, url = parts + if label in urls: + # The label already exists in our set of urls, so this field + # is unparseable, and we can just add the whole thing to our + # unparseable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload = msg.get_payload() + assert isinstance(payload, str) + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload = msg.get_payload(decode=True) + assert isinstance(bpayload, bytes) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError as exc: + raise ValueError("payload in an invalid encoding") from exc + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "keywords": "keywords", + "license": "license", + "license-expression": "license_expression", + "license-file": "license_files", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: list[tuple[bytes, str | None]] = [] + for bin, encoding in email.header.decode_header(h): + try: + bin.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((bin, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparseable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload] + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparseable. + if "description" in raw: + description_header = cast(str, raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast(RawMetadata, raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: Metadata, name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast(T, value) + + def _invalid_metadata( + self, msg: str, cause: Exception | None = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast(_MetadataVersion, value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: list[str]) -> list[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{dynamic_field!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata( + f"{dynamic_field!r} is not a valid dynamic field" + ) + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: list[str], + ) -> list[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_requires_dist( + self, + value: list[str], + ) -> list[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata( + f"{req!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return reqs + + def _process_license_expression( + self, value: str + ) -> NormalizedLicenseExpression | None: + try: + return licenses.canonicalize_license_expression(value) + except ValueError as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_license_files(self, value: list[str]) -> list[str]: + paths = [] + for path in value: + if ".." in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "parent directory indicators are not allowed" + ) + if "*" in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be resolved" + ) + if ( + pathlib.PurePosixPath(path).is_absolute() + or pathlib.PureWindowsPath(path).is_absolute() + ): + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be relative" + ) + if pathlib.PureWindowsPath(path).as_posix() != path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "paths must use '/' delimiter" + ) + paths.append(path) + return paths + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: list[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + f"{field} introduced in metadata version " + f"{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + # `name` is not normalized/typed to NormalizedName so as to provide access to + # the original/raw name. + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[list[str] | None] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[str | None] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[str | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-license`""" + license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator( + added="2.4" + ) + """:external:ref:`core-metadata-license-expression`""" + license_files: _Validator[list[str] | None] = _Validator(added="2.4") + """:external:ref:`core-metadata-license-file`""" + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + requires: _Validator[list[str] | None] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[list[str] | None] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" diff --git a/phi4/lib/python3.10/site-packages/packaging/py.typed b/phi4/lib/python3.10/site-packages/packaging/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/phi4/lib/python3.10/site-packages/packaging/specifiers.py b/phi4/lib/python3.10/site-packages/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..b30926af8bf4f47efe98eea44d5ded4cb6f7e07d --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/specifiers.py @@ -0,0 +1,1020 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +from __future__ import annotations + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> bool | None: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: bool | None = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: {spec!r}") + + self._spec: tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: str | Version) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> list[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: list[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: list[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, + specifiers: str | Iterable[Specifier] = "", + prereleases: bool | None = None, + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + May also be an iterable of ``Specifier`` instances, which will be used + as is. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + if isinstance(specifiers, str): + # Split on `,` to break each individual specifier into its own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set + # for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + else: + # Save the supplied specifiers in a frozen set. + self._specs = frozenset(specifiers) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> bool | None: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: bool | None = None, + installed: bool | None = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/phi4/lib/python3.10/site-packages/packaging/version.py b/phi4/lib/python3.10/site-packages/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..c9bbda20e463b8d9389ecd65f74af33810a02bdd --- /dev/null +++ b/phi4/lib/python3.10/site-packages/packaging/version.py @@ -0,0 +1,582 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +from __future__ import annotations + +import itertools +import re +from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: tuple[int, ...] + dev: tuple[str, int] | None + pre: tuple[str, int] | None + post: tuple[str, int] | None + local: LocalType | None + + +def parse(version: str) -> Version: + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: {version!r}")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be round-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> int | None:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> int | None:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> str | None:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        rel = super().release
+        nonzeros = (index for index, val in enumerate(rel) if val)
+        last_nonzero = max(nonzeros, default=0)
+        return rel[: last_nonzero + 1]
+
+
+def _parse_letter_version(
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+
+    assert not letter
+    if number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local