Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- evalkit_tf446/lib/python3.10/lib2to3/Grammar.txt +196 -0
- evalkit_tf446/lib/python3.10/lib2to3/PatternGrammar.txt +28 -0
- evalkit_tf446/lib/python3.10/lib2to3/PatternGrammar3.10.16.final.0.pickle +3 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__init__.py +1 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_apply.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_asserts.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_except.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_execfile.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_filter.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_funcattrs.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_idioms.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_import.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_map.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_methodattrs.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_numliterals.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_paren.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_print.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_raise.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_throw.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_tuple_params.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_zip.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_apply.py +68 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_asserts.py +34 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_basestring.py +14 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_buffer.py +22 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_dict.py +106 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_except.py +93 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_exec.py +39 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_execfile.py +53 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_exitfunc.py +72 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_filter.py +94 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_funcattrs.py +21 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_future.py +22 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_getcwdu.py +19 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_has_key.py +109 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_idioms.py +152 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_import.py +99 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_imports.py +145 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_imports2.py +16 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_input.py +26 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_intern.py +39 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_isinstance.py +52 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_itertools.py +43 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py +57 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_long.py +19 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_map.py +110 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_metaclass.py +228 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_methodattrs.py +24 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_ne.py +23 -0
- evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_next.py +103 -0
evalkit_tf446/lib/python3.10/lib2to3/Grammar.txt
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Grammar for 2to3. This grammar supports Python 2.x and 3.x.
|
| 2 |
+
|
| 3 |
+
# NOTE WELL: You should also follow all the steps listed at
|
| 4 |
+
# https://devguide.python.org/grammar/
|
| 5 |
+
|
| 6 |
+
# Start symbols for the grammar:
|
| 7 |
+
# file_input is a module or sequence of commands read from an input file;
|
| 8 |
+
# single_input is a single interactive statement;
|
| 9 |
+
# eval_input is the input for the eval() and input() functions.
|
| 10 |
+
# NB: compound_stmt in single_input is followed by extra NEWLINE!
|
| 11 |
+
file_input: (NEWLINE | stmt)* ENDMARKER
|
| 12 |
+
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
|
| 13 |
+
eval_input: testlist NEWLINE* ENDMARKER
|
| 14 |
+
|
| 15 |
+
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
|
| 16 |
+
decorators: decorator+
|
| 17 |
+
decorated: decorators (classdef | funcdef | async_funcdef)
|
| 18 |
+
async_funcdef: ASYNC funcdef
|
| 19 |
+
funcdef: 'def' NAME parameters ['->' test] ':' suite
|
| 20 |
+
parameters: '(' [typedargslist] ')'
|
| 21 |
+
|
| 22 |
+
# The following definition for typedarglist is equivalent to this set of rules:
|
| 23 |
+
#
|
| 24 |
+
# arguments = argument (',' argument)*
|
| 25 |
+
# argument = tfpdef ['=' test]
|
| 26 |
+
# kwargs = '**' tname [',']
|
| 27 |
+
# args = '*' [tname]
|
| 28 |
+
# kwonly_kwargs = (',' argument)* [',' [kwargs]]
|
| 29 |
+
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
|
| 30 |
+
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
|
| 31 |
+
# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
|
| 32 |
+
# typedarglist = arguments ',' '/' [',' [typedargslist_no_posonly]])|(typedargslist_no_posonly)"
|
| 33 |
+
#
|
| 34 |
+
# It needs to be fully expanded to allow our LL(1) parser to work on it.
|
| 35 |
+
|
| 36 |
+
typedargslist: tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [
|
| 37 |
+
',' [((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
|
| 38 |
+
[',' ['**' tname [',']]] | '**' tname [','])
|
| 39 |
+
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])]
|
| 40 |
+
] | ((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
|
| 41 |
+
[',' ['**' tname [',']]] | '**' tname [','])
|
| 42 |
+
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
|
| 43 |
+
|
| 44 |
+
tname: NAME [':' test]
|
| 45 |
+
tfpdef: tname | '(' tfplist ')'
|
| 46 |
+
tfplist: tfpdef (',' tfpdef)* [',']
|
| 47 |
+
|
| 48 |
+
# The following definition for varargslist is equivalent to this set of rules:
|
| 49 |
+
#
|
| 50 |
+
# arguments = argument (',' argument )*
|
| 51 |
+
# argument = vfpdef ['=' test]
|
| 52 |
+
# kwargs = '**' vname [',']
|
| 53 |
+
# args = '*' [vname]
|
| 54 |
+
# kwonly_kwargs = (',' argument )* [',' [kwargs]]
|
| 55 |
+
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
|
| 56 |
+
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
|
| 57 |
+
# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
|
| 58 |
+
# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly)
|
| 59 |
+
#
|
| 60 |
+
# It needs to be fully expanded to allow our LL(1) parser to work on it.
|
| 61 |
+
|
| 62 |
+
varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [
|
| 63 |
+
((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])*
|
| 64 |
+
[',' ['**' vname [',']]] | '**' vname [','])
|
| 65 |
+
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
|
| 66 |
+
]] | ((vfpdef ['=' test] ',')*
|
| 67 |
+
('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]]| '**' vname [','])
|
| 68 |
+
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
|
| 69 |
+
|
| 70 |
+
vname: NAME
|
| 71 |
+
vfpdef: vname | '(' vfplist ')'
|
| 72 |
+
vfplist: vfpdef (',' vfpdef)* [',']
|
| 73 |
+
|
| 74 |
+
stmt: simple_stmt | compound_stmt
|
| 75 |
+
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
|
| 76 |
+
small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
|
| 77 |
+
import_stmt | global_stmt | exec_stmt | assert_stmt)
|
| 78 |
+
expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
|
| 79 |
+
('=' (yield_expr|testlist_star_expr))*)
|
| 80 |
+
annassign: ':' test ['=' test]
|
| 81 |
+
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
|
| 82 |
+
augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
|
| 83 |
+
'<<=' | '>>=' | '**=' | '//=')
|
| 84 |
+
# For normal and annotated assignments, additional restrictions enforced by the interpreter
|
| 85 |
+
print_stmt: 'print' ( [ test (',' test)* [','] ] |
|
| 86 |
+
'>>' test [ (',' test)+ [','] ] )
|
| 87 |
+
del_stmt: 'del' exprlist
|
| 88 |
+
pass_stmt: 'pass'
|
| 89 |
+
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
|
| 90 |
+
break_stmt: 'break'
|
| 91 |
+
continue_stmt: 'continue'
|
| 92 |
+
return_stmt: 'return' [testlist_star_expr]
|
| 93 |
+
yield_stmt: yield_expr
|
| 94 |
+
raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]]
|
| 95 |
+
import_stmt: import_name | import_from
|
| 96 |
+
import_name: 'import' dotted_as_names
|
| 97 |
+
import_from: ('from' ('.'* dotted_name | '.'+)
|
| 98 |
+
'import' ('*' | '(' import_as_names ')' | import_as_names))
|
| 99 |
+
import_as_name: NAME ['as' NAME]
|
| 100 |
+
dotted_as_name: dotted_name ['as' NAME]
|
| 101 |
+
import_as_names: import_as_name (',' import_as_name)* [',']
|
| 102 |
+
dotted_as_names: dotted_as_name (',' dotted_as_name)*
|
| 103 |
+
dotted_name: NAME ('.' NAME)*
|
| 104 |
+
global_stmt: ('global' | 'nonlocal') NAME (',' NAME)*
|
| 105 |
+
exec_stmt: 'exec' expr ['in' test [',' test]]
|
| 106 |
+
assert_stmt: 'assert' test [',' test]
|
| 107 |
+
|
| 108 |
+
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
| 109 |
+
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
| 110 |
+
if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
|
| 111 |
+
while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
|
| 112 |
+
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
|
| 113 |
+
try_stmt: ('try' ':' suite
|
| 114 |
+
((except_clause ':' suite)+
|
| 115 |
+
['else' ':' suite]
|
| 116 |
+
['finally' ':' suite] |
|
| 117 |
+
'finally' ':' suite))
|
| 118 |
+
with_stmt: 'with' with_item (',' with_item)* ':' suite
|
| 119 |
+
with_item: test ['as' expr]
|
| 120 |
+
with_var: 'as' expr
|
| 121 |
+
# NB compile.c makes sure that the default except clause is last
|
| 122 |
+
except_clause: 'except' [test [(',' | 'as') test]]
|
| 123 |
+
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
|
| 124 |
+
|
| 125 |
+
# Backward compatibility cruft to support:
|
| 126 |
+
# [ x for x in lambda: True, lambda: False if x() ]
|
| 127 |
+
# even while also allowing:
|
| 128 |
+
# lambda x: 5 if x else 2
|
| 129 |
+
# (But not a mix of the two)
|
| 130 |
+
testlist_safe: old_test [(',' old_test)+ [',']]
|
| 131 |
+
old_test: or_test | old_lambdef
|
| 132 |
+
old_lambdef: 'lambda' [varargslist] ':' old_test
|
| 133 |
+
|
| 134 |
+
namedexpr_test: test [':=' test]
|
| 135 |
+
test: or_test ['if' or_test 'else' test] | lambdef
|
| 136 |
+
or_test: and_test ('or' and_test)*
|
| 137 |
+
and_test: not_test ('and' not_test)*
|
| 138 |
+
not_test: 'not' not_test | comparison
|
| 139 |
+
comparison: expr (comp_op expr)*
|
| 140 |
+
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
|
| 141 |
+
star_expr: '*' expr
|
| 142 |
+
expr: xor_expr ('|' xor_expr)*
|
| 143 |
+
xor_expr: and_expr ('^' and_expr)*
|
| 144 |
+
and_expr: shift_expr ('&' shift_expr)*
|
| 145 |
+
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
|
| 146 |
+
arith_expr: term (('+'|'-') term)*
|
| 147 |
+
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
| 148 |
+
factor: ('+'|'-'|'~') factor | power
|
| 149 |
+
power: [AWAIT] atom trailer* ['**' factor]
|
| 150 |
+
atom: ('(' [yield_expr|testlist_gexp] ')' |
|
| 151 |
+
'[' [listmaker] ']' |
|
| 152 |
+
'{' [dictsetmaker] '}' |
|
| 153 |
+
'`' testlist1 '`' |
|
| 154 |
+
NAME | NUMBER | STRING+ | '.' '.' '.')
|
| 155 |
+
listmaker: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
| 156 |
+
testlist_gexp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
| 157 |
+
lambdef: 'lambda' [varargslist] ':' test
|
| 158 |
+
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
|
| 159 |
+
subscriptlist: subscript (',' subscript)* [',']
|
| 160 |
+
subscript: test | [test] ':' [test] [sliceop]
|
| 161 |
+
sliceop: ':' [test]
|
| 162 |
+
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
|
| 163 |
+
testlist: test (',' test)* [',']
|
| 164 |
+
dictsetmaker: ( ((test ':' test | '**' expr)
|
| 165 |
+
(comp_for | (',' (test ':' test | '**' expr))* [','])) |
|
| 166 |
+
((test | star_expr)
|
| 167 |
+
(comp_for | (',' (test | star_expr))* [','])) )
|
| 168 |
+
|
| 169 |
+
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
|
| 170 |
+
|
| 171 |
+
arglist: argument (',' argument)* [',']
|
| 172 |
+
|
| 173 |
+
# "test '=' test" is really "keyword '=' test", but we have no such token.
|
| 174 |
+
# These need to be in a single rule to avoid grammar that is ambiguous
|
| 175 |
+
# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
|
| 176 |
+
# we explicitly match '*' here, too, to give it proper precedence.
|
| 177 |
+
# Illegal combinations and orderings are blocked in ast.c:
|
| 178 |
+
# multiple (test comp_for) arguments are blocked; keyword unpackings
|
| 179 |
+
# that precede iterable unpackings are blocked; etc.
|
| 180 |
+
argument: ( test [comp_for] |
|
| 181 |
+
test ':=' test |
|
| 182 |
+
test '=' test |
|
| 183 |
+
'**' test |
|
| 184 |
+
'*' test )
|
| 185 |
+
|
| 186 |
+
comp_iter: comp_for | comp_if
|
| 187 |
+
comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [comp_iter]
|
| 188 |
+
comp_if: 'if' old_test [comp_iter]
|
| 189 |
+
|
| 190 |
+
testlist1: test (',' test)*
|
| 191 |
+
|
| 192 |
+
# not used in grammar, but may appear in "node" passed from Parser to Compiler
|
| 193 |
+
encoding_decl: NAME
|
| 194 |
+
|
| 195 |
+
yield_expr: 'yield' [yield_arg]
|
| 196 |
+
yield_arg: 'from' test | testlist_star_expr
|
evalkit_tf446/lib/python3.10/lib2to3/PatternGrammar.txt
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
# A grammar to describe tree matching patterns.
|
| 5 |
+
# Not shown here:
|
| 6 |
+
# - 'TOKEN' stands for any token (leaf node)
|
| 7 |
+
# - 'any' stands for any node (leaf or interior)
|
| 8 |
+
# With 'any' we can still specify the sub-structure.
|
| 9 |
+
|
| 10 |
+
# The start symbol is 'Matcher'.
|
| 11 |
+
|
| 12 |
+
Matcher: Alternatives ENDMARKER
|
| 13 |
+
|
| 14 |
+
Alternatives: Alternative ('|' Alternative)*
|
| 15 |
+
|
| 16 |
+
Alternative: (Unit | NegatedUnit)+
|
| 17 |
+
|
| 18 |
+
Unit: [NAME '='] ( STRING [Repeater]
|
| 19 |
+
| NAME [Details] [Repeater]
|
| 20 |
+
| '(' Alternatives ')' [Repeater]
|
| 21 |
+
| '[' Alternatives ']'
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')')
|
| 25 |
+
|
| 26 |
+
Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}'
|
| 27 |
+
|
| 28 |
+
Details: '<' Alternatives '>'
|
evalkit_tf446/lib/python3.10/lib2to3/PatternGrammar3.10.16.final.0.pickle
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:36ee934395b9209737b13893ddaff05fad8e239c2fdfac29d401d3fceeb30768
|
| 3 |
+
size 1225
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Dummy file to make this directory a package.
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_apply.cpython-310.pyc
ADDED
|
Binary file (1.91 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_asserts.cpython-310.pyc
ADDED
|
Binary file (1.27 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_except.cpython-310.pyc
ADDED
|
Binary file (3.03 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_execfile.cpython-310.pyc
ADDED
|
Binary file (1.67 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_filter.cpython-310.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_funcattrs.cpython-310.pyc
ADDED
|
Binary file (1.23 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_idioms.cpython-310.pyc
ADDED
|
Binary file (3.9 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_import.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_map.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_methodattrs.cpython-310.pyc
ADDED
|
Binary file (1.19 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_numliterals.cpython-310.pyc
ADDED
|
Binary file (1.28 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_paren.cpython-310.pyc
ADDED
|
Binary file (1.64 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_print.cpython-310.pyc
ADDED
|
Binary file (2.56 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_raise.cpython-310.pyc
ADDED
|
Binary file (2.24 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_throw.cpython-310.pyc
ADDED
|
Binary file (1.8 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_tuple_params.cpython-310.pyc
ADDED
|
Binary file (4.58 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/__pycache__/fix_zip.cpython-310.pyc
ADDED
|
Binary file (1.57 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_apply.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for apply().
|
| 5 |
+
|
| 6 |
+
This converts apply(func, v, k) into (func)(*v, **k)."""
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from .. import pytree
|
| 10 |
+
from ..pgen2 import token
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Call, Comma, parenthesize
|
| 13 |
+
|
| 14 |
+
class FixApply(fixer_base.BaseFix):
|
| 15 |
+
BM_compatible = True
|
| 16 |
+
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< 'apply'
|
| 19 |
+
trailer<
|
| 20 |
+
'('
|
| 21 |
+
arglist<
|
| 22 |
+
(not argument<NAME '=' any>) func=any ','
|
| 23 |
+
(not argument<NAME '=' any>) args=any [','
|
| 24 |
+
(not argument<NAME '=' any>) kwds=any] [',']
|
| 25 |
+
>
|
| 26 |
+
')'
|
| 27 |
+
>
|
| 28 |
+
>
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
def transform(self, node, results):
|
| 32 |
+
syms = self.syms
|
| 33 |
+
assert results
|
| 34 |
+
func = results["func"]
|
| 35 |
+
args = results["args"]
|
| 36 |
+
kwds = results.get("kwds")
|
| 37 |
+
# I feel like we should be able to express this logic in the
|
| 38 |
+
# PATTERN above but I don't know how to do it so...
|
| 39 |
+
if args:
|
| 40 |
+
if (args.type == self.syms.argument and
|
| 41 |
+
args.children[0].value in {'**', '*'}):
|
| 42 |
+
return # Make no change.
|
| 43 |
+
if kwds and (kwds.type == self.syms.argument and
|
| 44 |
+
kwds.children[0].value == '**'):
|
| 45 |
+
return # Make no change.
|
| 46 |
+
prefix = node.prefix
|
| 47 |
+
func = func.clone()
|
| 48 |
+
if (func.type not in (token.NAME, syms.atom) and
|
| 49 |
+
(func.type != syms.power or
|
| 50 |
+
func.children[-2].type == token.DOUBLESTAR)):
|
| 51 |
+
# Need to parenthesize
|
| 52 |
+
func = parenthesize(func)
|
| 53 |
+
func.prefix = ""
|
| 54 |
+
args = args.clone()
|
| 55 |
+
args.prefix = ""
|
| 56 |
+
if kwds is not None:
|
| 57 |
+
kwds = kwds.clone()
|
| 58 |
+
kwds.prefix = ""
|
| 59 |
+
l_newargs = [pytree.Leaf(token.STAR, "*"), args]
|
| 60 |
+
if kwds is not None:
|
| 61 |
+
l_newargs.extend([Comma(),
|
| 62 |
+
pytree.Leaf(token.DOUBLESTAR, "**"),
|
| 63 |
+
kwds])
|
| 64 |
+
l_newargs[-2].prefix = " " # that's the ** token
|
| 65 |
+
# XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
|
| 66 |
+
# can be translated into f(x, y, *t) instead of f(*(x, y) + t)
|
| 67 |
+
#new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
|
| 68 |
+
return Call(func, l_newargs, prefix=prefix)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_asserts.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that replaces deprecated unittest method names."""
|
| 2 |
+
|
| 3 |
+
# Author: Ezio Melotti
|
| 4 |
+
|
| 5 |
+
from ..fixer_base import BaseFix
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
NAMES = dict(
|
| 9 |
+
assert_="assertTrue",
|
| 10 |
+
assertEquals="assertEqual",
|
| 11 |
+
assertNotEquals="assertNotEqual",
|
| 12 |
+
assertAlmostEquals="assertAlmostEqual",
|
| 13 |
+
assertNotAlmostEquals="assertNotAlmostEqual",
|
| 14 |
+
assertRegexpMatches="assertRegex",
|
| 15 |
+
assertRaisesRegexp="assertRaisesRegex",
|
| 16 |
+
failUnlessEqual="assertEqual",
|
| 17 |
+
failIfEqual="assertNotEqual",
|
| 18 |
+
failUnlessAlmostEqual="assertAlmostEqual",
|
| 19 |
+
failIfAlmostEqual="assertNotAlmostEqual",
|
| 20 |
+
failUnless="assertTrue",
|
| 21 |
+
failUnlessRaises="assertRaises",
|
| 22 |
+
failIf="assertFalse",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class FixAsserts(BaseFix):
|
| 27 |
+
|
| 28 |
+
PATTERN = """
|
| 29 |
+
power< any+ trailer< '.' meth=(%s)> any* >
|
| 30 |
+
""" % '|'.join(map(repr, NAMES))
|
| 31 |
+
|
| 32 |
+
def transform(self, node, results):
|
| 33 |
+
name = results["meth"][0]
|
| 34 |
+
name.replace(Name(NAMES[str(name)], prefix=name.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_basestring.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for basestring -> str."""
|
| 2 |
+
# Author: Christian Heimes
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
class FixBasestring(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
|
| 11 |
+
PATTERN = "'basestring'"
|
| 12 |
+
|
| 13 |
+
def transform(self, node, results):
|
| 14 |
+
return Name("str", prefix=node.prefix)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_buffer.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes buffer(...) into memoryview(...)."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixBuffer(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
|
| 14 |
+
explicit = True # The user must ask for this fixer
|
| 15 |
+
|
| 16 |
+
PATTERN = """
|
| 17 |
+
power< name='buffer' trailer< '(' [any] ')' > any* >
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
def transform(self, node, results):
|
| 21 |
+
name = results["name"]
|
| 22 |
+
name.replace(Name("memoryview", prefix=name.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_dict.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for dict methods.
|
| 5 |
+
|
| 6 |
+
d.keys() -> list(d.keys())
|
| 7 |
+
d.items() -> list(d.items())
|
| 8 |
+
d.values() -> list(d.values())
|
| 9 |
+
|
| 10 |
+
d.iterkeys() -> iter(d.keys())
|
| 11 |
+
d.iteritems() -> iter(d.items())
|
| 12 |
+
d.itervalues() -> iter(d.values())
|
| 13 |
+
|
| 14 |
+
d.viewkeys() -> d.keys()
|
| 15 |
+
d.viewitems() -> d.items()
|
| 16 |
+
d.viewvalues() -> d.values()
|
| 17 |
+
|
| 18 |
+
Except in certain very specific contexts: the iter() can be dropped
|
| 19 |
+
when the context is list(), sorted(), iter() or for...in; the list()
|
| 20 |
+
can be dropped when the context is list() or sorted() (but not iter()
|
| 21 |
+
or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
|
| 22 |
+
set(), any(), all(), sum().
|
| 23 |
+
|
| 24 |
+
Note: iter(d.keys()) could be written as iter(d) but since the
|
| 25 |
+
original d.iterkeys() was also redundant we don't fix this. And there
|
| 26 |
+
are (rare) contexts where it makes a difference (e.g. when passing it
|
| 27 |
+
as an argument to a function that introspects the argument).
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import pytree
|
| 32 |
+
from .. import patcomp
|
| 33 |
+
from .. import fixer_base
|
| 34 |
+
from ..fixer_util import Name, Call, Dot
|
| 35 |
+
from .. import fixer_util
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
iter_exempt = fixer_util.consuming_calls | {"iter"}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class FixDict(fixer_base.BaseFix):
|
| 42 |
+
BM_compatible = True
|
| 43 |
+
|
| 44 |
+
PATTERN = """
|
| 45 |
+
power< head=any+
|
| 46 |
+
trailer< '.' method=('keys'|'items'|'values'|
|
| 47 |
+
'iterkeys'|'iteritems'|'itervalues'|
|
| 48 |
+
'viewkeys'|'viewitems'|'viewvalues') >
|
| 49 |
+
parens=trailer< '(' ')' >
|
| 50 |
+
tail=any*
|
| 51 |
+
>
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def transform(self, node, results):
|
| 55 |
+
head = results["head"]
|
| 56 |
+
method = results["method"][0] # Extract node for method name
|
| 57 |
+
tail = results["tail"]
|
| 58 |
+
syms = self.syms
|
| 59 |
+
method_name = method.value
|
| 60 |
+
isiter = method_name.startswith("iter")
|
| 61 |
+
isview = method_name.startswith("view")
|
| 62 |
+
if isiter or isview:
|
| 63 |
+
method_name = method_name[4:]
|
| 64 |
+
assert method_name in ("keys", "items", "values"), repr(method)
|
| 65 |
+
head = [n.clone() for n in head]
|
| 66 |
+
tail = [n.clone() for n in tail]
|
| 67 |
+
special = not tail and self.in_special_context(node, isiter)
|
| 68 |
+
args = head + [pytree.Node(syms.trailer,
|
| 69 |
+
[Dot(),
|
| 70 |
+
Name(method_name,
|
| 71 |
+
prefix=method.prefix)]),
|
| 72 |
+
results["parens"].clone()]
|
| 73 |
+
new = pytree.Node(syms.power, args)
|
| 74 |
+
if not (special or isview):
|
| 75 |
+
new.prefix = ""
|
| 76 |
+
new = Call(Name("iter" if isiter else "list"), [new])
|
| 77 |
+
if tail:
|
| 78 |
+
new = pytree.Node(syms.power, [new] + tail)
|
| 79 |
+
new.prefix = node.prefix
|
| 80 |
+
return new
|
| 81 |
+
|
| 82 |
+
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
|
| 83 |
+
p1 = patcomp.compile_pattern(P1)
|
| 84 |
+
|
| 85 |
+
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 86 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 87 |
+
"""
|
| 88 |
+
p2 = patcomp.compile_pattern(P2)
|
| 89 |
+
|
| 90 |
+
def in_special_context(self, node, isiter):
|
| 91 |
+
if node.parent is None:
|
| 92 |
+
return False
|
| 93 |
+
results = {}
|
| 94 |
+
if (node.parent.parent is not None and
|
| 95 |
+
self.p1.match(node.parent.parent, results) and
|
| 96 |
+
results["node"] is node):
|
| 97 |
+
if isiter:
|
| 98 |
+
# iter(d.iterkeys()) -> iter(d.keys()), etc.
|
| 99 |
+
return results["func"].value in iter_exempt
|
| 100 |
+
else:
|
| 101 |
+
# list(d.keys()) -> list(d.keys()), etc.
|
| 102 |
+
return results["func"].value in fixer_util.consuming_calls
|
| 103 |
+
if not isiter:
|
| 104 |
+
return False
|
| 105 |
+
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
| 106 |
+
return self.p2.match(node.parent, results) and results["node"] is node
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_except.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for except statements with named exceptions.
|
| 2 |
+
|
| 3 |
+
The following cases will be converted:
|
| 4 |
+
|
| 5 |
+
- "except E, T:" where T is a name:
|
| 6 |
+
|
| 7 |
+
except E as T:
|
| 8 |
+
|
| 9 |
+
- "except E, T:" where T is not a name, tuple or list:
|
| 10 |
+
|
| 11 |
+
except E as t:
|
| 12 |
+
T = t
|
| 13 |
+
|
| 14 |
+
This is done because the target of an "except" clause must be a
|
| 15 |
+
name.
|
| 16 |
+
|
| 17 |
+
- "except E, T:" where T is a tuple or list literal:
|
| 18 |
+
|
| 19 |
+
except E as t:
|
| 20 |
+
T = t.args
|
| 21 |
+
"""
|
| 22 |
+
# Author: Collin Winter
|
| 23 |
+
|
| 24 |
+
# Local imports
|
| 25 |
+
from .. import pytree
|
| 26 |
+
from ..pgen2 import token
|
| 27 |
+
from .. import fixer_base
|
| 28 |
+
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms
|
| 29 |
+
|
| 30 |
+
def find_excepts(nodes):
|
| 31 |
+
for i, n in enumerate(nodes):
|
| 32 |
+
if n.type == syms.except_clause:
|
| 33 |
+
if n.children[0].value == 'except':
|
| 34 |
+
yield (n, nodes[i+2])
|
| 35 |
+
|
| 36 |
+
class FixExcept(fixer_base.BaseFix):
|
| 37 |
+
BM_compatible = True
|
| 38 |
+
|
| 39 |
+
PATTERN = """
|
| 40 |
+
try_stmt< 'try' ':' (simple_stmt | suite)
|
| 41 |
+
cleanup=(except_clause ':' (simple_stmt | suite))+
|
| 42 |
+
tail=(['except' ':' (simple_stmt | suite)]
|
| 43 |
+
['else' ':' (simple_stmt | suite)]
|
| 44 |
+
['finally' ':' (simple_stmt | suite)]) >
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def transform(self, node, results):
|
| 48 |
+
syms = self.syms
|
| 49 |
+
|
| 50 |
+
tail = [n.clone() for n in results["tail"]]
|
| 51 |
+
|
| 52 |
+
try_cleanup = [ch.clone() for ch in results["cleanup"]]
|
| 53 |
+
for except_clause, e_suite in find_excepts(try_cleanup):
|
| 54 |
+
if len(except_clause.children) == 4:
|
| 55 |
+
(E, comma, N) = except_clause.children[1:4]
|
| 56 |
+
comma.replace(Name("as", prefix=" "))
|
| 57 |
+
|
| 58 |
+
if N.type != token.NAME:
|
| 59 |
+
# Generate a new N for the except clause
|
| 60 |
+
new_N = Name(self.new_name(), prefix=" ")
|
| 61 |
+
target = N.clone()
|
| 62 |
+
target.prefix = ""
|
| 63 |
+
N.replace(new_N)
|
| 64 |
+
new_N = new_N.clone()
|
| 65 |
+
|
| 66 |
+
# Insert "old_N = new_N" as the first statement in
|
| 67 |
+
# the except body. This loop skips leading whitespace
|
| 68 |
+
# and indents
|
| 69 |
+
#TODO(cwinter) suite-cleanup
|
| 70 |
+
suite_stmts = e_suite.children
|
| 71 |
+
for i, stmt in enumerate(suite_stmts):
|
| 72 |
+
if isinstance(stmt, pytree.Node):
|
| 73 |
+
break
|
| 74 |
+
|
| 75 |
+
# The assignment is different if old_N is a tuple or list
|
| 76 |
+
# In that case, the assignment is old_N = new_N.args
|
| 77 |
+
if is_tuple(N) or is_list(N):
|
| 78 |
+
assign = Assign(target, Attr(new_N, Name('args')))
|
| 79 |
+
else:
|
| 80 |
+
assign = Assign(target, new_N)
|
| 81 |
+
|
| 82 |
+
#TODO(cwinter) stopgap until children becomes a smart list
|
| 83 |
+
for child in reversed(suite_stmts[:i]):
|
| 84 |
+
e_suite.insert_child(0, child)
|
| 85 |
+
e_suite.insert_child(i, assign)
|
| 86 |
+
elif N.prefix == "":
|
| 87 |
+
# No space after a comma is legal; no space after "as",
|
| 88 |
+
# not so much.
|
| 89 |
+
N.prefix = " "
|
| 90 |
+
|
| 91 |
+
#TODO(cwinter) fix this when children becomes a smart list
|
| 92 |
+
children = [c.clone() for c in node.children[:3]] + try_cleanup + tail
|
| 93 |
+
return pytree.Node(node.type, children)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_exec.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for exec.
|
| 5 |
+
|
| 6 |
+
This converts usages of the exec statement into calls to a built-in
|
| 7 |
+
exec() function.
|
| 8 |
+
|
| 9 |
+
exec code in ns1, ns2 -> exec(code, ns1, ns2)
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
# Local imports
|
| 13 |
+
from .. import fixer_base
|
| 14 |
+
from ..fixer_util import Comma, Name, Call
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FixExec(fixer_base.BaseFix):
|
| 18 |
+
BM_compatible = True
|
| 19 |
+
|
| 20 |
+
PATTERN = """
|
| 21 |
+
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
|
| 22 |
+
|
|
| 23 |
+
exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any >
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
def transform(self, node, results):
|
| 27 |
+
assert results
|
| 28 |
+
syms = self.syms
|
| 29 |
+
a = results["a"]
|
| 30 |
+
b = results.get("b")
|
| 31 |
+
c = results.get("c")
|
| 32 |
+
args = [a.clone()]
|
| 33 |
+
args[0].prefix = ""
|
| 34 |
+
if b is not None:
|
| 35 |
+
args.extend([Comma(), b.clone()])
|
| 36 |
+
if c is not None:
|
| 37 |
+
args.extend([Comma(), c.clone()])
|
| 38 |
+
|
| 39 |
+
return Call(Name("exec"), args, prefix=node.prefix)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_execfile.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for execfile.
|
| 5 |
+
|
| 6 |
+
This converts usages of the execfile function into calls to the built-in
|
| 7 |
+
exec() function.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
|
| 12 |
+
ArgList, String, syms)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixExecfile(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
|
| 18 |
+
PATTERN = """
|
| 19 |
+
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
| 20 |
+
|
|
| 21 |
+
power< 'execfile' trailer< '(' filename=any ')' > >
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
assert results
|
| 26 |
+
filename = results["filename"]
|
| 27 |
+
globals = results.get("globals")
|
| 28 |
+
locals = results.get("locals")
|
| 29 |
+
|
| 30 |
+
# Copy over the prefix from the right parentheses end of the execfile
|
| 31 |
+
# call.
|
| 32 |
+
execfile_paren = node.children[-1].children[-1].clone()
|
| 33 |
+
# Construct open().read().
|
| 34 |
+
open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')],
|
| 35 |
+
rparen=execfile_paren)
|
| 36 |
+
open_call = Node(syms.power, [Name("open"), open_args])
|
| 37 |
+
read = [Node(syms.trailer, [Dot(), Name('read')]),
|
| 38 |
+
Node(syms.trailer, [LParen(), RParen()])]
|
| 39 |
+
open_expr = [open_call] + read
|
| 40 |
+
# Wrap the open call in a compile call. This is so the filename will be
|
| 41 |
+
# preserved in the execed code.
|
| 42 |
+
filename_arg = filename.clone()
|
| 43 |
+
filename_arg.prefix = " "
|
| 44 |
+
exec_str = String("'exec'", " ")
|
| 45 |
+
compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
|
| 46 |
+
compile_call = Call(Name("compile"), compile_args, "")
|
| 47 |
+
# Finally, replace the execfile call with an exec call.
|
| 48 |
+
args = [compile_call]
|
| 49 |
+
if globals is not None:
|
| 50 |
+
args.extend([Comma(), globals.clone()])
|
| 51 |
+
if locals is not None:
|
| 52 |
+
args.extend([Comma(), locals.clone()])
|
| 53 |
+
return Call(Name("exec"), args, prefix=node.prefix)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_exitfunc.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Convert use of sys.exitfunc to use the atexit module.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Author: Benjamin Peterson
|
| 6 |
+
|
| 7 |
+
from lib2to3 import pytree, fixer_base
|
| 8 |
+
from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixExitfunc(fixer_base.BaseFix):
|
| 12 |
+
keep_line_order = True
|
| 13 |
+
BM_compatible = True
|
| 14 |
+
|
| 15 |
+
PATTERN = """
|
| 16 |
+
(
|
| 17 |
+
sys_import=import_name<'import'
|
| 18 |
+
('sys'
|
| 19 |
+
|
|
| 20 |
+
dotted_as_names< (any ',')* 'sys' (',' any)* >
|
| 21 |
+
)
|
| 22 |
+
>
|
| 23 |
+
|
|
| 24 |
+
expr_stmt<
|
| 25 |
+
power< 'sys' trailer< '.' 'exitfunc' > >
|
| 26 |
+
'=' func=any >
|
| 27 |
+
)
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, *args):
|
| 31 |
+
super(FixExitfunc, self).__init__(*args)
|
| 32 |
+
|
| 33 |
+
def start_tree(self, tree, filename):
|
| 34 |
+
super(FixExitfunc, self).start_tree(tree, filename)
|
| 35 |
+
self.sys_import = None
|
| 36 |
+
|
| 37 |
+
def transform(self, node, results):
|
| 38 |
+
# First, find the sys import. We'll just hope it's global scope.
|
| 39 |
+
if "sys_import" in results:
|
| 40 |
+
if self.sys_import is None:
|
| 41 |
+
self.sys_import = results["sys_import"]
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
func = results["func"].clone()
|
| 45 |
+
func.prefix = ""
|
| 46 |
+
register = pytree.Node(syms.power,
|
| 47 |
+
Attr(Name("atexit"), Name("register"))
|
| 48 |
+
)
|
| 49 |
+
call = Call(register, [func], node.prefix)
|
| 50 |
+
node.replace(call)
|
| 51 |
+
|
| 52 |
+
if self.sys_import is None:
|
| 53 |
+
# That's interesting.
|
| 54 |
+
self.warning(node, "Can't find sys import; Please add an atexit "
|
| 55 |
+
"import at the top of your file.")
|
| 56 |
+
return
|
| 57 |
+
|
| 58 |
+
# Now add an atexit import after the sys import.
|
| 59 |
+
names = self.sys_import.children[1]
|
| 60 |
+
if names.type == syms.dotted_as_names:
|
| 61 |
+
names.append_child(Comma())
|
| 62 |
+
names.append_child(Name("atexit", " "))
|
| 63 |
+
else:
|
| 64 |
+
containing_stmt = self.sys_import.parent
|
| 65 |
+
position = containing_stmt.children.index(self.sys_import)
|
| 66 |
+
stmt_container = containing_stmt.parent
|
| 67 |
+
new_import = pytree.Node(syms.import_name,
|
| 68 |
+
[Name("import"), Name("atexit", " ")]
|
| 69 |
+
)
|
| 70 |
+
new = pytree.Node(syms.simple_stmt, [new_import])
|
| 71 |
+
containing_stmt.insert_child(position + 1, Newline())
|
| 72 |
+
containing_stmt.insert_child(position + 2, new)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_filter.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes filter(F, X) into list(filter(F, X)).
|
| 5 |
+
|
| 6 |
+
We avoid the transformation if the filter() call is directly contained
|
| 7 |
+
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
|
| 8 |
+
for V in <>:.
|
| 9 |
+
|
| 10 |
+
NOTE: This is still not correct if the original code was depending on
|
| 11 |
+
filter(F, X) to return a string if X is a string and a tuple if X is a
|
| 12 |
+
tuple. That would require type inference, which we don't do. Let
|
| 13 |
+
Python 2.6 figure it out.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
# Local imports
|
| 17 |
+
from .. import fixer_base
|
| 18 |
+
from ..pytree import Node
|
| 19 |
+
from ..pygram import python_symbols as syms
|
| 20 |
+
from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class FixFilter(fixer_base.ConditionalFix):
|
| 24 |
+
BM_compatible = True
|
| 25 |
+
|
| 26 |
+
PATTERN = """
|
| 27 |
+
filter_lambda=power<
|
| 28 |
+
'filter'
|
| 29 |
+
trailer<
|
| 30 |
+
'('
|
| 31 |
+
arglist<
|
| 32 |
+
lambdef< 'lambda'
|
| 33 |
+
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
| 34 |
+
>
|
| 35 |
+
','
|
| 36 |
+
it=any
|
| 37 |
+
>
|
| 38 |
+
')'
|
| 39 |
+
>
|
| 40 |
+
[extra_trailers=trailer*]
|
| 41 |
+
>
|
| 42 |
+
|
|
| 43 |
+
power<
|
| 44 |
+
'filter'
|
| 45 |
+
trailer< '(' arglist< none='None' ',' seq=any > ')' >
|
| 46 |
+
[extra_trailers=trailer*]
|
| 47 |
+
>
|
| 48 |
+
|
|
| 49 |
+
power<
|
| 50 |
+
'filter'
|
| 51 |
+
args=trailer< '(' [any] ')' >
|
| 52 |
+
[extra_trailers=trailer*]
|
| 53 |
+
>
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
skip_on = "future_builtins.filter"
|
| 57 |
+
|
| 58 |
+
def transform(self, node, results):
|
| 59 |
+
if self.should_skip(node):
|
| 60 |
+
return
|
| 61 |
+
|
| 62 |
+
trailers = []
|
| 63 |
+
if 'extra_trailers' in results:
|
| 64 |
+
for t in results['extra_trailers']:
|
| 65 |
+
trailers.append(t.clone())
|
| 66 |
+
|
| 67 |
+
if "filter_lambda" in results:
|
| 68 |
+
xp = results.get("xp").clone()
|
| 69 |
+
if xp.type == syms.test:
|
| 70 |
+
xp.prefix = ""
|
| 71 |
+
xp = parenthesize(xp)
|
| 72 |
+
|
| 73 |
+
new = ListComp(results.get("fp").clone(),
|
| 74 |
+
results.get("fp").clone(),
|
| 75 |
+
results.get("it").clone(), xp)
|
| 76 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 77 |
+
|
| 78 |
+
elif "none" in results:
|
| 79 |
+
new = ListComp(Name("_f"),
|
| 80 |
+
Name("_f"),
|
| 81 |
+
results["seq"].clone(),
|
| 82 |
+
Name("_f"))
|
| 83 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
if in_special_context(node):
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
args = results['args'].clone()
|
| 90 |
+
new = Node(syms.power, [Name("filter"), args], prefix="")
|
| 91 |
+
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
| 92 |
+
new.prefix = ""
|
| 93 |
+
new.prefix = node.prefix
|
| 94 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_funcattrs.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix function attribute names (f.func_x -> f.__x__)."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class FixFuncattrs(fixer_base.BaseFix):
|
| 10 |
+
BM_compatible = True
|
| 11 |
+
|
| 12 |
+
PATTERN = """
|
| 13 |
+
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
|
| 14 |
+
| 'func_name' | 'func_defaults' | 'func_code'
|
| 15 |
+
| 'func_dict') > any* >
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def transform(self, node, results):
|
| 19 |
+
attr = results["attr"][0]
|
| 20 |
+
attr.replace(Name(("__%s__" % attr.value[5:]),
|
| 21 |
+
prefix=attr.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_future.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Remove __future__ imports
|
| 2 |
+
|
| 3 |
+
from __future__ import foo is replaced with an empty line.
|
| 4 |
+
"""
|
| 5 |
+
# Author: Christian Heimes
|
| 6 |
+
|
| 7 |
+
# Local imports
|
| 8 |
+
from .. import fixer_base
|
| 9 |
+
from ..fixer_util import BlankLine
|
| 10 |
+
|
| 11 |
+
class FixFuture(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
|
| 14 |
+
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
|
| 15 |
+
|
| 16 |
+
# This should be run last -- some things check for the import
|
| 17 |
+
run_order = 10
|
| 18 |
+
|
| 19 |
+
def transform(self, node, results):
|
| 20 |
+
new = BlankLine()
|
| 21 |
+
new.prefix = node.prefix
|
| 22 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_getcwdu.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Fixer that changes os.getcwdu() to os.getcwd().
|
| 3 |
+
"""
|
| 4 |
+
# Author: Victor Stinner
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
class FixGetcwdu(fixer_base.BaseFix):
|
| 11 |
+
BM_compatible = True
|
| 12 |
+
|
| 13 |
+
PATTERN = """
|
| 14 |
+
power< 'os' trailer< dot='.' name='getcwdu' > any* >
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def transform(self, node, results):
|
| 18 |
+
name = results["name"]
|
| 19 |
+
name.replace(Name("getcwd", prefix=name.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_has_key.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for has_key().
|
| 5 |
+
|
| 6 |
+
Calls to .has_key() methods are expressed in terms of the 'in'
|
| 7 |
+
operator:
|
| 8 |
+
|
| 9 |
+
d.has_key(k) -> k in d
|
| 10 |
+
|
| 11 |
+
CAVEATS:
|
| 12 |
+
1) While the primary target of this fixer is dict.has_key(), the
|
| 13 |
+
fixer will change any has_key() method call, regardless of its
|
| 14 |
+
class.
|
| 15 |
+
|
| 16 |
+
2) Cases like this will not be converted:
|
| 17 |
+
|
| 18 |
+
m = d.has_key
|
| 19 |
+
if m(k):
|
| 20 |
+
...
|
| 21 |
+
|
| 22 |
+
Only *calls* to has_key() are converted. While it is possible to
|
| 23 |
+
convert the above to something like
|
| 24 |
+
|
| 25 |
+
m = d.__contains__
|
| 26 |
+
if m(k):
|
| 27 |
+
...
|
| 28 |
+
|
| 29 |
+
this is currently not done.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
# Local imports
|
| 33 |
+
from .. import pytree
|
| 34 |
+
from .. import fixer_base
|
| 35 |
+
from ..fixer_util import Name, parenthesize
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class FixHasKey(fixer_base.BaseFix):
|
| 39 |
+
BM_compatible = True
|
| 40 |
+
|
| 41 |
+
PATTERN = """
|
| 42 |
+
anchor=power<
|
| 43 |
+
before=any+
|
| 44 |
+
trailer< '.' 'has_key' >
|
| 45 |
+
trailer<
|
| 46 |
+
'('
|
| 47 |
+
( not(arglist | argument<any '=' any>) arg=any
|
| 48 |
+
| arglist<(not argument<any '=' any>) arg=any ','>
|
| 49 |
+
)
|
| 50 |
+
')'
|
| 51 |
+
>
|
| 52 |
+
after=any*
|
| 53 |
+
>
|
| 54 |
+
|
|
| 55 |
+
negation=not_test<
|
| 56 |
+
'not'
|
| 57 |
+
anchor=power<
|
| 58 |
+
before=any+
|
| 59 |
+
trailer< '.' 'has_key' >
|
| 60 |
+
trailer<
|
| 61 |
+
'('
|
| 62 |
+
( not(arglist | argument<any '=' any>) arg=any
|
| 63 |
+
| arglist<(not argument<any '=' any>) arg=any ','>
|
| 64 |
+
)
|
| 65 |
+
')'
|
| 66 |
+
>
|
| 67 |
+
>
|
| 68 |
+
>
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
def transform(self, node, results):
|
| 72 |
+
assert results
|
| 73 |
+
syms = self.syms
|
| 74 |
+
if (node.parent.type == syms.not_test and
|
| 75 |
+
self.pattern.match(node.parent)):
|
| 76 |
+
# Don't transform a node matching the first alternative of the
|
| 77 |
+
# pattern when its parent matches the second alternative
|
| 78 |
+
return None
|
| 79 |
+
negation = results.get("negation")
|
| 80 |
+
anchor = results["anchor"]
|
| 81 |
+
prefix = node.prefix
|
| 82 |
+
before = [n.clone() for n in results["before"]]
|
| 83 |
+
arg = results["arg"].clone()
|
| 84 |
+
after = results.get("after")
|
| 85 |
+
if after:
|
| 86 |
+
after = [n.clone() for n in after]
|
| 87 |
+
if arg.type in (syms.comparison, syms.not_test, syms.and_test,
|
| 88 |
+
syms.or_test, syms.test, syms.lambdef, syms.argument):
|
| 89 |
+
arg = parenthesize(arg)
|
| 90 |
+
if len(before) == 1:
|
| 91 |
+
before = before[0]
|
| 92 |
+
else:
|
| 93 |
+
before = pytree.Node(syms.power, before)
|
| 94 |
+
before.prefix = " "
|
| 95 |
+
n_op = Name("in", prefix=" ")
|
| 96 |
+
if negation:
|
| 97 |
+
n_not = Name("not", prefix=" ")
|
| 98 |
+
n_op = pytree.Node(syms.comp_op, (n_not, n_op))
|
| 99 |
+
new = pytree.Node(syms.comparison, (arg, n_op, before))
|
| 100 |
+
if after:
|
| 101 |
+
new = parenthesize(new)
|
| 102 |
+
new = pytree.Node(syms.power, (new,) + tuple(after))
|
| 103 |
+
if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr,
|
| 104 |
+
syms.and_expr, syms.shift_expr,
|
| 105 |
+
syms.arith_expr, syms.term,
|
| 106 |
+
syms.factor, syms.power):
|
| 107 |
+
new = parenthesize(new)
|
| 108 |
+
new.prefix = prefix
|
| 109 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_idioms.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Adjust some old Python 2 idioms to their modern counterparts.
|
| 2 |
+
|
| 3 |
+
* Change some type comparisons to isinstance() calls:
|
| 4 |
+
type(x) == T -> isinstance(x, T)
|
| 5 |
+
type(x) is T -> isinstance(x, T)
|
| 6 |
+
type(x) != T -> not isinstance(x, T)
|
| 7 |
+
type(x) is not T -> not isinstance(x, T)
|
| 8 |
+
|
| 9 |
+
* Change "while 1:" into "while True:".
|
| 10 |
+
|
| 11 |
+
* Change both
|
| 12 |
+
|
| 13 |
+
v = list(EXPR)
|
| 14 |
+
v.sort()
|
| 15 |
+
foo(v)
|
| 16 |
+
|
| 17 |
+
and the more general
|
| 18 |
+
|
| 19 |
+
v = EXPR
|
| 20 |
+
v.sort()
|
| 21 |
+
foo(v)
|
| 22 |
+
|
| 23 |
+
into
|
| 24 |
+
|
| 25 |
+
v = sorted(EXPR)
|
| 26 |
+
foo(v)
|
| 27 |
+
"""
|
| 28 |
+
# Author: Jacques Frechet, Collin Winter
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import fixer_base
|
| 32 |
+
from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
|
| 33 |
+
|
| 34 |
+
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
| 35 |
+
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
| 36 |
+
|
| 37 |
+
class FixIdioms(fixer_base.BaseFix):
|
| 38 |
+
explicit = True # The user must ask for this fixer
|
| 39 |
+
|
| 40 |
+
PATTERN = r"""
|
| 41 |
+
isinstance=comparison< %s %s T=any >
|
| 42 |
+
|
|
| 43 |
+
isinstance=comparison< T=any %s %s >
|
| 44 |
+
|
|
| 45 |
+
while_stmt< 'while' while='1' ':' any+ >
|
| 46 |
+
|
|
| 47 |
+
sorted=any<
|
| 48 |
+
any*
|
| 49 |
+
simple_stmt<
|
| 50 |
+
expr_stmt< id1=any '='
|
| 51 |
+
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
|
| 52 |
+
>
|
| 53 |
+
'\n'
|
| 54 |
+
>
|
| 55 |
+
sort=
|
| 56 |
+
simple_stmt<
|
| 57 |
+
power< id2=any
|
| 58 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 59 |
+
>
|
| 60 |
+
'\n'
|
| 61 |
+
>
|
| 62 |
+
next=any*
|
| 63 |
+
>
|
| 64 |
+
|
|
| 65 |
+
sorted=any<
|
| 66 |
+
any*
|
| 67 |
+
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
|
| 68 |
+
sort=
|
| 69 |
+
simple_stmt<
|
| 70 |
+
power< id2=any
|
| 71 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 72 |
+
>
|
| 73 |
+
'\n'
|
| 74 |
+
>
|
| 75 |
+
next=any*
|
| 76 |
+
>
|
| 77 |
+
""" % (TYPE, CMP, CMP, TYPE)
|
| 78 |
+
|
| 79 |
+
def match(self, node):
|
| 80 |
+
r = super(FixIdioms, self).match(node)
|
| 81 |
+
# If we've matched one of the sort/sorted subpatterns above, we
|
| 82 |
+
# want to reject matches where the initial assignment and the
|
| 83 |
+
# subsequent .sort() call involve different identifiers.
|
| 84 |
+
if r and "sorted" in r:
|
| 85 |
+
if r["id1"] == r["id2"]:
|
| 86 |
+
return r
|
| 87 |
+
return None
|
| 88 |
+
return r
|
| 89 |
+
|
| 90 |
+
def transform(self, node, results):
|
| 91 |
+
if "isinstance" in results:
|
| 92 |
+
return self.transform_isinstance(node, results)
|
| 93 |
+
elif "while" in results:
|
| 94 |
+
return self.transform_while(node, results)
|
| 95 |
+
elif "sorted" in results:
|
| 96 |
+
return self.transform_sort(node, results)
|
| 97 |
+
else:
|
| 98 |
+
raise RuntimeError("Invalid match")
|
| 99 |
+
|
| 100 |
+
def transform_isinstance(self, node, results):
|
| 101 |
+
x = results["x"].clone() # The thing inside of type()
|
| 102 |
+
T = results["T"].clone() # The type being compared against
|
| 103 |
+
x.prefix = ""
|
| 104 |
+
T.prefix = " "
|
| 105 |
+
test = Call(Name("isinstance"), [x, Comma(), T])
|
| 106 |
+
if "n" in results:
|
| 107 |
+
test.prefix = " "
|
| 108 |
+
test = Node(syms.not_test, [Name("not"), test])
|
| 109 |
+
test.prefix = node.prefix
|
| 110 |
+
return test
|
| 111 |
+
|
| 112 |
+
def transform_while(self, node, results):
|
| 113 |
+
one = results["while"]
|
| 114 |
+
one.replace(Name("True", prefix=one.prefix))
|
| 115 |
+
|
| 116 |
+
def transform_sort(self, node, results):
|
| 117 |
+
sort_stmt = results["sort"]
|
| 118 |
+
next_stmt = results["next"]
|
| 119 |
+
list_call = results.get("list")
|
| 120 |
+
simple_expr = results.get("expr")
|
| 121 |
+
|
| 122 |
+
if list_call:
|
| 123 |
+
list_call.replace(Name("sorted", prefix=list_call.prefix))
|
| 124 |
+
elif simple_expr:
|
| 125 |
+
new = simple_expr.clone()
|
| 126 |
+
new.prefix = ""
|
| 127 |
+
simple_expr.replace(Call(Name("sorted"), [new],
|
| 128 |
+
prefix=simple_expr.prefix))
|
| 129 |
+
else:
|
| 130 |
+
raise RuntimeError("should not have reached here")
|
| 131 |
+
sort_stmt.remove()
|
| 132 |
+
|
| 133 |
+
btwn = sort_stmt.prefix
|
| 134 |
+
# Keep any prefix lines between the sort_stmt and the list_call and
|
| 135 |
+
# shove them right after the sorted() call.
|
| 136 |
+
if "\n" in btwn:
|
| 137 |
+
if next_stmt:
|
| 138 |
+
# The new prefix should be everything from the sort_stmt's
|
| 139 |
+
# prefix up to the last newline, then the old prefix after a new
|
| 140 |
+
# line.
|
| 141 |
+
prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix)
|
| 142 |
+
next_stmt[0].prefix = "\n".join(prefix_lines)
|
| 143 |
+
else:
|
| 144 |
+
assert list_call.parent
|
| 145 |
+
assert list_call.next_sibling is None
|
| 146 |
+
# Put a blank line after list_call and set its prefix.
|
| 147 |
+
end_line = BlankLine()
|
| 148 |
+
list_call.parent.append_child(end_line)
|
| 149 |
+
assert list_call.next_sibling is end_line
|
| 150 |
+
# The new prefix should be everything up to the first new line
|
| 151 |
+
# of sort_stmt's prefix.
|
| 152 |
+
end_line.prefix = btwn.rpartition("\n")[0]
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_import.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for import statements.
|
| 2 |
+
If spam is being imported from the local directory, this import:
|
| 3 |
+
from spam import eggs
|
| 4 |
+
Becomes:
|
| 5 |
+
from .spam import eggs
|
| 6 |
+
|
| 7 |
+
And this import:
|
| 8 |
+
import spam
|
| 9 |
+
Becomes:
|
| 10 |
+
from . import spam
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
# Local imports
|
| 14 |
+
from .. import fixer_base
|
| 15 |
+
from os.path import dirname, join, exists, sep
|
| 16 |
+
from ..fixer_util import FromImport, syms, token
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def traverse_imports(names):
|
| 20 |
+
"""
|
| 21 |
+
Walks over all the names imported in a dotted_as_names node.
|
| 22 |
+
"""
|
| 23 |
+
pending = [names]
|
| 24 |
+
while pending:
|
| 25 |
+
node = pending.pop()
|
| 26 |
+
if node.type == token.NAME:
|
| 27 |
+
yield node.value
|
| 28 |
+
elif node.type == syms.dotted_name:
|
| 29 |
+
yield "".join([ch.value for ch in node.children])
|
| 30 |
+
elif node.type == syms.dotted_as_name:
|
| 31 |
+
pending.append(node.children[0])
|
| 32 |
+
elif node.type == syms.dotted_as_names:
|
| 33 |
+
pending.extend(node.children[::-2])
|
| 34 |
+
else:
|
| 35 |
+
raise AssertionError("unknown node type")
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class FixImport(fixer_base.BaseFix):
|
| 39 |
+
BM_compatible = True
|
| 40 |
+
|
| 41 |
+
PATTERN = """
|
| 42 |
+
import_from< 'from' imp=any 'import' ['('] any [')'] >
|
| 43 |
+
|
|
| 44 |
+
import_name< 'import' imp=any >
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def start_tree(self, tree, name):
|
| 48 |
+
super(FixImport, self).start_tree(tree, name)
|
| 49 |
+
self.skip = "absolute_import" in tree.future_features
|
| 50 |
+
|
| 51 |
+
def transform(self, node, results):
|
| 52 |
+
if self.skip:
|
| 53 |
+
return
|
| 54 |
+
imp = results['imp']
|
| 55 |
+
|
| 56 |
+
if node.type == syms.import_from:
|
| 57 |
+
# Some imps are top-level (eg: 'import ham')
|
| 58 |
+
# some are first level (eg: 'import ham.eggs')
|
| 59 |
+
# some are third level (eg: 'import ham.eggs as spam')
|
| 60 |
+
# Hence, the loop
|
| 61 |
+
while not hasattr(imp, 'value'):
|
| 62 |
+
imp = imp.children[0]
|
| 63 |
+
if self.probably_a_local_import(imp.value):
|
| 64 |
+
imp.value = "." + imp.value
|
| 65 |
+
imp.changed()
|
| 66 |
+
else:
|
| 67 |
+
have_local = False
|
| 68 |
+
have_absolute = False
|
| 69 |
+
for mod_name in traverse_imports(imp):
|
| 70 |
+
if self.probably_a_local_import(mod_name):
|
| 71 |
+
have_local = True
|
| 72 |
+
else:
|
| 73 |
+
have_absolute = True
|
| 74 |
+
if have_absolute:
|
| 75 |
+
if have_local:
|
| 76 |
+
# We won't handle both sibling and absolute imports in the
|
| 77 |
+
# same statement at the moment.
|
| 78 |
+
self.warning(node, "absolute and local imports together")
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
new = FromImport(".", [imp])
|
| 82 |
+
new.prefix = node.prefix
|
| 83 |
+
return new
|
| 84 |
+
|
| 85 |
+
def probably_a_local_import(self, imp_name):
|
| 86 |
+
if imp_name.startswith("."):
|
| 87 |
+
# Relative imports are certainly not local imports.
|
| 88 |
+
return False
|
| 89 |
+
imp_name = imp_name.split(".", 1)[0]
|
| 90 |
+
base_path = dirname(self.filename)
|
| 91 |
+
base_path = join(base_path, imp_name)
|
| 92 |
+
# If there is no __init__.py next to the file its not in a package
|
| 93 |
+
# so can't be a relative import.
|
| 94 |
+
if not exists(join(dirname(base_path), "__init__.py")):
|
| 95 |
+
return False
|
| 96 |
+
for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
|
| 97 |
+
if exists(base_path + ext):
|
| 98 |
+
return True
|
| 99 |
+
return False
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_imports.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references."""
|
| 2 |
+
# Authors: Collin Winter, Nick Edds
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name, attr_chain
|
| 7 |
+
|
| 8 |
+
MAPPING = {'StringIO': 'io',
|
| 9 |
+
'cStringIO': 'io',
|
| 10 |
+
'cPickle': 'pickle',
|
| 11 |
+
'__builtin__' : 'builtins',
|
| 12 |
+
'copy_reg': 'copyreg',
|
| 13 |
+
'Queue': 'queue',
|
| 14 |
+
'SocketServer': 'socketserver',
|
| 15 |
+
'ConfigParser': 'configparser',
|
| 16 |
+
'repr': 'reprlib',
|
| 17 |
+
'FileDialog': 'tkinter.filedialog',
|
| 18 |
+
'tkFileDialog': 'tkinter.filedialog',
|
| 19 |
+
'SimpleDialog': 'tkinter.simpledialog',
|
| 20 |
+
'tkSimpleDialog': 'tkinter.simpledialog',
|
| 21 |
+
'tkColorChooser': 'tkinter.colorchooser',
|
| 22 |
+
'tkCommonDialog': 'tkinter.commondialog',
|
| 23 |
+
'Dialog': 'tkinter.dialog',
|
| 24 |
+
'Tkdnd': 'tkinter.dnd',
|
| 25 |
+
'tkFont': 'tkinter.font',
|
| 26 |
+
'tkMessageBox': 'tkinter.messagebox',
|
| 27 |
+
'ScrolledText': 'tkinter.scrolledtext',
|
| 28 |
+
'Tkconstants': 'tkinter.constants',
|
| 29 |
+
'Tix': 'tkinter.tix',
|
| 30 |
+
'ttk': 'tkinter.ttk',
|
| 31 |
+
'Tkinter': 'tkinter',
|
| 32 |
+
'markupbase': '_markupbase',
|
| 33 |
+
'_winreg': 'winreg',
|
| 34 |
+
'thread': '_thread',
|
| 35 |
+
'dummy_thread': '_dummy_thread',
|
| 36 |
+
# anydbm and whichdb are handled by fix_imports2
|
| 37 |
+
'dbhash': 'dbm.bsd',
|
| 38 |
+
'dumbdbm': 'dbm.dumb',
|
| 39 |
+
'dbm': 'dbm.ndbm',
|
| 40 |
+
'gdbm': 'dbm.gnu',
|
| 41 |
+
'xmlrpclib': 'xmlrpc.client',
|
| 42 |
+
'DocXMLRPCServer': 'xmlrpc.server',
|
| 43 |
+
'SimpleXMLRPCServer': 'xmlrpc.server',
|
| 44 |
+
'httplib': 'http.client',
|
| 45 |
+
'htmlentitydefs' : 'html.entities',
|
| 46 |
+
'HTMLParser' : 'html.parser',
|
| 47 |
+
'Cookie': 'http.cookies',
|
| 48 |
+
'cookielib': 'http.cookiejar',
|
| 49 |
+
'BaseHTTPServer': 'http.server',
|
| 50 |
+
'SimpleHTTPServer': 'http.server',
|
| 51 |
+
'CGIHTTPServer': 'http.server',
|
| 52 |
+
#'test.test_support': 'test.support',
|
| 53 |
+
'commands': 'subprocess',
|
| 54 |
+
'UserString' : 'collections',
|
| 55 |
+
'UserList' : 'collections',
|
| 56 |
+
'urlparse' : 'urllib.parse',
|
| 57 |
+
'robotparser' : 'urllib.robotparser',
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def alternates(members):
|
| 62 |
+
return "(" + "|".join(map(repr, members)) + ")"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def build_pattern(mapping=MAPPING):
|
| 66 |
+
mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
|
| 67 |
+
bare_names = alternates(mapping.keys())
|
| 68 |
+
|
| 69 |
+
yield """name_import=import_name< 'import' ((%s) |
|
| 70 |
+
multiple_imports=dotted_as_names< any* (%s) any* >) >
|
| 71 |
+
""" % (mod_list, mod_list)
|
| 72 |
+
yield """import_from< 'from' (%s) 'import' ['(']
|
| 73 |
+
( any | import_as_name< any 'as' any > |
|
| 74 |
+
import_as_names< any* >) [')'] >
|
| 75 |
+
""" % mod_list
|
| 76 |
+
yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
|
| 77 |
+
multiple_imports=dotted_as_names<
|
| 78 |
+
any* dotted_as_name< (%s) 'as' any > any* >) >
|
| 79 |
+
""" % (mod_list, mod_list)
|
| 80 |
+
|
| 81 |
+
# Find usages of module members in code e.g. thread.foo(bar)
|
| 82 |
+
yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class FixImports(fixer_base.BaseFix):
|
| 86 |
+
|
| 87 |
+
BM_compatible = True
|
| 88 |
+
keep_line_order = True
|
| 89 |
+
# This is overridden in fix_imports2.
|
| 90 |
+
mapping = MAPPING
|
| 91 |
+
|
| 92 |
+
# We want to run this fixer late, so fix_import doesn't try to make stdlib
|
| 93 |
+
# renames into relative imports.
|
| 94 |
+
run_order = 6
|
| 95 |
+
|
| 96 |
+
def build_pattern(self):
|
| 97 |
+
return "|".join(build_pattern(self.mapping))
|
| 98 |
+
|
| 99 |
+
def compile_pattern(self):
|
| 100 |
+
# We override this, so MAPPING can be pragmatically altered and the
|
| 101 |
+
# changes will be reflected in PATTERN.
|
| 102 |
+
self.PATTERN = self.build_pattern()
|
| 103 |
+
super(FixImports, self).compile_pattern()
|
| 104 |
+
|
| 105 |
+
# Don't match the node if it's within another match.
|
| 106 |
+
def match(self, node):
|
| 107 |
+
match = super(FixImports, self).match
|
| 108 |
+
results = match(node)
|
| 109 |
+
if results:
|
| 110 |
+
# Module usage could be in the trailer of an attribute lookup, so we
|
| 111 |
+
# might have nested matches when "bare_with_attr" is present.
|
| 112 |
+
if "bare_with_attr" not in results and \
|
| 113 |
+
any(match(obj) for obj in attr_chain(node, "parent")):
|
| 114 |
+
return False
|
| 115 |
+
return results
|
| 116 |
+
return False
|
| 117 |
+
|
| 118 |
+
def start_tree(self, tree, filename):
|
| 119 |
+
super(FixImports, self).start_tree(tree, filename)
|
| 120 |
+
self.replace = {}
|
| 121 |
+
|
| 122 |
+
def transform(self, node, results):
|
| 123 |
+
import_mod = results.get("module_name")
|
| 124 |
+
if import_mod:
|
| 125 |
+
mod_name = import_mod.value
|
| 126 |
+
new_name = self.mapping[mod_name]
|
| 127 |
+
import_mod.replace(Name(new_name, prefix=import_mod.prefix))
|
| 128 |
+
if "name_import" in results:
|
| 129 |
+
# If it's not a "from x import x, y" or "import x as y" import,
|
| 130 |
+
# marked its usage to be replaced.
|
| 131 |
+
self.replace[mod_name] = new_name
|
| 132 |
+
if "multiple_imports" in results:
|
| 133 |
+
# This is a nasty hack to fix multiple imports on a line (e.g.,
|
| 134 |
+
# "import StringIO, urlparse"). The problem is that I can't
|
| 135 |
+
# figure out an easy way to make a pattern recognize the keys of
|
| 136 |
+
# MAPPING randomly sprinkled in an import statement.
|
| 137 |
+
results = self.match(node)
|
| 138 |
+
if results:
|
| 139 |
+
self.transform(node, results)
|
| 140 |
+
else:
|
| 141 |
+
# Replace usage of the module.
|
| 142 |
+
bare_name = results["bare_with_attr"][0]
|
| 143 |
+
new_name = self.replace.get(bare_name.value)
|
| 144 |
+
if new_name:
|
| 145 |
+
bare_name.replace(Name(new_name, prefix=bare_name.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_imports2.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references that must be fixed after
|
| 2 |
+
fix_imports."""
|
| 3 |
+
from . import fix_imports
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
MAPPING = {
|
| 7 |
+
'whichdb': 'dbm',
|
| 8 |
+
'anydbm': 'dbm',
|
| 9 |
+
}
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixImports2(fix_imports.FixImports):
|
| 13 |
+
|
| 14 |
+
run_order = 7
|
| 15 |
+
|
| 16 |
+
mapping = MAPPING
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_input.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that changes input(...) into eval(input(...))."""
|
| 2 |
+
# Author: Andre Roberge
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Call, Name
|
| 7 |
+
from .. import patcomp
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class FixInput(fixer_base.BaseFix):
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
PATTERN = """
|
| 16 |
+
power< 'input' args=trailer< '(' [any] ')' > >
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
def transform(self, node, results):
|
| 20 |
+
# If we're already wrapped in an eval() call, we're done.
|
| 21 |
+
if context.match(node.parent.parent):
|
| 22 |
+
return
|
| 23 |
+
|
| 24 |
+
new = node.clone()
|
| 25 |
+
new.prefix = ""
|
| 26 |
+
return Call(Name("eval"), [new], prefix=node.prefix)
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_intern.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Georg Brandl.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for intern().
|
| 5 |
+
|
| 6 |
+
intern(s) -> sys.intern(s)"""
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
from ..fixer_util import ImportAndCall, touch_import
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class FixIntern(fixer_base.BaseFix):
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
order = "pre"
|
| 16 |
+
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< 'intern'
|
| 19 |
+
trailer< lpar='('
|
| 20 |
+
( not(arglist | argument<any '=' any>) obj=any
|
| 21 |
+
| obj=arglist<(not argument<any '=' any>) any ','> )
|
| 22 |
+
rpar=')' >
|
| 23 |
+
after=any*
|
| 24 |
+
>
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def transform(self, node, results):
|
| 28 |
+
if results:
|
| 29 |
+
# I feel like we should be able to express this logic in the
|
| 30 |
+
# PATTERN above but I don't know how to do it so...
|
| 31 |
+
obj = results['obj']
|
| 32 |
+
if obj:
|
| 33 |
+
if (obj.type == self.syms.argument and
|
| 34 |
+
obj.children[0].value in {'**', '*'}):
|
| 35 |
+
return # Make no change.
|
| 36 |
+
names = ('sys', 'intern')
|
| 37 |
+
new = ImportAndCall(node, results, names)
|
| 38 |
+
touch_import(None, 'sys', node)
|
| 39 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_isinstance.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2008 Armin Ronacher.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that cleans up a tuple argument to isinstance after the tokens
|
| 5 |
+
in it were fixed. This is mainly used to remove double occurrences of
|
| 6 |
+
tokens as a leftover of the long -> int / unicode -> str conversion.
|
| 7 |
+
|
| 8 |
+
eg. isinstance(x, (int, long)) -> isinstance(x, (int, int))
|
| 9 |
+
-> isinstance(x, int)
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from .. import fixer_base
|
| 13 |
+
from ..fixer_util import token
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class FixIsinstance(fixer_base.BaseFix):
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
PATTERN = """
|
| 19 |
+
power<
|
| 20 |
+
'isinstance'
|
| 21 |
+
trailer< '(' arglist< any ',' atom< '('
|
| 22 |
+
args=testlist_gexp< any+ >
|
| 23 |
+
')' > > ')' >
|
| 24 |
+
>
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
run_order = 6
|
| 28 |
+
|
| 29 |
+
def transform(self, node, results):
|
| 30 |
+
names_inserted = set()
|
| 31 |
+
testlist = results["args"]
|
| 32 |
+
args = testlist.children
|
| 33 |
+
new_args = []
|
| 34 |
+
iterator = enumerate(args)
|
| 35 |
+
for idx, arg in iterator:
|
| 36 |
+
if arg.type == token.NAME and arg.value in names_inserted:
|
| 37 |
+
if idx < len(args) - 1 and args[idx + 1].type == token.COMMA:
|
| 38 |
+
next(iterator)
|
| 39 |
+
continue
|
| 40 |
+
else:
|
| 41 |
+
new_args.append(arg)
|
| 42 |
+
if arg.type == token.NAME:
|
| 43 |
+
names_inserted.add(arg.value)
|
| 44 |
+
if new_args and new_args[-1].type == token.COMMA:
|
| 45 |
+
del new_args[-1]
|
| 46 |
+
if len(new_args) == 1:
|
| 47 |
+
atom = testlist.parent
|
| 48 |
+
new_args[0].prefix = atom.prefix
|
| 49 |
+
atom.replace(new_args[0])
|
| 50 |
+
else:
|
| 51 |
+
args[:] = new_args
|
| 52 |
+
node.changed()
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_itertools.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
| 2 |
+
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
| 3 |
+
|
| 4 |
+
imports from itertools are fixed in fix_itertools_import.py
|
| 5 |
+
|
| 6 |
+
If itertools is imported as something else (ie: import itertools as it;
|
| 7 |
+
it.izip(spam, eggs)) method calls will not get fixed.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Local imports
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name
|
| 13 |
+
|
| 14 |
+
class FixItertools(fixer_base.BaseFix):
|
| 15 |
+
BM_compatible = True
|
| 16 |
+
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< it='itertools'
|
| 19 |
+
trailer<
|
| 20 |
+
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
| 23 |
+
""" %(locals())
|
| 24 |
+
|
| 25 |
+
# Needs to be run after fix_(map|zip|filter)
|
| 26 |
+
run_order = 6
|
| 27 |
+
|
| 28 |
+
def transform(self, node, results):
|
| 29 |
+
prefix = None
|
| 30 |
+
func = results['func'][0]
|
| 31 |
+
if ('it' in results and
|
| 32 |
+
func.value not in ('ifilterfalse', 'izip_longest')):
|
| 33 |
+
dot, it = (results['dot'], results['it'])
|
| 34 |
+
# Remove the 'itertools'
|
| 35 |
+
prefix = it.prefix
|
| 36 |
+
it.remove()
|
| 37 |
+
# Replace the node which contains ('.', 'function') with the
|
| 38 |
+
# function (to be consistent with the second part of the pattern)
|
| 39 |
+
dot.remove()
|
| 40 |
+
func.parent.replace(func)
|
| 41 |
+
|
| 42 |
+
prefix = prefix or func.prefix
|
| 43 |
+
func.replace(Name(func.value[1:], prefix=prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
| 2 |
+
|
| 3 |
+
# Local imports
|
| 4 |
+
from lib2to3 import fixer_base
|
| 5 |
+
from lib2to3.fixer_util import BlankLine, syms, token
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FixItertoolsImports(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
PATTERN = """
|
| 11 |
+
import_from< 'from' 'itertools' 'import' imports=any >
|
| 12 |
+
""" %(locals())
|
| 13 |
+
|
| 14 |
+
def transform(self, node, results):
|
| 15 |
+
imports = results['imports']
|
| 16 |
+
if imports.type == syms.import_as_name or not imports.children:
|
| 17 |
+
children = [imports]
|
| 18 |
+
else:
|
| 19 |
+
children = imports.children
|
| 20 |
+
for child in children[::2]:
|
| 21 |
+
if child.type == token.NAME:
|
| 22 |
+
member = child.value
|
| 23 |
+
name_node = child
|
| 24 |
+
elif child.type == token.STAR:
|
| 25 |
+
# Just leave the import as is.
|
| 26 |
+
return
|
| 27 |
+
else:
|
| 28 |
+
assert child.type == syms.import_as_name
|
| 29 |
+
name_node = child.children[0]
|
| 30 |
+
member_name = name_node.value
|
| 31 |
+
if member_name in ('imap', 'izip', 'ifilter'):
|
| 32 |
+
child.value = None
|
| 33 |
+
child.remove()
|
| 34 |
+
elif member_name in ('ifilterfalse', 'izip_longest'):
|
| 35 |
+
node.changed()
|
| 36 |
+
name_node.value = ('filterfalse' if member_name[1] == 'f'
|
| 37 |
+
else 'zip_longest')
|
| 38 |
+
|
| 39 |
+
# Make sure the import statement is still sane
|
| 40 |
+
children = imports.children[:] or [imports]
|
| 41 |
+
remove_comma = True
|
| 42 |
+
for child in children:
|
| 43 |
+
if remove_comma and child.type == token.COMMA:
|
| 44 |
+
child.remove()
|
| 45 |
+
else:
|
| 46 |
+
remove_comma ^= True
|
| 47 |
+
|
| 48 |
+
while children and children[-1].type == token.COMMA:
|
| 49 |
+
children.pop().remove()
|
| 50 |
+
|
| 51 |
+
# If there are no imports left, just get rid of the entire statement
|
| 52 |
+
if (not (imports.children or getattr(imports, 'value', None)) or
|
| 53 |
+
imports.parent is None):
|
| 54 |
+
p = node.prefix
|
| 55 |
+
node = BlankLine()
|
| 56 |
+
node.prefix = p
|
| 57 |
+
return node
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_long.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that turns 'long' into 'int' everywhere.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Local imports
|
| 8 |
+
from lib2to3 import fixer_base
|
| 9 |
+
from lib2to3.fixer_util import is_probably_builtin
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixLong(fixer_base.BaseFix):
|
| 13 |
+
BM_compatible = True
|
| 14 |
+
PATTERN = "'long'"
|
| 15 |
+
|
| 16 |
+
def transform(self, node, results):
|
| 17 |
+
if is_probably_builtin(node):
|
| 18 |
+
node.value = "int"
|
| 19 |
+
node.changed()
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_map.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
|
| 5 |
+
exists a 'from future_builtins import map' statement in the top-level
|
| 6 |
+
namespace.
|
| 7 |
+
|
| 8 |
+
As a special case, map(None, X) is changed into list(X). (This is
|
| 9 |
+
necessary because the semantics are changed in this case -- the new
|
| 10 |
+
map(None, X) is equivalent to [(x,) for x in X].)
|
| 11 |
+
|
| 12 |
+
We avoid the transformation (except for the special case mentioned
|
| 13 |
+
above) if the map() call is directly contained in iter(<>), list(<>),
|
| 14 |
+
tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
| 15 |
+
|
| 16 |
+
NOTE: This is still not correct if the original code was depending on
|
| 17 |
+
map(F, X, Y, ...) to go on until the longest argument is exhausted,
|
| 18 |
+
substituting None for missing values -- like zip(), it now stops as
|
| 19 |
+
soon as the shortest argument is exhausted.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
# Local imports
|
| 23 |
+
from ..pgen2 import token
|
| 24 |
+
from .. import fixer_base
|
| 25 |
+
from ..fixer_util import Name, ArgList, Call, ListComp, in_special_context
|
| 26 |
+
from ..pygram import python_symbols as syms
|
| 27 |
+
from ..pytree import Node
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class FixMap(fixer_base.ConditionalFix):
|
| 31 |
+
BM_compatible = True
|
| 32 |
+
|
| 33 |
+
PATTERN = """
|
| 34 |
+
map_none=power<
|
| 35 |
+
'map'
|
| 36 |
+
trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
|
| 37 |
+
[extra_trailers=trailer*]
|
| 38 |
+
>
|
| 39 |
+
|
|
| 40 |
+
map_lambda=power<
|
| 41 |
+
'map'
|
| 42 |
+
trailer<
|
| 43 |
+
'('
|
| 44 |
+
arglist<
|
| 45 |
+
lambdef< 'lambda'
|
| 46 |
+
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
| 47 |
+
>
|
| 48 |
+
','
|
| 49 |
+
it=any
|
| 50 |
+
>
|
| 51 |
+
')'
|
| 52 |
+
>
|
| 53 |
+
[extra_trailers=trailer*]
|
| 54 |
+
>
|
| 55 |
+
|
|
| 56 |
+
power<
|
| 57 |
+
'map' args=trailer< '(' [any] ')' >
|
| 58 |
+
[extra_trailers=trailer*]
|
| 59 |
+
>
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
skip_on = 'future_builtins.map'
|
| 63 |
+
|
| 64 |
+
def transform(self, node, results):
|
| 65 |
+
if self.should_skip(node):
|
| 66 |
+
return
|
| 67 |
+
|
| 68 |
+
trailers = []
|
| 69 |
+
if 'extra_trailers' in results:
|
| 70 |
+
for t in results['extra_trailers']:
|
| 71 |
+
trailers.append(t.clone())
|
| 72 |
+
|
| 73 |
+
if node.parent.type == syms.simple_stmt:
|
| 74 |
+
self.warning(node, "You should use a for loop here")
|
| 75 |
+
new = node.clone()
|
| 76 |
+
new.prefix = ""
|
| 77 |
+
new = Call(Name("list"), [new])
|
| 78 |
+
elif "map_lambda" in results:
|
| 79 |
+
new = ListComp(results["xp"].clone(),
|
| 80 |
+
results["fp"].clone(),
|
| 81 |
+
results["it"].clone())
|
| 82 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 83 |
+
|
| 84 |
+
else:
|
| 85 |
+
if "map_none" in results:
|
| 86 |
+
new = results["arg"].clone()
|
| 87 |
+
new.prefix = ""
|
| 88 |
+
else:
|
| 89 |
+
if "args" in results:
|
| 90 |
+
args = results["args"]
|
| 91 |
+
if args.type == syms.trailer and \
|
| 92 |
+
args.children[1].type == syms.arglist and \
|
| 93 |
+
args.children[1].children[0].type == token.NAME and \
|
| 94 |
+
args.children[1].children[0].value == "None":
|
| 95 |
+
self.warning(node, "cannot convert map(None, ...) "
|
| 96 |
+
"with multiple arguments because map() "
|
| 97 |
+
"now truncates to the shortest sequence")
|
| 98 |
+
return
|
| 99 |
+
|
| 100 |
+
new = Node(syms.power, [Name("map"), args.clone()])
|
| 101 |
+
new.prefix = ""
|
| 102 |
+
|
| 103 |
+
if in_special_context(node):
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
| 107 |
+
new.prefix = ""
|
| 108 |
+
|
| 109 |
+
new.prefix = node.prefix
|
| 110 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_metaclass.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for __metaclass__ = X -> (metaclass=X) methods.
|
| 2 |
+
|
| 3 |
+
The various forms of classef (inherits nothing, inherits once, inherits
|
| 4 |
+
many) don't parse the same in the CST so we look at ALL classes for
|
| 5 |
+
a __metaclass__ and if we find one normalize the inherits to all be
|
| 6 |
+
an arglist.
|
| 7 |
+
|
| 8 |
+
For one-liner classes ('class X: pass') there is no indent/dedent so
|
| 9 |
+
we normalize those into having a suite.
|
| 10 |
+
|
| 11 |
+
Moving the __metaclass__ into the classdef can also cause the class
|
| 12 |
+
body to be empty so there is some special casing for that as well.
|
| 13 |
+
|
| 14 |
+
This fixer also tries very hard to keep original indenting and spacing
|
| 15 |
+
in all those corner cases.
|
| 16 |
+
|
| 17 |
+
"""
|
| 18 |
+
# Author: Jack Diederich
|
| 19 |
+
|
| 20 |
+
# Local imports
|
| 21 |
+
from .. import fixer_base
|
| 22 |
+
from ..pygram import token
|
| 23 |
+
from ..fixer_util import syms, Node, Leaf
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def has_metaclass(parent):
|
| 27 |
+
""" we have to check the cls_node without changing it.
|
| 28 |
+
There are two possibilities:
|
| 29 |
+
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
|
| 30 |
+
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
|
| 31 |
+
"""
|
| 32 |
+
for node in parent.children:
|
| 33 |
+
if node.type == syms.suite:
|
| 34 |
+
return has_metaclass(node)
|
| 35 |
+
elif node.type == syms.simple_stmt and node.children:
|
| 36 |
+
expr_node = node.children[0]
|
| 37 |
+
if expr_node.type == syms.expr_stmt and expr_node.children:
|
| 38 |
+
left_side = expr_node.children[0]
|
| 39 |
+
if isinstance(left_side, Leaf) and \
|
| 40 |
+
left_side.value == '__metaclass__':
|
| 41 |
+
return True
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def fixup_parse_tree(cls_node):
|
| 46 |
+
""" one-line classes don't get a suite in the parse tree so we add
|
| 47 |
+
one to normalize the tree
|
| 48 |
+
"""
|
| 49 |
+
for node in cls_node.children:
|
| 50 |
+
if node.type == syms.suite:
|
| 51 |
+
# already in the preferred format, do nothing
|
| 52 |
+
return
|
| 53 |
+
|
| 54 |
+
# !%@#! one-liners have no suite node, we have to fake one up
|
| 55 |
+
for i, node in enumerate(cls_node.children):
|
| 56 |
+
if node.type == token.COLON:
|
| 57 |
+
break
|
| 58 |
+
else:
|
| 59 |
+
raise ValueError("No class suite and no ':'!")
|
| 60 |
+
|
| 61 |
+
# move everything into a suite node
|
| 62 |
+
suite = Node(syms.suite, [])
|
| 63 |
+
while cls_node.children[i+1:]:
|
| 64 |
+
move_node = cls_node.children[i+1]
|
| 65 |
+
suite.append_child(move_node.clone())
|
| 66 |
+
move_node.remove()
|
| 67 |
+
cls_node.append_child(suite)
|
| 68 |
+
node = suite
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def fixup_simple_stmt(parent, i, stmt_node):
|
| 72 |
+
""" if there is a semi-colon all the parts count as part of the same
|
| 73 |
+
simple_stmt. We just want the __metaclass__ part so we move
|
| 74 |
+
everything after the semi-colon into its own simple_stmt node
|
| 75 |
+
"""
|
| 76 |
+
for semi_ind, node in enumerate(stmt_node.children):
|
| 77 |
+
if node.type == token.SEMI: # *sigh*
|
| 78 |
+
break
|
| 79 |
+
else:
|
| 80 |
+
return
|
| 81 |
+
|
| 82 |
+
node.remove() # kill the semicolon
|
| 83 |
+
new_expr = Node(syms.expr_stmt, [])
|
| 84 |
+
new_stmt = Node(syms.simple_stmt, [new_expr])
|
| 85 |
+
while stmt_node.children[semi_ind:]:
|
| 86 |
+
move_node = stmt_node.children[semi_ind]
|
| 87 |
+
new_expr.append_child(move_node.clone())
|
| 88 |
+
move_node.remove()
|
| 89 |
+
parent.insert_child(i, new_stmt)
|
| 90 |
+
new_leaf1 = new_stmt.children[0].children[0]
|
| 91 |
+
old_leaf1 = stmt_node.children[0].children[0]
|
| 92 |
+
new_leaf1.prefix = old_leaf1.prefix
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def remove_trailing_newline(node):
|
| 96 |
+
if node.children and node.children[-1].type == token.NEWLINE:
|
| 97 |
+
node.children[-1].remove()
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def find_metas(cls_node):
|
| 101 |
+
# find the suite node (Mmm, sweet nodes)
|
| 102 |
+
for node in cls_node.children:
|
| 103 |
+
if node.type == syms.suite:
|
| 104 |
+
break
|
| 105 |
+
else:
|
| 106 |
+
raise ValueError("No class suite!")
|
| 107 |
+
|
| 108 |
+
# look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ]
|
| 109 |
+
for i, simple_node in list(enumerate(node.children)):
|
| 110 |
+
if simple_node.type == syms.simple_stmt and simple_node.children:
|
| 111 |
+
expr_node = simple_node.children[0]
|
| 112 |
+
if expr_node.type == syms.expr_stmt and expr_node.children:
|
| 113 |
+
# Check if the expr_node is a simple assignment.
|
| 114 |
+
left_node = expr_node.children[0]
|
| 115 |
+
if isinstance(left_node, Leaf) and \
|
| 116 |
+
left_node.value == '__metaclass__':
|
| 117 |
+
# We found an assignment to __metaclass__.
|
| 118 |
+
fixup_simple_stmt(node, i, simple_node)
|
| 119 |
+
remove_trailing_newline(simple_node)
|
| 120 |
+
yield (node, i, simple_node)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def fixup_indent(suite):
|
| 124 |
+
""" If an INDENT is followed by a thing with a prefix then nuke the prefix
|
| 125 |
+
Otherwise we get in trouble when removing __metaclass__ at suite start
|
| 126 |
+
"""
|
| 127 |
+
kids = suite.children[::-1]
|
| 128 |
+
# find the first indent
|
| 129 |
+
while kids:
|
| 130 |
+
node = kids.pop()
|
| 131 |
+
if node.type == token.INDENT:
|
| 132 |
+
break
|
| 133 |
+
|
| 134 |
+
# find the first Leaf
|
| 135 |
+
while kids:
|
| 136 |
+
node = kids.pop()
|
| 137 |
+
if isinstance(node, Leaf) and node.type != token.DEDENT:
|
| 138 |
+
if node.prefix:
|
| 139 |
+
node.prefix = ''
|
| 140 |
+
return
|
| 141 |
+
else:
|
| 142 |
+
kids.extend(node.children[::-1])
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class FixMetaclass(fixer_base.BaseFix):
|
| 146 |
+
BM_compatible = True
|
| 147 |
+
|
| 148 |
+
PATTERN = """
|
| 149 |
+
classdef<any*>
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
def transform(self, node, results):
|
| 153 |
+
if not has_metaclass(node):
|
| 154 |
+
return
|
| 155 |
+
|
| 156 |
+
fixup_parse_tree(node)
|
| 157 |
+
|
| 158 |
+
# find metaclasses, keep the last one
|
| 159 |
+
last_metaclass = None
|
| 160 |
+
for suite, i, stmt in find_metas(node):
|
| 161 |
+
last_metaclass = stmt
|
| 162 |
+
stmt.remove()
|
| 163 |
+
|
| 164 |
+
text_type = node.children[0].type # always Leaf(nnn, 'class')
|
| 165 |
+
|
| 166 |
+
# figure out what kind of classdef we have
|
| 167 |
+
if len(node.children) == 7:
|
| 168 |
+
# Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite])
|
| 169 |
+
# 0 1 2 3 4 5 6
|
| 170 |
+
if node.children[3].type == syms.arglist:
|
| 171 |
+
arglist = node.children[3]
|
| 172 |
+
# Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite])
|
| 173 |
+
else:
|
| 174 |
+
parent = node.children[3].clone()
|
| 175 |
+
arglist = Node(syms.arglist, [parent])
|
| 176 |
+
node.set_child(3, arglist)
|
| 177 |
+
elif len(node.children) == 6:
|
| 178 |
+
# Node(classdef, ['class', 'name', '(', ')', ':', suite])
|
| 179 |
+
# 0 1 2 3 4 5
|
| 180 |
+
arglist = Node(syms.arglist, [])
|
| 181 |
+
node.insert_child(3, arglist)
|
| 182 |
+
elif len(node.children) == 4:
|
| 183 |
+
# Node(classdef, ['class', 'name', ':', suite])
|
| 184 |
+
# 0 1 2 3
|
| 185 |
+
arglist = Node(syms.arglist, [])
|
| 186 |
+
node.insert_child(2, Leaf(token.RPAR, ')'))
|
| 187 |
+
node.insert_child(2, arglist)
|
| 188 |
+
node.insert_child(2, Leaf(token.LPAR, '('))
|
| 189 |
+
else:
|
| 190 |
+
raise ValueError("Unexpected class definition")
|
| 191 |
+
|
| 192 |
+
# now stick the metaclass in the arglist
|
| 193 |
+
meta_txt = last_metaclass.children[0].children[0]
|
| 194 |
+
meta_txt.value = 'metaclass'
|
| 195 |
+
orig_meta_prefix = meta_txt.prefix
|
| 196 |
+
|
| 197 |
+
if arglist.children:
|
| 198 |
+
arglist.append_child(Leaf(token.COMMA, ','))
|
| 199 |
+
meta_txt.prefix = ' '
|
| 200 |
+
else:
|
| 201 |
+
meta_txt.prefix = ''
|
| 202 |
+
|
| 203 |
+
# compact the expression "metaclass = Meta" -> "metaclass=Meta"
|
| 204 |
+
expr_stmt = last_metaclass.children[0]
|
| 205 |
+
assert expr_stmt.type == syms.expr_stmt
|
| 206 |
+
expr_stmt.children[1].prefix = ''
|
| 207 |
+
expr_stmt.children[2].prefix = ''
|
| 208 |
+
|
| 209 |
+
arglist.append_child(last_metaclass)
|
| 210 |
+
|
| 211 |
+
fixup_indent(suite)
|
| 212 |
+
|
| 213 |
+
# check for empty suite
|
| 214 |
+
if not suite.children:
|
| 215 |
+
# one-liner that was just __metaclass_
|
| 216 |
+
suite.remove()
|
| 217 |
+
pass_leaf = Leaf(text_type, 'pass')
|
| 218 |
+
pass_leaf.prefix = orig_meta_prefix
|
| 219 |
+
node.append_child(pass_leaf)
|
| 220 |
+
node.append_child(Leaf(token.NEWLINE, '\n'))
|
| 221 |
+
|
| 222 |
+
elif len(suite.children) > 1 and \
|
| 223 |
+
(suite.children[-2].type == token.INDENT and
|
| 224 |
+
suite.children[-1].type == token.DEDENT):
|
| 225 |
+
# there was only one line in the class body and it was __metaclass__
|
| 226 |
+
pass_leaf = Leaf(text_type, 'pass')
|
| 227 |
+
suite.insert_child(-1, pass_leaf)
|
| 228 |
+
suite.insert_child(-1, Leaf(token.NEWLINE, '\n'))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_methodattrs.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix bound method attributes (method.im_? -> method.__?__).
|
| 2 |
+
"""
|
| 3 |
+
# Author: Christian Heimes
|
| 4 |
+
|
| 5 |
+
# Local imports
|
| 6 |
+
from .. import fixer_base
|
| 7 |
+
from ..fixer_util import Name
|
| 8 |
+
|
| 9 |
+
MAP = {
|
| 10 |
+
"im_func" : "__func__",
|
| 11 |
+
"im_self" : "__self__",
|
| 12 |
+
"im_class" : "__self__.__class__"
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
class FixMethodattrs(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
attr = results["attr"][0]
|
| 23 |
+
new = MAP[attr.value]
|
| 24 |
+
attr.replace(Name(new, prefix=attr.prefix))
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_ne.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that turns <> into !=."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import pytree
|
| 8 |
+
from ..pgen2 import token
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNe(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NOTEQUAL
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return node.value == "<>"
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix)
|
| 23 |
+
return new
|
evalkit_tf446/lib/python3.10/lib2to3/fixes/fix_next.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for it.next() -> next(it), per PEP 3114."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Things that currently aren't covered:
|
| 5 |
+
# - listcomp "next" names aren't warned
|
| 6 |
+
# - "with" statement targets aren't checked
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from ..pgen2 import token
|
| 10 |
+
from ..pygram import python_symbols as syms
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name, Call, find_binding
|
| 13 |
+
|
| 14 |
+
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FixNext(fixer_base.BaseFix):
|
| 18 |
+
BM_compatible = True
|
| 19 |
+
PATTERN = """
|
| 20 |
+
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
|
| 23 |
+
|
|
| 24 |
+
classdef< 'class' any+ ':'
|
| 25 |
+
suite< any*
|
| 26 |
+
funcdef< 'def'
|
| 27 |
+
name='next'
|
| 28 |
+
parameters< '(' NAME ')' > any+ >
|
| 29 |
+
any* > >
|
| 30 |
+
|
|
| 31 |
+
global=global_stmt< 'global' any* 'next' any* >
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
order = "pre" # Pre-order tree traversal
|
| 35 |
+
|
| 36 |
+
def start_tree(self, tree, filename):
|
| 37 |
+
super(FixNext, self).start_tree(tree, filename)
|
| 38 |
+
|
| 39 |
+
n = find_binding('next', tree)
|
| 40 |
+
if n:
|
| 41 |
+
self.warning(n, bind_warning)
|
| 42 |
+
self.shadowed_next = True
|
| 43 |
+
else:
|
| 44 |
+
self.shadowed_next = False
|
| 45 |
+
|
| 46 |
+
def transform(self, node, results):
|
| 47 |
+
assert results
|
| 48 |
+
|
| 49 |
+
base = results.get("base")
|
| 50 |
+
attr = results.get("attr")
|
| 51 |
+
name = results.get("name")
|
| 52 |
+
|
| 53 |
+
if base:
|
| 54 |
+
if self.shadowed_next:
|
| 55 |
+
attr.replace(Name("__next__", prefix=attr.prefix))
|
| 56 |
+
else:
|
| 57 |
+
base = [n.clone() for n in base]
|
| 58 |
+
base[0].prefix = ""
|
| 59 |
+
node.replace(Call(Name("next", prefix=node.prefix), base))
|
| 60 |
+
elif name:
|
| 61 |
+
n = Name("__next__", prefix=name.prefix)
|
| 62 |
+
name.replace(n)
|
| 63 |
+
elif attr:
|
| 64 |
+
# We don't do this transformation if we're assigning to "x.next".
|
| 65 |
+
# Unfortunately, it doesn't seem possible to do this in PATTERN,
|
| 66 |
+
# so it's being done here.
|
| 67 |
+
if is_assign_target(node):
|
| 68 |
+
head = results["head"]
|
| 69 |
+
if "".join([str(n) for n in head]).strip() == '__builtin__':
|
| 70 |
+
self.warning(node, bind_warning)
|
| 71 |
+
return
|
| 72 |
+
attr.replace(Name("__next__"))
|
| 73 |
+
elif "global" in results:
|
| 74 |
+
self.warning(node, bind_warning)
|
| 75 |
+
self.shadowed_next = True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
### The following functions help test if node is part of an assignment
|
| 79 |
+
### target.
|
| 80 |
+
|
| 81 |
+
def is_assign_target(node):
|
| 82 |
+
assign = find_assign(node)
|
| 83 |
+
if assign is None:
|
| 84 |
+
return False
|
| 85 |
+
|
| 86 |
+
for child in assign.children:
|
| 87 |
+
if child.type == token.EQUAL:
|
| 88 |
+
return False
|
| 89 |
+
elif is_subtree(child, node):
|
| 90 |
+
return True
|
| 91 |
+
return False
|
| 92 |
+
|
| 93 |
+
def find_assign(node):
|
| 94 |
+
if node.type == syms.expr_stmt:
|
| 95 |
+
return node
|
| 96 |
+
if node.type == syms.simple_stmt or node.parent is None:
|
| 97 |
+
return None
|
| 98 |
+
return find_assign(node.parent)
|
| 99 |
+
|
| 100 |
+
def is_subtree(root, node):
|
| 101 |
+
if root == node:
|
| 102 |
+
return True
|
| 103 |
+
return any(is_subtree(c, node) for c in root.children)
|