idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
240,700
def _handle_token ( self , token ) : try : return _HANDLERS [ type ( token ) ] ( self , token ) except KeyError : err = "_handle_token() got unexpected {0}" raise ParserError ( err . format ( type ( token ) . __name__ ) )
Handle a single token .
65
5
240,701
def build ( self , tokenlist ) : self . _tokens = tokenlist self . _tokens . reverse ( ) self . _push ( ) while self . _tokens : node = self . _handle_token ( self . _tokens . pop ( ) ) self . _write ( node ) return self . _pop ( )
Build a Wikicode object from a list tokens and return it .
76
14
240,702
def _select_theory ( theories ) : if theories : values = tuple ( theories . values ( ) ) best = max ( values ) confidence = float ( best ) / sum ( values ) if confidence > 0.5 : return tuple ( theories . keys ( ) ) [ values . index ( best ) ]
Return the most likely spacing convention given different options .
64
10
240,703
def _get_spacing_conventions ( self , use_names ) : before_theories = defaultdict ( lambda : 0 ) after_theories = defaultdict ( lambda : 0 ) for param in self . params : if not param . showkey : continue if use_names : component = str ( param . name ) else : component = str ( param . value ) match = re . search ( r"^(\s*).*?(\s*)$" , component , FLAGS ) before , after = match . group ( 1 ) , match . group ( 2 ) if not use_names and component . isspace ( ) and "\n" in before : # If the value is empty, we expect newlines in the whitespace # to be after the content, not before it: before , after = before . split ( "\n" , 1 ) after = "\n" + after before_theories [ before ] += 1 after_theories [ after ] += 1 before = self . _select_theory ( before_theories ) after = self . _select_theory ( after_theories ) return before , after
Try to determine the whitespace conventions for parameters .
241
10
240,704
def _should_remove ( self , i , name ) : if self . params [ i ] . showkey : following = self . params [ i + 1 : ] better_matches = [ after . name . strip ( ) == name and not after . showkey for after in following ] return any ( better_matches ) return False
Look ahead for a parameter with the same name but hidden .
71
12
240,705
def _push ( self , context = 0 ) : new_ident = ( self . _head , context ) if new_ident in self . _bad_routes : raise BadRoute ( context ) self . _stacks . append ( [ [ ] , context , [ ] , new_ident ] ) self . _depth += 1
Add a new token stack context and textbuffer to the list .
71
13
240,706
def _push_textbuffer ( self ) : if self . _textbuffer : self . _stack . append ( tokens . Text ( text = "" . join ( self . _textbuffer ) ) ) self . _textbuffer = [ ]
Push the textbuffer onto the stack as a Text node and clear it .
50
15
240,707
def _fail_route ( self ) : context = self . _context self . _memoize_bad_route ( ) self . _pop ( ) raise BadRoute ( context )
Fail the current tokenization route .
39
7
240,708
def _emit_all ( self , tokenlist ) : if tokenlist and isinstance ( tokenlist [ 0 ] , tokens . Text ) : self . _emit_text ( tokenlist . pop ( 0 ) . text ) self . _push_textbuffer ( ) self . _stack . extend ( tokenlist )
Write a series of tokens to the current stack at once .
68
12
240,709
def _read ( self , delta = 0 , wrap = False , strict = False ) : index = self . _head + delta if index < 0 and ( not wrap or abs ( index ) > len ( self . _text ) ) : return self . START try : return self . _text [ index ] except IndexError : if strict : self . _fail_route ( ) return self . END
Read the value at a relative point in the wikicode .
83
13
240,710
def _parse_template ( self , has_content ) : reset = self . _head context = contexts . TEMPLATE_NAME if has_content : context |= contexts . HAS_TEMPLATE try : template = self . _parse ( context ) except BadRoute : self . _head = reset raise self . _emit_first ( tokens . TemplateOpen ( ) ) self . _emit_all ( template ) self . _emit ( tokens . TemplateClose ( ) )
Parse a template at the head of the wikicode string .
105
14
240,711
def _parse_argument ( self ) : reset = self . _head try : argument = self . _parse ( contexts . ARGUMENT_NAME ) except BadRoute : self . _head = reset raise self . _emit_first ( tokens . ArgumentOpen ( ) ) self . _emit_all ( argument ) self . _emit ( tokens . ArgumentClose ( ) )
Parse an argument at the head of the wikicode string .
82
14
240,712
def _parse_template_or_argument ( self ) : self . _head += 2 braces = 2 while self . _read ( ) == "{" : self . _head += 1 braces += 1 has_content = False self . _push ( ) while braces : if braces == 1 : return self . _emit_text_then_stack ( "{" ) if braces == 2 : try : self . _parse_template ( has_content ) except BadRoute : return self . _emit_text_then_stack ( "{{" ) break try : self . _parse_argument ( ) braces -= 3 except BadRoute : try : self . _parse_template ( has_content ) braces -= 2 except BadRoute : return self . _emit_text_then_stack ( "{" * braces ) if braces : has_content = True self . _head += 1 self . _emit_all ( self . _pop ( ) ) if self . _context & contexts . FAIL_NEXT : self . _context ^= contexts . FAIL_NEXT
Parse a template or argument at the head of the wikicode string .
227
16
240,713
def _handle_template_param ( self ) : if self . _context & contexts . TEMPLATE_NAME : if not self . _context & ( contexts . HAS_TEXT | contexts . HAS_TEMPLATE ) : self . _fail_route ( ) self . _context ^= contexts . TEMPLATE_NAME elif self . _context & contexts . TEMPLATE_PARAM_VALUE : self . _context ^= contexts . TEMPLATE_PARAM_VALUE else : self . _emit_all ( self . _pop ( ) ) self . _context |= contexts . TEMPLATE_PARAM_KEY self . _emit ( tokens . TemplateParamSeparator ( ) ) self . _push ( self . _context )
Handle a template parameter at the head of the string .
168
11
240,714
def _handle_template_param_value ( self ) : self . _emit_all ( self . _pop ( ) ) self . _context ^= contexts . TEMPLATE_PARAM_KEY self . _context |= contexts . TEMPLATE_PARAM_VALUE self . _emit ( tokens . TemplateParamEquals ( ) )
Handle a template parameter s value at the head of the string .
77
13
240,715
def _handle_template_end ( self ) : if self . _context & contexts . TEMPLATE_NAME : if not self . _context & ( contexts . HAS_TEXT | contexts . HAS_TEMPLATE ) : self . _fail_route ( ) elif self . _context & contexts . TEMPLATE_PARAM_KEY : self . _emit_all ( self . _pop ( ) ) self . _head += 1 return self . _pop ( )
Handle the end of a template at the head of the string .
105
13
240,716
def _handle_argument_separator ( self ) : self . _context ^= contexts . ARGUMENT_NAME self . _context |= contexts . ARGUMENT_DEFAULT self . _emit ( tokens . ArgumentSeparator ( ) )
Handle the separator between an argument s name and default .
56
12
240,717
def _parse_wikilink ( self ) : reset = self . _head + 1 self . _head += 2 try : # If the wikilink looks like an external link, parse it as such: link , extra , delta = self . _really_parse_external_link ( True ) except BadRoute : self . _head = reset + 1 try : # Otherwise, actually parse it as a wikilink: wikilink = self . _parse ( contexts . WIKILINK_TITLE ) except BadRoute : self . _head = reset self . _emit_text ( "[[" ) else : self . _emit ( tokens . WikilinkOpen ( ) ) self . _emit_all ( wikilink ) self . _emit ( tokens . WikilinkClose ( ) ) else : if self . _context & contexts . EXT_LINK_TITLE : # In this exceptional case, an external link that looks like a # wikilink inside of an external link is parsed as text: self . _head = reset self . _emit_text ( "[[" ) return self . _emit_text ( "[" ) self . _emit ( tokens . ExternalLinkOpen ( brackets = True ) ) self . _emit_all ( link ) self . _emit ( tokens . ExternalLinkClose ( ) )
Parse an internal wikilink at the head of the wikicode string .
288
17
240,718
def _handle_wikilink_separator ( self ) : self . _context ^= contexts . WIKILINK_TITLE self . _context |= contexts . WIKILINK_TEXT self . _emit ( tokens . WikilinkSeparator ( ) )
Handle the separator between a wikilink s title and its text .
60
15
240,719
def _parse_bracketed_uri_scheme ( self ) : self . _push ( contexts . EXT_LINK_URI ) if self . _read ( ) == self . _read ( 1 ) == "/" : self . _emit_text ( "//" ) self . _head += 2 else : valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" all_valid = lambda : all ( char in valid for char in self . _read ( ) ) scheme = "" while self . _read ( ) is not self . END and all_valid ( ) : scheme += self . _read ( ) self . _emit_text ( self . _read ( ) ) self . _head += 1 if self . _read ( ) != ":" : self . _fail_route ( ) self . _emit_text ( ":" ) self . _head += 1 slashes = self . _read ( ) == self . _read ( 1 ) == "/" if slashes : self . _emit_text ( "//" ) self . _head += 2 if not is_scheme ( scheme , slashes ) : self . _fail_route ( )
Parse the URI scheme of a bracket - enclosed external link .
265
13
240,720
def _handle_free_link_text ( self , punct , tail , this ) : if "(" in this and ")" in punct : punct = punct [ : - 1 ] # ')' is not longer valid punctuation if this . endswith ( punct ) : for i in range ( len ( this ) - 1 , 0 , - 1 ) : if this [ i - 1 ] not in punct : break else : i = 0 stripped = this [ : i ] if stripped and tail : self . _emit_text ( tail ) tail = "" tail += this [ i : ] this = stripped elif tail : self . _emit_text ( tail ) tail = "" self . _emit_text ( this ) return punct , tail
Handle text in a free ext link including trailing punctuation .
158
12
240,721
def _is_free_link_end ( self , this , next ) : # Built from _parse()'s end sentinels: after , ctx = self . _read ( 2 ) , self . _context equal_sign_contexts = contexts . TEMPLATE_PARAM_KEY | contexts . HEADING return ( this in ( self . END , "\n" , "[" , "]" , "<" , ">" ) or this == next == "'" or ( this == "|" and ctx & contexts . TEMPLATE ) or ( this == "=" and ctx & equal_sign_contexts ) or ( this == next == "}" and ctx & contexts . TEMPLATE ) or ( this == next == after == "}" and ctx & contexts . ARGUMENT ) )
Return whether the current head is the end of a free link .
178
13
240,722
def _really_parse_external_link ( self , brackets ) : if brackets : self . _parse_bracketed_uri_scheme ( ) invalid = ( "\n" , " " , "]" ) else : self . _parse_free_uri_scheme ( ) invalid = ( "\n" , " " , "[" , "]" ) punct = tuple ( ",;\\.:!?)" ) if self . _read ( ) is self . END or self . _read ( ) [ 0 ] in invalid : self . _fail_route ( ) tail = "" while True : this , next = self . _read ( ) , self . _read ( 1 ) if this == "&" : if tail : self . _emit_text ( tail ) tail = "" self . _parse_entity ( ) elif ( this == "<" and next == "!" and self . _read ( 2 ) == self . _read ( 3 ) == "-" ) : if tail : self . _emit_text ( tail ) tail = "" self . _parse_comment ( ) elif not brackets and self . _is_free_link_end ( this , next ) : return self . _pop ( ) , tail , - 1 elif this is self . END or this == "\n" : self . _fail_route ( ) elif this == next == "{" and self . _can_recurse ( ) : if tail : self . _emit_text ( tail ) tail = "" self . _parse_template_or_argument ( ) elif this == "]" : return self . _pop ( ) , tail , 0 elif " " in this : before , after = this . split ( " " , 1 ) if brackets : self . _emit_text ( before ) self . _emit ( tokens . ExternalLinkSeparator ( ) ) if after : self . _emit_text ( after ) self . _context ^= contexts . EXT_LINK_URI self . _context |= contexts . EXT_LINK_TITLE self . _head += 1 return self . _parse ( push = False ) , None , 0 punct , tail = self . _handle_free_link_text ( punct , tail , before ) return self . _pop ( ) , tail + " " + after , 0 elif not brackets : punct , tail = self . _handle_free_link_text ( punct , tail , this ) else : self . _emit_text ( this ) self . _head += 1
Really parse an external link .
545
6
240,723
def _remove_uri_scheme_from_textbuffer ( self , scheme ) : length = len ( scheme ) while length : if length < len ( self . _textbuffer [ - 1 ] ) : self . _textbuffer [ - 1 ] = self . _textbuffer [ - 1 ] [ : - length ] break length -= len ( self . _textbuffer [ - 1 ] ) self . _textbuffer . pop ( )
Remove the URI scheme of a new external link from the textbuffer .
92
14
240,724
def _parse_external_link ( self , brackets ) : if self . _context & contexts . NO_EXT_LINKS or not self . _can_recurse ( ) : if not brackets and self . _context & contexts . DL_TERM : self . _handle_dl_term ( ) else : self . _emit_text ( self . _read ( ) ) return reset = self . _head self . _head += 1 try : link , extra , delta = self . _really_parse_external_link ( brackets ) except BadRoute : self . _head = reset if not brackets and self . _context & contexts . DL_TERM : self . _handle_dl_term ( ) else : self . _emit_text ( self . _read ( ) ) else : if not brackets : scheme = link [ 0 ] . text . split ( ":" , 1 ) [ 0 ] self . _remove_uri_scheme_from_textbuffer ( scheme ) self . _emit ( tokens . ExternalLinkOpen ( brackets = brackets ) ) self . _emit_all ( link ) self . _emit ( tokens . ExternalLinkClose ( ) ) self . _head += delta if extra : self . _emit_text ( extra )
Parse an external link at the head of the wikicode string .
272
15
240,725
def _parse_heading ( self ) : self . _global |= contexts . GL_HEADING reset = self . _head self . _head += 1 best = 1 while self . _read ( ) == "=" : best += 1 self . _head += 1 context = contexts . HEADING_LEVEL_1 << min ( best - 1 , 5 ) try : title , level = self . _parse ( context ) except BadRoute : self . _head = reset + best - 1 self . _emit_text ( "=" * best ) else : self . _emit ( tokens . HeadingStart ( level = level ) ) if level < best : self . _emit_text ( "=" * ( best - level ) ) self . _emit_all ( title ) self . _emit ( tokens . HeadingEnd ( ) ) finally : self . _global ^= contexts . GL_HEADING
Parse a section heading at the head of the wikicode string .
195
15
240,726
def _handle_heading_end ( self ) : reset = self . _head self . _head += 1 best = 1 while self . _read ( ) == "=" : best += 1 self . _head += 1 current = int ( log ( self . _context / contexts . HEADING_LEVEL_1 , 2 ) ) + 1 level = min ( current , min ( best , 6 ) ) try : # Try to check for a heading closure after this one after , after_level = self . _parse ( self . _context ) except BadRoute : if level < best : self . _emit_text ( "=" * ( best - level ) ) self . _head = reset + best - 1 return self . _pop ( ) , level else : # Found another closure self . _emit_text ( "=" * best ) self . _emit_all ( after ) return self . _pop ( ) , after_level
Handle the end of a section heading at the head of the string .
199
14
240,727
def _really_parse_entity ( self ) : self . _emit ( tokens . HTMLEntityStart ( ) ) self . _head += 1 this = self . _read ( strict = True ) if this == "#" : numeric = True self . _emit ( tokens . HTMLEntityNumeric ( ) ) self . _head += 1 this = self . _read ( strict = True ) if this [ 0 ] . lower ( ) == "x" : hexadecimal = True self . _emit ( tokens . HTMLEntityHex ( char = this [ 0 ] ) ) this = this [ 1 : ] if not this : self . _fail_route ( ) else : hexadecimal = False else : numeric = hexadecimal = False valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789" if not numeric and not hexadecimal : valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" if not all ( [ char in valid for char in this ] ) : self . _fail_route ( ) self . _head += 1 if self . _read ( ) != ";" : self . _fail_route ( ) if numeric : test = int ( this , 16 ) if hexadecimal else int ( this ) if test < 1 or test > 0x10FFFF : self . _fail_route ( ) else : if this not in htmlentities . entitydefs : self . _fail_route ( ) self . _emit ( tokens . Text ( text = this ) ) self . _emit ( tokens . HTMLEntityEnd ( ) )
Actually parse an HTML entity and ensure that it is valid .
382
12
240,728
def _parse_entity ( self ) : reset = self . _head try : self . _push ( contexts . HTML_ENTITY ) self . _really_parse_entity ( ) except BadRoute : self . _head = reset self . _emit_text ( self . _read ( ) ) else : self . _emit_all ( self . _pop ( ) )
Parse an HTML entity at the head of the wikicode string .
81
15
240,729
def _parse_comment ( self ) : self . _head += 4 reset = self . _head - 1 self . _push ( ) while True : this = self . _read ( ) if this == self . END : self . _pop ( ) self . _head = reset self . _emit_text ( "<!--" ) return if this == self . _read ( 1 ) == "-" and self . _read ( 2 ) == ">" : self . _emit_first ( tokens . CommentStart ( ) ) self . _emit ( tokens . CommentEnd ( ) ) self . _emit_all ( self . _pop ( ) ) self . _head += 2 if self . _context & contexts . FAIL_NEXT : # _verify_safe() sets this flag while parsing a template # or link when it encounters what might be a comment -- we # must unset it to let _verify_safe() know it was correct: self . _context ^= contexts . FAIL_NEXT return self . _emit_text ( this ) self . _head += 1
Parse an HTML comment at the head of the wikicode string .
235
15
240,730
def _handle_blacklisted_tag ( self ) : strip = lambda text : text . rstrip ( ) . lower ( ) while True : this , next = self . _read ( ) , self . _read ( 1 ) if this is self . END : self . _fail_route ( ) elif this == "<" and next == "/" : self . _head += 3 if self . _read ( ) != ">" or ( strip ( self . _read ( - 1 ) ) != strip ( self . _stack [ 1 ] . text ) ) : self . _head -= 1 self . _emit_text ( "</" ) continue self . _emit ( tokens . TagOpenClose ( ) ) self . _emit_text ( self . _read ( - 1 ) ) self . _emit ( tokens . TagCloseClose ( ) ) return self . _pop ( ) elif this == "&" : self . _parse_entity ( ) else : self . _emit_text ( this ) self . _head += 1
Handle the body of an HTML tag that is parser - blacklisted .
224
14
240,731
def _handle_single_only_tag_end ( self ) : padding = self . _stack . pop ( ) . padding self . _emit ( tokens . TagCloseSelfclose ( padding = padding , implicit = True ) ) self . _head -= 1 # Offset displacement done by _handle_tag_close_open return self . _pop ( )
Handle the end of an implicitly closing single - only HTML tag .
76
13
240,732
def _handle_single_tag_end ( self ) : stack = self . _stack # We need to find the index of the TagCloseOpen token corresponding to # the TagOpenOpen token located at index 0: depth = 1 for index , token in enumerate ( stack [ 2 : ] , 2 ) : if isinstance ( token , tokens . TagOpenOpen ) : depth += 1 elif isinstance ( token , tokens . TagCloseOpen ) : depth -= 1 if depth == 0 : break elif isinstance ( token , tokens . TagCloseSelfclose ) : depth -= 1 if depth == 0 : # pragma: no cover (untestable/exceptional) raise ParserError ( "_handle_single_tag_end() got an unexpected " "TagCloseSelfclose" ) else : # pragma: no cover (untestable/exceptional case) raise ParserError ( "_handle_single_tag_end() missed a TagCloseOpen" ) padding = stack [ index ] . padding stack [ index ] = tokens . TagCloseSelfclose ( padding = padding , implicit = True ) return self . _pop ( )
Handle the stream end when inside a single - supporting HTML tag .
239
13
240,733
def _parse_tag ( self ) : reset = self . _head self . _head += 1 try : tag = self . _really_parse_tag ( ) except BadRoute : self . _head = reset self . _emit_text ( "<" ) else : self . _emit_all ( tag )
Parse an HTML tag at the head of the wikicode string .
68
15
240,734
def _emit_style_tag ( self , tag , markup , body ) : self . _emit ( tokens . TagOpenOpen ( wiki_markup = markup ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseOpen ( ) ) self . _emit_all ( body ) self . _emit ( tokens . TagOpenClose ( ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseClose ( ) )
Write the body of a tag and the tokens that should surround it .
110
14
240,735
def _parse_italics ( self ) : reset = self . _head try : stack = self . _parse ( contexts . STYLE_ITALICS ) except BadRoute as route : self . _head = reset if route . context & contexts . STYLE_PASS_AGAIN : new_ctx = contexts . STYLE_ITALICS | contexts . STYLE_SECOND_PASS stack = self . _parse ( new_ctx ) else : return self . _emit_text ( "''" ) self . _emit_style_tag ( "i" , "''" , stack )
Parse wiki - style italics .
131
8
240,736
def _parse_bold ( self ) : reset = self . _head try : stack = self . _parse ( contexts . STYLE_BOLD ) except BadRoute : self . _head = reset if self . _context & contexts . STYLE_SECOND_PASS : self . _emit_text ( "'" ) return True elif self . _context & contexts . STYLE_ITALICS : self . _context |= contexts . STYLE_PASS_AGAIN self . _emit_text ( "'''" ) else : self . _emit_text ( "'" ) self . _parse_italics ( ) else : self . _emit_style_tag ( "b" , "'''" , stack )
Parse wiki - style bold .
161
7
240,737
def _emit_table_tag ( self , open_open_markup , tag , style , padding , close_open_markup , contents , open_close_markup ) : self . _emit ( tokens . TagOpenOpen ( wiki_markup = open_open_markup ) ) self . _emit_text ( tag ) if style : self . _emit_all ( style ) if close_open_markup : self . _emit ( tokens . TagCloseOpen ( wiki_markup = close_open_markup , padding = padding ) ) else : self . _emit ( tokens . TagCloseOpen ( padding = padding ) ) if contents : self . _emit_all ( contents ) self . _emit ( tokens . TagOpenClose ( wiki_markup = open_close_markup ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseClose ( ) )
Emit a table tag .
207
6
240,738
def _handle_table_style ( self , end_token ) : data = _TagOpenData ( ) data . context = _TagOpenData . CX_ATTR_READY while True : this = self . _read ( ) can_exit = ( not data . context & data . CX_QUOTED or data . context & data . CX_NOTE_SPACE ) if this == end_token and can_exit : if data . context & ( data . CX_ATTR_NAME | data . CX_ATTR_VALUE ) : self . _push_tag_buffer ( data ) if this . isspace ( ) : data . padding_buffer [ "first" ] += this return data . padding_buffer [ "first" ] elif this is self . END or this == end_token : if self . _context & contexts . TAG_ATTR : if data . context & data . CX_QUOTED : # Unclosed attribute quote: reset, don't die data . context = data . CX_ATTR_VALUE self . _memoize_bad_route ( ) self . _pop ( ) self . _head = data . reset continue self . _pop ( ) self . _fail_route ( ) else : self . _handle_tag_data ( data , this ) self . _head += 1
Handle style attributes for a table until end_token .
291
11
240,739
def _parse_table ( self ) : reset = self . _head self . _head += 2 try : self . _push ( contexts . TABLE_OPEN ) padding = self . _handle_table_style ( "\n" ) except BadRoute : self . _head = reset self . _emit_text ( "{" ) return style = self . _pop ( ) self . _head += 1 restore_point = self . _stack_ident try : table = self . _parse ( contexts . TABLE_OPEN ) except BadRoute : while self . _stack_ident != restore_point : self . _memoize_bad_route ( ) self . _pop ( ) self . _head = reset self . _emit_text ( "{" ) return self . _emit_table_tag ( "{|" , "table" , style , padding , None , table , "|}" ) # Offset displacement done by _parse(): self . _head -= 1
Parse a wikicode table by starting with the first line .
209
14
240,740
def _handle_table_row ( self ) : self . _head += 2 if not self . _can_recurse ( ) : self . _emit_text ( "|-" ) self . _head -= 1 return self . _push ( contexts . TABLE_OPEN | contexts . TABLE_ROW_OPEN ) padding = self . _handle_table_style ( "\n" ) style = self . _pop ( ) # Don't parse the style separator: self . _head += 1 row = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_ROW_OPEN ) self . _emit_table_tag ( "|-" , "tr" , style , padding , None , row , "" ) # Offset displacement done by parse(): self . _head -= 1
Parse as style until end of the line then continue .
174
12
240,741
def _handle_table_cell ( self , markup , tag , line_context ) : old_context = self . _context padding , style = "" , None self . _head += len ( markup ) reset = self . _head if not self . _can_recurse ( ) : self . _emit_text ( markup ) self . _head -= 1 return cell = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context | contexts . TABLE_CELL_STYLE ) cell_context = self . _context self . _context = old_context reset_for_style = cell_context & contexts . TABLE_CELL_STYLE if reset_for_style : self . _head = reset self . _push ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context ) padding = self . _handle_table_style ( "|" ) style = self . _pop ( ) # Don't parse the style separator: self . _head += 1 cell = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context ) cell_context = self . _context self . _context = old_context close_open_markup = "|" if reset_for_style else None self . _emit_table_tag ( markup , tag , style , padding , close_open_markup , cell , "" ) # Keep header/cell line contexts: self . _context |= cell_context & ( contexts . TABLE_TH_LINE | contexts . TABLE_TD_LINE ) # Offset displacement done by parse(): self . _head -= 1
Parse as normal syntax unless we hit a style marker then parse style as HTML attributes and the remainder as normal syntax .
370
24
240,742
def _handle_table_cell_end ( self , reset_for_style = False ) : if reset_for_style : self . _context |= contexts . TABLE_CELL_STYLE else : self . _context &= ~ contexts . TABLE_CELL_STYLE return self . _pop ( keep_context = True )
Returns the current context with the TABLE_CELL_STYLE flag set if it is necessary to reset and parse style attributes .
75
27
240,743
def _handle_end ( self ) : if self . _context & contexts . FAIL : if self . _context & contexts . TAG_BODY : if is_single ( self . _stack [ 1 ] . text ) : return self . _handle_single_tag_end ( ) if self . _context & contexts . TABLE_CELL_OPEN : self . _pop ( ) if self . _context & contexts . DOUBLE : self . _pop ( ) self . _fail_route ( ) return self . _pop ( )
Handle the end of the stream of wikitext .
117
11
240,744
def _verify_safe ( self , this ) : context = self . _context if context & contexts . FAIL_NEXT : return False if context & contexts . WIKILINK_TITLE : if this == "]" or this == "{" : self . _context |= contexts . FAIL_NEXT elif this == "\n" or this == "[" or this == "}" or this == ">" : return False elif this == "<" : if self . _read ( 1 ) == "!" : self . _context |= contexts . FAIL_NEXT else : return False return True elif context & contexts . EXT_LINK_TITLE : return this != "\n" elif context & contexts . TEMPLATE_NAME : if this == "{" : self . _context |= contexts . HAS_TEMPLATE | contexts . FAIL_NEXT return True if this == "}" or ( this == "<" and self . _read ( 1 ) == "!" ) : self . _context |= contexts . FAIL_NEXT return True if this == "[" or this == "]" or this == "<" or this == ">" : return False if this == "|" : return True if context & contexts . HAS_TEXT : if context & contexts . FAIL_ON_TEXT : if this is self . END or not this . isspace ( ) : return False elif this == "\n" : self . _context |= contexts . FAIL_ON_TEXT elif this is self . END or not this . isspace ( ) : self . _context |= contexts . HAS_TEXT return True elif context & contexts . TAG_CLOSE : return this != "<" else : if context & contexts . FAIL_ON_EQUALS : if this == "=" : return False elif context & contexts . FAIL_ON_LBRACE : if this == "{" or ( self . _read ( - 1 ) == self . _read ( - 2 ) == "{" ) : if context & contexts . TEMPLATE : self . _context |= contexts . FAIL_ON_EQUALS else : self . _context |= contexts . FAIL_NEXT return True self . _context ^= contexts . FAIL_ON_LBRACE elif context & contexts . FAIL_ON_RBRACE : if this == "}" : self . _context |= contexts . FAIL_NEXT return True self . _context ^= contexts . FAIL_ON_RBRACE elif this == "{" : self . _context |= contexts . FAIL_ON_LBRACE elif this == "}" : self . _context |= contexts . FAIL_ON_RBRACE return True
Make sure we are not trying to write an invalid character .
596
12
240,745
def tokenize ( self , text , context = 0 , skip_style_tags = False ) : split = self . regex . split ( text ) self . _text = [ segment for segment in split if segment ] self . _head = self . _global = self . _depth = 0 self . _bad_routes = set ( ) self . _skip_style_tags = skip_style_tags try : tokens = self . _parse ( context ) except BadRoute : # pragma: no cover (untestable/exceptional case) raise ParserError ( "Python tokenizer exited with BadRoute" ) if self . _stacks : # pragma: no cover (untestable/exceptional case) err = "Python tokenizer exited with non-empty token stack" raise ParserError ( err ) return tokens
Build a list of tokens from a string of wikicode and return it .
177
16
240,746
def _value_needs_quotes ( val ) : if not val : return None val = "" . join ( str ( node ) for node in val . filter_text ( recursive = False ) ) if not any ( char . isspace ( ) for char in val ) : return None if "'" in val and '"' not in val : return '"' if '"' in val and "'" not in val : return "'" return "\"'"
Return valid quotes for the given value or None if unneeded .
94
13
240,747
def _set_padding ( self , attr , value ) : if not value : setattr ( self , attr , "" ) else : value = str ( value ) if not value . isspace ( ) : raise ValueError ( "padding must be entirely whitespace" ) setattr ( self , attr , value )
Setter for the value of a padding attribute .
68
10
240,748
def coerce_quotes ( quotes ) : orig , quotes = quotes , str ( quotes ) if quotes else None if quotes not in [ None , '"' , "'" ] : raise ValueError ( "{!r} is not a valid quote type" . format ( orig ) ) return quotes
Coerce a quote type into an acceptable value or raise an error .
61
15
240,749
def _indexed_ifilter ( self , recursive = True , matches = None , flags = FLAGS , forcetype = None ) : match = self . _build_matcher ( matches , flags ) if recursive : restrict = forcetype if recursive == self . RECURSE_OTHERS else None def getter ( i , node ) : for ch in self . _get_children ( node , restrict = restrict ) : yield ( i , ch ) inodes = chain ( * ( getter ( i , n ) for i , n in enumerate ( self . nodes ) ) ) else : inodes = enumerate ( self . nodes ) for i , node in inodes : if ( not forcetype or isinstance ( node , forcetype ) ) and match ( node ) : yield ( i , node )
Iterate over nodes and their corresponding indices in the node list .
176
13
240,750
def _get_tree ( self , code , lines , marker , indent ) : def write ( * args ) : """Write a new line following the proper indentation rules.""" if lines and lines [ - 1 ] is marker : # Continue from the last line lines . pop ( ) # Remove the marker last = lines . pop ( ) lines . append ( last + " " . join ( args ) ) else : lines . append ( " " * 6 * indent + " " . join ( args ) ) get = lambda code : self . _get_tree ( code , lines , marker , indent + 1 ) mark = lambda : lines . append ( marker ) for node in code . nodes : node . __showtree__ ( write , get , mark ) return lines
Build a tree to illustrate the way the Wikicode object was parsed .
159
15
240,751
def _build_filter_methods ( cls , * * meths ) : doc = """Iterate over {0}. This is equivalent to :meth:`{1}` with *forcetype* set to :class:`~{2.__module__}.{2.__name__}`. """ make_ifilter = lambda ftype : ( lambda self , * a , * * kw : self . ifilter ( forcetype = ftype , * a , * * kw ) ) make_filter = lambda ftype : ( lambda self , * a , * * kw : self . filter ( forcetype = ftype , * a , * * kw ) ) for name , ftype in ( meths . items ( ) if py3k else meths . iteritems ( ) ) : ifilter = make_ifilter ( ftype ) filter = make_filter ( ftype ) ifilter . __doc__ = doc . format ( name , "ifilter" , ftype ) filter . __doc__ = doc . format ( name , "filter" , ftype ) setattr ( cls , "ifilter_" + name , ifilter ) setattr ( cls , "filter_" + name , filter )
Given Node types build the corresponding i?filter shortcuts .
279
11
240,752
def matches ( self , other ) : cmp = lambda a , b : ( a [ 0 ] . upper ( ) + a [ 1 : ] == b [ 0 ] . upper ( ) + b [ 1 : ] if a and b else a == b ) this = self . strip_code ( ) . strip ( ) if isinstance ( other , ( str , bytes , Wikicode , Node ) ) : that = parse_anything ( other ) . strip_code ( ) . strip ( ) return cmp ( this , that ) for obj in other : that = parse_anything ( obj ) . strip_code ( ) . strip ( ) if cmp ( this , that ) : return True return False
Do a loose equivalency test suitable for comparing page names .
149
12
240,753
def ifilter ( self , recursive = True , matches = None , flags = FLAGS , forcetype = None ) : gen = self . _indexed_ifilter ( recursive , matches , flags , forcetype ) return ( node for i , node in gen )
Iterate over nodes in our list matching certain conditions .
60
11
240,754
def get_sections ( self , levels = None , matches = None , flags = FLAGS , flat = False , include_lead = None , include_headings = True ) : title_matcher = self . _build_matcher ( matches , flags ) matcher = lambda heading : ( title_matcher ( heading . title ) and ( not levels or heading . level in levels ) ) iheadings = self . _indexed_ifilter ( recursive = False , forcetype = Heading ) sections = [ ] # Tuples of (index_of_first_node, section) open_headings = [ ] # Tuples of (index, heading), where index and # heading.level are both monotonically increasing # Add the lead section if appropriate: if include_lead or not ( include_lead is not None or matches or levels ) : itr = self . _indexed_ifilter ( recursive = False , forcetype = Heading ) try : first = next ( itr ) [ 0 ] sections . append ( ( 0 , Wikicode ( self . nodes [ : first ] ) ) ) except StopIteration : # No headings in page sections . append ( ( 0 , Wikicode ( self . nodes [ : ] ) ) ) # Iterate over headings, adding sections to the list as they end: for i , heading in iheadings : if flat : # With flat, all sections close at the next heading newly_closed , open_headings = open_headings , [ ] else : # Otherwise, figure out which sections have closed, if any closed_start_index = len ( open_headings ) for j , ( start , last_heading ) in enumerate ( open_headings ) : if heading . level <= last_heading . level : closed_start_index = j break newly_closed = open_headings [ closed_start_index : ] del open_headings [ closed_start_index : ] for start , closed_heading in newly_closed : if matcher ( closed_heading ) : sections . append ( ( start , Wikicode ( self . nodes [ start : i ] ) ) ) start = i if include_headings else ( i + 1 ) open_headings . append ( ( start , heading ) ) # Add any remaining open headings to the list of sections: for start , heading in open_headings : if matcher ( heading ) : sections . append ( ( start , Wikicode ( self . nodes [ start : ] ) ) ) # Ensure that earlier sections are earlier in the returned list: return [ section for i , section in sorted ( sections ) ]
Return a list of sections within the page .
567
9
240,755
def strip_code ( self , normalize = True , collapse = True , keep_template_params = False ) : kwargs = { "normalize" : normalize , "collapse" : collapse , "keep_template_params" : keep_template_params } nodes = [ ] for node in self . nodes : stripped = node . __strip__ ( * * kwargs ) if stripped : nodes . append ( str ( stripped ) ) if collapse : stripped = "" . join ( nodes ) . strip ( "\n" ) while "\n\n\n" in stripped : stripped = stripped . replace ( "\n\n\n" , "\n\n" ) return stripped else : return "" . join ( nodes )
Return a rendered string without unprintable code such as templates .
156
13
240,756
def write_pid_file ( fn ) : if not fn : return None if fn == '' or fn == "''" : # work around argument passings in init-scripts return None f = open ( fn , "w" ) f . write ( "%s\n" % ( os . getpid ( ) ) ) f . close ( )
Create a file with our PID .
73
7
240,757
def input_validate_str ( string , name , max_len = None , exact_len = None ) : if type ( string ) is not str : raise pyhsm . exception . YHSM_WrongInputType ( name , str , type ( string ) ) if max_len != None and len ( string ) > max_len : raise pyhsm . exception . YHSM_InputTooLong ( name , max_len , len ( string ) ) if exact_len != None and len ( string ) != exact_len : raise pyhsm . exception . YHSM_WrongInputSize ( name , exact_len , len ( string ) ) return string
Input validation for strings .
146
5
240,758
def input_validate_int ( value , name , max_value = None ) : if type ( value ) is not int : raise pyhsm . exception . YHSM_WrongInputType ( name , int , type ( value ) ) if max_value != None and value > max_value : raise pyhsm . exception . YHSM_WrongInputSize ( name , max_value , value ) return value
Input validation for integers .
92
5
240,759
def input_validate_nonce ( nonce , name = 'nonce' , pad = False ) : if type ( nonce ) is not str : raise pyhsm . exception . YHSM_WrongInputType ( name , str , type ( nonce ) ) if len ( nonce ) > pyhsm . defines . YSM_AEAD_NONCE_SIZE : raise pyhsm . exception . YHSM_InputTooLong ( name , pyhsm . defines . YSM_AEAD_NONCE_SIZE , len ( nonce ) ) if pad : return nonce . ljust ( pyhsm . defines . YSM_AEAD_NONCE_SIZE , chr ( 0x0 ) ) else : return nonce
Input validation for nonces .
167
6
240,760
def input_validate_key_handle ( key_handle , name = 'key_handle' ) : if type ( key_handle ) is not int : try : return key_handle_to_int ( key_handle ) except pyhsm . exception . YHSM_Error : raise pyhsm . exception . YHSM_WrongInputType ( name , int , type ( key_handle ) ) return key_handle
Input validation for key_handles .
93
8
240,761
def input_validate_yubikey_secret ( data , name = 'data' ) : if isinstance ( data , pyhsm . aead_cmd . YHSM_YubiKeySecret ) : data = data . pack ( ) return input_validate_str ( data , name )
Input validation for YHSM_YubiKeySecret or string .
66
14
240,762
def input_validate_aead ( aead , name = 'aead' , expected_len = None , max_aead_len = pyhsm . defines . YSM_AEAD_MAX_SIZE ) : if isinstance ( aead , pyhsm . aead_cmd . YHSM_GeneratedAEAD ) : aead = aead . data if expected_len != None : return input_validate_str ( aead , name , exact_len = expected_len ) else : return input_validate_str ( aead , name , max_len = max_aead_len )
Input validation for YHSM_GeneratedAEAD or string .
136
14
240,763
def validate_cmd_response_nonce ( got , used ) : if used == '000000000000' . decode ( 'hex' ) : if got == used : raise ( pyhsm . exception . YHSM_Error ( "Bad nonce in response (got %s, expected HSM generated nonce)" % ( got . encode ( 'hex' ) ) ) ) return got return validate_cmd_response_str ( 'nonce' , got , used )
Check that the returned nonce matches nonce used in request .
100
13
240,764
def _raw_pack ( key_handle , flags , data ) : # #define YHSM_HMAC_RESET 0x01 // Flag to indicate reset at first packet # #define YHSM_HMAC_FINAL 0x02 // Flag to indicate that the hash shall be calculated # typedef struct { # uint32_t keyHandle; // Key handle # uint8_t flags; // Flags # uint8_t numBytes; // Number of bytes in data packet # uint8_t data[YHSM_MAX_PKT_SIZE - 6]; // Data to be written # } YHSM_HMAC_SHA1_GENERATE_REQ; return struct . pack ( '<IBB' , key_handle , flags , len ( data ) ) + data
Common code for packing payload to YHSM_HMAC_SHA1_GENERATE command .
171
21
240,765
def next ( self , data , final = False , to_buffer = False ) : if final : self . flags = pyhsm . defines . YSM_HMAC_SHA1_FINAL else : self . flags = 0x0 if to_buffer : self . flags |= pyhsm . defines . YSM_HMAC_SHA1_TO_BUFFER self . payload = _raw_pack ( self . key_handle , self . flags , data ) self . final = final return self
Add more input to the HMAC SHA1 .
108
10
240,766
def get_hash ( self ) : if not self . executed : raise pyhsm . exception . YHSM_Error ( "HMAC-SHA1 hash not available, before execute()." ) return self . result . hash_result
Get the HMAC - SHA1 that has been calculated this far .
50
14
240,767
def check_signature ( params ) : if 'id' in params : try : id_int = int ( params [ 'id' ] [ 0 ] ) except : my_log_message ( args , syslog . LOG_INFO , "Non-numerical client id (%s) in request." % ( params [ 'id' ] [ 0 ] ) ) return False , None key = client_ids . get ( id_int ) if key : if 'h' in params : sig = params [ 'h' ] [ 0 ] good_sig = make_signature ( params , key ) if sig == good_sig : #my_log_message(args, syslog.LOG_DEBUG, "Good signature (client id '%i')" % id_int) return True , key else : my_log_message ( args , syslog . LOG_INFO , "Bad signature from client id '%i' (%s, expected %s)." % ( id_int , sig , good_sig ) ) else : my_log_message ( args , syslog . LOG_INFO , "Client id (%i) but no HMAC in request." % ( id_int ) ) return False , key else : my_log_message ( args , syslog . LOG_INFO , "Unknown client id '%i'" % ( id_int ) ) return False , None return True , None
Verify the signature of the parameters in an OTP v2 . 0 verify request .
302
18
240,768
def validate_oath_hotp ( self , params ) : from_key = params [ "hotp" ] [ 0 ] if not re . match ( hotp_valid_input , from_key ) : self . log_error ( "IN: %s, Invalid OATH-HOTP OTP" % ( params ) ) return "ERR Invalid OATH-HOTP OTP" uid , otp , = get_oath_hotp_bits ( params ) if not uid or not otp : self . log_error ( "IN: %s, could not get UID/OTP ('%s'/'%s')" % ( params , uid , otp ) ) return "ERR Invalid OATH-HOTP input" if args . debug : print "OATH-HOTP uid %s, OTP %s" % ( uid , otp ) # Fetch counter value for `uid' from database try : db = ValOathDb ( args . db_file ) entry = db . get ( uid ) except Exception , e : self . log_error ( "IN: %s, database error : '%s'" % ( params , e ) ) return "ERR Internal error" # Check for correct OATH-HOTP OTP nonce = entry . data [ "nonce" ] . decode ( 'hex' ) aead = entry . data [ "aead" ] . decode ( 'hex' ) new_counter = pyhsm . oath_hotp . search_for_oath_code ( hsm , entry . data [ "key_handle" ] , nonce , aead , entry . data [ "oath_c" ] , otp , args . look_ahead ) if args . debug : print "OATH-HOTP %i..%i -> new C == %s" % ( entry . data [ "oath_c" ] , entry . data [ "oath_c" ] + args . look_ahead , new_counter ) if type ( new_counter ) != int : # XXX increase 'throttling parameter' to make brute forcing harder/impossible return "ERR Could not validate OATH-HOTP OTP" try : # Must successfully store new_counter before we return OK if db . update_oath_hotp_c ( entry , new_counter ) : return "OK counter=%04x" % ( new_counter ) else : return "ERR replayed OATH-HOTP" except Exception , e : self . log_error ( "IN: %s, database error updating counter : %s" % ( params , e ) ) return "ERR Internal error"
Validate OATH - HOTP code using YubiHSM HMAC - SHA1 hashing with token keys secured in AEAD s that we have stored in an SQLite3 database .
590
38
240,769
def validate_oath_totp ( self , params ) : from_key = params [ "totp" ] [ 0 ] if not re . match ( totp_valid_input , from_key ) : self . log_error ( "IN: %s, Invalid OATH-TOTP OTP" % ( params ) ) return "ERR Invalid OATH-TOTP OTP" uid , otp , = get_oath_totp_bits ( params ) if not uid or not otp : self . log_error ( "IN: %s, could not get UID/OTP ('%s'/'%s')" % ( params , uid , otp ) ) return "ERR Invalid OATH-TOTP input" if args . debug : print "OATH-TOTP uid %s, OTP %s" % ( uid , otp ) # Fetch counter value for `uid' from database try : db = ValOathDb ( args . db_file ) entry = db . get ( uid ) except Exception , e : self . log_error ( "IN: %s, database error : '%s'" % ( params , e ) ) return "ERR Internal error" # Check for correct OATH-TOTP OTP nonce = entry . data [ "nonce" ] . decode ( 'hex' ) aead = entry . data [ "aead" ] . decode ( 'hex' ) new_timecounter = pyhsm . oath_totp . search_for_oath_code ( hsm , entry . data [ "key_handle" ] , nonce , aead , otp , args . interval , args . tolerance ) if args . debug : print "OATH-TOTP counter: %i, interval: %i -> new timecounter == %s" % ( entry . data [ "oath_c" ] , args . interval , new_timecounter ) if type ( new_timecounter ) != int : return "ERR Could not validate OATH-TOTP OTP" try : # Must successfully store new_timecounter before we return OK # Can use existing hotp function since it would be identical if db . update_oath_hotp_c ( entry , new_timecounter ) : return "OK timecounter=%04x" % ( new_timecounter ) else : return "ERR replayed OATH-TOTP" except Exception , e : self . log_error ( "IN: %s, database error updating counter : %s" % ( params , e ) ) return "ERR Internal error"
Validate OATH - TOTP code using YubiHSM HMAC - SHA1 hashing with token keys secured in AEAD s that we have stored in an SQLite3 database .
577
39
240,770
def validate_pwhash ( _self , params ) : pwhash , nonce , aead , key_handle = get_pwhash_bits ( params ) d_aead = aead . decode ( 'hex' ) plaintext_len = len ( d_aead ) - pyhsm . defines . YSM_AEAD_MAC_SIZE pw = pwhash . ljust ( plaintext_len , chr ( 0x0 ) ) if hsm . validate_aead ( nonce . decode ( 'hex' ) , key_handle , d_aead , pw ) : return "OK pwhash validated" return "ERR Could not validate pwhash"
Validate password hash using YubiHSM .
153
10
240,771
def get_pwhash_bits ( params ) : if not "pwhash" in params or not "nonce" in params or not "aead" in params or not "kh" in params : raise Exception ( "Missing required parameter in request (pwhash, nonce, aead or kh)" ) pwhash = params [ "pwhash" ] [ 0 ] nonce = params [ "nonce" ] [ 0 ] aead = params [ "aead" ] [ 0 ] key_handle = pyhsm . util . key_handle_to_int ( params [ "kh" ] [ 0 ] ) return pwhash , nonce , aead , key_handle
Extract bits for password hash validation from params .
151
10
240,772
def get_oath_hotp_bits ( params ) : if "uid" in params : return params [ "uid" ] [ 0 ] , int ( params [ "hotp" ] [ 0 ] ) m = re . match ( "^([cbdefghijklnrtuv]*)([0-9]{6,8})" , params [ "hotp" ] [ 0 ] ) uid , otp , = m . groups ( ) return uid , int ( otp ) ,
Extract the OATH - HOTP uid and OTP from params .
109
16
240,773
def load_clients_file ( filename ) : res = { } content = [ ] try : fhandle = file ( filename ) content = fhandle . readlines ( ) fhandle . close ( ) except IOError : return None linenum = 0 for line in content : linenum += 1 while line . endswith ( "\r" ) or line . endswith ( "\n" ) : line = line [ : - 1 ] if re . match ( "(^\s*#|^\s*$)" , line ) : # skip comments and empty lines continue parts = [ x . strip ( ) for x in line . split ( ',' ) ] try : if len ( parts ) != 2 : raise Exception ( ) id_num = int ( parts [ 0 ] ) key = base64 . b64decode ( parts [ 1 ] ) res [ id_num ] = key except : my_log_message ( args , syslog . LOG_ERR , 'Bad data on line %i of clients file "%s" : "%s"' % ( linenum , filename , line ) ) return None return res
Load a list of base64 encoded shared secrets for numerical client ids .
238
15
240,774
def run ( ) : server_address = ( args . listen_addr , args . listen_port ) httpd = YHSM_VALServer ( server_address , YHSM_VALRequestHandler ) my_log_message ( args , syslog . LOG_INFO , "Serving requests to 'http://%s:%s%s' (YubiHSM: '%s')" % ( args . listen_addr , args . listen_port , args . serve_url , args . device ) ) httpd . serve_forever ( )
Start the BaseHTTPServer and serve requests forever .
120
11
240,775
def main ( ) : my_name = os . path . basename ( sys . argv [ 0 ] ) if not my_name : my_name = "yhsm-validation-server" syslog . openlog ( my_name , syslog . LOG_PID , syslog . LOG_LOCAL0 ) global args args = parse_args ( ) args_fixup ( ) global hsm try : hsm = pyhsm . YHSM ( device = args . device , debug = args . debug ) except serial . SerialException , e : my_log_message ( args , syslog . LOG_ERR , 'Failed opening YubiHSM device "%s" : %s' % ( args . device , e ) ) return 1 write_pid_file ( args . pid_file ) try : run ( ) except KeyboardInterrupt : print "" print "Shutting down" print ""
The main function that will be executed when running this as a stand alone script .
197
16
240,776
def do_GET ( self ) : if self . path . startswith ( args . serve_url ) : res = None log_res = None mode = None params = urlparse . parse_qs ( self . path [ len ( args . serve_url ) : ] ) if "otp" in params : if args . mode_short_otp : # YubiKey internal db OTP in KSM mode mode = 'YubiKey OTP (short)' res = validate_yubikey_otp_short ( self , params ) elif args . mode_otp : # YubiKey internal db OTP validation 2.0 mode = 'YubiKey OTP' res = validate_yubikey_otp ( self , params ) #status = [x for x in res.split('\n') if x.startswith("status=")] #if len(status) == 1: # res = status[0][7:] log_res = '&' . join ( res . split ( '\n' ) ) else : res = "ERR 'otp/otp2' disabled" elif "hotp" in params : if args . mode_hotp : mode = 'OATH-HOTP' res = validate_oath_hotp ( self , params ) else : res = "ERR 'hotp' disabled" elif "totp" in params : if args . mode_totp : mode = 'OATH-TOTP' res = validate_oath_totp ( self , params ) else : res = "ERR 'totp' disabled" elif "pwhash" in params : if args . mode_pwhash : mode = 'Password hash' res = validate_pwhash ( self , params ) else : res = "ERR 'pwhash' disabled" if not log_res : log_res = res self . log_message ( "%s validation result: %s -> %s" , mode , self . path , log_res ) if res != None : self . send_response ( 200 ) self . send_header ( 'Content-type' , 'text/plain' ) self . end_headers ( ) self . wfile . write ( res ) self . wfile . write ( "\n" ) else : self . log_error ( "No validation result to '%s' (responding 403)" % ( self . path ) ) self . send_response ( 403 , 'Forbidden' ) self . end_headers ( ) else : self . log_error ( "Bad URL '%s' - I'm serving '%s' (responding 403)" % ( self . path , args . serve_url ) ) self . send_response ( 403 , 'Forbidden' ) self . end_headers ( )
Process validation GET requests .
614
5
240,777
def get ( self , key ) : c = self . conn . cursor ( ) for row in c . execute ( "SELECT key, nonce, key_handle, aead, oath_C, oath_T FROM oath WHERE key = ?" , ( key , ) ) : return ValOathEntry ( row ) raise Exception ( "OATH token for '%s' not found in database (%s)" % ( key , self . filename ) )
Fetch entry from database .
95
6
240,778
def update_oath_hotp_c ( self , entry , new_c ) : key = entry . data [ "key" ] c = self . conn . cursor ( ) c . execute ( "UPDATE oath SET oath_c = ? WHERE key = ? AND ? > oath_c" , ( new_c , key , new_c , ) ) self . conn . commit ( ) return c . rowcount == 1
Update the OATH - HOTP counter value for entry in the database .
91
15
240,779
def generate_aead ( hsm , args , password ) : try : pw = password . ljust ( args . min_len , chr ( 0x0 ) ) return hsm . generate_aead_simple ( args . nonce . decode ( 'hex' ) , args . key_handle , pw ) except pyhsm . exception . YHSM_CommandFailed , e : if e . status_str == 'YHSM_FUNCTION_DISABLED' : print "ERROR: The key handle %s is not permitted to YSM_AEAD_GENERATE." % ( args . key_handle ) return None else : print "ERROR: %s" % ( e . reason )
Generate an AEAD using the YubiHSM .
156
12
240,780
def aead_filename ( aead_dir , key_handle , public_id ) : parts = [ aead_dir , key_handle ] + pyhsm . util . group ( public_id , 2 ) path = os . path . join ( * parts ) if not os . path . isdir ( path ) : os . makedirs ( path ) return os . path . join ( path , public_id )
Return the filename of the AEAD for this public_id and create any missing directorys .
91
19
240,781
def safe_process_files ( path , files , args , state ) : for fn in files : full_fn = os . path . join ( path , fn ) try : if not process_file ( path , fn , args , state ) : return False except Exception , e : sys . stderr . write ( "error: %s\n%s\n" % ( os . path . join ( path , fn ) , traceback . format_exc ( ) ) ) state . log_failed ( full_fn ) if state . should_quit ( ) : return False return True
Process a number of files in a directory . Catches any exception from the processing and checks if we should fail directly or keep going .
125
27
240,782
def walk_dir ( path , args , state ) : if args . debug : sys . stderr . write ( "Walking %s\n" % path ) for root , _dirs , files in os . walk ( path ) : if not safe_process_files ( root , files , args , state ) : return False if state . should_quit ( ) : return False return True
Check all files in path to see if there is any requests that we should send out on the bus .
84
21
240,783
def main ( ) : global args args = parse_args ( ) if not args : return 1 state = MyState ( args ) for path in args . paths : if os . path . isdir ( path ) : walk_dir ( path , args , state ) else : safe_process_files ( os . path . dirname ( path ) , [ os . path . basename ( path ) ] , args , state ) if state . should_quit ( ) : break if state . failed_files : sys . stderr . write ( "error: %i/%i AEADs failed\n" % ( len ( state . failed_files ) , state . file_count ) ) return 1 if args . debug : sys . stderr . write ( "Successfully processed %i AEADs\n" % ( state . file_count ) )
Main function when running as a program .
183
8
240,784
def search_for_oath_code ( hsm , key_handle , nonce , aead , counter , user_code , look_ahead = 1 ) : key_handle = pyhsm . util . input_validate_key_handle ( key_handle ) nonce = pyhsm . util . input_validate_nonce ( nonce , pad = False ) aead = pyhsm . util . input_validate_aead ( aead ) counter = pyhsm . util . input_validate_int ( counter , 'counter' ) user_code = pyhsm . util . input_validate_int ( user_code , 'user_code' ) hsm . load_temp_key ( nonce , key_handle , aead ) # User might have produced codes never sent to us, so we support trying look_ahead # codes to see if we find the user's current code. for j in xrange ( look_ahead ) : this_counter = counter + j secret = struct . pack ( "> Q" , this_counter ) hmac_result = hsm . hmac_sha1 ( pyhsm . defines . YSM_TEMP_KEY_HANDLE , secret ) . get_hash ( ) this_code = truncate ( hmac_result ) if this_code == user_code : return this_counter + 1 return None
Try to validate an OATH HOTP OTP generated by a token whose secret key is available to the YubiHSM through the AEAD .
303
30
240,785
def truncate ( hmac_result , length = 6 ) : assert ( len ( hmac_result ) == 20 ) offset = ord ( hmac_result [ 19 ] ) & 0xf bin_code = ( ord ( hmac_result [ offset ] ) & 0x7f ) << 24 | ( ord ( hmac_result [ offset + 1 ] ) & 0xff ) << 16 | ( ord ( hmac_result [ offset + 2 ] ) & 0xff ) << 8 | ( ord ( hmac_result [ offset + 3 ] ) & 0xff ) return bin_code % ( 10 ** length )
Perform the truncating .
132
6
240,786
def flush ( self ) : if self . debug : sys . stderr . write ( "%s: FLUSH INPUT (%i bytes waiting)\n" % ( self . __class__ . __name__ , self . ser . inWaiting ( ) ) ) self . ser . flushInput ( )
Flush input buffers .
64
5
240,787
def drain ( self ) : if self . debug : sys . stderr . write ( "%s: DRAIN INPUT (%i bytes waiting)\n" % ( self . __class__ . __name__ , self . ser . inWaiting ( ) ) ) old_timeout = self . ser . timeout self . ser . timeout = 0.1 data = self . ser . read ( 1 ) while len ( data ) : if self . debug : sys . stderr . write ( "%s: DRAINED 0x%x (%c)\n" % ( self . __class__ . __name__ , ord ( data [ 0 ] ) , data [ 0 ] ) ) data = self . ser . read ( 1 ) self . ser . timeout = old_timeout return True
Drain input .
167
4
240,788
def extract_keyhandle ( path , filepath ) : keyhandle = filepath . lstrip ( path ) keyhandle = keyhandle . split ( "/" ) return keyhandle [ 0 ]
extract keyhandle value from the path
40
8
240,789
def insert_query ( connection , publicId , aead , keyhandle , aeadobj ) : # turn the keyhandle into an integer keyhandle = key_handle_to_int ( keyhandle ) if not keyhandle == aead . key_handle : print ( "WARNING: keyhandle does not match aead.key_handle" ) return None # creates the query object try : sql = aeadobj . insert ( ) . values ( public_id = publicId , keyhandle = aead . key_handle , nonce = aead . nonce , aead = aead . data ) # insert the query result = connection . execute ( sql ) return result except sqlalchemy . exc . IntegrityError : pass return None
this functions read the response fields and creates sql query . then inserts everything inside the database
155
17
240,790
def import_keys ( hsm , args ) : res = True # ykksm 1 #123456,ftftftcccc,534543524554,fcacd309a20ce1809c2db257f0e8d6ea,000000000000,,, for line in sys . stdin : if line [ 0 ] == '#' : continue l = line . split ( ',' ) modhex_id = l [ 1 ] uid = l [ 2 ] . decode ( 'hex' ) key = l [ 3 ] . decode ( 'hex' ) if modhex_id and uid and key : public_id = pyhsm . yubikey . modhex_decode ( modhex_id ) padded_id = modhex_id . rjust ( args . public_id_chars , 'c' ) if int ( public_id , 16 ) == 0 : print "WARNING: Skipping import of key with public ID: %s" % ( padded_id ) print "This public ID is unsupported by the YubiHSM.\n" continue if args . verbose : print " %s" % ( padded_id ) secret = pyhsm . aead_cmd . YHSM_YubiKeySecret ( key , uid ) hsm . load_secret ( secret ) for kh in args . key_handles . keys ( ) : if ( args . random_nonce ) : nonce = "" else : nonce = public_id . decode ( 'hex' ) aead = hsm . generate_aead ( nonce , kh ) if args . internal_db : if not store_in_internal_db ( args , hsm , modhex_id , public_id , kh , aead ) : res = False continue filename = output_filename ( args . output_dir , args . key_handles [ kh ] , padded_id ) if args . verbose : print " %4s, %i bytes (%s) -> %s" % ( args . key_handles [ kh ] , len ( aead . data ) , shorten_aead ( aead ) , filename ) aead . save ( filename ) if args . verbose : print "" if res : print "\nDone\n" else : print "\nDone (one or more entries rejected)" return res
The main stdin iteration loop .
507
7
240,791
def shorten_aead ( aead ) : head = aead . data [ : 4 ] . encode ( 'hex' ) tail = aead . data [ - 4 : ] . encode ( 'hex' ) return "%s...%s" % ( head , tail )
Produce pretty - printable version of long AEAD .
58
12
240,792
def output_filename ( output_dir , key_handle , public_id ) : parts = [ output_dir , key_handle ] + pyhsm . util . group ( public_id , 2 ) path = os . path . join ( * parts ) if not os . path . isdir ( path ) : os . makedirs ( path ) return os . path . join ( path , public_id )
Return an output filename for a generated AEAD . Creates a hashed directory structure using the last three bytes of the public id to get equal usage .
88
31
240,793
def parse_result ( self , data ) : # typedef struct { # uint8_t publicId[YSM_PUBLIC_ID_SIZE]; // Public id (nonce) # uint32_t keyHandle; // Key handle # YSM_STATUS status; // Validation status # } YSM_DB_YUBIKEY_AEAD_STORE_RESP; public_id , key_handle , self . status = struct . unpack ( "< %is I B" % ( pyhsm . defines . YSM_AEAD_NONCE_SIZE ) , data ) pyhsm . util . validate_cmd_response_str ( 'public_id' , public_id , self . public_id ) pyhsm . util . validate_cmd_response_hex ( 'key_handle' , key_handle , self . key_handle ) if self . status == pyhsm . defines . YSM_STATUS_OK : return True else : raise pyhsm . exception . YHSM_CommandFailed ( pyhsm . defines . cmd2str ( self . command ) , self . status )
Return True if the AEAD was stored sucessfully .
247
12
240,794
def gen_keys ( hsm , args ) : if args . verbose : print "Generating %i keys :\n" % ( args . count ) else : print "Generating %i keys" % ( args . count ) for int_id in range ( args . start_id , args . start_id + args . count ) : public_id = ( "%x" % int_id ) . rjust ( args . public_id_chars , '0' ) padded_id = pyhsm . yubikey . modhex_encode ( public_id ) if args . verbose : print " %s" % ( padded_id ) num_bytes = len ( pyhsm . aead_cmd . YHSM_YubiKeySecret ( 'a' * 16 , 'b' * 6 ) . pack ( ) ) hsm . load_random ( num_bytes ) for kh in args . key_handles . keys ( ) : if args . random_nonce : nonce = "" else : nonce = public_id . decode ( 'hex' ) aead = hsm . generate_aead ( nonce , kh ) filename = output_filename ( args . output_dir , args . key_handles [ kh ] , padded_id ) if args . verbose : print " %4s, %i bytes (%s) -> %s" % ( args . key_handles [ kh ] , len ( aead . data ) , shorten_aead ( aead ) , filename ) aead . save ( filename ) if args . verbose : print "" print "\nDone\n"
The main key generating loop .
354
6
240,795
def reset ( stick ) : nulls = ( pyhsm . defines . YSM_MAX_PKT_SIZE - 1 ) * '\x00' res = YHSM_Cmd ( stick , pyhsm . defines . YSM_NULL , payload = nulls ) . execute ( read_response = False ) unlock = stick . acquire ( ) try : stick . drain ( ) stick . flush ( ) finally : unlock ( ) return res == 0
Send a bunch of zero - bytes to the YubiHSM and flush the input buffer .
97
19
240,796
def execute ( self , read_response = True ) : # // Up- and downlink packet # typedef struct { # uint8_t bcnt; // Number of bytes (cmd + payload) # uint8_t cmd; // YSM_xxx command # uint8_t payload[YSM_MAX_PKT_SIZE]; // Payload # } YSM_PKT; if self . command != pyhsm . defines . YSM_NULL : # YSM_NULL is the exception to the rule - it should NOT be prefixed with YSM_PKT.bcnt cmd_buf = struct . pack ( 'BB' , len ( self . payload ) + 1 , self . command ) else : cmd_buf = chr ( self . command ) cmd_buf += self . payload debug_info = None unlock = self . stick . acquire ( ) try : if self . stick . debug : debug_info = "%s (payload %i/0x%x)" % ( pyhsm . defines . cmd2str ( self . command ) , len ( self . payload ) , len ( self . payload ) ) self . stick . write ( cmd_buf , debug_info ) if not read_response : return None return self . _read_response ( ) finally : unlock ( )
Write command to HSM and read response .
278
9
240,797
def _read_response ( self ) : # // Up- and downlink packet # typedef struct { # uint8_t bcnt; // Number of bytes (cmd + payload) # uint8_t cmd; // YSM_xxx command # uint8_t payload[YSM_MAX_PKT_SIZE]; // Payload # } YSM_PKT; # read YSM_PKT.bcnt and YSM_PKT.cmd res = self . stick . read ( 2 , 'response length + response status' ) if len ( res ) != 2 : self . _handle_invalid_read_response ( res , 2 ) response_len , response_status = struct . unpack ( 'BB' , res ) response_len -= 1 # the status byte has been read already debug_info = None if response_status & pyhsm . defines . YSM_RESPONSE : debug_info = "%s response (%i/0x%x bytes)" % ( pyhsm . defines . cmd2str ( response_status - pyhsm . defines . YSM_RESPONSE ) , response_len , response_len ) # read YSM_PKT.payload res = self . stick . read ( response_len , debug_info ) if res : if response_status == self . command | pyhsm . defines . YSM_RESPONSE : self . executed = True self . response_status = response_status return self . parse_result ( res ) else : reset ( self . stick ) raise pyhsm . exception . YHSM_Error ( 'YubiHSM responded to wrong command' ) else : raise pyhsm . exception . YHSM_Error ( 'YubiHSM did not respond' )
After writing a command read response .
383
7
240,798
def reset ( self , test_sync = True ) : pyhsm . cmd . reset ( self . stick ) if test_sync : # Now verify we are in sync data = 'ekoeko' echo = self . echo ( data ) # XXX analyze 'echo' to see if we are in config mode, and produce a # nice exception if we are. return data == echo else : return True
Perform stream resynchronization .
85
7
240,799
def set_debug ( self , new ) : if type ( new ) is not bool : raise pyhsm . exception . YHSM_WrongInputType ( 'new' , bool , type ( new ) ) old = self . debug self . debug = new self . stick . set_debug ( new ) return old
Set debug mode .
68
4