idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
20,200 | def _add_annots ( self , layout , annots ) : if annots : for annot in resolve1 ( annots ) : annot = resolve1 ( annot ) if annot . get ( 'Rect' ) is not None : annot [ 'bbox' ] = annot . pop ( 'Rect' ) annot = self . _set_hwxy_attrs ( annot ) try : annot [ 'URI' ] = resolve1 ( annot [ 'A' ] ) [ 'URI' ] except KeyError : pass for k , v in six . iteritems ( annot ) : if not isinstance ( v , six . string_types ) : annot [ k ] = obj_to_string ( v ) elem = parser . makeelement ( 'Annot' , annot ) layout . add ( elem ) return layout | Adds annotations to the layout object |
20,201 | def _set_hwxy_attrs ( attr ) : bbox = attr [ 'bbox' ] attr [ 'x0' ] = bbox [ 0 ] attr [ 'x1' ] = bbox [ 2 ] attr [ 'y0' ] = bbox [ 1 ] attr [ 'y1' ] = bbox [ 3 ] attr [ 'height' ] = attr [ 'y1' ] - attr [ 'y0' ] attr [ 'width' ] = attr [ 'x1' ] - attr [ 'x0' ] return attr | Using the bbox attribute set the h w x0 x1 y0 and y1 attributes . |
20,202 | def _check_bool ( result , func , args ) : if not result : raise ctypes . WinError ( ctypes . get_last_error ( ) ) return args | Used as an error handler for Windows calls Gets last error if call is not successful |
20,203 | def get_csbi ( filehandle = None ) : if filehandle is None : filehandle = msvcrt . get_osfhandle ( sys . __stdout__ . fileno ( ) ) csbi = ConsoleScreenBufferInfo ( ) KERNEL32 . GetConsoleScreenBufferInfo ( filehandle , ctypes . byref ( csbi ) ) return csbi | Returns a CONSOLE_SCREEN_BUFFER_INFO structure for the given console or stdout |
20,204 | def enable_vt_mode ( filehandle = None ) : if filehandle is None : filehandle = msvcrt . get_osfhandle ( sys . __stdout__ . fileno ( ) ) current_mode = wintypes . DWORD ( ) KERNEL32 . GetConsoleMode ( filehandle , ctypes . byref ( current_mode ) ) new_mode = 0x0004 | current_mode . value KERNEL32 . SetConsoleMode ( filehandle , new_mode ) | Enables virtual terminal processing mode for the given console or stdout |
20,205 | def create_color_method ( color , code ) : def func ( self , content = '' ) : return self . _apply_color ( code , content ) setattr ( Terminal , color , func ) | Create a function for the given color Done inside this function to keep the variables out of the main scope |
20,206 | def _apply_color ( code , content ) : normal = u'\x1B[0m' seq = u'\x1B[%sm' % code return seq + ( normal + seq ) . join ( content . split ( normal ) ) + normal | Apply a color code to text |
20,207 | def color ( self , code ) : def func ( content = '' ) : return self . _apply_color ( u'38;5;%d' % code , content ) return func | When color is given as a number apply that color to the content While this is designed to support 256 color terminals Windows will approximate this with 16 colors |
20,208 | def download ( ) : ftp = ftplib . FTP ( SITE ) ftp . set_debuglevel ( DEBUG ) ftp . login ( USER , PASSWD ) ftp . cwd ( DIR ) filelist = ftp . nlst ( ) filecounter = MANAGER . counter ( total = len ( filelist ) , desc = 'Downloading' , unit = 'files' ) for filename in filelist : with Writer ( filename , ftp . size ( filename ) , DEST ) as writer : ftp . retrbinary ( 'RETR %s' % filename , writer . write ) print ( filename ) filecounter . update ( ) ftp . close ( ) | Download all files from an FTP share |
20,209 | def write ( self , block ) : self . fileobj . write ( block ) self . status . update ( len ( block ) ) | Write to local file and update progress bar |
20,210 | def process_files ( ) : with enlighten . Manager ( ) as manager : with manager . counter ( total = SPLINES , desc = 'Reticulating:' , unit = 'splines' ) as retic : for num in range ( SPLINES ) : time . sleep ( random . uniform ( 0.1 , 0.5 ) ) retic . update ( ) with manager . counter ( total = LLAMAS , desc = 'Herding:' , unit = 'llamas' ) as herd : for num in range ( SPLINES ) : time . sleep ( random . uniform ( 0.1 , 0.5 ) ) herd . update ( ) | Use Manager and Counter as context managers |
20,211 | def print_spelling_errors ( filename , encoding = 'utf8' ) : filesize = os . stat ( filename ) . st_size if filesize : sys . stdout . write ( 'Misspelled Words:\n' ) with io . open ( filename , encoding = encoding ) as wordlist : for line in wordlist : sys . stdout . write ( ' ' + line ) return 1 if filesize else 0 | Print misspelled words returned by sphinxcontrib - spelling |
20,212 | def initialize ( manager , initials = 15 ) : pbar = manager . counter ( total = initials , desc = 'Initializing:' , unit = 'initials' ) for num in range ( initials ) : time . sleep ( random . uniform ( 0.1 , 0.5 ) ) pbar . update ( ) pbar . close ( ) | Simple progress bar example |
20,213 | def _resize_handler ( self , * args , ** kwarg ) : try : assert self . resize_lock except AssertionError : self . resize_lock = True term = self . term term . clear_cache ( ) newHeight = term . height newWidth = term . width lastHeight = lastWidth = 0 while newHeight != lastHeight or newWidth != lastWidth : lastHeight = newHeight lastWidth = newWidth time . sleep ( .2 ) term . clear_cache ( ) newHeight = term . height newWidth = term . width if newWidth < self . width : offset = ( self . scroll_offset - 1 ) * ( 1 + self . width // newWidth ) term . move_to ( 0 , max ( 0 , newHeight - offset ) ) self . stream . write ( term . clear_eos ) self . width = newWidth self . _set_scroll_area ( force = True ) for cter in self . counters : cter . refresh ( flush = False ) self . stream . flush ( ) self . resize_lock = False | Called when a window resize signal is detected |
20,214 | def _at_exit ( self ) : if self . process_exit : try : term = self . term if self . set_scroll : term . reset ( ) else : term . move_to ( 0 , term . height ) self . term . feed ( ) except ValueError : pass | Resets terminal to normal configuration |
20,215 | def stop ( self ) : if self . enabled : term = self . term stream = self . stream positions = self . counters . values ( ) if not self . no_resize and RESIZE_SUPPORTED : signal . signal ( signal . SIGWINCH , self . sigwinch_orig ) try : for num in range ( self . scroll_offset - 1 , 0 , - 1 ) : if num not in positions : term . move_to ( 0 , term . height - num ) stream . write ( term . clear_eol ) stream . flush ( ) finally : if self . set_scroll : self . term . reset ( ) if self . companion_term : self . companion_term . reset ( ) else : term . move_to ( 0 , term . height ) self . process_exit = False self . enabled = False for cter in self . counters : cter . enabled = False if 1 in positions : term . feed ( ) | Clean up and reset terminal |
20,216 | def close ( self , clear = False ) : if clear and not self . leave : self . clear ( ) else : self . refresh ( ) self . manager . remove ( self ) | Do final refresh and remove from manager |
20,217 | def process_files ( manager ) : enterprise = manager . counter ( total = DATACENTERS , desc = 'Processing:' , unit = 'datacenters' ) for dnum in range ( 1 , DATACENTERS + 1 ) : systems = random . randint ( * SYSTEMS ) currCenter = manager . counter ( total = systems , desc = ' Datacenter %d:' % dnum , unit = 'systems' , leave = False ) for snum in range ( 1 , systems + 1 ) : system = manager . counter ( desc = ' System %d:' % snum , unit = 'files' , leave = False ) files = random . randint ( * FILES ) for fnum in range ( files ) : system . update ( ) time . sleep ( random . uniform ( 0.0001 , 0.0005 ) ) system . close ( ) LOGGER . info ( 'Updated %d files on System %d in Datacenter %d' , files , snum , dnum ) currCenter . update ( ) currCenter . close ( ) enterprise . update ( ) enterprise . close ( ) | Process a random number of files on a random number of systems across multiple data centers |
20,218 | def _state ( self , variable , num ) : value = getattr ( self , variable ) if value is None : return False if value is True : return True if random . randint ( 1 , num ) == num : setattr ( self , variable , True ) return True return False | Generic method to randomly determine if state is reached |
20,219 | def reset ( self ) : self . stream . write ( self . normal_cursor ) self . stream . write ( self . csr ( 0 , self . height ) ) self . stream . write ( self . move ( self . height , 0 ) ) | Reset scroll window and cursor to default |
20,220 | def move_to ( self , xpos , ypos ) : self . stream . write ( self . move ( ypos , xpos ) ) | Move cursor to specified position |
20,221 | def _height_and_width ( self ) : try : return self . _cache [ 'height_and_width' ] except KeyError : handw = self . _cache [ 'height_and_width' ] = super ( Terminal , self ) . _height_and_width ( ) return handw | Override for blessings . Terminal . _height_and_width Adds caching |
20,222 | def get_stack_info ( ) : stack = traceback . walk_stack ( sys . _getframe ( ) . f_back ) for frame , _ in stack : code = frame . f_code if code . co_name . startswith ( 'test_' ) : return ( frame . f_locals . copy ( ) , frame . f_globals [ '__name__' ] , code . co_filename , frame . f_lineno ) | Capture locals module name filename and line number from the stacktrace to provide the source of the assertion error and formatted note . |
20,223 | def assertBetween ( self , obj , lower , upper , strict = True , msg = None ) : if strict : standardMsg = '%s is not strictly between %s and %s' % ( obj , lower , upper ) op = operator . lt else : standardMsg = '%s is not between %s and %s' % ( obj , lower , upper ) op = operator . le if not ( op ( lower , obj ) and op ( obj , upper ) ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if obj is not between lower and upper . |
20,224 | def assertMonotonicIncreasing ( self , sequence , strict = True , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if strict : standardMsg = ( 'Elements in %s are not strictly monotonically ' 'increasing' ) % ( sequence , ) op = operator . lt else : standardMsg = ( 'Elements in %s are not monotonically ' 'increasing' ) % ( sequence , ) op = operator . le if not self . _monotonic ( op , sequence ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if sequence is not monotonically increasing . |
20,225 | def assertNotMonotonicDecreasing ( self , sequence , strict = True , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if strict : standardMsg = ( 'Elements in %s are strictly monotonically ' 'decreasing' ) % ( sequence , ) op = operator . gt else : standardMsg = ( 'Elements in %s are monotonically ' 'decreasing' ) % ( sequence , ) op = operator . ge if self . _monotonic ( op , sequence ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if sequence is monotonically decreasing . |
20,226 | def assertUnique ( self , container , msg = None ) : if not isinstance ( container , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) standardMsg = 'Elements in %s are not unique' % ( container , ) for idx , elem in enumerate ( container ) : if elem in container [ : idx ] or elem in container [ idx + 1 : ] : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if elements in container are not unique . |
20,227 | def _get_or_open_file ( filename ) : if isinstance ( filename , ( str , bytes ) ) : f = open ( filename ) elif hasattr ( filename , 'read' ) and hasattr ( filename , 'write' ) : f = filename else : raise TypeError ( 'filename must be str or bytes, or a file' ) return f | If filename is a string or bytes object open the filename and return the file object . If filename is file - like ( i . e . it has read and write attributes return filename . |
20,228 | def assertFileNameEqual ( self , filename , name , msg = None ) : fname = self . _get_file_name ( filename ) self . assertEqual ( fname , name , msg = msg ) | Fail if filename does not have the given name as determined by the == operator . |
20,229 | def assertFileNameNotEqual ( self , filename , name , msg = None ) : fname = self . _get_file_name ( filename ) self . assertNotEqual ( fname , name , msg = msg ) | Fail if filename has the given name as determined by the ! = operator . |
20,230 | def assertFileNameRegex ( self , filename , expected_regex , msg = None ) : fname = self . _get_file_name ( filename ) self . assertRegex ( fname , expected_regex , msg = msg ) | Fail unless filename matches expected_regex . |
20,231 | def assertFileNameNotRegex ( self , filename , expected_regex , msg = None ) : fname = self . _get_file_name ( filename ) self . assertNotRegex ( fname , expected_regex , msg = msg ) | Fail if filename matches expected_regex . |
20,232 | def assertFileTypeEqual ( self , filename , extension , msg = None ) : ftype = self . _get_file_type ( filename ) self . assertEqual ( ftype , extension , msg = msg ) | Fail if filename does not have the given extension as determined by the == operator . |
20,233 | def assertFileTypeNotEqual ( self , filename , extension , msg = None ) : ftype = self . _get_file_type ( filename ) self . assertNotEqual ( ftype , extension , msg = msg ) | Fail if filename has the given extension as determined by the ! = operator . |
20,234 | def assertFileEncodingEqual ( self , filename , encoding , msg = None ) : fencoding = self . _get_file_encoding ( filename ) fname = self . _get_file_name ( filename ) standardMsg = '%s is not %s encoded' % ( fname , encoding ) self . assertEqual ( fencoding . lower ( ) , encoding . lower ( ) , self . _formatMessage ( msg , standardMsg ) ) | Fail if filename is not encoded with the given encoding as determined by the == operator . |
20,235 | def assertFileEncodingNotEqual ( self , filename , encoding , msg = None ) : fencoding = self . _get_file_encoding ( filename ) fname = self . _get_file_name ( filename ) standardMsg = '%s is %s encoded' % ( fname , encoding ) self . assertNotEqual ( fencoding . lower ( ) , encoding . lower ( ) , self . _formatMessage ( msg , standardMsg ) ) | Fail if filename is encoded with the given encoding as determined by the ! = operator . |
20,236 | def assertFileSizeEqual ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertEqual ( fsize , size , msg = msg ) | Fail if filename does not have the given size as determined by the == operator . |
20,237 | def assertFileSizeNotEqual ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertNotEqual ( fsize , size , msg = msg ) | Fail if filename has the given size as determined by the ! = operator . |
20,238 | def assertFileSizeGreater ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertGreater ( fsize , size , msg = msg ) | Fail if filename s size is not greater than size as determined by the > operator . |
20,239 | def assertFileSizeGreaterEqual ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertGreaterEqual ( fsize , size , msg = msg ) | Fail if filename s size is not greater than or equal to size as determined by the > = operator . |
20,240 | def assertFileSizeLess ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertLess ( fsize , size , msg = msg ) | Fail if filename s size is not less than size as determined by the < operator . |
20,241 | def assertFileSizeLessEqual ( self , filename , size , msg = None ) : fsize = self . _get_file_size ( filename ) self . assertLessEqual ( fsize , size , msg = msg ) | Fail if filename s size is not less than or equal to size as determined by the < = operator . |
20,242 | def assertCategoricalLevelsEqual ( self , levels1 , levels2 , msg = None ) : if not isinstance ( levels1 , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( levels2 , collections . Iterable ) : raise TypeError ( 'Second argument is not iterable' ) standardMsg = '%s levels != %s levels' % ( levels1 , levels2 ) if not all ( level in levels2 for level in levels1 ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) if not all ( level in levels1 for level in levels2 ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if levels1 and levels2 do not have the same domain . |
20,243 | def assertCategoricalLevelsNotEqual ( self , levels1 , levels2 , msg = None ) : if not isinstance ( levels1 , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( levels2 , collections . Iterable ) : raise TypeError ( 'Second argument is not iterable' ) standardMsg = '%s levels == %s levels' % ( levels1 , levels2 ) unshared_levels = False if not all ( level in levels2 for level in levels1 ) : unshared_levels = True if not all ( level in levels1 for level in levels2 ) : unshared_levels = True if not unshared_levels : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if levels1 and levels2 have the same domain . |
20,244 | def assertCategoricalLevelIn ( self , level , levels , msg = None ) : if not isinstance ( levels , collections . Iterable ) : raise TypeError ( 'Second argument is not iterable' ) self . assertIn ( level , levels , msg = msg ) | Fail if level is not in levels . |
20,245 | def assertCategoricalLevelNotIn ( self , level , levels , msg = None ) : if not isinstance ( levels , collections . Iterable ) : raise TypeError ( 'Second argument is not iterable' ) self . assertNotIn ( level , levels , msg = msg ) | Fail if level is in levels . |
20,246 | def assertDateTimesBefore ( self , sequence , target , strict = True , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if strict : standardMsg = '%s is not strictly less than %s' % ( sequence , target ) op = operator . lt else : standardMsg = '%s is not less than %s' % ( sequence , target ) op = operator . le if isinstance ( target , collections . Iterable ) : if len ( target ) != len ( sequence ) : raise ValueError ( ( 'Length mismatch: ' 'first argument contains %s elements, ' 'second argument contains %s elements' % ( len ( sequence ) , len ( target ) ) ) ) if not all ( op ( i , j ) for i , j in zip ( sequence , target ) ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) elif isinstance ( target , ( date , datetime ) ) : if not all ( op ( element , target ) for element in sequence ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) else : raise TypeError ( 'Second argument is not a datetime or date object or iterable' ) | Fail if any elements in sequence are not before target . |
20,247 | def assertDateTimesPast ( self , sequence , strict = True , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if isinstance ( max ( sequence ) , datetime ) : target = datetime . today ( ) elif isinstance ( max ( sequence ) , date ) : target = date . today ( ) else : raise TypeError ( 'Expected iterable of datetime or date objects' ) self . assertDateTimesBefore ( sequence , target , strict = strict , msg = msg ) | Fail if any elements in sequence are not in the past . |
20,248 | def assertDateTimesFuture ( self , sequence , strict = True , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if isinstance ( min ( sequence ) , datetime ) : target = datetime . today ( ) elif isinstance ( min ( sequence ) , date ) : target = date . today ( ) else : raise TypeError ( 'Expected iterable of datetime or date objects' ) self . assertDateTimesAfter ( sequence , target , strict = strict , msg = msg ) | Fail if any elements in sequence are not in the future . |
20,249 | def assertDateTimesFrequencyEqual ( self , sequence , frequency , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( frequency , timedelta ) : raise TypeError ( 'Second argument is not a timedelta object' ) standardMsg = 'unexpected frequencies found in %s' % sequence s1 = pd . Series ( sequence ) s2 = s1 . shift ( - 1 ) freq = s2 - s1 if not all ( f == frequency for f in freq [ : - 1 ] ) : self . fail ( self . _formatMessage ( msg , standardMsg ) ) | Fail if any elements in sequence aren t separated by the expected fequency . |
20,250 | def assertDateTimesLagEqual ( self , sequence , lag , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( lag , timedelta ) : raise TypeError ( 'Second argument is not a timedelta object' ) if isinstance ( max ( sequence ) , datetime ) : target = datetime . today ( ) elif isinstance ( max ( sequence ) , date ) : target = date . today ( ) else : raise TypeError ( 'Expected iterable of datetime or date objects' ) self . assertEqual ( target - max ( sequence ) , lag , msg = msg ) | Fail unless max element in sequence is separated from the present by lag as determined by the == operator . |
20,251 | def assertDateTimesLagLess ( self , sequence , lag , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( lag , timedelta ) : raise TypeError ( 'Second argument is not a timedelta object' ) if isinstance ( max ( sequence ) , datetime ) : target = datetime . today ( ) elif isinstance ( max ( sequence ) , date ) : target = date . today ( ) else : raise TypeError ( 'Expected iterable of datetime or date objects' ) self . assertLess ( target - max ( sequence ) , lag , msg = msg ) | Fail if max element in sequence is separated from the present by lag or more as determined by the < operator . |
20,252 | def assertDateTimesLagLessEqual ( self , sequence , lag , msg = None ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( 'First argument is not iterable' ) if not isinstance ( lag , timedelta ) : raise TypeError ( 'Second argument is not a timedelta object' ) if isinstance ( max ( sequence ) , datetime ) : target = datetime . today ( ) elif isinstance ( max ( sequence ) , date ) : target = date . today ( ) else : raise TypeError ( 'Expected iterable of datetime or date objects' ) self . assertLessEqual ( target - max ( sequence ) , lag , msg = msg ) | Fail if max element in sequence is separated from the present by more than lag as determined by the < = operator . |
20,253 | def assertTimeZoneIsNone ( self , dt , msg = None ) : if not isinstance ( dt , datetime ) : raise TypeError ( 'First argument is not a datetime object' ) self . assertIsNone ( dt . tzinfo , msg = msg ) | Fail if dt has a non - null tzinfo attribute . |
20,254 | def assertTimeZoneIsNotNone ( self , dt , msg = None ) : if not isinstance ( dt , datetime ) : raise TypeError ( 'First argument is not a datetime object' ) self . assertIsNotNone ( dt . tzinfo , msg = msg ) | Fail unless dt has a non - null tzinfo attribute . |
20,255 | def assertTimeZoneEqual ( self , dt , tz , msg = None ) : if not isinstance ( dt , datetime ) : raise TypeError ( 'First argument is not a datetime object' ) if not isinstance ( tz , timezone ) : raise TypeError ( 'Second argument is not a timezone object' ) self . assertEqual ( dt . tzinfo , tz , msg = msg ) | Fail unless dt s tzinfo attribute equals tz as determined by the == operator . |
20,256 | def assertTimeZoneNotEqual ( self , dt , tz , msg = None ) : if not isinstance ( dt , datetime ) : raise TypeError ( 'First argument is not a datetime object' ) if not isinstance ( tz , timezone ) : raise TypeError ( 'Second argument is not a timezone object' ) self . assertNotEqual ( dt . tzinfo , tz , msg = msg ) | Fail if dt s tzinfo attribute equals tz as determined by the ! = operator . |
20,257 | def _class_defining_method ( meth ) : if inspect . ismethod ( meth ) : for cls in inspect . getmro ( meth . __self__ . __class__ ) : if cls . __dict__ . get ( meth . __name__ ) is meth : return '{}.{}' . format ( cls . __module__ , cls . __name__ ) meth = meth . __func__ if inspect . isfunction ( meth ) : module = meth . __qualname__ . split ( '.<locals>' , 1 ) [ 0 ] cls = getattr ( inspect . getmodule ( meth ) , module . rsplit ( '.' , 1 ) [ 0 ] ) if isinstance ( cls , type ) : return '{}.{}' . format ( cls . __module__ , cls . __name__ ) | Gets the name of the class that defines meth . |
20,258 | def configure ( self , ** kwargs ) : if 'logfile' in kwargs : self . _logfilename = kwargs [ 'logfile' ] if 'attrs' in kwargs : self . _attrs = kwargs [ 'attrs' ] if 'verbose_attrs' in kwargs : self . _verbose_attrs = kwargs [ 'verbose_attrs' ] if 'verbose' in kwargs : self . _verbose = kwargs [ 'verbose' ] | Configure what assertion logging is done . |
20,259 | def _find_msg_argument ( signature ) : names = signature . parameters . keys ( ) try : msg_idx = list ( names ) . index ( 'msg' ) default_msg = signature . parameters [ 'msg' ] . default except ValueError : msg_idx = - 1 default_msg = None kinds = ( inspect . Parameter . POSITIONAL_ONLY , inspect . Parameter . POSITIONAL_OR_KEYWORD ) non_msg_params = itertools . takewhile ( lambda param : param . name != 'msg' and param . kind in kinds , signature . parameters . values ( ) ) non_msg_params = sum ( 1 for _ in non_msg_params ) return msg_idx , default_msg , non_msg_params | Locates the msg argument in a function signature . |
20,260 | def _extract_msg ( args , kwargs , msg_idx , default_msg , non_msg_params ) : rem_args = [ ] if 'msg' in kwargs : msg = kwargs . pop ( 'msg' ) elif len ( args ) > non_msg_params and msg_idx < len ( args ) : msg = args [ msg_idx ] if 0 <= msg_idx : rem_args = args [ msg_idx + 1 : ] args = args [ : msg_idx ] else : msg = default_msg return msg , args , rem_args , kwargs | Extracts the msg argument from the passed args . |
20,261 | def wrap ( self , text , ** kwargs ) : pilcrow = re . compile ( r'(\n\s*\n)' , re . MULTILINE ) list_prefix = re . compile ( r'\s*(?:\w|[0-9]+)[\.\)]\s+' ) paragraphs = pilcrow . split ( text ) wrapped_lines = [ ] for paragraph in paragraphs : if paragraph . isspace ( ) : wrapped_lines . append ( '' ) else : wrapper = textwrap . TextWrapper ( ** vars ( self ) ) list_item = re . match ( list_prefix , paragraph ) if list_item : wrapper . subsequent_indent += ' ' * len ( list_item . group ( 0 ) ) wrapped_lines . extend ( wrapper . wrap ( paragraph ) ) return wrapped_lines | Wraps each paragraph in text individually . |
20,262 | def assert_stmt ( self ) : module_globals = vars ( sys . modules [ self . module ] ) line_range , lineno = self . _find_assert_stmt ( self . filename , self . linenumber , module_globals = module_globals ) source = [ linecache . getline ( self . filename , x , module_globals = module_globals ) for x in line_range ] dedented_lines = textwrap . dedent ( '' . join ( source ) ) . split ( '\n' ) [ : - 1 ] formatted_lines = [ ] for i , line in zip ( line_range , dedented_lines ) : prefix = '>' if i == lineno else ' ' formatted_lines . append ( ' {0} {1:4d} {2}' . format ( prefix , i , line ) ) return '\n' . join ( formatted_lines ) | Returns a string displaying the whole statement that failed with a > indicator on the line starting the expression . |
20,263 | def _find_assert_stmt ( filename , linenumber , leading = 1 , following = 2 , module_globals = None ) : lines = linecache . getlines ( filename , module_globals = module_globals ) _source = '' . join ( lines ) _tree = ast . parse ( _source ) finder = _StatementFinder ( linenumber ) finder . visit ( _tree ) line_range = range ( finder . found - leading , linenumber + following ) return line_range , finder . found | Given a Python module name filename and line number find the lines that are part of the statement containing that line . |
20,264 | def _validate_annotation ( self , annotation ) : required_keys = set ( self . _required_keys ) keys = set ( key for key , val in annotation . items ( ) if val ) missing_keys = required_keys . difference ( keys ) if missing_keys : error = 'Annotation missing required fields: {0}' . format ( missing_keys ) raise AnnotationError ( error ) | Ensures that the annotation has the right fields . |
20,265 | def _tchelper ( tc_deps , evals , deps ) : for e in evals : if e in tc_deps : continue else : if e in deps : tc_deps [ e ] = deps [ e ] _tchelper ( tc_deps , deps [ e ] , deps ) return tc_deps | modifies graph in place |
20,266 | def run ( self , evals , feed_dict = None , breakpoints = None , break_immediately = False ) : if not isinstance ( evals , list ) : evals = [ evals ] if feed_dict is None : feed_dict = { } if breakpoints is None : breakpoints = [ ] self . state = RUNNING self . _original_evals = evals self . _original_feed_dict = feed_dict self . _exe_order = op_store . compute_exe_order ( evals ) self . _init_evals_bps ( evals , breakpoints ) for k , v in feed_dict . items ( ) : if not isinstance ( k , str ) : k = k . name self . _cache [ k ] = v op_store . register_dbsession ( self ) if break_immediately : return self . _break ( ) else : return self . c ( ) | starts the debug session |
20,267 | def s ( self ) : next_node = self . _exe_order [ self . step ] self . _eval ( next_node ) self . step += 1 if self . step == len ( self . _exe_order ) : return self . _finish ( ) else : return self . _break ( value = self . _cache . get ( next_node . name ) ) | step to the next node in the execution order |
20,268 | def get_value ( self , node ) : if isinstance ( node , tf . Tensor ) : return self . _cache . get ( node . name , None ) elif isinstance ( node , tf . Operation ) : return None else : return self . _cache . get ( node , None ) | retrieve a node value from the cache |
20,269 | def _init_evals_bps ( self , evals , breakpoints ) : evals2 = [ op_store . get_op ( t ) if op_store . is_htop_out ( t ) else t for t in evals ] breakpoints2 = [ op_store . get_op ( t ) if op_store . is_htop_out ( t ) else t for t in breakpoints ] self . _exe_order = op_store . compute_exe_order ( evals2 ) self . _evalset = set ( [ e . name for e in evals2 ] ) for e in self . _exe_order : if isinstance ( e , HTOp ) : self . _evalset . add ( e . name ) for t in e . inputs : if not op_store . is_htop_out ( t ) : self . _evalset . add ( t . name ) self . _bpset = set ( [ bp . name for bp in breakpoints2 ] ) | HTOps may depend on tf . Tensors that are not in eval . We need to have all inputs to HTOps ready upon evaluation . |
20,270 | def _eval ( self , node ) : if isinstance ( node , HTOp ) : feed_dict = dict ( ( t , self . _cache [ t . name ] ) for t in node . inputs ) node . run ( feed_dict ) else : if isinstance ( node , tf . Tensor ) : result = self . session . run ( node , self . _cache ) self . _cache [ node . name ] = result else : if node . type == 'Assign' or node . type == 'AssignAdd' or node . type == 'AssignSub' : self . session . run ( node , self . _original_feed_dict ) | node is a TensorFlow Op or Tensor from self . _exe_order |
20,271 | def error_rate ( predictions , labels ) : return 100.0 - ( 100.0 * np . sum ( np . argmax ( predictions , 1 ) == np . argmax ( labels , 1 ) ) / predictions . shape [ 0 ] ) | Return the error rate based on dense predictions and 1 - hot labels . |
20,272 | def get_node ( name ) : if name in _ops : return _ops [ name ] else : g = tf . get_default_graph ( ) return g . as_graph_element ( name ) | returns HTOp or tf graph element corresponding to requested node name |
20,273 | def cache_values ( self , results ) : if results is None : return elif isinstance ( results , np . ndarray ) : results = [ results ] elif isinstance ( results , list ) : if len ( results ) is not len ( self . outputs ) : raise ValueError ( 'Number of output tensors does not match number of outputs produced by function' ) elif isinstance ( results , np . number ) : if len ( self . outputs ) != 1 : raise ValueError ( 'Fn produces scalar but %d outputs expected' % ( len ( self . outputs ) ) ) results = [ results ] for i , ndarray in enumerate ( results ) : self . session . _cache_value ( self . outputs [ i ] , ndarray ) | loads into DebugSession cache |
20,274 | def debug ( evals , feed_dict = None , breakpoints = None , break_immediately = False , session = None ) : global _dbsession _dbsession = debug_session . DebugSession ( session ) return _dbsession . run ( evals , feed_dict , breakpoints , break_immediately ) | spawns a new debug session |
20,275 | def connect ( ) : if not is_notebook ( ) : print ( 'Python session is not running in a Notebook Kernel' ) return global _comm kernel = get_ipython ( ) . kernel kernel . comm_manager . register_target ( 'tdb' , handle_comm_opened ) _comm = Comm ( target_name = 'tdb' , data = { } ) _comm . on_msg ( None ) | establish connection to frontend notebook |
20,276 | def send_action ( action , params = None ) : data = { "msg_type" : "action" , "action" : action } if params is not None : data [ 'params' ] = params _comm . send ( data ) | helper method for sending actions |
20,277 | def send_fig ( fig , name ) : imgdata = StringIO . StringIO ( ) fig . savefig ( imgdata , format = 'png' ) imgdata . seek ( 0 ) uri = 'data:image/png;base64,' + urllib . quote ( b64encode ( imgdata . buf ) ) send_action ( "update_plot" , params = { "src" : uri , "name" : name } ) | sends figure to frontend |
20,278 | def create_engine ( url , con = None , header = True , show_progress = 5.0 , clear_progress = True ) : url = urlparse ( url ) engine_type = url . scheme if url . scheme else 'presto' if con is None : if url . netloc : apikey , host = url . netloc . split ( '@' ) con = Connection ( apikey = apikey , endpoint = "https://{0}/" . format ( host ) ) else : con = Connection ( ) database = url . path [ 1 : ] if url . path . startswith ( '/' ) else url . path params = { 'type' : engine_type , } params . update ( parse_qsl ( url . query ) ) return QueryEngine ( con , database , params , header = header , show_progress = show_progress , clear_progress = clear_progress ) | Create a handler for query engine based on a URL . |
20,279 | def read_td_query ( query , engine , index_col = None , parse_dates = None , distributed_join = False , params = None ) : if params is None : params = { } header = engine . create_header ( "read_td_query" ) if engine . type == 'presto' and distributed_join is not None : header += "-- set session distributed_join = '{0}'\n" . format ( 'true' if distributed_join else 'false' ) r = engine . execute ( header + query , ** params ) return r . to_dataframe ( index_col = index_col , parse_dates = parse_dates ) | Read Treasure Data query into a DataFrame . |
20,280 | def read_td_job ( job_id , engine , index_col = None , parse_dates = None ) : job = engine . connection . client . job ( job_id ) r = engine . get_result ( job , wait = True ) return r . to_dataframe ( index_col = index_col , parse_dates = parse_dates ) | Read Treasure Data job result into a DataFrame . |
20,281 | def read_td_table ( table_name , engine , index_col = None , parse_dates = None , columns = None , time_range = None , limit = 10000 ) : query = engine . create_header ( "read_td_table('{0}')" . format ( table_name ) ) query += "SELECT {0}\n" . format ( '*' if columns is None else ', ' . join ( columns ) ) query += "FROM {0}\n" . format ( table_name ) if time_range is not None : start , end = time_range query += "WHERE td_time_range(time, {0}, {1})\n" . format ( _convert_time ( start ) , _convert_time ( end ) ) if limit is not None : query += "LIMIT {0}\n" . format ( limit ) r = engine . execute ( query ) return r . to_dataframe ( index_col = index_col , parse_dates = parse_dates ) | Read Treasure Data table into a DataFrame . |
20,282 | def to_td ( frame , name , con , if_exists = 'fail' , time_col = None , time_index = None , index = True , index_label = None , chunksize = 10000 , date_format = None ) : database , table = name . split ( '.' ) uploader = StreamingUploader ( con . client , database , table , show_progress = True , clear_progress = True ) uploader . message ( 'Streaming import into: {0}.{1}' . format ( database , table ) ) if if_exists == 'fail' : try : con . client . table ( database , table ) except tdclient . api . NotFoundError : uploader . message ( 'creating new table...' ) con . client . create_log_table ( database , table ) else : raise RuntimeError ( 'table "%s" already exists' % name ) elif if_exists == 'replace' : try : con . client . table ( database , table ) except tdclient . api . NotFoundError : pass else : uploader . message ( 'deleting old table...' ) con . client . delete_table ( database , table ) uploader . message ( 'creating new table...' ) con . client . create_log_table ( database , table ) elif if_exists == 'append' : try : con . client . table ( database , table ) except tdclient . api . NotFoundError : uploader . message ( 'creating new table...' ) con . client . create_log_table ( database , table ) else : raise ValueError ( 'invalid value for if_exists: %s' % if_exists ) if time_index : index = None frame = frame . copy ( ) frame = _convert_time_column ( frame , time_col , time_index ) frame = _convert_index_column ( frame , index , index_label ) frame = _convert_date_format ( frame , date_format ) uploader . upload_frame ( frame , chunksize ) uploader . wait_for_import ( len ( frame ) ) | Write a DataFrame to a Treasure Data table . |
20,283 | def ensure_dir ( path ) : dirpath = os . path . dirname ( path ) if dirpath and not os . path . exists ( dirpath ) : os . makedirs ( dirpath ) | Ensure directory exists . |
20,284 | def normalize_value ( value ) : cast = str if six . PY2 : cast = unicode return cast ( value ) . lower ( ) | Convert value to string and make it lower cased . |
20,285 | def infer ( data , row_limit , confidence , encoding , to_file ) : descriptor = tableschema . infer ( data , encoding = encoding , limit = row_limit , confidence = confidence ) if to_file : with io . open ( to_file , mode = 'w+t' , encoding = 'utf-8' ) as dest : dest . write ( json . dumps ( descriptor , ensure_ascii = False , indent = 4 ) ) click . echo ( descriptor ) | Infer a schema from data . |
20,286 | def validate ( schema ) : try : tableschema . validate ( schema ) click . echo ( "Schema is valid" ) sys . exit ( 0 ) except tableschema . exceptions . ValidationError as exception : click . echo ( "Schema is not valid" ) click . echo ( exception . errors ) sys . exit ( 1 ) | Validate that a supposed schema is in fact a Table Schema . |
20,287 | def _CheckKeyPath ( self , registry_key , search_depth ) : if self . _key_path_segments is None : return False if search_depth < 0 or search_depth > self . _number_of_key_path_segments : return False if search_depth == 0 : segment_name = '' else : segment_name = self . _key_path_segments [ search_depth - 1 ] if self . _is_regex : if isinstance ( segment_name , py2to3 . STRING_TYPES ) : flags = re . DOTALL | re . IGNORECASE | re . UNICODE try : segment_name = r'^{0:s}$' . format ( segment_name ) segment_name = re . compile ( segment_name , flags = flags ) except sre_constants . error : return False self . _key_path_segments [ search_depth - 1 ] = segment_name else : segment_name = segment_name . lower ( ) self . _key_path_segments [ search_depth - 1 ] = segment_name if search_depth > 0 : if self . _is_regex : if not segment_name . match ( registry_key . name ) : return False elif segment_name != registry_key . name . lower ( ) : return False return True | Checks the key path find specification . |
20,288 | def Matches ( self , registry_key , search_depth ) : if self . _key_path_segments is None : key_path_match = None else : key_path_match = self . _CheckKeyPath ( registry_key , search_depth ) if not key_path_match : return False , key_path_match if search_depth != self . _number_of_key_path_segments : return False , key_path_match return True , key_path_match | Determines if the Windows Registry key matches the find specification . |
20,289 | def _FindInKey ( self , registry_key , find_specs , search_depth ) : sub_find_specs = [ ] for find_spec in find_specs : match , key_path_match = find_spec . Matches ( registry_key , search_depth ) if match : yield registry_key . path if key_path_match != False and not find_spec . AtMaximumDepth ( search_depth ) : sub_find_specs . append ( find_spec ) if sub_find_specs : search_depth += 1 for sub_registry_key in registry_key . GetSubkeys ( ) : for matching_path in self . _FindInKey ( sub_registry_key , sub_find_specs , search_depth ) : yield matching_path | Searches for matching keys within the Windows Registry key . |
20,290 | def Find ( self , find_specs = None ) : if not find_specs : find_specs = [ FindSpec ( ) ] registry_key = self . _win_registry . GetRootKey ( ) for matching_path in self . _FindInKey ( registry_key , find_specs , 0 ) : yield matching_path | Searches for matching keys within the Windows Registry . |
20,291 | def RecurseKeys ( self ) : root_key = self . GetRootKey ( ) if root_key : for registry_key in root_key . RecurseKeys ( ) : yield registry_key | Recurses the Windows Registry keys starting with the root key . |
20,292 | def SetKeyPathPrefix ( self , key_path_prefix ) : self . _key_path_prefix = key_path_prefix self . _key_path_prefix_length = len ( key_path_prefix ) self . _key_path_prefix_upper = key_path_prefix . upper ( ) | Sets the Window Registry key path prefix . |
20,293 | def RecurseKeys ( self ) : yield self for subkey in self . GetSubkeys ( ) : for key in subkey . RecurseKeys ( ) : yield key | Recurses the subkeys starting with the key . |
20,294 | def DataIsInteger ( self ) : return self . data_type in ( definitions . REG_DWORD , definitions . REG_DWORD_BIG_ENDIAN , definitions . REG_QWORD ) | Determines based on the data type if the data is an integer . |
20,295 | def AddKeyByPath ( self , key_path , registry_key ) : if not key_path . startswith ( definitions . KEY_PATH_SEPARATOR ) : raise ValueError ( 'Key path does not start with: {0:s}' . format ( definitions . KEY_PATH_SEPARATOR ) ) if not self . _root_key : self . _root_key = FakeWinRegistryKey ( self . _key_path_prefix ) path_segments = key_paths . SplitKeyPath ( key_path ) parent_key = self . _root_key for path_segment in path_segments : try : subkey = FakeWinRegistryKey ( path_segment ) parent_key . AddSubkey ( subkey ) except KeyError : subkey = parent_key . GetSubkeyByName ( path_segment ) parent_key = subkey parent_key . AddSubkey ( registry_key ) | Adds a Windows Registry key for a specific key path . |
20,296 | def _BuildKeyHierarchy ( self , subkeys , values ) : if subkeys : for registry_key in subkeys : name = registry_key . name . upper ( ) if name in self . _subkeys : continue self . _subkeys [ name ] = registry_key registry_key . _key_path = key_paths . JoinKeyPath ( [ self . _key_path , registry_key . name ] ) if values : for registry_value in values : name = registry_value . name . upper ( ) if name in self . _values : continue self . _values [ name ] = registry_value | Builds the Windows Registry key hierarchy . |
20,297 | def AddValue ( self , registry_value ) : name = registry_value . name . upper ( ) if name in self . _values : raise KeyError ( 'Value: {0:s} already exists.' . format ( registry_value . name ) ) self . _values [ name ] = registry_value | Adds a value . |
20,298 | def _GetCachedFileByPath ( self , key_path_upper ) : longest_key_path_prefix_upper = '' longest_key_path_prefix_length = len ( longest_key_path_prefix_upper ) for key_path_prefix_upper in self . _registry_files : if key_path_upper . startswith ( key_path_prefix_upper ) : key_path_prefix_length = len ( key_path_prefix_upper ) if key_path_prefix_length > longest_key_path_prefix_length : longest_key_path_prefix_upper = key_path_prefix_upper longest_key_path_prefix_length = key_path_prefix_length if not longest_key_path_prefix_upper : return None , None registry_file = self . _registry_files . get ( longest_key_path_prefix_upper , None ) return longest_key_path_prefix_upper , registry_file | Retrieves a cached Windows Registry file for a key path . |
20,299 | def _GetCurrentControlSet ( self , key_path_suffix ) : select_key_path = 'HKEY_LOCAL_MACHINE\\System\\Select' select_key = self . GetKeyByPath ( select_key_path ) if not select_key : return None control_set = None for value_name in ( 'Current' , 'Default' , 'LastKnownGood' ) : value = select_key . GetValueByName ( value_name ) if not value or not value . DataIsInteger ( ) : continue control_set = value . GetDataAsObject ( ) if control_set > 0 or control_set <= 999 : break if not control_set or control_set <= 0 or control_set > 999 : return None control_set_path = 'HKEY_LOCAL_MACHINE\\System\\ControlSet{0:03d}' . format ( control_set ) key_path = '' . join ( [ control_set_path , key_path_suffix ] ) return self . GetKeyByPath ( key_path ) | Virtual key callback to determine the current control set . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.