idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
3,200
def pack ( self , out : IO ) : out . write ( self . access_flags . pack ( ) ) out . write ( pack ( '>HH' , self . _name_index , self . _descriptor_index ) ) self . attributes . pack ( out )
Write the Field to the file - like object out .
3,201
def remove ( self , field : Field ) : self . _table = [ fld for fld in self . _table if fld is not field ]
Removes a Field from the table by identity .
3,202
def unpack ( self , source : IO ) : field_count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , field_count ) : field = Field ( self . _cf ) field . unpack ( source ) self . append ( field )
Read the FieldTable from the file - like object source .
3,203
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self ) ) ) for field in self . _table : field . pack ( out )
Write the FieldTable to the file - like object out .
3,204
def find ( self , * , name : str = None , type_ : str = None , f : Callable = None ) -> Iterator [ Field ] : for field in self . _table : if name is not None and field . name . value != name : continue descriptor = field . descriptor . value if type_ is not None and type_ != descriptor : continue if f is not None and n...
Iterates over the fields table yielding each matching method . Calling without any arguments is equivalent to iterating over the table .
3,205
def is_valid_host ( value ) : host_validators = validators . ipv4 , validators . ipv6 , validators . domain return any ( f ( value ) for f in host_validators )
Check if given value is a valid host string .
3,206
def is_valid_url ( value ) : match = URL_REGEX . match ( value ) host_str = urlparse ( value ) . hostname return match and is_valid_host ( host_str )
Check if given value is a valid URL string .
3,207
def accepts_valid_host ( func ) : @ functools . wraps ( func ) def wrapper ( obj , value , * args , ** kwargs ) : if not is_valid_host ( value ) : raise InvalidHostError return func ( obj , value , * args , ** kwargs ) return wrapper
Return a wrapper that runs given method only for valid hosts .
3,208
def accepts_valid_urls ( func ) : @ functools . wraps ( func ) def wrapper ( obj , urls , * args , ** kwargs ) : invalid_urls = [ u for u in urls if not is_valid_url ( u ) ] if invalid_urls : msg_tpl = 'The values: {} are not valid URLs' msg = msg_tpl . format ( ',' . join ( invalid_urls ) ) raise InvalidURLError ( msg...
Return a wrapper that runs given method only for valid URLs .
3,209
def get ( self , index ) : constant = self . _pool [ index ] if not isinstance ( constant , Constant ) : constant = _constant_types [ constant [ 0 ] ] ( self , index , * constant [ 1 : ] ) self . _pool [ index ] = constant return constant
Returns the Constant at index raising a KeyError if it does not exist .
3,210
def find ( self , type_ = None , f = None ) : for constant in self : if type_ is not None and not isinstance ( constant , type_ ) : continue if f is not None and not f ( constant ) : continue yield constant
Iterates over the pool yielding each matching Constant . Calling without any arguments is equivalent to iterating over the pool .
3,211
def pack ( self , fout ) : write = fout . write write ( pack ( '>H' , self . raw_count ) ) for constant in self : write ( constant . pack ( ) )
Write the ConstantPool to the file - like object fout .
3,212
def checkout_and_create_branch ( repo , name ) : local_branch = repo . branches [ name ] if name in repo . branches else None if not local_branch : if name in repo . remotes . origin . refs : msg = repo . git . checkout ( name ) _LOGGER . debug ( msg ) return local_branch = repo . create_head ( name ) local_branch . ch...
Checkout branch . Create it if necessary
3,213
def checkout_create_push_branch ( repo , name ) : try : repo . git . checkout ( name ) _LOGGER . info ( "Checkout %s success" , name ) except GitCommandError : _LOGGER . info ( "Checkout %s was impossible (branch does not exist). Creating it and push it." , name ) checkout_and_create_branch ( repo , name ) repo . git ....
Checkout this branch . Create it if necessary and push it to origin .
3,214
def get_repo_hexsha ( git_folder ) : repo = Repo ( str ( git_folder ) ) if repo . bare : not_git_hexsha = "notgitrepo" _LOGGER . warning ( "Not a git repo, SHA1 used will be: %s" , not_git_hexsha ) return not_git_hexsha hexsha = repo . head . commit . hexsha _LOGGER . info ( "Found REST API repo SHA1: %s" , hexsha ) re...
Get the SHA1 of the current repo
3,215
def checkout_with_fetch ( git_folder , refspec , repository = "origin" ) : _LOGGER . info ( "Trying to fetch and checkout %s" , refspec ) repo = Repo ( str ( git_folder ) ) repo . git . fetch ( repository , refspec ) repo . git . checkout ( "FETCH_HEAD" ) _LOGGER . info ( "Fetch and checkout success for %s" , refspec )
Fetch the refspec and checkout FETCH_HEAD . Beware that you will ne in detached head mode .
3,216
def clone_to_path ( https_authenticated_url , folder , branch_or_commit = None ) : _LOGGER . info ( "Cloning repo" ) repo = Repo . clone_from ( https_authenticated_url , str ( folder ) ) if branch_or_commit : _LOGGER . info ( "Checkout branch_or_commit %s" , branch_or_commit ) repo . git . checkout ( branch_or_commit )...
Clone the given URL to the folder .
3,217
def get_files_in_commit ( git_folder , commit_id = "HEAD" ) : repo = Repo ( str ( git_folder ) ) output = repo . git . diff ( "--name-only" , commit_id + "^" , commit_id ) return output . splitlines ( )
List of files in HEAD commit .
3,218
def parse_values ( self , query ) : values = { } for name , filt in self . filters . items ( ) : val = filt . parse_value ( query ) if val is None : continue values [ name ] = val return values
extract values from query
3,219
def filter_queryset ( self , queryset ) : for name , filt in self . filters . items ( ) : val = self . values . get ( name , None ) if name is None : continue params = filt . filter_params ( val ) if not params : continue if isinstance ( params , dict ) : queryset = queryset . filter ( ** params ) if isinstance ( param...
convert values to filtering params and apply to queryset
3,220
def media_image_url ( self ) : if self . is_nowplaying : base = self . server . construct_url ( API_URL ) try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Thumb' ] image_type = 'Thumb' except KeyError : try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Primary' ] image_type ...
Image url of current playing media .
3,221
def state ( self ) : if self . is_active : if 'NowPlayingItem' in self . session : if self . session [ 'PlayState' ] [ 'IsPaused' ] : return STATE_PAUSED else : return STATE_PLAYING else : return STATE_IDLE else : return STATE_OFF
Return current playstate of the device .
3,222
async def set_playstate ( self , state , pos = 0 ) : url = '{}/Sessions/{}/Playing/{}' . format ( self . server . construct_url ( API_URL ) , self . session_id , state ) params = { 'api_key' : self . server . api_key } if state == 'seek' : params [ 'SeekPositionTicks' ] = int ( pos * 10000000 ) params [ 'static' ] = 't...
Send media commands to server .
3,223
def start_shell ( local_ns : Dict = None , banner : str = '' ) : if IPYTHON_SHELL_AVAILABLE : terminal = embed . InteractiveShellEmbed ( user_ns = { } ) terminal . mainloop ( local_ns = local_ns ) else : code . interact ( banner = banner , local = local_ns )
Create and immediately drop into a Python shell .
3,224
def expand_constants ( ins : Instruction , * , cf ) -> Instruction : for i , operand in enumerate ( ins . operands ) : if not isinstance ( operand , Operand ) : continue if operand . op_type == OperandTypes . CONSTANT_INDEX : ins . operands [ i ] = cf . constants [ operand . value ] return ins
Replace CONSTANT_INDEX operands with the literal Constant object from the constant pool .
3,225
def simple_swap ( ins : Instruction ) -> Instruction : try : rule = ins . details [ 'transform' ] [ 'simple_swap' ] except KeyError : return ins replacement_ins = opcode_table [ rule [ 'op' ] ] return Instruction ( replacement_ins [ 'mnemonic' ] , replacement_ins [ 'op' ] , [ Operand ( replacement_ins [ 'operands' ] [ ...
Replaces one instruction with another based on the transform rules in the bytecode definitions . This can help simplify your code as it reduces the overall number of instructions . For example aload_0 will become aload 0 .
3,226
def find_request ( ) : frame = inspect . currentframe ( ) request = None f = frame while not request and f : if 'request' in f . f_locals and isinstance ( f . f_locals [ 'request' ] , HttpRequest ) : request = f . f_locals [ 'request' ] f = f . f_back del frame return request
Inspect running environment for request object . There should be one but don t rely on it .
3,227
def error_view ( template_dir = None ) : if not template_dir : template_dir = "Pylot/Error" template_page = "%s/index.html" % template_dir class Error ( Pylot ) : @ classmethod def register ( cls , app , ** kwargs ) : super ( cls , cls ) . register ( app , ** kwargs ) @ app . errorhandler ( 400 ) def error_400 ( error ...
Create the Error view Must be instantiated
3,228
def sign_s3_upload ( self ) : AWS_ACCESS_KEY = self . config_ ( 'AWS_ACCESS_KEY_ID' ) AWS_SECRET_KEY = self . config_ ( 'AWS_SECRET_ACCESS_KEY' ) S3_BUCKET = self . config_ ( 'AWS_S3_BUCKET_NAME' ) object_name = request . args . get ( 's3_object_name' ) mime_type = request . args . get ( 's3_object_type' ) expires = lo...
Allow to create Signed object to upload to S3 via JS
3,229
def add_new_devices_callback ( self , callback ) : self . _new_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added new devices callback to %s' , callback )
Register as callback for when new devices are added .
3,230
def add_stale_devices_callback ( self , callback ) : self . _stale_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added stale devices callback to %s' , callback )
Register as callback for when stale devices exist .
3,231
def add_update_callback ( self , callback , device ) : self . _update_callbacks . append ( [ callback , device ] ) _LOGGER . debug ( 'Added update callback to %s on %s' , callback , device )
Register as callback for when a matching device changes .
3,232
def remove_update_callback ( self , callback , device ) : if [ callback , device ] in self . _update_callbacks : self . _update_callbacks . remove ( [ callback , device ] ) _LOGGER . debug ( 'Removed update callback %s for %s' , callback , device )
Remove a registered update callback .
3,233
def start ( self ) : asyncio . ensure_future ( self . register ( ) , loop = self . _event_loop ) if self . _own_loop : _LOGGER . info ( "Starting up our own event loop." ) self . _event_loop . run_forever ( ) self . _event_loop . close ( ) _LOGGER . info ( "Connection shut down." )
Public method for initiating connectivity with the emby server .
3,234
async def stop ( self ) : self . _shutdown = True if self . wsck : _LOGGER . info ( 'Closing Emby server websocket.' ) await self . wsck . close ( ) self . wsck = None if self . _own_loop : _LOGGER . info ( "Shutting down Emby server loop..." ) self . _event_loop . call_soon_threadsafe ( self . _event_loop . stop )
Async method for stopping connectivity with the emby server .
3,235
async def register ( self ) : url = '{}/Sessions' . format ( self . construct_url ( API_URL ) ) params = { 'api_key' : self . _api_key } reg = await self . api_request ( url , params ) if reg is None : self . _registered = False _LOGGER . error ( 'Unable to register emby client.' ) else : self . _registered = True _LOG...
Register library device id and get initial device list .
3,236
async def socket_connection ( self ) : if not self . _registered : _LOGGER . error ( 'Client not registered, cannot start socket.' ) return url = '{}?DeviceID={}&api_key={}' . format ( self . construct_url ( SOCKET_URL ) , self . _api_id , self . _api_key ) fail_count = 0 while True : _LOGGER . debug ( 'Attempting Sock...
Open websocket connection .
3,237
def process_msg ( self , msg ) : jmsg = json . loads ( msg ) msgtype = jmsg [ 'MessageType' ] msgdata = jmsg [ 'Data' ] _LOGGER . debug ( 'New websocket message recieved of type: %s' , msgtype ) if msgtype == 'Sessions' : self . _sessions = msgdata self . update_device_list ( self . _sessions )
Process messages from the event stream .
3,238
def update_device_list ( self , sessions ) : if sessions is None : _LOGGER . error ( 'Error updating Emby devices.' ) return new_devices = [ ] active_devices = [ ] dev_update = False for device in sessions : dev_name = '{}.{}' . format ( device [ 'DeviceId' ] , device [ 'Client' ] ) try : _LOGGER . debug ( 'Session msg...
Update device list .
3,239
def update_check ( self , existing , new ) : old_state = existing . state if 'NowPlayingItem' in existing . session_raw : try : old_theme = existing . session_raw [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] except KeyError : old_theme = False else : old_theme = False if 'NowPlayingItem' in new : if new [ 'PlayState' ] [ 'I...
Check device state to see if we need to fire the callback .
3,240
def main ( ) : parser = get_parser ( ) args = parser . parse_args ( ) ARCHIVE = args . archive_path archive = ( not args . no_archive ) os . environ [ 'F2FORMAT_VERSION' ] = args . python os . environ [ 'F2FORMAT_ENCODING' ] = args . encoding def find ( root ) : flst = list ( ) temp = os . listdir ( root ) for file in ...
Entry point for f2format .
3,241
def create ( cls , this : str , super_ : str = u'java/lang/Object' ) -> 'ClassFile' : cf = ClassFile ( ) cf . access_flags . acc_public = True cf . access_flags . acc_super = True cf . this = cf . constants . create_class ( this ) cf . super_ = cf . constants . create_class ( super_ ) return cf
A utility which sets up reasonable defaults for a new public class .
3,242
def save ( self , source : IO ) : write = source . write write ( pack ( '>IHH' , ClassFile . MAGIC , self . version . minor , self . version . major ) ) self . _constants . pack ( source ) write ( self . access_flags . pack ( ) ) write ( pack ( f'>HHH{len(self._interfaces)}H' , self . _this , self . _super , len ( self...
Saves the class to the file - like object source .
3,243
def _from_io ( self , source : IO ) : read = source . read if unpack ( '>I' , source . read ( 4 ) ) [ 0 ] != ClassFile . MAGIC : raise ValueError ( 'invalid magic number' ) self . version = unpack ( '>HH' , source . read ( 4 ) ) [ : : - 1 ] self . _constants . unpack ( source ) self . access_flags . unpack ( read ( 2 )...
Loads an existing JVM ClassFile from any file - like object .
3,244
def interfaces ( self ) -> Iterable [ ConstantClass ] : return [ self . _constants [ idx ] for idx in self . _interfaces ]
A list of direct superinterfaces of this class as indexes into the constant pool in left - to - right order .
3,245
def bootstrap_methods ( self ) -> BootstrapMethod : bootstrap = self . attributes . find_one ( name = 'BootstrapMethods' ) if bootstrap is None : bootstrap = self . attributes . create ( ATTRIBUTE_CLASSES [ 'BootstrapMethods' ] ) return bootstrap . table
Returns the bootstrap methods table from the BootstrapMethods attribute if one exists . If it does not one will be created .
3,246
def attributes ( ) : attribute_classes = get_attribute_classes ( ) for name , class_ in attribute_classes . items ( ) : click . echo ( u'{name} - Added in: {ai} ({cv})' . format ( name = click . style ( name , fg = 'green' ) , ai = click . style ( class_ . ADDED_IN , fg = 'yellow' ) , cv = click . style ( ClassVersion ...
List enabled Attributes .
3,247
def ins ( mnemonic ) : try : opcode = bytecode . opcode_table [ mnemonic ] except KeyError : click . secho ( u'No definition found.' , fg = 'red' ) return click . echo ( u'{mnemonic} (0x{op})' . format ( mnemonic = click . style ( opcode [ 'mnemonic' ] , fg = 'green' , underline = True ) , op = click . style ( format (...
Lookup instruction information .
3,248
def shell_command ( class_path ) : loader = ClassLoader ( * class_path ) shell . start_shell ( local_ns = { 'ClassFile' : ClassFile , 'loader' : loader , 'constants' : importlib . import_module ( 'jawa.constants' ) , } )
Drop into a debugging shell .
3,249
def definition_to_json ( source ) : try : import yaml except ImportError : click . echo ( 'The pyyaml module could not be found and is required' ' to use this command.' , err = True ) return y = yaml . load ( source ) for k , v in y . items ( ) : v . setdefault ( 'operands' , None ) v . setdefault ( 'can_be_wide' , Fal...
Convert a bytecode . yaml file into a prepared bytecode . json .
3,250
def dependencies ( source ) : loader = ClassLoader ( source , max_cache = - 1 ) all_dependencies = set ( ) for klass in loader . classes : new_dependencies = loader . dependencies ( klass ) - all_dependencies all_dependencies . update ( new_dependencies ) for new_dep in new_dependencies : click . echo ( new_dep )
Output a list of all classes referenced by the given source .
3,251
def grep ( source , regex , stop_on_first = False ) : loader = ClassLoader ( source , max_cache = - 1 ) r = re . compile ( regex ) def _matches ( constant ) : return r . match ( constant . value ) for klass in loader . classes : it = loader . search_constant_pool ( path = klass , type_ = UTF8 , f = _matches ) if next (...
Grep the constant pool of all classes in source .
3,252
def fetch ( * args , ** kwargs ) : data = kwargs . get ( 'data' , None ) files = kwargs . get ( 'files' , { } ) if data and isinstance ( data , ( basestring , dict ) ) or files : return post ( * args , ** kwargs ) return get ( * args , ** kwargs )
fetch an URL .
3,253
def parse_url ( url ) : try : url = unicode ( url ) except UnicodeDecodeError : pass if py3k : make_utf8 = lambda x : x else : make_utf8 = lambda x : isinstance ( x , unicode ) and x . encode ( 'utf-8' ) or x if '://' in url : scheme , url = url . split ( '://' , 1 ) else : scheme = 'http' url = 'http://' + url parsed ...
Return a dictionary of parsed url
3,254
def get_proxies_from_environ ( ) : proxies = { } http_proxy = os . getenv ( 'http_proxy' ) or os . getenv ( 'HTTP_PROXY' ) https_proxy = os . getenv ( 'https_proxy' ) or os . getenv ( 'HTTPS_PROXY' ) if http_proxy : proxies [ 'http' ] = http_proxy if https_proxy : proxies [ 'https' ] = https_proxy return proxies
Get proxies from os . environ .
3,255
def random_useragent ( filename = True ) : import random default_ua = 'urlfetch/%s' % __version__ if isinstance ( filename , basestring ) : filenames = [ filename ] else : filenames = [ ] if filename and UAFILE : filenames . append ( UAFILE ) for filename in filenames : try : st = os . stat ( filename ) if stat . S_ISR...
Returns a User - Agent string randomly from file .
3,256
def url_concat ( url , args , keep_existing = True ) : if not args : return url if keep_existing : if url [ - 1 ] not in ( '?' , '&' ) : url += '&' if ( '?' in url ) else '?' return url + urlencode ( args , 1 ) else : url , seq , query = url . partition ( '?' ) query = urlparse . parse_qs ( query , True ) query . updat...
Concatenate url and argument dictionary
3,257
def choose_boundary ( ) : global BOUNDARY_PREFIX if BOUNDARY_PREFIX is None : BOUNDARY_PREFIX = "urlfetch" try : uid = repr ( os . getuid ( ) ) BOUNDARY_PREFIX += "." + uid except AttributeError : pass try : pid = repr ( os . getpid ( ) ) BOUNDARY_PREFIX += "." + pid except AttributeError : pass return "%s.%s" % ( BOUN...
Generate a multipart boundry .
3,258
def encode_multipart ( data , files ) : body = BytesIO ( ) boundary = choose_boundary ( ) part_boundary = b ( '--%s\r\n' % boundary ) writer = codecs . lookup ( 'utf-8' ) [ 3 ] if isinstance ( data , dict ) : for name , values in data . items ( ) : if not isinstance ( values , ( list , tuple , set ) ) : values = ( valu...
Encode multipart .
3,259
def body ( self ) : content = [ ] length = 0 for chunk in self : content . append ( chunk ) length += len ( chunk ) if self . length_limit and length > self . length_limit : self . close ( ) raise ContentLimitExceeded ( "Content length is more than %d " "bytes" % self . length_limit ) return b ( "" ) . join ( content )
Response body .
3,260
def json ( self ) : try : return json . loads ( self . text ) except Exception as e : raise ContentDecodingError ( e )
Load response body as json .
3,261
def headers ( self ) : if py3k : return dict ( ( k . lower ( ) , v ) for k , v in self . getheaders ( ) ) else : return dict ( self . getheaders ( ) )
Response headers .
3,262
def cookies ( self ) : c = Cookie . SimpleCookie ( self . getheader ( 'set-cookie' ) ) return dict ( ( i . key , i . value ) for i in c . values ( ) )
Cookies in dict
3,263
def links ( self ) : ret = [ ] linkheader = self . getheader ( 'link' ) if not linkheader : return ret for i in linkheader . split ( ',' ) : try : url , params = i . split ( ';' , 1 ) except ValueError : url , params = i , '' link = { } link [ 'url' ] = url . strip ( ) for param in params . split ( ';' ) : try : k , v ...
Links parsed from HTTP Link header
3,264
def cookiestring ( self , value ) : c = Cookie . SimpleCookie ( value ) sc = [ ( i . key , i . value ) for i in c . values ( ) ] self . cookies = dict ( sc )
Cookie string setter
3,265
def request ( self , * args , ** kwargs ) : headers = self . headers . copy ( ) if self . cookiestring : headers [ 'Cookie' ] = self . cookiestring headers . update ( kwargs . get ( 'headers' , { } ) ) kwargs [ 'headers' ] = headers r = request ( * args , ** kwargs ) self . cookies . update ( r . cookies ) return r
Issue a request .
3,266
def f2format ( filename ) : print ( 'Now converting %r...' % filename ) encoding = os . getenv ( 'F2FORMAT_ENCODING' , LOCALE_ENCODING ) lineno = dict ( ) content = list ( ) with open ( filename , 'r' , encoding = encoding ) as file : lineno [ 1 ] = 0 for lnum , line in enumerate ( file , start = 1 ) : content . append...
Wrapper works for conversion .
3,267
def exception_to_github ( github_obj_to_comment , summary = "" ) : context = ExceptionContext ( ) try : yield context except Exception : if summary : summary = ": ({})" . format ( summary ) error_type = "an unknown error" try : raise except CalledProcessError as err : error_type = "a Subprocess error" content = "Comman...
If any exception comes log them in the given Github obj .
3,268
def create_comment ( github_object , body ) : try : return github_object . create_issue_comment ( body ) except AttributeError : return github_object . create_comment ( body )
Create a comment whatever the object is a PR a commit or an issue .
3,269
def get_full_sdk_id ( gh_token , sdk_git_id ) : if not '/' in sdk_git_id : login = user_from_token ( gh_token ) . login return '{}/{}' . format ( login , sdk_git_id ) return sdk_git_id
If the SDK git id is incomplete try to complete it with user login
3,270
def sync_fork ( gh_token , github_repo_id , repo , push = True ) : if not gh_token : _LOGGER . warning ( 'Skipping the upstream repo sync, no token' ) return _LOGGER . info ( 'Check if repo has to be sync with upstream' ) github_con = Github ( gh_token ) github_repo = github_con . get_repo ( github_repo_id ) if not git...
Sync the current branch in this fork against the direct parent on Github
3,271
def get_or_create_pull ( github_repo , title , body , head , base , * , none_if_no_commit = False ) : try : return github_repo . create_pull ( title = title , body = body , head = head , base = base ) except GithubException as err : err_message = err . data [ 'errors' ] [ 0 ] . get ( 'message' , '' ) if err . status ==...
Try to create the PR . If the PR exists try to find it instead . Raises otherwise .
3,272
def clone_to_path ( gh_token , folder , sdk_git_id , branch_or_commit = None , * , pr_number = None ) : _LOGGER . info ( "Clone SDK repository %s" , sdk_git_id ) url_parsing = urlsplit ( sdk_git_id ) sdk_git_id = url_parsing . path if sdk_git_id . startswith ( "/" ) : sdk_git_id = sdk_git_id [ 1 : ] credentials_part = ...
Clone the given repo_id to the folder .
3,273
def do_pr ( gh_token , sdk_git_id , sdk_pr_target_repo_id , branch_name , base_branch , pr_body = "" ) : "Do the PR" if not gh_token : _LOGGER . info ( 'Skipping the PR, no token found' ) return None if not sdk_pr_target_repo_id : _LOGGER . info ( 'Skipping the PR, no target repo id' ) return None github_con = Github (...
Do the PR
3,274
def remove_readonly ( func , path , _ ) : "Clear the readonly bit and reattempt the removal" os . chmod ( path , stat . S_IWRITE ) func ( path )
Clear the readonly bit and reattempt the removal
3,275
def manage_git_folder ( gh_token , temp_dir , git_id , * , pr_number = None ) : _LOGGER . debug ( "Git ID %s" , git_id ) if Path ( git_id ) . exists ( ) : yield git_id return split_git_id = git_id . split ( "@" ) branch = split_git_id [ 1 ] if len ( split_git_id ) > 1 else None clone_to_path ( gh_token , temp_dir , spl...
Context manager to avoid readonly problem while cleanup the temp dir .
3,276
def as_raw_link ( self ) : if self . link_type == "raw" : return self if self . link_type != "blob" : raise ValueError ( "Cannot get a download link from a tree link" ) return self . __class__ ( self . gitid , "raw" , self . branch_or_commit , self . path , self . token )
Returns a GithubLink to a raw content .
3,277
def create_comment ( self , text ) : return DashboardComment . get_or_create ( self . _issue_or_pr , self . _header , text )
Mimic issue API so we can use it everywhere . Return dashboard comment .
3,278
def get_or_create ( cls , issue , header , text = None ) : for comment in get_comments ( issue ) : try : if comment . body . splitlines ( ) [ 0 ] == header : obj = cls ( comment , header ) break except IndexError : pass else : comment = create_comment ( issue , header ) obj = cls ( comment , header ) if text : obj . ed...
Get or create the dashboard comment in this issue .
3,279
def disconnect ( self , name , func , dispatch_uid = None ) : try : signal = self . _registry [ name ] except KeyError : return signal . disconnect ( func , dispatch_uid = dispatch_uid )
Disconnects a function from a hook
3,280
def create_host ( factories , value ) : data = [ value ] for func in factories : try : return func ( value ) except InvalidHostError as ex : data . append ( str ( ex ) ) msg_tpl = ( "Failed to create a host object for '{}', raising the following errors" " in the process:" + "\n" . join ( data ) ) raise InvalidHostError...
Use the factories to create a host object .
3,281
def is_subdomain ( self , other ) : compared = other . value if hasattr ( other , 'value' ) else other try : return self . value . is_subdomain ( compared ) except AttributeError : return False
Test if the object is a subdomain of the other .
3,282
def assemble ( code ) : final = [ ] for line in code : if isinstance ( line , Label ) : final . append ( line ) continue mnemonic , operands = line [ 0 ] , line [ 1 : ] operand_fmts = opcode_table [ mnemonic ] [ 'operands' ] final_operands = [ ] for i , operand in enumerate ( operands ) : if isinstance ( operand , Oper...
Assemble the given iterable of mnemonics operands and lables .
3,283
def register ( self , hook ) : assert callable ( hook ) , "Hook must be a callable" assert issubclass ( hook , HookBase ) , "The hook does not inherit from HookBase" self . _registry . append ( hook )
Register a hook .
3,284
def save ( self , * args , ** kwargs ) : return [ ( form , form . save ( * args , ** kwargs ) ) for form in self . instances ]
Save all the forms
3,285
def get_attribute_classes ( ) -> Dict [ str , Attribute ] : attribute_children = pkgutil . iter_modules ( importlib . import_module ( 'jawa.attributes' ) . __path__ , prefix = 'jawa.attributes.' ) result = { } for _ , name , _ in attribute_children : classes = inspect . getmembers ( importlib . import_module ( name ) ,...
Lookup all builtin Attribute subclasses load them and return a dict
3,286
def unpack ( self , source : IO ) : count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , count ) : name_index , length = unpack ( '>HI' , source . read ( 6 ) ) info_blob = source . read ( length ) self . _table . append ( ( name_index , info_blob ) )
Read the ConstantPool from the file - like object source .
3,287
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self . _table ) ) ) for attribute in self : info = attribute . pack ( ) out . write ( pack ( '>HI' , attribute . name . index , len ( info ) ) ) out . write ( info )
Write the AttributeTable to the file - like object out .
3,288
def create ( self , type_ , * args , ** kwargs ) -> Any : attribute = type_ ( self , * args , ** kwargs ) self . _table . append ( attribute ) return attribute
Creates a new attribute of type_ appending it to the attribute table and returning it .
3,289
def get_locations ( self , url ) : if not is_valid_url ( url ) : raise InvalidURLError ( '{} is not a valid URL' . format ( url ) ) try : response = self . session . head ( url ) except ( ConnectionError , InvalidSchema , Timeout ) : raise StopIteration try : generator = self . session . resolve_redirects ( response , ...
Get valid location header values from responses .
3,290
def get_new_locations ( self , urls ) : seen = set ( urls ) for i in urls : for k in self . get_locations ( i ) : if k not in seen : seen . add ( k ) yield k
Get valid location header values for all given URLs .
3,291
def get_urls_and_locations ( self , urls ) : location_generator = self . get_new_locations ( urls ) initial_cache = list ( set ( urls ) ) return CachedIterable ( location_generator , initial_cache )
Get URLs and their redirection addresses .
3,292
def _handle_get ( self , request_data ) : der = base64 . b64decode ( request_data ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
An OCSP GET request contains the DER - in - base64 encoded OCSP request in the HTTP request URL .
3,293
def _handle_post ( self ) : der = request . body . read ( ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
An OCSP POST request contains the DER encoded OCSP request in the HTTP request body .
3,294
def _build_ocsp_response ( self , ocsp_request : OCSPRequest ) -> OCSPResponse : tbs_request = ocsp_request [ 'tbs_request' ] request_list = tbs_request [ 'request_list' ] if len ( request_list ) != 1 : logger . warning ( 'Received OCSP request with multiple sub requests' ) raise NotImplemented ( 'Combined requests not...
Create and return an OCSP response from an OCSP request .
3,295
def hook_tag ( context , name , * args , ** kwargs ) : return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in hook ( name , context , * args , ** kwargs ) ) )
Hook tag to call within templates
3,296
def template_hook_collect ( module , hook_name , * args , ** kwargs ) : try : templatehook = getattr ( module , hook_name ) except AttributeError : return "" return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in templatehook ( * args , ** kwargs ) ) )
Helper to include in your own templatetag for static TemplateHooks
3,297
def _extract ( self , source , * args , ** kwargs ) : self . _data = mbox_to_pandas ( source ) self . _data [ 'MessageID' ] = pd . Series ( range ( 0 , len ( self . _data ) ) )
Extracts data from mbox files . Mutates _data .
3,298
def build_from_issue_comment ( gh_token , body ) : if body [ "action" ] in [ "created" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'comment' ] [ 'body' ] try : comment = is...
Create a WebhookMetadata from a comment added to an issue .
3,299
def build_from_issues ( gh_token , body ) : if body [ "action" ] in [ "opened" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'issue' ] [ 'body' ] comment = issue return Webho...
Create a WebhookMetadata from an opening issue text .