idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
19,600 | def create ( klass , account , name ) : audience = klass ( account ) getattr ( audience , '__create_audience__' ) ( name ) try : return audience . reload ( ) except BadRequest as e : audience . delete ( ) raise e | Creates a new tailored audience . |
19,601 | def users ( self , params ) : resource = self . RESOURCE_USERS . format ( account_id = self . account . id , id = self . id ) headers = { 'Content-Type' : 'application/json' } response = Request ( self . account . client , 'post' , resource , headers = headers , body = json . dumps ( params ) ) . perform ( ) success_count = response . body [ 'data' ] [ 'success_count' ] total_count = response . body [ 'data' ] [ 'total_count' ] return ( success_count , total_count ) | This is a private API and requires whitelisting from Twitter . This endpoint will allow partners to add update and remove users from a given tailored_audience_id . The endpoint will also accept multiple user identifier types per user as well . |
19,602 | def permissions ( self , ** kwargs ) : self . _validate_loaded ( ) return TailoredAudiencePermission . all ( self . account , self . id , ** kwargs ) | Returns a collection of permissions for the curent tailored audience . |
19,603 | def all ( klass , account , tailored_audience_id , ** kwargs ) : resource = klass . RESOURCE_COLLECTION . format ( account_id = account . id , tailored_audience_id = tailored_audience_id ) request = Request ( account . client , 'get' , resource , params = kwargs ) return Cursor ( klass , request , init_with = [ account ] ) | Returns a Cursor instance for the given tailored audience permission resource . |
19,604 | def save ( self ) : if self . id : method = 'put' resource = self . RESOURCE . format ( account_id = self . account . id , tailored_audience_id = self . tailored_audience_id , id = self . id ) else : method = 'post' resource = self . RESOURCE_COLLECTION . format ( account_id = self . account . id , tailored_audience_id = self . tailored_audience_id ) response = Request ( self . account . client , method , resource , params = self . to_params ( ) ) . perform ( ) return self . from_response ( response . body [ 'data' ] ) | Saves or updates the current tailored audience permission . |
19,605 | def delete ( self ) : resource = self . RESOURCE . format ( account_id = self . account . id , tailored_audience_id = self . tailored_audience_id , id = self . id ) response = Request ( self . account . client , 'delete' , resource ) . perform ( ) return self . from_response ( response . body [ 'data' ] ) | Deletes the current tailored audience permission . |
19,606 | def __get ( klass , account , client , params ) : resource = klass . RESOURCE_CONVERSATIONS . format ( account_id = account . id ) request = Request ( account . client , klass . METHOD , resource , headers = klass . HEADERS , body = params ) return Cursor ( klass , request , init_with = [ account ] ) | Helper function to get the conversation data Returns a Cursor instance |
19,607 | def conversations ( self ) : body = { "conversation_type" : self . conversation_type , "audience_definition" : self . audience_definition , "targeting_inputs" : self . targeting_inputs } return self . __get ( account = self . account , client = self . account . client , params = json . dumps ( body ) ) | Get the conversation topics for an input targeting criteria |
19,608 | def demographics ( self ) : body = { "audience_definition" : self . audience_definition , "targeting_inputs" : self . targeting_inputs } resource = self . RESOURCE_DEMOGRAPHICS . format ( account_id = self . account . id ) response = Request ( self . account . client , self . METHOD , resource , headers = self . HEADERS , body = json . dumps ( body ) ) . perform ( ) return response . body [ 'data' ] | Get the demographic breakdown for an input targeting criteria |
19,609 | def setupEnvironment ( self , cmd ) : shell . ShellCommand . setupEnvironment ( self , cmd ) env = { } for k , v in self . build . getProperties ( ) . properties . items ( ) : env [ str ( k ) ] = str ( v [ 0 ] ) if cmd . args [ 'env' ] is None : cmd . args [ 'env' ] = { } cmd . args [ 'env' ] . update ( env ) | Turn all build properties into environment variables |
19,610 | def updateStats ( self , log ) : stdio = log . getText ( ) total = passed = skipped = fails = warnings = errors = 0 hastests = False if not hastests : outputs = re . findall ( "Ran (?P<count>[\d]+) tests with (?P<fail>[\d]+) failures and (?P<error>[\d]+) errors" , stdio ) for output in outputs : total += int ( output [ 0 ] ) fails += int ( output [ 1 ] ) errors += int ( output [ 2 ] ) hastests = True if not hastests : for line in stdio . split ( "\n" ) : if line . startswith ( "FAILED (" ) or line . startswith ( "PASSED (" ) : hastests = True line = line [ 8 : ] [ : - 1 ] stats = line . split ( ", " ) data = { } for stat in stats : k , v = stat . split ( "=" ) data [ k ] = int ( v ) if "successes" not in data : total = 0 for number in re . findall ( "Ran (?P<count>[\d]+) tests in " , stdio ) : total += int ( number ) data [ "successes" ] = total - sum ( data . values ( ) ) if not hastests : fails += len ( re . findall ( 'FAIL:' , stdio ) ) errors += len ( re . findall ( '======================================================================\nERROR:' , stdio ) ) for number in re . findall ( "Ran (?P<count>[\d]+)" , stdio ) : total += int ( number ) hastests = True passed = total - ( skipped + fails + errors + warnings ) if hastests : self . setStatistic ( 'total' , total ) self . setStatistic ( 'fails' , fails ) self . setStatistic ( 'errors' , errors ) self . setStatistic ( 'warnings' , warnings ) self . setStatistic ( 'skipped' , skipped ) self . setStatistic ( 'passed' , passed ) | Parse test results out of common test harnesses . |
19,611 | def saveConfig ( self , request ) : res = yield self . assertAllowed ( request ) if res : defer . returnValue ( res ) request . setHeader ( 'Content-Type' , 'application/json' ) if self . _in_progress : defer . returnValue ( json . dumps ( { 'success' : False , 'errors' : [ 'reconfig already in progress' ] } ) ) self . _in_progress = True cfg = json . loads ( request . content . read ( ) ) if cfg != self . _cfg : try : err = yield self . saveCfg ( cfg ) except Exception as e : err = [ repr ( e ) ] if err is not None : self . _in_progress = False yield self . saveCfg ( self . _cfg ) defer . returnValue ( json . dumps ( { 'success' : False , 'errors' : err } ) ) yield self . ep . master . reconfig ( ) defer . returnValue ( json . dumps ( { 'success' : True } ) ) | I save the config and run check_config potencially returning errors |
19,612 | def anti_alias ( map_in , steps ) : height , width = map_in . shape map_part = ( 2.0 / 11.0 ) * map_in w = - 1.0 / numpy . sqrt ( 3.0 ) kernel = [ w , w , w ] def _anti_alias_step ( original ) : result = original * ( 3.0 / 11.0 ) result = numpy . append ( result , [ result [ 0 , : ] ] , 0 ) result = numpy . append ( result , numpy . transpose ( [ result [ : , 0 ] ] ) , 1 ) result = numpy . insert ( result , [ 0 ] , [ result [ - 2 , : ] ] , 0 ) result = numpy . insert ( result , [ 0 ] , numpy . transpose ( [ result [ : , - 2 ] ] ) , 1 ) for y in range ( height + 2 ) : result [ y , 1 : - 1 ] = numpy . convolve ( result [ y , : ] , kernel , 'valid' ) for x in range ( width + 2 ) : result [ 1 : - 1 , x ] = numpy . convolve ( result [ : , x ] , kernel , 'valid' ) result = result [ 1 : - 1 , 1 : - 1 ] result += map_part return result current = map_in for i in range ( steps ) : current = _anti_alias_step ( current ) return current | Execute the anti_alias operation steps times on the given map |
19,613 | def count_neighbours ( mask , radius = 1 ) : height , width = mask . shape f = 2.0 * radius + 1.0 w = - 1.0 / numpy . sqrt ( f ) kernel = [ w ] * radius + [ w ] + [ w ] * radius result = mask * f for y in range ( height ) : result [ y , : ] = numpy . convolve ( result [ y , : ] , kernel , 'same' ) for x in range ( width ) : result [ : , x ] = numpy . convolve ( result [ : , x ] , kernel , 'same' ) return result - mask | Count how many neighbours of a coordinate are set to one . This uses the same principles as anti_alias compare comments there . |
19,614 | def average_colors ( c1 , c2 ) : r = int ( ( c1 [ 0 ] + c2 [ 0 ] ) / 2 ) g = int ( ( c1 [ 1 ] + c2 [ 1 ] ) / 2 ) b = int ( ( c1 [ 2 ] + c2 [ 2 ] ) / 2 ) return ( r , g , b ) | Average the values of two colors together |
19,615 | def get_normalized_elevation_array ( world ) : e = world . layers [ 'elevation' ] . data ocean = world . layers [ 'ocean' ] . data mask = numpy . ma . array ( e , mask = ocean ) min_elev_land = mask . min ( ) max_elev_land = mask . max ( ) elev_delta_land = max_elev_land - min_elev_land mask = numpy . ma . array ( e , mask = numpy . logical_not ( ocean ) ) min_elev_sea = mask . min ( ) max_elev_sea = mask . max ( ) elev_delta_sea = max_elev_sea - min_elev_sea c = numpy . empty ( e . shape , dtype = numpy . float ) c [ numpy . invert ( ocean ) ] = ( e [ numpy . invert ( ocean ) ] - min_elev_land ) * 127 / elev_delta_land + 128 c [ ocean ] = ( e [ ocean ] - min_elev_sea ) * 127 / elev_delta_sea c = numpy . rint ( c ) . astype ( dtype = numpy . int32 ) return c | Convert raw elevation into normalized values between 0 and 255 and return a numpy array of these values |
19,616 | def get_biome_color_based_on_elevation ( world , elev , x , y , rng ) : v = world . biome_at ( ( x , y ) ) . name ( ) biome_color = _biome_satellite_colors [ v ] noise = ( 0 , 0 , 0 ) if world . is_land ( ( x , y ) ) : noise = rng . randint ( - NOISE_RANGE , NOISE_RANGE , size = 3 ) if elev > HIGH_MOUNTAIN_ELEV : noise = add_colors ( noise , HIGH_MOUNTAIN_NOISE_MODIFIER ) biome_color = average_colors ( biome_color , MOUNTAIN_COLOR ) elif elev > MOUNTAIN_ELEV : noise = add_colors ( noise , MOUNTAIN_NOISE_MODIFIER ) biome_color = average_colors ( biome_color , MOUNTAIN_COLOR ) elif elev > HIGH_HILL_ELEV : noise = add_colors ( noise , HIGH_HILL_NOISE_MODIFIER ) elif elev > HILL_ELEV : noise = add_colors ( noise , HILL_NOISE_MODIFIER ) modification_amount = int ( elev / BASE_ELEVATION_INTENSITY_MODIFIER ) base_elevation_modifier = ( modification_amount , modification_amount , modification_amount ) this_tile_color = add_colors ( biome_color , noise , base_elevation_modifier ) return this_tile_color | This is the business logic for determining the base biome color in satellite view . This includes generating some noise at each spot in a pixel s rgb value potentially modifying the noise based on elevation and finally incorporating this with the base biome color . |
19,617 | def find_water_flow ( self , world , water_path ) : for x in range ( world . width - 1 ) : for y in range ( world . height - 1 ) : path = self . find_quick_path ( [ x , y ] , world ) if path : tx , ty = path flow_dir = [ tx - x , ty - y ] key = 0 for direction in DIR_NEIGHBORS_CENTER : if direction == flow_dir : water_path [ y , x ] = key key += 1 | Find the flow direction for each cell in heightmap |
19,618 | def river_sources ( world , water_flow , water_path ) : river_source_list = [ ] for y in range ( 0 , world . height - 1 ) : for x in range ( 0 , world . width - 1 ) : rain_fall = world . layers [ 'precipitation' ] . data [ y , x ] water_flow [ y , x ] = rain_fall if water_path [ y , x ] == 0 : continue cx , cy = x , y neighbour_seed_found = False while not neighbour_seed_found : if world . is_mountain ( ( cx , cy ) ) and water_flow [ cy , cx ] >= RIVER_TH : for seed in river_source_list : sx , sy = seed if in_circle ( 9 , cx , cy , sx , sy ) : neighbour_seed_found = True if neighbour_seed_found : break river_source_list . append ( [ cx , cy ] ) break if water_path [ cy , cx ] == 0 : break dx , dy = DIR_NEIGHBORS_CENTER [ water_path [ cy , cx ] ] nx , ny = cx + dx , cy + dy water_flow [ ny , nx ] += rain_fall cx , cy = nx , ny return river_source_list | Find places on map where sources of river can be found |
19,619 | def cleanUpFlow ( self , river , world ) : celevation = 1.0 for r in river : rx , ry = r relevation = world . layers [ 'elevation' ] . data [ ry , rx ] if relevation <= celevation : celevation = relevation elif relevation > celevation : world . layers [ 'elevation' ] . data [ ry , rx ] = celevation return river | Validate that for each point in river is equal to or lower than the last |
19,620 | def findLowerElevation ( self , source , world ) : x , y = source currentRadius = 1 maxRadius = 40 lowestElevation = world . layers [ 'elevation' ] . data [ y , x ] destination = [ ] notFound = True isWrapped = False wrapped = [ ] while notFound and currentRadius <= maxRadius : for cx in range ( - currentRadius , currentRadius + 1 ) : for cy in range ( - currentRadius , currentRadius + 1 ) : rx , ry = x + cx , y + cy if not self . wrap and not world . contains ( ( rx , ry ) ) : continue if not in_circle ( currentRadius , x , y , rx , ry ) : continue rx , ry = overflow ( rx , world . width ) , overflow ( ry , world . height ) elevation = world . layers [ 'elevation' ] . data [ ry , rx ] if elevation < lowestElevation : lowestElevation = elevation destination = [ rx , ry ] notFound = False if not world . contains ( ( x + cx , y + cy ) ) : wrapped . append ( destination ) currentRadius += 1 if destination in wrapped : isWrapped = True return isWrapped , destination | Try to find a lower elevation with in a range of an increasing circle s radius and try to find the best path and return it |
19,621 | def rivermap_update ( self , river , water_flow , rivermap , precipitations ) : isSeed = True px , py = ( 0 , 0 ) for x , y in river : if isSeed : rivermap [ y , x ] = water_flow [ y , x ] isSeed = False else : rivermap [ y , x ] = precipitations [ y , x ] + rivermap [ py , px ] px , py = x , y | Update the rivermap with the rainfall that is to become the waterflow |
19,622 | def draw_rivers_on_image ( world , target , factor = 1 ) : for y in range ( world . height ) : for x in range ( world . width ) : if world . is_land ( ( x , y ) ) and ( world . layers [ 'river_map' ] . data [ y , x ] > 0.0 ) : for dx in range ( factor ) : for dy in range ( factor ) : target . set_pixel ( x * factor + dx , y * factor + dy , ( 0 , 0 , 128 , 255 ) ) if world . is_land ( ( x , y ) ) and ( world . layers [ 'lake_map' ] . data [ y , x ] != 0 ) : for dx in range ( factor ) : for dy in range ( factor ) : target . set_pixel ( x * factor + dx , y * factor + dy , ( 0 , 100 , 128 , 255 ) ) | Draw only the rivers it expect the background to be in place |
19,623 | def center_land ( world ) : y_sums = world . layers [ 'elevation' ] . data . sum ( 1 ) y_with_min_sum = y_sums . argmin ( ) if get_verbose ( ) : print ( "geo.center_land: height complete" ) x_sums = world . layers [ 'elevation' ] . data . sum ( 0 ) x_with_min_sum = x_sums . argmin ( ) if get_verbose ( ) : print ( "geo.center_land: width complete" ) latshift = 0 world . layers [ 'elevation' ] . data = numpy . roll ( numpy . roll ( world . layers [ 'elevation' ] . data , - y_with_min_sum + latshift , axis = 0 ) , - x_with_min_sum , axis = 1 ) world . layers [ 'plates' ] . data = numpy . roll ( numpy . roll ( world . layers [ 'plates' ] . data , - y_with_min_sum + latshift , axis = 0 ) , - x_with_min_sum , axis = 1 ) if get_verbose ( ) : print ( "geo.center_land: width complete" ) | Translate the map horizontally and vertically to put as much ocean as possible at the borders . It operates on elevation and plates map |
19,624 | def place_oceans_at_map_borders ( world ) : ocean_border = int ( min ( 30 , max ( world . width / 5 , world . height / 5 ) ) ) def place_ocean ( x , y , i ) : world . layers [ 'elevation' ] . data [ y , x ] = ( world . layers [ 'elevation' ] . data [ y , x ] * i ) / ocean_border for x in range ( world . width ) : for i in range ( ocean_border ) : place_ocean ( x , i , i ) place_ocean ( x , world . height - i - 1 , i ) for y in range ( world . height ) : for i in range ( ocean_border ) : place_ocean ( i , y , i ) place_ocean ( world . width - i - 1 , y , i ) | Lower the elevation near the border of the map |
19,625 | def harmonize_ocean ( ocean , elevation , ocean_level ) : shallow_sea = ocean_level * 0.85 midpoint = shallow_sea / 2.0 ocean_points = numpy . logical_and ( elevation < shallow_sea , ocean ) shallow_ocean = numpy . logical_and ( elevation < midpoint , ocean_points ) elevation [ shallow_ocean ] = midpoint - ( ( midpoint - elevation [ shallow_ocean ] ) / 5.0 ) deep_ocean = numpy . logical_and ( elevation > midpoint , ocean_points ) elevation [ deep_ocean ] = midpoint + ( ( elevation [ deep_ocean ] - midpoint ) / 5.0 ) | The goal of this function is to make the ocean floor less noisy . The underwater erosion should cause the ocean floor to be more uniform |
19,626 | def _always_strings ( env_dict ) : if IS_WINDOWS or PY2 : env_dict . update ( ( key , str ( value ) ) for ( key , value ) in env_dict . items ( ) ) return env_dict | On Windows and Python 2 environment dictionaries must be strings and not unicode . |
19,627 | def compat_stat ( path ) : stat = os . stat ( path ) info = get_file_info ( path ) return nt . stat_result ( ( stat . st_mode , ) + ( info . file_index , info . volume_serial_number , info . number_of_links ) + stat [ 4 : ] ) | Generate stat as found on Python 3 . 2 and later . |
19,628 | def scm_find_files ( path , scm_files , scm_dirs ) : realpath = os . path . normcase ( os . path . realpath ( path ) ) seen = set ( ) res = [ ] for dirpath , dirnames , filenames in os . walk ( realpath , followlinks = True ) : realdirpath = os . path . normcase ( os . path . realpath ( dirpath ) ) def _link_not_in_scm ( n ) : fn = os . path . join ( realdirpath , os . path . normcase ( n ) ) return os . path . islink ( fn ) and fn not in scm_files if realdirpath not in scm_dirs : dirnames [ : ] = [ ] continue if ( os . path . islink ( dirpath ) and not os . path . relpath ( realdirpath , realpath ) . startswith ( os . pardir ) ) : res . append ( os . path . join ( path , os . path . relpath ( dirpath , path ) ) ) dirnames [ : ] = [ ] continue if realdirpath in seen : dirnames [ : ] = [ ] continue dirnames [ : ] = [ dn for dn in dirnames if not _link_not_in_scm ( dn ) ] for filename in filenames : if _link_not_in_scm ( filename ) : continue fullfilename = os . path . join ( dirpath , filename ) if os . path . normcase ( os . path . realpath ( fullfilename ) ) in scm_files : res . append ( os . path . join ( path , os . path . relpath ( fullfilename , path ) ) ) seen . add ( realdirpath ) return res | setuptools compatible file finder that follows symlinks |
19,629 | def session ( self ) : engine = self . engine connection = engine . connect ( ) db_session = scoped_session ( sessionmaker ( autocommit = False , autoflush = True , bind = engine ) ) yield db_session db_session . close ( ) connection . close ( ) | Creates a context with an open SQLAlchemy session . |
19,630 | def kill ( arg1 , arg2 ) : from subprocess import Popen , PIPE t0 = time . time ( ) time_out = 30 running = True while running and time . time ( ) - t0 < time_out : if os . name == 'nt' : p = Popen ( 'tasklist | find "%s"' % arg1 , shell = True , stdin = PIPE , stdout = PIPE , stderr = PIPE , close_fds = False ) else : p = Popen ( 'ps aux | grep %s' % arg1 , shell = True , stdin = PIPE , stdout = PIPE , stderr = PIPE , close_fds = True ) lines = p . stdout . readlines ( ) running = False for line in lines : if ( '%s' % arg2 in line ) or ( os . name == 'nt' and '%s' % arg1 in line ) : running = True fields = line . strip ( ) . split ( ) info ( 'Stopping %s (process number %s)' % ( arg1 , fields [ 1 ] ) ) if os . name == 'nt' : kill = 'taskkill /F /PID "%s"' % fields [ 1 ] else : kill = 'kill -9 %s 2> /dev/null' % fields [ 1 ] os . system ( kill ) time . sleep ( 1 ) else : pass if running : raise Exception ( 'Could not stop %s: ' 'Running processes are\n%s' % ( arg1 , '\n' . join ( [ l . strip ( ) for l in lines ] ) ) ) | Stops a proces that contains arg1 and is filtered by arg2 |
19,631 | def sign ( self , consumer_secret , method , url , oauth_token_secret = None , ** params ) : url = yarl . URL ( url ) . with_query ( sorted ( params . items ( ) ) ) url , params = str ( url ) . split ( '?' , 1 ) method = method . upper ( ) signature = b"&" . join ( map ( self . _escape , ( method , url , params ) ) ) key = self . _escape ( consumer_secret ) + b"&" if oauth_token_secret : key += self . _escape ( oauth_token_secret ) hashed = hmac . new ( key , signature , sha1 ) return base64 . b64encode ( hashed . digest ( ) ) . decode ( ) | Create a signature using HMAC - SHA1 . |
19,632 | def sign ( self , consumer_secret , method , url , oauth_token_secret = None , ** params ) : key = self . _escape ( consumer_secret ) + b'&' if oauth_token_secret : key += self . _escape ( oauth_token_secret ) return key . decode ( ) | Create a signature using PLAINTEXT . |
19,633 | def _get_url ( self , url ) : if self . base_url and not url . startswith ( ( 'http://' , 'https://' ) ) : return urljoin ( self . base_url , url ) return url | Build provider s url . Join with base_url part if needed . |
19,634 | async def _request ( self , method , url , loop = None , timeout = None , ** kwargs ) : session = self . session or aiohttp . ClientSession ( loop = loop , conn_timeout = timeout , read_timeout = timeout ) try : async with session . request ( method , url , ** kwargs ) as response : if response . status / 100 > 2 : raise web . HTTPBadRequest ( reason = 'HTTP status code: %s' % response . status ) if 'json' in response . headers . get ( 'CONTENT-TYPE' ) : data = await response . json ( ) else : data = await response . text ( ) data = dict ( parse_qsl ( data ) ) return data except asyncio . TimeoutError : raise web . HTTPBadRequest ( reason = 'HTTP Timeout' ) finally : if not self . session and not session . closed : await session . close ( ) | Make a request through AIOHTTP . |
19,635 | async def user_info ( self , loop = None , ** kwargs ) : if not self . user_info_url : raise NotImplementedError ( 'The provider doesnt support user_info method.' ) data = await self . request ( 'GET' , self . user_info_url , loop = loop , ** kwargs ) user = User ( ** dict ( self . user_parse ( data ) ) ) return user , data | Load user information from provider . |
19,636 | def request ( self , method , url , params = None , ** aio_kwargs ) : oparams = { 'oauth_consumer_key' : self . consumer_key , 'oauth_nonce' : sha1 ( str ( RANDOM ( ) ) . encode ( 'ascii' ) ) . hexdigest ( ) , 'oauth_signature_method' : self . signature . name , 'oauth_timestamp' : str ( int ( time . time ( ) ) ) , 'oauth_version' : self . version , } oparams . update ( params or { } ) if self . oauth_token : oparams [ 'oauth_token' ] = self . oauth_token url = self . _get_url ( url ) if urlsplit ( url ) . query : raise ValueError ( 'Request parameters should be in the "params" parameter, ' 'not inlined in the URL' ) oparams [ 'oauth_signature' ] = self . signature . sign ( self . consumer_secret , method , url , oauth_token_secret = self . oauth_token_secret , ** oparams ) self . logger . debug ( "%s %s" , url , oparams ) return self . _request ( method , url , params = oparams , ** aio_kwargs ) | Make a request to provider . |
19,637 | async def get_request_token ( self , loop = None , ** params ) : params = dict ( self . params , ** params ) data = await self . request ( 'GET' , self . request_token_url , params = params , loop = loop ) self . oauth_token = data . get ( 'oauth_token' ) self . oauth_token_secret = data . get ( 'oauth_token_secret' ) return self . oauth_token , self . oauth_token_secret , data | Get a request_token and request_token_secret from OAuth1 provider . |
19,638 | async def get_access_token ( self , oauth_verifier , request_token = None , loop = None , ** params ) : if not isinstance ( oauth_verifier , str ) and self . shared_key in oauth_verifier : oauth_verifier = oauth_verifier [ self . shared_key ] if request_token and self . oauth_token != request_token : raise web . HTTPBadRequest ( reason = 'Failed to obtain OAuth 1.0 access token. ' 'Request token is invalid' ) data = await self . request ( 'POST' , self . access_token_url , params = { 'oauth_verifier' : oauth_verifier , 'oauth_token' : request_token } , loop = loop ) self . oauth_token = data . get ( 'oauth_token' ) self . oauth_token_secret = data . get ( 'oauth_token_secret' ) return self . oauth_token , self . oauth_token_secret , data | Get access_token from OAuth1 provider . |
19,639 | def get_authorize_url ( self , ** params ) : params = dict ( self . params , ** params ) params . update ( { 'client_id' : self . client_id , 'response_type' : 'code' } ) return self . authorize_url + '?' + urlencode ( params ) | Return formatted authorize URL . |
19,640 | def request ( self , method , url , params = None , headers = None , access_token = None , ** aio_kwargs ) : url = self . _get_url ( url ) params = params or { } access_token = access_token or self . access_token if access_token : if isinstance ( params , list ) : if self . access_token_key not in dict ( params ) : params . append ( ( self . access_token_key , access_token ) ) else : params [ self . access_token_key ] = access_token headers = headers or { 'Accept' : 'application/json' , 'Content-Type' : 'application/x-www-form-urlencoded;charset=UTF-8' , } return self . _request ( method , url , params = params , headers = headers , ** aio_kwargs ) | Request OAuth2 resource . |
19,641 | async def get_access_token ( self , code , loop = None , redirect_uri = None , ** payload ) : payload . setdefault ( 'grant_type' , 'authorization_code' ) payload . update ( { 'client_id' : self . client_id , 'client_secret' : self . client_secret } ) if not isinstance ( code , str ) and self . shared_key in code : code = code [ self . shared_key ] payload [ 'refresh_token' if payload [ 'grant_type' ] == 'refresh_token' else 'code' ] = code redirect_uri = redirect_uri or self . params . get ( 'redirect_uri' ) if redirect_uri : payload [ 'redirect_uri' ] = redirect_uri self . access_token = None data = await self . request ( 'POST' , self . access_token_url , data = payload , loop = loop ) try : self . access_token = data [ 'access_token' ] except KeyError : self . logger . error ( 'Error when getting the access token.\nData returned by OAuth server: %r' , data , ) raise web . HTTPBadRequest ( reason = 'Failed to obtain OAuth access token.' ) return self . access_token , data | Get an access_token from OAuth provider . |
19,642 | async def user_info ( self , params = None , ** kwargs ) : params = params or { } params [ 'fields' ] = 'id,email,first_name,last_name,name,link,locale,' 'gender,location' return await super ( FacebookClient , self ) . user_info ( params = params , ** kwargs ) | Facebook required fields - param . |
19,643 | def hijack_require_http_methods ( fn ) : required_methods = [ 'POST' ] if hijack_settings . HIJACK_ALLOW_GET_REQUESTS : required_methods . append ( 'GET' ) return require_http_methods ( required_methods ) ( fn ) | Wrapper for require_http_methods decorator . POST required by default GET can optionally be allowed |
19,644 | def is_authorized_default ( hijacker , hijacked ) : if hijacker . is_superuser : return True if hijacked . is_superuser : return False if hijacker . is_staff and hijack_settings . HIJACK_AUTHORIZE_STAFF : if hijacked . is_staff and not hijack_settings . HIJACK_AUTHORIZE_STAFF_TO_HIJACK_STAFF : return False return True return False | Checks if the user has the correct permission to Hijack another user . |
19,645 | def is_authorized ( hijack , hijacked ) : authorization_check = import_string ( hijack_settings . HIJACK_AUTHORIZATION_CHECK ) return authorization_check ( hijack , hijacked ) | Evaluates the authorization check specified in settings |
19,646 | def is_downloadable ( self , response ) : content_type = response . headers . get ( 'Content-Type' , '' ) content_disp = response . headers . get ( 'Content-Disposition' , '' ) if 'text/html' in content_type and 'attachment' not in content_disp : return False return True | Checks whether the response object is a html page or a likely downloadable file . Intended to detect error pages or prompts such as kaggle s competition rules acceptance prompt . |
19,647 | def count ( self ) : self . request_params . update ( { 'sysparm_count' : True } ) response = self . session . get ( self . _get_stats_url ( ) , params = self . _get_formatted_query ( fields = list ( ) , limit = None , order_by = list ( ) , offset = None ) ) content = self . _get_content ( response ) return int ( content [ 'stats' ] [ 'count' ] ) | Returns the number of records the query would yield |
19,648 | def _all_inner ( self , fields , limit , order_by , offset ) : response = self . session . get ( self . _get_table_url ( ) , params = self . _get_formatted_query ( fields , limit , order_by , offset ) ) yield self . _get_content ( response ) while 'next' in response . links : self . url_link = response . links [ 'next' ] [ 'url' ] response = self . session . get ( self . url_link ) yield self . _get_content ( response ) | Yields all records for the query and follows links if present on the response after validating |
19,649 | def get_one ( self , fields = list ( ) ) : response = self . session . get ( self . _get_table_url ( ) , params = self . _get_formatted_query ( fields , limit = None , order_by = list ( ) , offset = None ) ) content = self . _get_content ( response ) l = len ( content ) if l > 1 : raise MultipleResults ( 'Multiple results for get_one()' ) if len ( content ) == 0 : return { } return content [ 0 ] | Convenience function for queries returning only one result . Validates response before returning . |
19,650 | def insert ( self , payload ) : response = self . session . post ( self . _get_table_url ( ) , data = json . dumps ( payload ) ) return self . _get_content ( response ) | Inserts a new record with the payload passed as an argument |
19,651 | def delete ( self ) : try : result = self . get_one ( ) if 'sys_id' not in result : raise NoResults ( ) except MultipleResults : raise MultipleResults ( "Deletion of multiple records is not supported" ) except NoResults as e : e . args = ( 'Cannot delete a non-existing record' , ) raise response = self . session . delete ( self . _get_table_url ( sys_id = result [ 'sys_id' ] ) ) return self . _get_content ( response ) | Deletes the queried record and returns response content after response validation |
19,652 | def update ( self , payload ) : try : result = self . get_one ( ) if 'sys_id' not in result : raise NoResults ( ) except MultipleResults : raise MultipleResults ( "Update of multiple records is not supported" ) except NoResults as e : e . args = ( 'Cannot update a non-existing record' , ) raise if not isinstance ( payload , dict ) : raise InvalidUsage ( "Update payload must be of type dict" ) response = self . session . put ( self . _get_table_url ( sys_id = result [ 'sys_id' ] ) , data = json . dumps ( payload ) ) return self . _get_content ( response ) | Updates the queried record with payload and returns the updated record after validating the response |
19,653 | def clone ( self , reset_fields = list ( ) ) : if not isinstance ( reset_fields , list ) : raise InvalidUsage ( "reset_fields must be a `list` of fields" ) try : response = self . get_one ( ) if 'sys_id' not in response : raise NoResults ( ) except MultipleResults : raise MultipleResults ( 'Cloning multiple records is not supported' ) except NoResults as e : e . args = ( 'Cannot clone a non-existing record' , ) raise payload = { } for field in response : if field in reset_fields : continue item = response [ field ] if isinstance ( item , dict ) and 'value' in item : payload [ field ] = item [ 'value' ] else : payload [ field ] = item try : return self . insert ( payload ) except UnexpectedResponse as e : if e . status_code == 403 : e . args = ( 'Unable to create clone. Make sure unique fields has been reset.' , ) raise | Clones the queried record |
19,654 | def attach ( self , file ) : try : result = self . get_one ( ) if 'sys_id' not in result : raise NoResults ( ) except MultipleResults : raise MultipleResults ( 'Attaching a file to multiple records is not supported' ) except NoResults : raise NoResults ( 'Attempted to attach file to a non-existing record' ) if not os . path . isfile ( file ) : raise InvalidUsage ( "Attachment '%s' must be an existing regular file" % file ) response = self . session . post ( self . _get_attachment_url ( 'upload' ) , data = { 'table_name' : self . table , 'table_sys_id' : result [ 'sys_id' ] , 'file_name' : ntpath . basename ( file ) } , files = { 'file' : open ( file , 'rb' ) } , headers = { 'content-type' : None } ) return self . _get_content ( response ) | Attaches the queried record with file and returns the response after validating the response |
19,655 | def _get_content ( self , response ) : method = response . request . method self . last_response = response server_error = { 'summary' : None , 'details' : None } try : content_json = response . json ( ) if 'error' in content_json : e = content_json [ 'error' ] if 'message' in e : server_error [ 'summary' ] = e [ 'message' ] if 'detail' in e : server_error [ 'details' ] = e [ 'detail' ] except ValueError : content_json = { } if method == 'DELETE' : if response . status_code == 204 : return { 'success' : True } else : raise UnexpectedResponse ( 204 , response . status_code , method , server_error [ 'summary' ] , server_error [ 'details' ] ) elif method == 'POST' and response . status_code != 201 : raise UnexpectedResponse ( 201 , response . status_code , method , server_error [ 'summary' ] , server_error [ 'details' ] ) if ( 'result' in content_json and len ( content_json [ 'result' ] ) == 0 ) or response . status_code == 404 : if self . raise_on_empty is True : raise NoResults ( 'Query yielded no results' ) elif 'error' in content_json : raise UnexpectedResponse ( 200 , response . status_code , method , server_error [ 'summary' ] , server_error [ 'details' ] ) if 'result' not in content_json : raise MissingResult ( "The request was successful but the content didn't contain the expected 'result'" ) return content_json [ 'result' ] | Checks for errors in the response . Returns response content in bytes . |
19,656 | def _get_session ( self , session ) : if not session : logger . debug ( '(SESSION_CREATE) User: %s' % self . _user ) s = requests . Session ( ) s . auth = HTTPBasicAuth ( self . _user , self . _password ) else : logger . debug ( '(SESSION_CREATE) Object: %s' % session ) s = session s . headers . update ( { 'content-type' : 'application/json' , 'accept' : 'application/json' , 'User-Agent' : 'pysnow/%s' % pysnow . __version__ } ) return s | Creates a new session with basic auth unless one was provided and sets headers . |
19,657 | def get ( self , * args , ** kwargs ) : self . _parameters . query = kwargs . pop ( 'query' , { } ) if len ( args ) == 0 else args [ 0 ] self . _parameters . limit = kwargs . pop ( 'limit' , 10000 ) self . _parameters . offset = kwargs . pop ( 'offset' , 0 ) self . _parameters . fields = kwargs . pop ( 'fields' , kwargs . pop ( 'fields' , [ ] ) ) return self . _get_response ( 'GET' , stream = kwargs . pop ( 'stream' , False ) ) | Fetches one or more records |
19,658 | def update ( self , query , payload ) : if not isinstance ( payload , dict ) : raise InvalidUsage ( "Update payload must be of type dict" ) record = self . get ( query ) . one ( ) self . _url = self . _url_builder . get_appended_custom ( "/{0}" . format ( record [ 'sys_id' ] ) ) return self . _get_response ( 'PUT' , data = json . dumps ( payload ) ) | Updates a record |
19,659 | def delete ( self , query ) : record = self . get ( query = query ) . one ( ) self . _url = self . _url_builder . get_appended_custom ( "/{0}" . format ( record [ 'sys_id' ] ) ) return self . _get_response ( 'DELETE' ) . one ( ) | Deletes a record |
19,660 | def custom ( self , method , path_append = None , headers = None , ** kwargs ) : if headers : self . _session . headers . update ( headers ) if path_append is not None : try : self . _url = self . _url_builder . get_appended_custom ( path_append ) except InvalidUsage : raise InvalidUsage ( "Argument 'path_append' must be a string in the following format: " "/path-to-append[/.../...]" ) return self . _get_response ( method , ** kwargs ) | Creates a custom request |
19,661 | def attachments ( self ) : resource = copy ( self ) resource . _url_builder = URLBuilder ( self . _base_url , self . _base_path , '/attachment' ) path = self . _api_path . strip ( '/' ) . split ( '/' ) if path [ 0 ] != 'table' : raise InvalidUsage ( 'The attachment API can only be used with the table API' ) return Attachment ( resource , path [ 1 ] ) | Provides an Attachment API for this resource . Enables easy listing deleting and creating new attachments . |
19,662 | def request ( self , method , path_append = None , headers = None , ** kwargs ) : return self . _request . custom ( method , path_append = path_append , headers = headers , ** kwargs ) | Create a custom request |
19,663 | def _get_buffered_response ( self ) : response = self . _get_response ( ) if response . request . method == 'DELETE' and response . status_code == 204 : return [ { 'status' : 'record deleted' } ] , 1 result = self . _response . json ( ) . get ( 'result' , None ) if result is None : raise MissingResult ( 'The expected `result` key was missing in the response. Cannot continue' ) length = 0 if isinstance ( result , list ) : length = len ( result ) elif isinstance ( result , dict ) : result = [ result ] length = 1 return result , length | Returns a buffered response |
19,664 | def all ( self ) : if self . _stream : return chain . from_iterable ( self . _get_streamed_response ( ) ) return self . _get_buffered_response ( ) [ 0 ] | Returns a chained generator response containing all matching records |
19,665 | def first ( self ) : if not self . _stream : raise InvalidUsage ( 'first() is only available when stream=True' ) try : content = next ( self . all ( ) ) except StopIteration : raise NoResults ( "No records found" ) return content | Return the first record or raise an exception if the result doesn t contain any data |
19,666 | def one ( self ) : result , count = self . _get_buffered_response ( ) if count == 0 : raise NoResults ( "No records found" ) elif count > 1 : raise MultipleResults ( "Expected single-record result, got multiple" ) return result [ 0 ] | Return exactly one record or raise an exception . |
19,667 | def upload ( self , * args , ** kwargs ) : return self . _resource . attachments . upload ( self [ 'sys_id' ] , * args , ** kwargs ) | Convenience method for attaching files to a fetched record |
19,668 | def get ( self , sys_id = None , limit = 100 ) : if sys_id : return self . resource . get ( query = { 'table_sys_id' : sys_id , 'table_name' : self . table_name } ) . all ( ) return self . resource . get ( query = { 'table_name' : self . table_name } , limit = limit ) . all ( ) | Returns a list of attachments |
19,669 | def upload ( self , sys_id , file_path , name = None , multipart = False ) : if not isinstance ( multipart , bool ) : raise InvalidUsage ( 'Multipart must be of type bool' ) resource = self . resource if name is None : name = os . path . basename ( file_path ) resource . parameters . add_custom ( { 'table_name' : self . table_name , 'table_sys_id' : sys_id , 'file_name' : name } ) data = open ( file_path , 'rb' ) . read ( ) headers = { } if multipart : headers [ "Content-Type" ] = "multipart/form-data" path_append = '/upload' else : headers [ "Content-Type" ] = "text/plain" path_append = '/file' return resource . request ( method = 'POST' , data = data , headers = headers , path_append = path_append ) | Attaches a new file to the provided record |
19,670 | def validate_path ( path ) : if not isinstance ( path , six . string_types ) or not re . match ( '^/(?:[._a-zA-Z0-9-]/?)+[^/]$' , path ) : raise InvalidUsage ( "Path validation failed - Expected: '/<component>[/component], got: %s" % path ) return True | Validates the provided path |
19,671 | def get_base_url ( use_ssl , instance = None , host = None ) : if instance is not None : host = ( "%s.service-now.com" % instance ) . rstrip ( '/' ) if use_ssl is True : return "https://%s" % host return "http://%s" % host | Formats the base URL either host or instance |
19,672 | def _get_oauth_session ( self ) : return self . _get_session ( OAuth2Session ( client_id = self . client_id , token = self . token , token_updater = self . token_updater , auto_refresh_url = self . token_url , auto_refresh_kwargs = { "client_id" : self . client_id , "client_secret" : self . client_secret } ) ) | Creates a new OAuth session |
19,673 | def set_token ( self , token ) : if not token : self . token = None return expected_keys = [ 'token_type' , 'refresh_token' , 'access_token' , 'scope' , 'expires_in' , 'expires_at' ] if not isinstance ( token , dict ) or not set ( token ) >= set ( expected_keys ) : raise InvalidUsage ( "Expected a token dictionary containing the following keys: {0}" . format ( expected_keys ) ) self . token = dict ( ( k , v ) for k , v in token . items ( ) if k in expected_keys ) | Validate and set token |
19,674 | def generate_token ( self , user , password ) : logger . debug ( '(TOKEN_CREATE) :: User: %s' % user ) session = OAuth2Session ( client = LegacyApplicationClient ( client_id = self . client_id ) ) try : return dict ( session . fetch_token ( token_url = self . token_url , username = user , password = password , client_id = self . client_id , client_secret = self . client_secret ) ) except OAuth2Error as exception : raise TokenCreateError ( 'Error creating user token' , exception . description , exception . status_code ) | Takes user and password credentials and generates a new token |
19,675 | def order_descending ( self ) : self . _query . append ( 'ORDERBYDESC{0}' . format ( self . current_field ) ) self . c_oper = inspect . currentframe ( ) . f_back . f_code . co_name return self | Sets ordering of field descending |
19,676 | def equals ( self , data ) : if isinstance ( data , six . string_types ) : return self . _add_condition ( '=' , data , types = [ int , str ] ) elif isinstance ( data , list ) : return self . _add_condition ( 'IN' , "," . join ( map ( str , data ) ) , types = [ str ] ) raise QueryTypeError ( 'Expected value of type `str` or `list`, not %s' % type ( data ) ) | Adds new IN or = condition depending on if a list or string was provided |
19,677 | def greater_than ( self , greater_than ) : if hasattr ( greater_than , 'strftime' ) : greater_than = datetime_as_utc ( greater_than ) . strftime ( '%Y-%m-%d %H:%M:%S' ) elif isinstance ( greater_than , six . string_types ) : raise QueryTypeError ( 'Expected value of type `int` or instance of `datetime`, not %s' % type ( greater_than ) ) return self . _add_condition ( '>' , greater_than , types = [ int , str ] ) | Adds new > condition |
19,678 | def less_than ( self , less_than ) : if hasattr ( less_than , 'strftime' ) : less_than = datetime_as_utc ( less_than ) . strftime ( '%Y-%m-%d %H:%M:%S' ) elif isinstance ( less_than , six . string_types ) : raise QueryTypeError ( 'Expected value of type `int` or instance of `datetime`, not %s' % type ( less_than ) ) return self . _add_condition ( '<' , less_than , types = [ int , str ] ) | Adds new < condition |
19,679 | def between ( self , start , end ) : if hasattr ( start , 'strftime' ) and hasattr ( end , 'strftime' ) : dt_between = ( 'javascript:gs.dateGenerate("%(start)s")' "@" 'javascript:gs.dateGenerate("%(end)s")' ) % { 'start' : start . strftime ( '%Y-%m-%d %H:%M:%S' ) , 'end' : end . strftime ( '%Y-%m-%d %H:%M:%S' ) } elif isinstance ( start , int ) and isinstance ( end , int ) : dt_between = '%d@%d' % ( start , end ) else : raise QueryTypeError ( "Expected `start` and `end` of type `int` " "or instance of `datetime`, not %s and %s" % ( type ( start ) , type ( end ) ) ) return self . _add_condition ( 'BETWEEN' , dt_between , types = [ str ] ) | Adds new BETWEEN condition |
19,680 | def _add_condition ( self , operator , operand , types ) : if not self . current_field : raise QueryMissingField ( "Conditions requires a field()" ) elif not type ( operand ) in types : caller = inspect . currentframe ( ) . f_back . f_code . co_name raise QueryTypeError ( "Invalid type passed to %s() , expected: %s" % ( caller , types ) ) elif self . c_oper : raise QueryMultipleExpressions ( "Expected logical operator after expression" ) self . c_oper = inspect . currentframe ( ) . f_back . f_code . co_name self . _query . append ( "%(current_field)s%(operator)s%(operand)s" % { 'current_field' : self . current_field , 'operator' : operator , 'operand' : operand } ) return self | Appends condition to self . _query after performing validation |
19,681 | def _add_logical_operator ( self , operator ) : if not self . c_oper : raise QueryExpressionError ( "Logical operators must be preceded by an expression" ) self . current_field = None self . c_oper = None self . l_oper = inspect . currentframe ( ) . f_back . f_code . co_name self . _query . append ( operator ) return self | Adds a logical operator in query |
19,682 | def add_custom ( self , params ) : if isinstance ( params , dict ) is False : raise InvalidUsage ( "custom parameters must be of type `dict`" ) self . _custom_params . update ( params ) | Adds new custom parameter after making sure it s of type dict . |
19,683 | def offset ( self , offset ) : if not isinstance ( offset , int ) or isinstance ( offset , bool ) : raise InvalidUsage ( 'Offset must be an integer' ) self . _sysparms [ 'sysparm_offset' ] = offset | Sets sysparm_offset usually used to accomplish pagination |
19,684 | def fields ( self , fields ) : if not isinstance ( fields , list ) : raise InvalidUsage ( 'fields must be of type `list`' ) self . _sysparms [ 'sysparm_fields' ] = "," . join ( fields ) | Sets sysparm_fields after joining the given list of fields |
19,685 | def exclude_reference_link ( self , exclude ) : if not isinstance ( exclude , bool ) : raise InvalidUsage ( 'exclude_reference_link must be of type bool' ) self . _sysparms [ 'sysparm_exclude_reference_link' ] = exclude | Sets sysparm_exclude_reference_link to a bool value |
19,686 | def suppress_pagination_header ( self , suppress ) : if not isinstance ( suppress , bool ) : raise InvalidUsage ( 'suppress_pagination_header must be of type bool' ) self . _sysparms [ 'sysparm_suppress_pagination_header' ] = suppress | Enables or disables pagination header by setting sysparm_suppress_pagination_header |
19,687 | def quit ( self ) : def q ( * args ) : raise urwid . ExitMainLoop ( ) self . worker . shutdown ( wait = False ) self . ui_worker . shutdown ( wait = False ) self . loop . set_alarm_in ( 0 , q ) | This could be called from another thread so let s do this via alarm |
19,688 | def _set_main_widget ( self , widget , redraw ) : self . set_body ( widget ) self . reload_footer ( ) if redraw : logger . debug ( "redraw main widget" ) self . refresh ( ) | add provided widget to widget list and display it |
19,689 | def display_buffer ( self , buffer , redraw = True ) : logger . debug ( "display buffer %r" , buffer ) self . buffer_movement_history . append ( buffer ) self . current_buffer = buffer self . _set_main_widget ( buffer . widget , redraw = redraw ) | display provided buffer |
19,690 | def add_and_display_buffer ( self , buffer , redraw = True ) : if buffer not in self . buffers : logger . debug ( "adding new buffer {!r}" . format ( buffer ) ) self . buffers . append ( buffer ) self . display_buffer ( buffer , redraw = redraw ) | add provided buffer to buffer list and display it |
19,691 | def pick_and_display_buffer ( self , i ) : if len ( self . buffers ) == 1 : return else : try : self . display_buffer ( self . buffers [ i ] ) except IndexError : self . display_buffer ( self . buffers [ 0 ] ) | pick i - th buffer from list and display it |
19,692 | def refresh ( self ) : logger . debug ( "refresh user interface" ) try : with self . refresh_lock : self . draw_screen ( ) except AssertionError : logger . warning ( "application is not running" ) pass | explicitely refresh user interface ; useful when changing widgets dynamically |
19,693 | def strip_from_ansi_esc_sequences ( text ) : seq_regex = r"\x1b\[[0-9;]*[mKJusDCBAfH]" regex = re . compile ( seq_regex ) start = 0 response = "" for match in regex . finditer ( text ) : end = match . start ( ) response += text [ start : end ] start = match . end ( ) response += text [ start : len ( text ) ] return response | find ANSI escape sequences in text and remove them |
19,694 | def realtime_updates ( self ) : logger . info ( "starting receiving events from docker" ) it = self . d . realtime_updates ( ) while True : try : event = next ( it ) except NotifyError as ex : self . ui . notify_message ( "error when receiving realtime events from docker: %s" % ex , level = "error" ) return logger . debug ( "pass event to current buffer %s" , self . ui . current_buffer ) try : self . ui . current_buffer . process_realtime_event ( event ) except Exception as ex : logger . error ( "error while processing runtime event: %r" , ex ) | fetch realtime events from docker and pass them to buffers |
19,695 | def setup_dirs ( ) : try : top_dir = os . path . abspath ( os . path . expanduser ( os . environ [ "XDG_CACHE_HOME" ] ) ) except KeyError : top_dir = os . path . abspath ( os . path . expanduser ( "~/.cache" ) ) our_cache_dir = os . path . join ( top_dir , PROJECT_NAME ) os . makedirs ( our_cache_dir , mode = 0o775 , exist_ok = True ) return our_cache_dir | Make required directories to hold logfile . |
19,696 | def humanize_bytes ( bytesize , precision = 2 ) : abbrevs = ( ( 1 << 50 , 'PB' ) , ( 1 << 40 , 'TB' ) , ( 1 << 30 , 'GB' ) , ( 1 << 20 , 'MB' ) , ( 1 << 10 , 'kB' ) , ( 1 , 'bytes' ) ) if bytesize == 1 : return '1 byte' for factor , suffix in abbrevs : if bytesize >= factor : break if factor == 1 : precision = 0 return '%.*f %s' % ( precision , bytesize / float ( factor ) , suffix ) | Humanize byte size figures |
19,697 | def metadata_get ( self , path , cached = True ) : try : value = graceful_chain_get ( self . inspect ( cached = cached ) . response , * path ) except docker . errors . NotFound : logger . warning ( "object %s is not available anymore" , self ) raise NotAvailableAnymore ( ) return value | get metadata from inspect specified by path |
19,698 | def unique_size ( self ) : self . _virtual_size = self . _virtual_size or graceful_chain_get ( self . data , "VirtualSize" , default = 0 ) try : return self . _virtual_size - self . _shared_size except TypeError : return 0 | Size of ONLY this particular layer |
19,699 | def image_id ( self ) : try : image_id = self . data [ "ImageID" ] except KeyError : image_id = self . metadata_get ( [ "Image" ] ) return image_id | this container is created from image with id ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.