query
stringlengths 5
1.23k
| positive
stringlengths 53
15.2k
| id_
int64 0
252k
| task_name
stringlengths 87
242
| negative
listlengths 20
553
|
|---|---|---|---|---|
Returns a once - assembled dict of this module s storable attributes .
|
def defaults ( values = { } ) : if values : return values save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in globals ( ) . items ( ) : if isinstance ( v , save_types ) and not k . startswith ( "_" ) : values [ k ] = v return values
| 2,100
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/conf.py#L297-L303
|
[
"def",
"_should_allocate_port",
"(",
"pid",
")",
":",
"if",
"pid",
"<=",
"0",
":",
"log",
".",
"info",
"(",
"'Not allocating a port to invalid pid'",
")",
"return",
"False",
"if",
"pid",
"==",
"1",
":",
"# The client probably meant to send us its parent pid but",
"# had been reparented to init.",
"log",
".",
"info",
"(",
"'Not allocating a port to init.'",
")",
"return",
"False",
"try",
":",
"os",
".",
"kill",
"(",
"pid",
",",
"0",
")",
"except",
"ProcessLookupError",
":",
"log",
".",
"info",
"(",
"'Not allocating a port to a non-existent process'",
")",
"return",
"False",
"return",
"True"
] |
Fix malformed pdf files when data are present after %%EOF
|
def fix_pdf ( pdf_file , destination ) : tmp = tempfile . NamedTemporaryFile ( ) with open ( tmp . name , 'wb' ) as output : with open ( pdf_file , "rb" ) as fh : for line in fh : output . write ( line ) if b'%%EOF' in line : break shutil . copy ( tmp . name , destination )
| 2,101
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/papers/tearpages.py#L24-L42
|
[
"def",
"similar_item_values",
"(",
"self",
",",
"key",
",",
"replaces",
")",
":",
"return",
"self",
".",
"_similar_item_values",
"(",
"0",
",",
"key",
",",
"self",
".",
"dct",
".",
"ROOT",
",",
"replaces",
")"
] |
Copy filename to a tempfile write pages to filename except the teared one .
|
def tearpage_backend ( filename , teared_pages = None ) : # Handle default argument if teared_pages is None : teared_pages = [ 0 ] # Copy the pdf to a tmp file with tempfile . NamedTemporaryFile ( ) as tmp : # Copy the input file to tmp shutil . copy ( filename , tmp . name ) # Read the copied pdf # TODO: Use with syntax try : input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) except PdfReadError : fix_pdf ( filename , tmp . name ) input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) # Seek for the number of pages num_pages = input_file . getNumPages ( ) # Write pages excepted the first one output_file = PdfFileWriter ( ) for i in range ( num_pages ) : if i in teared_pages : continue output_file . addPage ( input_file . getPage ( i ) ) tmp . close ( ) outputStream = open ( filename , "wb" ) output_file . write ( outputStream )
| 2,102
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/papers/tearpages.py#L45-L85
|
[
"def",
"create_dimension_groups",
"(",
"dimension_positions",
")",
":",
"dimension_groups",
"=",
"[",
"]",
"for",
"dim_group_label",
",",
"position",
"in",
"dimension_positions",
":",
"dim_group",
"=",
"DimensionGroup",
"(",
"dim_group_label",
",",
"position",
")",
"for",
"dim_label",
"in",
"nmrstarlib",
".",
"RESONANCE_CLASSES",
"[",
"dim_group_label",
"]",
":",
"dim_group",
".",
"dimensions",
".",
"append",
"(",
"Dimension",
"(",
"dim_label",
",",
"position",
")",
")",
"dimension_groups",
".",
"append",
"(",
"dim_group",
")",
"return",
"dimension_groups"
] |
Check whether a given paper needs some pages to be teared or not .
|
def tearpage_needed ( bibtex ) : for publisher in BAD_JOURNALS : if publisher in bibtex . get ( "journal" , "" ) . lower ( ) : # Bad journal is found, add pages to tear return BAD_JOURNALS [ publisher ] # If no bad journals are found, return an empty list return [ ]
| 2,103
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/papers/tearpages.py#L88-L102
|
[
"def",
"construct",
"(",
"self",
",",
"max_message_size",
",",
"remote_name",
"=",
"None",
",",
"python_path",
"=",
"None",
",",
"debug",
"=",
"False",
",",
"connect_timeout",
"=",
"None",
",",
"profiling",
"=",
"False",
",",
"unidirectional",
"=",
"False",
",",
"old_router",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"Stream",
",",
"self",
")",
".",
"construct",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"max_message_size",
"=",
"max_message_size",
"if",
"python_path",
":",
"self",
".",
"python_path",
"=",
"python_path",
"if",
"connect_timeout",
":",
"self",
".",
"connect_timeout",
"=",
"connect_timeout",
"if",
"remote_name",
"is",
"None",
":",
"remote_name",
"=",
"get_default_remote_name",
"(",
")",
"if",
"'/'",
"in",
"remote_name",
"or",
"'\\\\'",
"in",
"remote_name",
":",
"raise",
"ValueError",
"(",
"'remote_name= cannot contain slashes'",
")",
"self",
".",
"remote_name",
"=",
"remote_name",
"self",
".",
"debug",
"=",
"debug",
"self",
".",
"profiling",
"=",
"profiling",
"self",
".",
"unidirectional",
"=",
"unidirectional",
"self",
".",
"max_message_size",
"=",
"max_message_size",
"self",
".",
"connect_deadline",
"=",
"time",
".",
"time",
"(",
")",
"+",
"self",
".",
"connect_timeout"
] |
Tear some pages of the file if needed .
|
def tearpage ( filename , bibtex = None , force = None ) : # Fetch pages to tear pages_to_tear = [ ] if force is not None : pages_to_tear = force elif bibtex is not None : pages_to_tear = tearpage_needed ( bibtex ) if len ( pages_to_tear ) > 0 : # If tearing is needed, do it and return True tearpage_backend ( filename , teared_pages = pages_to_tear ) return True # Else, simply return False return False
| 2,104
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/papers/tearpages.py#L105-L130
|
[
"def",
"disconnect",
"(",
"self",
",",
"receiver",
")",
":",
"if",
"receiver",
"not",
"in",
"self",
".",
"receivers",
".",
"keys",
"(",
")",
":",
"raise",
"Exception",
"(",
"\"No receiver %s was registered\"",
"%",
"receiver",
")",
"self",
".",
"receivers",
"[",
"receiver",
"]",
".",
"disconnect",
"(",
")",
"del",
"(",
"self",
".",
"receivers",
"[",
"receiver",
"]",
")",
"self",
".",
"__log",
".",
"debug",
"(",
"\"Receiver %s disconnected\"",
"%",
"receiver",
")"
] |
Checks out a file into the default changelist
|
def edit ( filename , connection = None ) : c = connection or connect ( ) rev = c . ls ( filename ) if rev : rev [ 0 ] . edit ( )
| 2,105
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/api.py#L28-L39
|
[
"def",
"formatData",
"(",
"data",
")",
":",
"condensed",
"=",
"PyVDF",
".",
"__CondensedOutput",
"indentation",
"=",
"PyVDF",
".",
"__OutputIndentation",
"spacing",
"=",
"PyVDF",
".",
"__OutputSpacing",
"def",
"loop",
"(",
"array",
",",
"tab",
"=",
"''",
")",
":",
"string",
"=",
"''",
"for",
"k",
",",
"v",
"in",
"array",
".",
"items",
"(",
")",
":",
"string",
"+=",
"'{}\"{}\"'",
".",
"format",
"(",
"tab",
",",
"k",
")",
"if",
"isinstance",
"(",
"v",
",",
"dict",
")",
":",
"string",
"+=",
"'{}{{\\n{}{}}}\\n'",
".",
"format",
"(",
"''",
"if",
"condensed",
"else",
"'\\n'",
"+",
"tab",
",",
"loop",
"(",
"v",
",",
"tab",
"+",
"indentation",
")",
",",
"tab",
")",
"else",
":",
"string",
"+=",
"'{}\"{}\"\\n'",
".",
"format",
"(",
"spacing",
",",
"v",
")",
"return",
"string",
"return",
"loop",
"(",
"data",
")"
] |
Syncs a file
|
def sync ( filename , connection = None ) : c = connection or connect ( ) rev = c . ls ( filename ) if rev : rev [ 0 ] . sync ( )
| 2,106
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/api.py#L42-L53
|
[
"def",
"_try_to_release_dynamic_segment",
"(",
"self",
",",
"context",
",",
"migration",
"=",
"False",
")",
":",
"if",
"migration",
":",
"binding_levels",
"=",
"context",
".",
"original_binding_levels",
"else",
":",
"binding_levels",
"=",
"context",
".",
"binding_levels",
"LOG",
".",
"debug",
"(",
"\"_try_release_dynamic_segment: \"",
"\"binding_levels=%(bl)s\"",
",",
"{",
"'bl'",
":",
"binding_levels",
"}",
")",
"if",
"not",
"binding_levels",
":",
"return",
"for",
"prior_level",
",",
"binding",
"in",
"enumerate",
"(",
"binding_levels",
"[",
"1",
":",
"]",
")",
":",
"allocating_driver",
"=",
"binding_levels",
"[",
"prior_level",
"]",
".",
"get",
"(",
"driver_api",
".",
"BOUND_DRIVER",
")",
"if",
"allocating_driver",
"!=",
"a_const",
".",
"MECHANISM_DRV_NAME",
":",
"continue",
"bound_segment",
"=",
"binding",
".",
"get",
"(",
"driver_api",
".",
"BOUND_SEGMENT",
",",
"{",
"}",
")",
"segment_id",
"=",
"bound_segment",
".",
"get",
"(",
"'id'",
")",
"if",
"not",
"db_lib",
".",
"segment_is_dynamic",
"(",
"segment_id",
")",
":",
"continue",
"if",
"not",
"db_lib",
".",
"segment_bound",
"(",
"segment_id",
")",
":",
"context",
".",
"release_dynamic_segment",
"(",
"segment_id",
")",
"LOG",
".",
"debug",
"(",
"\"Released dynamic segment %(seg)s allocated \"",
"\"by %(drv)s\"",
",",
"{",
"'seg'",
":",
"segment_id",
",",
"'drv'",
":",
"allocating_driver",
"}",
")"
] |
Edits or Adds a filename ensuring the file is in perforce and editable
|
def open ( filename , connection = None ) : c = connection or connect ( ) res = c . ls ( filename ) if res and res [ 0 ] . revision : res [ 0 ] . edit ( ) else : c . add ( filename )
| 2,107
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/api.py#L81-L94
|
[
"def",
"__get_registry_key",
"(",
"self",
",",
"key",
")",
":",
"import",
"winreg",
"root",
"=",
"winreg",
".",
"OpenKey",
"(",
"winreg",
".",
"HKEY_CURRENT_USER",
",",
"r'SOFTWARE\\GSettings\\org\\gnucash\\general'",
",",
"0",
",",
"winreg",
".",
"KEY_READ",
")",
"[",
"pathname",
",",
"regtype",
"]",
"=",
"(",
"winreg",
".",
"QueryValueEx",
"(",
"root",
",",
"key",
")",
")",
"winreg",
".",
"CloseKey",
"(",
"root",
")",
"return",
"pathname"
] |
Check that a given arXiv ID is a valid one .
|
def is_valid ( arxiv_id ) : match = REGEX . match ( arxiv_id ) return ( match is not None ) and ( match . group ( 0 ) == arxiv_id )
| 2,108
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/repositories/arxiv.py#L235-L273
|
[
"def",
"init",
"(",
"path",
"=",
"None",
")",
":",
"default",
"=",
"get_default",
"(",
")",
"if",
"default",
"is",
"not",
"None",
"and",
"not",
"isinstance",
"(",
"default",
",",
"VoidLogKeeper",
")",
":",
"return",
"default",
"tee",
"=",
"LogTee",
"(",
")",
"set_default",
"(",
"tee",
")",
"FluLogKeeper",
".",
"init",
"(",
"path",
")",
"tee",
".",
"add_keeper",
"(",
"'flulog'",
",",
"FluLogKeeper",
"(",
")",
")",
"return",
"tee"
] |
Get a BibTeX entry for a given arXiv ID .
|
def get_bibtex ( arxiv_id ) : # Fetch bibtex using arxiv2bib module try : bibtex = arxiv2bib . arxiv2bib ( [ arxiv_id ] ) except HTTPError : bibtex = [ ] for bib in bibtex : if isinstance ( bib , arxiv2bib . ReferenceErrorInfo ) : continue else : # Return fetched bibtex return bib . bibtex ( ) # An error occurred, return None return None
| 2,109
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/repositories/arxiv.py#L276-L306
|
[
"def",
"set_notification_callback",
"(",
"self",
",",
"notification_cb",
")",
":",
"self",
".",
"_notification_cb",
"=",
"notification_cb",
"if",
"not",
"notification_cb",
"and",
"self",
".",
"dimensions",
":",
"self",
".",
"measured_value",
".",
"notify_value_set",
"=",
"None",
"return",
"self"
] |
Extract arXiv IDs from a text .
|
def extract_from_text ( text ) : # Remove the leading "arxiv:". return tools . remove_duplicates ( [ re . sub ( "arxiv:" , "" , i [ 0 ] , flags = re . IGNORECASE ) for i in REGEX . findall ( text ) if i [ 0 ] != '' ] )
| 2,110
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/repositories/arxiv.py#L309-L322
|
[
"def",
"merged",
"(",
"self",
")",
":",
"stats",
"=",
"{",
"}",
"for",
"topic",
"in",
"self",
".",
"client",
".",
"topics",
"(",
")",
"[",
"'topics'",
"]",
":",
"for",
"producer",
"in",
"self",
".",
"client",
".",
"lookup",
"(",
"topic",
")",
"[",
"'producers'",
"]",
":",
"hostname",
"=",
"producer",
"[",
"'broadcast_address'",
"]",
"port",
"=",
"producer",
"[",
"'http_port'",
"]",
"host",
"=",
"'%s_%s'",
"%",
"(",
"hostname",
",",
"port",
")",
"stats",
"[",
"host",
"]",
"=",
"nsqd",
".",
"Client",
"(",
"'http://%s:%s/'",
"%",
"(",
"hostname",
",",
"port",
")",
")",
".",
"clean_stats",
"(",
")",
"return",
"stats"
] |
Get the arXiv eprint id for a given DOI .
|
def from_doi ( doi ) : try : request = requests . get ( "http://export.arxiv.org/api/query" , params = { "search_query" : "doi:%s" % ( doi , ) , "max_results" : 1 } ) request . raise_for_status ( ) except RequestException : return None root = xml . etree . ElementTree . fromstring ( request . content ) for entry in root . iter ( "{http://www.w3.org/2005/Atom}entry" ) : arxiv_id = entry . find ( "{http://www.w3.org/2005/Atom}id" ) . text # arxiv_id is an arXiv full URL. We only want the id which is the last # URL component. return arxiv_id . split ( "/" ) [ - 1 ] return None
| 2,111
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/repositories/arxiv.py#L366-L397
|
[
"def",
"merge_config",
"(",
"self",
",",
"user_config",
")",
":",
"# provisioanlly update the default configurations with the user preferences",
"temp_data_config",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"data_config",
")",
".",
"update",
"(",
"user_config",
")",
"temp_model_config",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"model_config",
")",
".",
"update",
"(",
"user_config",
")",
"temp_conversation_config",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"conversation_config",
")",
".",
"update",
"(",
"user_config",
")",
"# if the new configurations validate, apply them",
"if",
"validate_data_config",
"(",
"temp_data_config",
")",
":",
"self",
".",
"data_config",
"=",
"temp_data_config",
"if",
"validate_model_config",
"(",
"temp_model_config",
")",
":",
"self",
".",
"model_config",
"=",
"temp_model_config",
"if",
"validate_conversation_config",
"(",
"temp_conversation_config",
")",
":",
"self",
".",
"conversation_config",
"=",
"temp_conversation_config"
] |
Download sources on arXiv for a given preprint .
|
def get_sources ( arxiv_id ) : try : request = requests . get ( ARXIV_EPRINT_URL . format ( arxiv_id = arxiv_id ) ) request . raise_for_status ( ) file_object = io . BytesIO ( request . content ) return tarfile . open ( fileobj = file_object ) except ( RequestException , AssertionError , tarfile . TarError ) : return None
| 2,112
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/repositories/arxiv.py#L435-L455
|
[
"def",
"rebalance_replication_groups",
"(",
"self",
")",
":",
"# Balance replicas over replication-groups for each partition",
"if",
"any",
"(",
"b",
".",
"inactive",
"for",
"b",
"in",
"six",
".",
"itervalues",
"(",
"self",
".",
"cluster_topology",
".",
"brokers",
")",
")",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Impossible to rebalance replication groups because of inactive \"",
"\"brokers.\"",
")",
"raise",
"RebalanceError",
"(",
"\"Impossible to rebalance replication groups because of inactive \"",
"\"brokers\"",
")",
"# Balance replica-count over replication-groups",
"self",
".",
"rebalance_replicas",
"(",
")",
"# Balance partition-count over replication-groups",
"self",
".",
"_rebalance_groups_partition_cnt",
"(",
")"
] |
Extract semantic date information from an input string . This is a convenience method which would only be used if you d rather not initialize a DateService object .
|
def extractDates ( inp , tz = None , now = None ) : service = DateService ( tz = tz , now = now ) return service . extractDates ( inp )
| 2,113
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L476-L495
|
[
"def",
"clear",
"(",
"self",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"while",
"self",
".",
"_tombstones",
":",
"_",
",",
"req_timeout",
"=",
"self",
".",
"_tombstones",
".",
"popitem",
"(",
")",
"io_loop",
".",
"remove_timeout",
"(",
"req_timeout",
")"
] |
Extracts time - related information from an input string . Ignores any information related to the specific date focusing on the time - of - day .
|
def extractTimes ( self , inp ) : def handleMatch ( time ) : relative = False if not time : return None # Default times: 8am, 12pm, 7pm elif time . group ( 1 ) == 'morning' : h = 8 m = 0 elif time . group ( 1 ) == 'afternoon' : h = 12 m = 0 elif time . group ( 1 ) == 'evening' : h = 19 m = 0 elif time . group ( 4 ) and time . group ( 5 ) : h , m = 0 , 0 # Extract hours difference converter = NumberService ( ) try : diff = converter . parse ( time . group ( 4 ) ) except : return None if time . group ( 5 ) == 'hours' : h += diff else : m += diff # Extract minutes difference if time . group ( 6 ) : converter = NumberService ( ) try : diff = converter . parse ( time . group ( 7 ) ) except : return None if time . group ( 8 ) == 'hours' : h += diff else : m += diff relative = True else : # Convert from "HH:MM pm" format t = time . group ( 2 ) h , m = int ( t . split ( ':' ) [ 0 ] ) % 12 , int ( t . split ( ':' ) [ 1 ] ) try : if time . group ( 3 ) == 'pm' : h += 12 except IndexError : pass if relative : return self . now + datetime . timedelta ( hours = h , minutes = m ) else : return datetime . datetime ( self . now . year , self . now . month , self . now . day , h , m ) inp = self . _preprocess ( inp ) return [ handleMatch ( time ) for time in self . _timeRegex . finditer ( inp ) ]
| 2,114
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L263-L339
|
[
"def",
"restore_checkpoint",
"(",
"filename",
")",
":",
"with",
"gzip",
".",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"generation",
",",
"config",
",",
"population",
",",
"species_set",
",",
"rndstate",
"=",
"pickle",
".",
"load",
"(",
"f",
")",
"random",
".",
"setstate",
"(",
"rndstate",
")",
"return",
"Population",
"(",
"config",
",",
"(",
"population",
",",
"species_set",
",",
"generation",
")",
")"
] |
Extract semantic date information from an input string . In effect runs both parseDay and parseTime on the input string and merges the results to produce a comprehensive datetime object .
|
def extractDates ( self , inp ) : def merge ( param ) : day , time = param if not ( day or time ) : return None if not day : return time if not time : return day return datetime . datetime ( day . year , day . month , day . day , time . hour , time . minute ) days = self . extractDays ( inp ) times = self . extractTimes ( inp ) return map ( merge , zip_longest ( days , times , fillvalue = None ) )
| 2,115
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L349-L378
|
[
"def",
"undefine",
"(",
"self",
")",
":",
"if",
"lib",
".",
"EnvUndefgeneric",
"(",
"self",
".",
"_env",
",",
"self",
".",
"_gnc",
")",
"!=",
"1",
":",
"raise",
"CLIPSError",
"(",
"self",
".",
"_env",
")",
"self",
".",
"_env",
"=",
"None"
] |
Returns the first date found in the input string or None if not found .
|
def extractDate ( self , inp ) : dates = self . extractDates ( inp ) for date in dates : return date return None
| 2,116
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L380-L386
|
[
"def",
"crypto_box_keypair",
"(",
")",
":",
"pk",
"=",
"ffi",
".",
"new",
"(",
"\"unsigned char[]\"",
",",
"crypto_box_PUBLICKEYBYTES",
")",
"sk",
"=",
"ffi",
".",
"new",
"(",
"\"unsigned char[]\"",
",",
"crypto_box_SECRETKEYBYTES",
")",
"rc",
"=",
"lib",
".",
"crypto_box_keypair",
"(",
"pk",
",",
"sk",
")",
"ensure",
"(",
"rc",
"==",
"0",
",",
"'Unexpected library error'",
",",
"raising",
"=",
"exc",
".",
"RuntimeError",
")",
"return",
"(",
"ffi",
".",
"buffer",
"(",
"pk",
",",
"crypto_box_PUBLICKEYBYTES",
")",
"[",
":",
"]",
",",
"ffi",
".",
"buffer",
"(",
"sk",
",",
"crypto_box_SECRETKEYBYTES",
")",
"[",
":",
"]",
",",
")"
] |
Convert a datetime object representing a day into a human - ready string that can be read spoken aloud etc .
|
def convertDay ( self , day , prefix = "" , weekday = False ) : def sameDay ( d1 , d2 ) : d = d1 . day == d2 . day m = d1 . month == d2 . month y = d1 . year == d2 . year return d and m and y tom = self . now + datetime . timedelta ( days = 1 ) if sameDay ( day , self . now ) : return "today" elif sameDay ( day , tom ) : return "tomorrow" if weekday : dayString = day . strftime ( "%A, %B %d" ) else : dayString = day . strftime ( "%B %d" ) # Ex) Remove '0' from 'August 03' if not int ( dayString [ - 2 ] ) : dayString = dayString [ : - 2 ] + dayString [ - 1 ] return prefix + " " + dayString
| 2,117
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L388-L427
|
[
"def",
"node_intersection",
"(",
"graphs",
")",
":",
"graphs",
"=",
"tuple",
"(",
"graphs",
")",
"n_graphs",
"=",
"len",
"(",
"graphs",
")",
"if",
"n_graphs",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'no graphs given'",
")",
"if",
"n_graphs",
"==",
"1",
":",
"return",
"graphs",
"[",
"0",
"]",
"nodes",
"=",
"set",
"(",
"graphs",
"[",
"0",
"]",
".",
"nodes",
"(",
")",
")",
"for",
"graph",
"in",
"graphs",
"[",
"1",
":",
"]",
":",
"nodes",
".",
"intersection_update",
"(",
"graph",
")",
"return",
"union",
"(",
"subgraph",
"(",
"graph",
",",
"nodes",
")",
"for",
"graph",
"in",
"graphs",
")"
] |
Convert a datetime object representing a time into a human - ready string that can be read spoken aloud etc .
|
def convertTime ( self , time ) : # if ':00', ignore reporting minutes m_format = "" if time . minute : m_format = ":%M" timeString = time . strftime ( "%I" + m_format + " %p" ) # if '07:30', cast to '7:30' if not int ( timeString [ 0 ] ) : timeString = timeString [ 1 : ] return timeString
| 2,118
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L429-L451
|
[
"def",
"node_intersection",
"(",
"graphs",
")",
":",
"graphs",
"=",
"tuple",
"(",
"graphs",
")",
"n_graphs",
"=",
"len",
"(",
"graphs",
")",
"if",
"n_graphs",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'no graphs given'",
")",
"if",
"n_graphs",
"==",
"1",
":",
"return",
"graphs",
"[",
"0",
"]",
"nodes",
"=",
"set",
"(",
"graphs",
"[",
"0",
"]",
".",
"nodes",
"(",
")",
")",
"for",
"graph",
"in",
"graphs",
"[",
"1",
":",
"]",
":",
"nodes",
".",
"intersection_update",
"(",
"graph",
")",
"return",
"union",
"(",
"subgraph",
"(",
"graph",
",",
"nodes",
")",
"for",
"graph",
"in",
"graphs",
")"
] |
Convert a datetime object representing into a human - ready string that can be read spoken aloud etc . In effect runs both convertDay and convertTime on the input merging the results .
|
def convertDate ( self , date , prefix = "" , weekday = False ) : dayString = self . convertDay ( date , prefix = prefix , weekday = weekday ) timeString = self . convertTime ( date ) return dayString + " at " + timeString
| 2,119
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/dates.py#L453-L473
|
[
"def",
"has_nvme_ssd",
"(",
"self",
")",
":",
"for",
"member",
"in",
"self",
".",
"_drives_list",
"(",
")",
":",
"if",
"(",
"member",
".",
"media_type",
"==",
"constants",
".",
"MEDIA_TYPE_SSD",
"and",
"member",
".",
"protocol",
"==",
"constants",
".",
"PROTOCOL_NVMe",
")",
":",
"return",
"True",
"return",
"False"
] |
Called during a PUT request where the action specifies a move operation . Returns resource URI of the destination file .
|
def _move ( self ) : newpath = self . action [ 'newpath' ] try : self . fs . move ( self . fp , newpath ) except OSError : raise tornado . web . HTTPError ( 400 ) return newpath
| 2,120
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L32-L42
|
[
"async",
"def",
"add_unknown_id",
"(",
"self",
",",
"unknown_id",
",",
"timeout",
"=",
"OTGW_DEFAULT_TIMEOUT",
")",
":",
"cmd",
"=",
"OTGW_CMD_UNKNOWN_ID",
"unknown_id",
"=",
"int",
"(",
"unknown_id",
")",
"if",
"unknown_id",
"<",
"1",
"or",
"unknown_id",
">",
"255",
":",
"return",
"None",
"ret",
"=",
"await",
"self",
".",
"_wait_for_cmd",
"(",
"cmd",
",",
"unknown_id",
",",
"timeout",
")",
"if",
"ret",
"is",
"not",
"None",
":",
"return",
"int",
"(",
"ret",
")"
] |
Called during a PUT request where the action specifies a copy operation . Returns resource URI of the new file .
|
def _copy ( self ) : copypath = self . action [ 'copypath' ] try : self . fs . copy ( self . fp , copypath ) except OSError : raise tornado . web . HTTPError ( 400 ) return copypath
| 2,121
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L44-L54
|
[
"def",
"devices",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw",
"# from Android system/core/adb/transport.c statename()",
"re_device_info",
"=",
"re",
".",
"compile",
"(",
"r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'",
")",
"devices",
"=",
"[",
"]",
"lines",
"=",
"self",
".",
"command_output",
"(",
"[",
"\"devices\"",
",",
"\"-l\"",
"]",
",",
"timeout",
"=",
"timeout",
")",
".",
"split",
"(",
"'\\n'",
")",
"for",
"line",
"in",
"lines",
":",
"if",
"line",
"==",
"'List of devices attached '",
":",
"continue",
"match",
"=",
"re_device_info",
".",
"match",
"(",
"line",
")",
"if",
"match",
":",
"device",
"=",
"{",
"'device_serial'",
":",
"match",
".",
"group",
"(",
"1",
")",
",",
"'state'",
":",
"match",
".",
"group",
"(",
"2",
")",
"}",
"remainder",
"=",
"line",
"[",
"match",
".",
"end",
"(",
"2",
")",
":",
"]",
".",
"strip",
"(",
")",
"if",
"remainder",
":",
"try",
":",
"device",
".",
"update",
"(",
"dict",
"(",
"[",
"j",
".",
"split",
"(",
"':'",
")",
"for",
"j",
"in",
"remainder",
".",
"split",
"(",
"' '",
")",
"]",
")",
")",
"except",
"ValueError",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"'devices: Unable to parse '",
"'remainder for device %s'",
"%",
"line",
")",
"devices",
".",
"append",
"(",
"device",
")",
"return",
"devices"
] |
Called during a PUT request where the action specifies a rename operation . Returns resource URI of the renamed file .
|
def _rename ( self ) : newname = self . action [ 'newname' ] try : newpath = self . fs . rename ( self . fp , newname ) except OSError : raise tornado . web . HTTPError ( 400 ) return newpath
| 2,122
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L56-L66
|
[
"def",
"fetch",
"(",
"self",
")",
":",
"from",
"neobolt",
".",
"exceptions",
"import",
"ConnectionExpired",
"if",
"self",
".",
"_connection",
":",
"try",
":",
"detail_count",
",",
"_",
"=",
"self",
".",
"_connection",
".",
"fetch",
"(",
")",
"except",
"ConnectionExpired",
"as",
"error",
":",
"raise",
"SessionExpired",
"(",
"*",
"error",
".",
"args",
")",
"else",
":",
"return",
"detail_count",
"return",
"0"
] |
Return details for the filesystem including configured volumes .
|
def get ( self ) : res = self . fs . get_filesystem_details ( ) res = res . to_dict ( ) self . write ( res )
| 2,123
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L69-L75
|
[
"def",
"StoreCSRFCookie",
"(",
"user",
",",
"response",
")",
":",
"csrf_token",
"=",
"GenerateCSRFToken",
"(",
"user",
",",
"None",
")",
"response",
".",
"set_cookie",
"(",
"\"csrftoken\"",
",",
"csrf_token",
",",
"max_age",
"=",
"CSRF_TOKEN_DURATION",
".",
"seconds",
")"
] |
Provides move copy and rename functionality . An action must be specified when calling this method .
|
def put ( self ) : self . fp = self . get_body_argument ( 'filepath' ) self . action = self . get_body_argument ( 'action' ) try : ptype = self . fs . get_type_from_path ( self . fp ) except OSError : raise tornado . web . HTTPError ( 404 ) if ptype == 'directory' : self . handler_name = 'filesystem:directories-details' else : self . handler_name = 'filesystem:files-details' if self . action [ 'action' ] == 'move' : newpath = self . _move ( ) self . write ( { 'filepath' : newpath } ) elif self . action [ 'action' ] == 'copy' : newpath = self . _copy ( ) self . write ( { 'filepath' : newpath } ) elif self . action [ 'action' ] == 'rename' : newpath = self . _rename ( ) self . write ( { 'filepath' : newpath } ) else : raise tornado . web . HTTPError ( 400 )
| 2,124
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L78-L105
|
[
"def",
"get_cuda_visible_devices",
"(",
")",
":",
"gpu_ids_str",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"CUDA_VISIBLE_DEVICES\"",
",",
"None",
")",
"if",
"gpu_ids_str",
"is",
"None",
":",
"return",
"None",
"if",
"gpu_ids_str",
"==",
"\"\"",
":",
"return",
"[",
"]",
"return",
"[",
"int",
"(",
"i",
")",
"for",
"i",
"in",
"gpu_ids_str",
".",
"split",
"(",
"\",\"",
")",
"]"
] |
Start a new filewatcher at the specified path .
|
def post ( self , * args ) : filepath = self . get_body_argument ( 'filepath' ) if not self . fs . exists ( filepath ) : raise tornado . web . HTTPError ( 404 ) Filewatcher . add_directory_to_watch ( filepath ) self . write ( { 'msg' : 'Watcher added for {}' . format ( filepath ) } )
| 2,125
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L113-L122
|
[
"def",
"alias_gradebook",
"(",
"self",
",",
"gradebook_id",
",",
"alias_id",
")",
":",
"# Implemented from template for",
"# osid.resource.BinLookupSession.alias_bin_template",
"if",
"self",
".",
"_catalog_session",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_catalog_session",
".",
"alias_catalog",
"(",
"catalog_id",
"=",
"gradebook_id",
",",
"alias_id",
"=",
"alias_id",
")",
"self",
".",
"_alias_id",
"(",
"primary_id",
"=",
"gradebook_id",
",",
"equivalent_id",
"=",
"alias_id",
")"
] |
Stop and delete the specified filewatcher .
|
def delete ( self , filepath ) : Filewatcher . remove_directory_to_watch ( filepath ) self . write ( { 'msg' : 'Watcher deleted for {}' . format ( filepath ) } )
| 2,126
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L129-L134
|
[
"def",
"show_halogen",
"(",
"self",
")",
":",
"grp",
"=",
"self",
".",
"getPseudoBondGroup",
"(",
"\"HalogenBonds-%i\"",
"%",
"self",
".",
"tid",
",",
"associateWith",
"=",
"[",
"self",
".",
"model",
"]",
")",
"grp",
".",
"lineWidth",
"=",
"3",
"for",
"i",
"in",
"self",
".",
"plcomplex",
".",
"halogen_bonds",
":",
"b",
"=",
"grp",
".",
"newPseudoBond",
"(",
"self",
".",
"atoms",
"[",
"i",
"[",
"0",
"]",
"]",
",",
"self",
".",
"atoms",
"[",
"i",
"[",
"1",
"]",
"]",
")",
"b",
".",
"color",
"=",
"self",
".",
"colorbyname",
"(",
"'turquoise'",
")",
"self",
".",
"bs_res_ids",
".",
"append",
"(",
"i",
".",
"acc_id",
")"
] |
Get file details for the specified file .
|
def get ( self , filepath ) : try : res = self . fs . get_file_details ( filepath ) res = res . to_dict ( ) self . write ( res ) except OSError : raise tornado . web . HTTPError ( 404 )
| 2,127
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L144-L153
|
[
"def",
"user_deleted_from_site_event",
"(",
"event",
")",
":",
"userid",
"=",
"event",
".",
"principal",
"catalog",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_catalog'",
")",
"query",
"=",
"{",
"'object_provides'",
":",
"WORKSPACE_INTERFACE",
"}",
"query",
"[",
"'workspace_members'",
"]",
"=",
"userid",
"workspaces",
"=",
"[",
"IWorkspace",
"(",
"b",
".",
"_unrestrictedGetObject",
"(",
")",
")",
"for",
"b",
"in",
"catalog",
".",
"unrestrictedSearchResults",
"(",
"query",
")",
"]",
"for",
"workspace",
"in",
"workspaces",
":",
"workspace",
".",
"remove_from_team",
"(",
"userid",
")"
] |
Change the group or permissions of the specified file . Action must be specified when calling this method .
|
def put ( self , filepath ) : action = self . get_body_argument ( 'action' ) if action [ 'action' ] == 'update_group' : newgrp = action [ 'group' ] try : self . fs . update_group ( filepath , newgrp ) self . write ( { 'msg' : 'Updated group for {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 ) elif action [ 'action' ] == 'update_permissions' : newperms = action [ 'permissions' ] try : self . fs . update_permissions ( filepath , newperms ) self . write ( { 'msg' : 'Updated permissions for {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 ) else : raise tornado . web . HTTPError ( 400 )
| 2,128
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L156-L178
|
[
"def",
"createExternalTable",
"(",
"self",
",",
"tableName",
",",
"path",
"=",
"None",
",",
"source",
"=",
"None",
",",
"schema",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"return",
"self",
".",
"sparkSession",
".",
"catalog",
".",
"createExternalTable",
"(",
"tableName",
",",
"path",
",",
"source",
",",
"schema",
",",
"*",
"*",
"options",
")"
] |
Delete the specified file .
|
def delete ( self , filepath ) : try : self . fs . delete ( filepath ) self . write ( { 'msg' : 'File deleted at {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 )
| 2,129
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L181-L189
|
[
"def",
"_get_site_response_term",
"(",
"self",
",",
"C",
",",
"imt",
",",
"vs30",
",",
"sa1180",
")",
":",
"# vs30 star",
"vs30_star",
"=",
"self",
".",
"_get_vs30star",
"(",
"vs30",
",",
"imt",
")",
"# compute the site term",
"site_resp_term",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"gt_vlin",
"=",
"vs30",
">=",
"C",
"[",
"'vlin'",
"]",
"lw_vlin",
"=",
"vs30",
"<",
"C",
"[",
"'vlin'",
"]",
"# compute site response term for sites with vs30 greater than vlin",
"vs30_rat",
"=",
"vs30_star",
"/",
"C",
"[",
"'vlin'",
"]",
"site_resp_term",
"[",
"gt_vlin",
"]",
"=",
"(",
"(",
"C",
"[",
"'a10'",
"]",
"+",
"C",
"[",
"'b'",
"]",
"*",
"self",
".",
"CONSTS",
"[",
"'n'",
"]",
")",
"*",
"np",
".",
"log",
"(",
"vs30_rat",
"[",
"gt_vlin",
"]",
")",
")",
"# compute site response term for sites with vs30 lower than vlin",
"site_resp_term",
"[",
"lw_vlin",
"]",
"=",
"(",
"C",
"[",
"'a10'",
"]",
"*",
"np",
".",
"log",
"(",
"vs30_rat",
"[",
"lw_vlin",
"]",
")",
"-",
"C",
"[",
"'b'",
"]",
"*",
"np",
".",
"log",
"(",
"sa1180",
"[",
"lw_vlin",
"]",
"+",
"C",
"[",
"'c'",
"]",
")",
"+",
"C",
"[",
"'b'",
"]",
"*",
"np",
".",
"log",
"(",
"sa1180",
"[",
"lw_vlin",
"]",
"+",
"C",
"[",
"'c'",
"]",
"*",
"vs30_rat",
"[",
"lw_vlin",
"]",
"**",
"self",
".",
"CONSTS",
"[",
"'n'",
"]",
")",
")",
"return",
"site_resp_term"
] |
Create a new directory at the specified path .
|
def post ( self ) : filepath = self . get_body_argument ( 'filepath' ) try : self . fs . create_directory ( filepath ) encoded_filepath = tornado . escape . url_escape ( filepath , plus = True ) resource_uri = self . reverse_url ( 'filesystem:directories-details' , encoded_filepath ) self . write ( { 'uri' : resource_uri } ) except OSError : raise tornado . web . HTTPError ( 404 )
| 2,130
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L255-L267
|
[
"def",
"get_correlated_report_ids",
"(",
"self",
",",
"indicators",
")",
":",
"params",
"=",
"{",
"'indicators'",
":",
"indicators",
"}",
"resp",
"=",
"self",
".",
"_client",
".",
"get",
"(",
"\"reports/correlate\"",
",",
"params",
"=",
"params",
")",
"return",
"resp",
".",
"json",
"(",
")"
] |
Get the contents of the specified file .
|
def get ( self , filepath ) : exists = self . fs . exists ( filepath ) if exists : mime = magic . Magic ( mime = True ) mime_type = mime . from_file ( filepath ) if mime_type in self . unsupported_types : self . set_status ( 204 ) return else : contents = self . fs . read_file ( filepath ) self . write ( { 'filepath' : filepath , 'contents' : contents } ) else : raise tornado . web . HTTPError ( 404 )
| 2,131
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L277-L292
|
[
"def",
"user_deleted_from_site_event",
"(",
"event",
")",
":",
"userid",
"=",
"event",
".",
"principal",
"catalog",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_catalog'",
")",
"query",
"=",
"{",
"'object_provides'",
":",
"WORKSPACE_INTERFACE",
"}",
"query",
"[",
"'workspace_members'",
"]",
"=",
"userid",
"workspaces",
"=",
"[",
"IWorkspace",
"(",
"b",
".",
"_unrestrictedGetObject",
"(",
")",
")",
"for",
"b",
"in",
"catalog",
".",
"unrestrictedSearchResults",
"(",
"query",
")",
"]",
"for",
"workspace",
"in",
"workspaces",
":",
"workspace",
".",
"remove_from_team",
"(",
"userid",
")"
] |
Write the given contents to the specified file . This is not an append all file contents will be replaced by the contents given .
|
def post ( self , filepath ) : try : content = self . get_body_argument ( 'content' ) self . fs . write_file ( filepath , content ) self . write ( { 'msg' : 'Updated file at {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 )
| 2,132
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L295-L306
|
[
"def",
"getlist",
"(",
"self",
",",
"key",
":",
"'Entity'",
")",
"->",
"Sequence",
"[",
"object",
"]",
":",
"if",
"not",
"(",
"isinstance",
"(",
"key",
",",
"type",
"(",
"self",
")",
")",
"and",
"key",
".",
"type",
"is",
"EntityType",
".",
"property",
")",
":",
"return",
"[",
"]",
"claims_map",
"=",
"self",
".",
"attributes",
".",
"get",
"(",
"'claims'",
")",
"or",
"{",
"}",
"assert",
"isinstance",
"(",
"claims_map",
",",
"collections",
".",
"abc",
".",
"Mapping",
")",
"claims",
"=",
"claims_map",
".",
"get",
"(",
"key",
".",
"id",
",",
"[",
"]",
")",
"claims",
".",
"sort",
"(",
"key",
"=",
"lambda",
"claim",
":",
"claim",
"[",
"'rank'",
"]",
",",
"# FIXME",
"reverse",
"=",
"True",
")",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
"+",
"'.Entity.getitem'",
")",
"if",
"logger",
".",
"isEnabledFor",
"(",
"logging",
".",
"DEBUG",
")",
":",
"logger",
".",
"debug",
"(",
"'claim data: %s'",
",",
"__import__",
"(",
"'pprint'",
")",
".",
"pformat",
"(",
"claims",
")",
")",
"decode",
"=",
"self",
".",
"client",
".",
"decode_datavalue",
"return",
"[",
"decode",
"(",
"snak",
"[",
"'datatype'",
"]",
",",
"snak",
"[",
"'datavalue'",
"]",
")",
"for",
"snak",
"in",
"(",
"claim",
"[",
"'mainsnak'",
"]",
"for",
"claim",
"in",
"claims",
")",
"]"
] |
Retrieve the content of the requested resource which is located at the given absolute path . This method should either return a byte string or an iterator of byte strings . The latter is preferred for large files as it helps reduce memory fragmentation .
|
def get_content ( self , start = None , end = None ) : with open ( self . filepath , "rb" ) as file : if start is not None : file . seek ( start ) if end is not None : remaining = end - ( start or 0 ) else : remaining = None while True : chunk_size = 64 * 1024 if remaining is not None and remaining < chunk_size : chunk_size = remaining chunk = file . read ( chunk_size ) if chunk : if remaining is not None : remaining -= len ( chunk ) yield chunk else : if remaining is not None : assert remaining == 0 return
| 2,133
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L387-L414
|
[
"def",
"new_cast_status",
"(",
"self",
",",
"status",
")",
":",
"self",
".",
"status",
"=",
"status",
"if",
"status",
":",
"self",
".",
"status_event",
".",
"set",
"(",
")"
] |
Sets the content headers on the response .
|
def set_headers ( self ) : self . set_header ( "Accept-Ranges" , "bytes" ) content_type = self . get_content_type ( ) if content_type : self . set_header ( "Content-Type" , content_type )
| 2,134
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/handlers.py#L416-L424
|
[
"def",
"catalogFactory",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"fn",
"=",
"lambda",
"member",
":",
"inspect",
".",
"isclass",
"(",
"member",
")",
"and",
"member",
".",
"__module__",
"==",
"__name__",
"catalogs",
"=",
"odict",
"(",
"inspect",
".",
"getmembers",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
",",
"fn",
")",
")",
"if",
"name",
"not",
"in",
"list",
"(",
"catalogs",
".",
"keys",
"(",
")",
")",
":",
"msg",
"=",
"\"%s not found in catalogs:\\n %s\"",
"%",
"(",
"name",
",",
"list",
"(",
"kernels",
".",
"keys",
"(",
")",
")",
")",
"logger",
".",
"error",
"(",
"msg",
")",
"msg",
"=",
"\"Unrecognized catalog: %s\"",
"%",
"name",
"raise",
"Exception",
"(",
"msg",
")",
"return",
"catalogs",
"[",
"name",
"]",
"(",
"*",
"*",
"kwargs",
")"
] |
Callback which gets executed if the signal plugin_deactivate_post was send by the plugin .
|
def __deactivate_shared_objects ( self , plugin , * args , * * kwargs ) : shared_objects = self . get ( ) for shared_object in shared_objects . keys ( ) : self . unregister ( shared_object )
| 2,135
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_shared_objects_pattern.py#L59-L65
|
[
"def",
"get_conflicts",
"(",
"self",
")",
":",
"conflicts",
"=",
"[",
"]",
"if",
"self",
".",
"_array",
"and",
"self",
".",
"_range",
":",
"conflicts",
".",
"append",
"(",
"'cannot use range expressions on arrays'",
")",
"return",
"conflicts"
] |
Returns requested shared objects which were registered by the current plugin .
|
def get ( self , name = None ) : return self . app . shared_objects . get ( name , self . plugin )
| 2,136
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_shared_objects_pattern.py#L89-L100
|
[
"def",
"add_deviation",
"(",
"self",
",",
"dev",
",",
"td",
"=",
"None",
")",
":",
"self",
".",
"deviation",
"=",
"dev",
"try",
":",
"self",
".",
"compute_position_log",
"(",
"td",
"=",
"td",
")",
"except",
":",
"self",
".",
"position",
"=",
"None",
"return"
] |
Returns requested shared objects .
|
def get ( self , name = None , plugin = None ) : if plugin is not None : if name is None : shared_objects_list = { } for key in self . _shared_objects . keys ( ) : if self . _shared_objects [ key ] . plugin == plugin : shared_objects_list [ key ] = self . _shared_objects [ key ] return shared_objects_list else : if name in self . _shared_objects . keys ( ) : if self . _shared_objects [ name ] . plugin == plugin : return self . _shared_objects [ name ] else : return None else : return None else : if name is None : return self . _shared_objects else : if name in self . _shared_objects . keys ( ) : return self . _shared_objects [ name ] else : return None
| 2,137
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_shared_objects_pattern.py#L127-L158
|
[
"def",
"audit_customer_subscription",
"(",
"customer",
",",
"unknown",
"=",
"True",
")",
":",
"if",
"(",
"hasattr",
"(",
"customer",
",",
"'suspended'",
")",
"and",
"customer",
".",
"suspended",
")",
":",
"result",
"=",
"AUDIT_RESULTS",
"[",
"'suspended'",
"]",
"else",
":",
"if",
"hasattr",
"(",
"customer",
",",
"'subscription'",
")",
":",
"try",
":",
"result",
"=",
"AUDIT_RESULTS",
"[",
"customer",
".",
"subscription",
".",
"status",
"]",
"except",
"KeyError",
",",
"err",
":",
"# TODO should this be a more specific exception class?",
"raise",
"Exception",
"(",
"\"Unable to locate a result set for \\\nsubscription status %s in ZEBRA_AUDIT_RESULTS\"",
")",
"%",
"str",
"(",
"err",
")",
"else",
":",
"result",
"=",
"AUDIT_RESULTS",
"[",
"'no_subscription'",
"]",
"return",
"result"
] |
Unregisters an existing shared object so that this shared object is no longer available .
|
def unregister ( self , shared_object ) : if shared_object not in self . _shared_objects . keys ( ) : self . log . warning ( "Can not unregister shared object %s" % shared_object ) else : del ( self . _shared_objects [ shared_object ] ) self . log . debug ( "Shared object %s got unregistered" % shared_object )
| 2,138
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_shared_objects_pattern.py#L193-L205
|
[
"def",
"_filter_by_normal",
"(",
"tumor_counts",
",",
"normal_counts",
",",
"data",
")",
":",
"from",
"bcbio",
".",
"heterogeneity",
"import",
"bubbletree",
"fparams",
"=",
"bubbletree",
".",
"NORMAL_FILTER_PARAMS",
"tumor_out",
"=",
"\"%s-normfilter%s\"",
"%",
"utils",
".",
"splitext_plus",
"(",
"tumor_counts",
")",
"normal_out",
"=",
"\"%s-normfilter%s\"",
"%",
"utils",
".",
"splitext_plus",
"(",
"normal_counts",
")",
"if",
"not",
"utils",
".",
"file_uptodate",
"(",
"tumor_out",
",",
"tumor_counts",
")",
":",
"with",
"file_transaction",
"(",
"data",
",",
"tumor_out",
",",
"normal_out",
")",
"as",
"(",
"tx_tumor_out",
",",
"tx_normal_out",
")",
":",
"median_depth",
"=",
"_get_normal_median_depth",
"(",
"normal_counts",
")",
"min_normal_depth",
"=",
"median_depth",
"*",
"fparams",
"[",
"\"min_depth_percent\"",
"]",
"max_normal_depth",
"=",
"median_depth",
"*",
"fparams",
"[",
"\"max_depth_percent\"",
"]",
"with",
"open",
"(",
"tumor_counts",
")",
"as",
"tumor_handle",
":",
"with",
"open",
"(",
"normal_counts",
")",
"as",
"normal_handle",
":",
"with",
"open",
"(",
"tx_tumor_out",
",",
"\"w\"",
")",
"as",
"tumor_out_handle",
":",
"with",
"open",
"(",
"tx_normal_out",
",",
"\"w\"",
")",
"as",
"normal_out_handle",
":",
"header",
"=",
"None",
"for",
"t",
",",
"n",
"in",
"zip",
"(",
"tumor_handle",
",",
"normal_handle",
")",
":",
"if",
"header",
"is",
"None",
":",
"if",
"not",
"n",
".",
"startswith",
"(",
"\"@\"",
")",
":",
"header",
"=",
"n",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"tumor_out_handle",
".",
"write",
"(",
"t",
")",
"normal_out_handle",
".",
"write",
"(",
"n",
")",
"elif",
"(",
"_normal_passes_depth",
"(",
"header",
",",
"n",
",",
"min_normal_depth",
",",
"max_normal_depth",
")",
"and",
"_normal_passes_freq",
"(",
"header",
",",
"n",
",",
"fparams",
")",
")",
":",
"tumor_out_handle",
".",
"write",
"(",
"t",
")",
"normal_out_handle",
".",
"write",
"(",
"n",
")",
"return",
"tumor_out",
",",
"normal_out"
] |
Prints a list of all registered signals . Including description and plugin name .
|
def list_signals ( self ) : print ( "Signal list" ) print ( "***********\n" ) for key , signal in self . app . signals . signals . items ( ) : print ( "%s (%s)\n %s\n" % ( signal . name , signal . plugin . name , signal . description ) )
| 2,139
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/plugins/gw_signals_info.py#L73-L80
|
[
"def",
"write_header",
"(",
"self",
")",
":",
"self",
".",
"fileobj",
".",
"seek",
"(",
"0",
")",
"header",
"=",
"mar_header",
".",
"build",
"(",
"dict",
"(",
"index_offset",
"=",
"self",
".",
"last_offset",
")",
")",
"self",
".",
"fileobj",
".",
"write",
"(",
"header",
")"
] |
Prints a list of all registered receivers . Including signal plugin name and description .
|
def list_receivers ( self ) : print ( "Receiver list" ) print ( "*************\n" ) for key , receiver in self . app . signals . receivers . items ( ) : print ( "%s <-- %s (%s):\n %s\n" % ( receiver . name , receiver . signal , receiver . plugin . name , receiver . description ) )
| 2,140
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/plugins/gw_signals_info.py#L82-L92
|
[
"def",
"write_header",
"(",
"self",
")",
":",
"self",
".",
"fileobj",
".",
"seek",
"(",
"0",
")",
"header",
"=",
"mar_header",
".",
"build",
"(",
"dict",
"(",
"index_offset",
"=",
"self",
".",
"last_offset",
")",
")",
"self",
".",
"fileobj",
".",
"write",
"(",
"header",
")"
] |
Command util with subcommands for tox environments .
|
def toxcmd_main ( args = None ) : usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..." if args is None : args = sys . argv [ 1 : ] # -- STEP: Build command-line parser. parser = argparse . ArgumentParser ( description = inspect . getdoc ( toxcmd_main ) , formatter_class = FORMATTER_CLASS ) common_parser = parser . add_argument_group ( "Common options" ) common_parser . add_argument ( "--version" , action = "version" , version = VERSION ) subparsers = parser . add_subparsers ( help = "commands" ) for command in discover_commands ( ) : command_parser = subparsers . add_parser ( command . name , usage = command . usage , description = command . description , help = command . short_description , formatter_class = FORMATTER_CLASS ) command_parser . set_defaults ( func = command ) command . setup_parser ( command_parser ) command . parser = command_parser # -- STEP: Process command-line and run command. options = parser . parse_args ( args ) command_function = options . func return command_function ( options )
| 2,141
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/bin/toxcmd.py#L220-L245
|
[
"def",
"parse_vtrgb",
"(",
"path",
"=",
"'/etc/vtrgb'",
")",
":",
"palette",
"=",
"(",
")",
"table",
"=",
"[",
"]",
"try",
":",
"with",
"open",
"(",
"path",
")",
"as",
"infile",
":",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"infile",
")",
":",
"row",
"=",
"tuple",
"(",
"int",
"(",
"val",
")",
"for",
"val",
"in",
"line",
".",
"split",
"(",
"','",
")",
")",
"table",
".",
"append",
"(",
"row",
")",
"if",
"i",
"==",
"2",
":",
"# failsafe",
"break",
"palette",
"=",
"tuple",
"(",
"zip",
"(",
"*",
"table",
")",
")",
"# swap rows to columns",
"except",
"IOError",
"as",
"err",
":",
"palette",
"=",
"color_tables",
".",
"vga_palette4",
"return",
"palette"
] |
Creates a macaroon to discharge a third party caveat .
|
def discharge ( ctx , id , caveat , key , checker , locator ) : caveat_id_prefix = [ ] if caveat is None : # The caveat information is encoded in the id itself. caveat = id else : # We've been given an explicit id, so when extra third party # caveats are added, use that id as the prefix # for any more ids. caveat_id_prefix = id cav_info = decode_caveat ( key , caveat ) cav_info = ThirdPartyCaveatInfo ( condition = cav_info . condition , first_party_public_key = cav_info . first_party_public_key , third_party_key_pair = cav_info . third_party_key_pair , root_key = cav_info . root_key , caveat = cav_info . caveat , version = cav_info . version , id = id , namespace = cav_info . namespace ) # Note that we don't check the error - we allow the # third party checker to see even caveats that we can't # understand. try : cond , arg = checkers . parse_caveat ( cav_info . condition ) except ValueError as exc : raise VerificationError ( exc . args [ 0 ] ) if cond == checkers . COND_NEED_DECLARED : cav_info = cav_info . _replace ( condition = arg ) caveats = _check_need_declared ( ctx , cav_info , checker ) else : caveats = checker . check_third_party_caveat ( ctx , cav_info ) # Note that the discharge macaroon does not need to # be stored persistently. Indeed, it would be a problem if # we did, because then the macaroon could potentially be used # for normal authorization with the third party. m = Macaroon ( cav_info . root_key , id , '' , cav_info . version , cav_info . namespace , ) m . _caveat_id_prefix = caveat_id_prefix if caveats is not None : for cav in caveats : m . add_caveat ( cav , key , locator ) return m
| 2,142
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_discharge.py#L116-L187
|
[
"def",
"expand_filename_pattern",
"(",
"self",
",",
"pattern",
",",
"base_dir",
",",
"sourcefile",
"=",
"None",
")",
":",
"# replace vars like ${benchmark_path},",
"# with converting to list and back, we can use the function 'substitute_vars()'",
"expandedPattern",
"=",
"substitute_vars",
"(",
"[",
"pattern",
"]",
",",
"self",
",",
"sourcefile",
")",
"assert",
"len",
"(",
"expandedPattern",
")",
"==",
"1",
"expandedPattern",
"=",
"expandedPattern",
"[",
"0",
"]",
"if",
"expandedPattern",
"!=",
"pattern",
":",
"logging",
".",
"debug",
"(",
"\"Expanded variables in expression %r to %r.\"",
",",
"pattern",
",",
"expandedPattern",
")",
"fileList",
"=",
"util",
".",
"expand_filename_pattern",
"(",
"expandedPattern",
",",
"base_dir",
")",
"# sort alphabetical,",
"fileList",
".",
"sort",
"(",
")",
"if",
"not",
"fileList",
":",
"logging",
".",
"warning",
"(",
"\"No files found matching %r.\"",
",",
"pattern",
")",
"return",
"fileList"
] |
Returns a third - party caveat that when added to a macaroon with add_caveat results in a caveat with the location local encrypted with the given PublicKey . This can be automatically discharged by discharge_all passing a local key .
|
def local_third_party_caveat ( key , version ) : if version >= VERSION_2 : loc = 'local {} {}' . format ( version , key ) else : loc = 'local {}' . format ( key ) return checkers . Caveat ( location = loc , condition = '' )
| 2,143
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_discharge.py#L234-L244
|
[
"def",
"progress",
"(",
"self",
",",
"p",
")",
":",
"self",
".",
"task_stack",
"[",
"-",
"1",
"]",
"=",
"self",
".",
"task_stack",
"[",
"-",
"1",
"]",
".",
"_replace",
"(",
"progress",
"=",
"p",
")",
"self",
".",
"progress_report",
"(",
")"
] |
Deserialize a Namespace object .
|
def deserialize_namespace ( data ) : if isinstance ( data , bytes ) : data = data . decode ( 'utf-8' ) kvs = data . split ( ) uri_to_prefix = { } for kv in kvs : i = kv . rfind ( ':' ) if i == - 1 : raise ValueError ( 'no colon in namespace ' 'field {}' . format ( repr ( kv ) ) ) uri , prefix = kv [ 0 : i ] , kv [ i + 1 : ] if not is_valid_schema_uri ( uri ) : # Currently this can't happen because the only invalid URIs # are those which contain a space raise ValueError ( 'invalid URI {} in namespace ' 'field {}' . format ( repr ( uri ) , repr ( kv ) ) ) if not is_valid_prefix ( prefix ) : raise ValueError ( 'invalid prefix {} in namespace field' ' {}' . format ( repr ( prefix ) , repr ( kv ) ) ) if uri in uri_to_prefix : raise ValueError ( 'duplicate URI {} in ' 'namespace {}' . format ( repr ( uri ) , repr ( data ) ) ) uri_to_prefix [ uri ] = prefix return Namespace ( uri_to_prefix )
| 2,144
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_namespace.py#L134-L165
|
[
"def",
"replace_version",
"(",
"self",
",",
"other",
",",
"logger",
")",
":",
"if",
"other",
".",
"library_name",
"!=",
"self",
".",
"library_name",
":",
"logger",
".",
"debug",
"(",
"'not replacable: {} != {} ()'",
".",
"format",
"(",
"other",
".",
"library_name",
",",
"self",
".",
"library_name",
",",
"other",
".",
"filename",
")",
")",
"return",
"False",
"elif",
"int",
"(",
"other",
".",
"major_version",
")",
"!=",
"int",
"(",
"self",
".",
"major_version",
")",
":",
"logger",
".",
"debug",
"(",
"'not replacable: {} != {} ({})'",
".",
"format",
"(",
"int",
"(",
"self",
".",
"major_version",
")",
",",
"int",
"(",
"other",
".",
"major_version",
")",
",",
"other",
".",
"filename",
",",
")",
")",
"return",
"False",
"elif",
"float",
"(",
"other",
".",
"minor_version",
")",
">=",
"float",
"(",
"self",
".",
"minor_version",
")",
":",
"logger",
".",
"debug",
"(",
"'not replacable: {} >= {} ({})'",
".",
"format",
"(",
"other",
".",
"minor_version",
",",
"self",
".",
"minor_version",
",",
"other",
".",
"filename",
",",
")",
")",
"return",
"False",
"else",
":",
"return",
"True"
] |
Returns a serialized form of the Namepace .
|
def serialize_text ( self ) : if self . _uri_to_prefix is None or len ( self . _uri_to_prefix ) == 0 : return b'' od = collections . OrderedDict ( sorted ( self . _uri_to_prefix . items ( ) ) ) data = [ ] for uri in od : data . append ( uri + ':' + od [ uri ] ) return ' ' . join ( data ) . encode ( 'utf-8' )
| 2,145
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_namespace.py#L33-L47
|
[
"def",
"OnAdjustVolume",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"volume",
"=",
"self",
".",
"player",
".",
"audio_get_volume",
"(",
")",
"if",
"event",
".",
"GetWheelRotation",
"(",
")",
"<",
"0",
":",
"self",
".",
"volume",
"=",
"max",
"(",
"0",
",",
"self",
".",
"volume",
"-",
"10",
")",
"elif",
"event",
".",
"GetWheelRotation",
"(",
")",
">",
"0",
":",
"self",
".",
"volume",
"=",
"min",
"(",
"200",
",",
"self",
".",
"volume",
"+",
"10",
")",
"self",
".",
"player",
".",
"audio_set_volume",
"(",
"self",
".",
"volume",
")"
] |
Registers the given URI and associates it with the given prefix .
|
def register ( self , uri , prefix ) : if not is_valid_schema_uri ( uri ) : raise KeyError ( 'cannot register invalid URI {} (prefix {})' . format ( uri , prefix ) ) if not is_valid_prefix ( prefix ) : raise ValueError ( 'cannot register invalid prefix %q for URI %q' . format ( prefix , uri ) ) if self . _uri_to_prefix . get ( uri ) is None : self . _uri_to_prefix [ uri ] = prefix
| 2,146
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_namespace.py#L49-L66
|
[
"def",
"handle_simulation_end",
"(",
"self",
",",
"data_portal",
")",
":",
"log",
".",
"info",
"(",
"'Simulated {} trading days\\n'",
"'first open: {}\\n'",
"'last close: {}'",
",",
"self",
".",
"_session_count",
",",
"self",
".",
"_trading_calendar",
".",
"session_open",
"(",
"self",
".",
"_first_session",
")",
",",
"self",
".",
"_trading_calendar",
".",
"session_close",
"(",
"self",
".",
"_last_session",
")",
",",
")",
"packet",
"=",
"{",
"}",
"self",
".",
"end_of_simulation",
"(",
"packet",
",",
"self",
".",
"_ledger",
",",
"self",
".",
"_trading_calendar",
",",
"self",
".",
"_sessions",
",",
"data_portal",
",",
"self",
".",
"_benchmark_source",
",",
")",
"return",
"packet"
] |
Return a copy of the AuthContext object with the given key and value added .
|
def with_value ( self , key , val ) : new_dict = dict ( self . _dict ) new_dict [ key ] = val return AuthContext ( new_dict )
| 2,147
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_auth_context.py#L19-L25
|
[
"def",
"libvlc_video_set_crop_geometry",
"(",
"p_mi",
",",
"psz_geometry",
")",
":",
"f",
"=",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_video_set_crop_geometry'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_video_set_crop_geometry'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
")",
",",
"None",
",",
"None",
",",
"MediaPlayer",
",",
"ctypes",
".",
"c_char_p",
")",
"return",
"f",
"(",
"p_mi",
",",
"psz_geometry",
")"
] |
Make pattern for a data type with the specified cardinality .
|
def make_pattern ( self , pattern , listsep = ',' ) : if self is Cardinality . one : return pattern elif self is Cardinality . zero_or_one : return self . schema % pattern else : return self . schema % ( pattern , listsep , pattern )
| 2,148
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/cardinality.py#L48-L65
|
[
"def",
"userstream_user",
"(",
"self",
",",
"delegate",
",",
"stall_warnings",
"=",
"None",
",",
"with_",
"=",
"'followings'",
",",
"replies",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'stringify_friend_ids'",
":",
"'true'",
"}",
"set_bool_param",
"(",
"params",
",",
"'stall_warnings'",
",",
"stall_warnings",
")",
"set_str_param",
"(",
"params",
",",
"'with'",
",",
"with_",
")",
"set_str_param",
"(",
"params",
",",
"'replies'",
",",
"replies",
")",
"svc",
"=",
"TwitterStreamService",
"(",
"lambda",
":",
"self",
".",
"_get_userstream",
"(",
"'user.json'",
",",
"params",
")",
",",
"delegate",
")",
"return",
"svc"
] |
Creates a type converter for the specified cardinality by using the type converter for T .
|
def with_cardinality ( cls , cardinality , converter , pattern = None , listsep = ',' ) : if cardinality is Cardinality . one : return converter # -- NORMAL-CASE builder_func = getattr ( cls , "with_%s" % cardinality . name ) if cardinality is Cardinality . zero_or_one : return builder_func ( converter , pattern ) else : # -- MANY CASE: 0..*, 1..* return builder_func ( converter , pattern , listsep = listsep )
| 2,149
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/cardinality.py#L95-L113
|
[
"def",
"remove_environment",
"(",
"environment_var_name",
",",
"system",
"=",
"False",
")",
":",
"config_filename",
"=",
"_SYSTEM_CONFIG_FILE",
"if",
"system",
"is",
"True",
"else",
"_USER_CONFIG_FILE",
"config",
"=",
"_read_config",
"(",
"config_filename",
")",
"section",
"=",
"_ENVIRONMENT_SECTION_NAME",
"config",
".",
"remove_option",
"(",
"section",
",",
"environment_var_name",
")",
"_write_config",
"(",
"config",
",",
"config_filename",
")"
] |
Creates a type converter for a T with 0 .. 1 times by using the type converter for one item of T .
|
def with_zero_or_one ( cls , converter , pattern = None ) : cardinality = Cardinality . zero_or_one if not pattern : pattern = getattr ( converter , "pattern" , cls . default_pattern ) optional_pattern = cardinality . make_pattern ( pattern ) group_count = cardinality . compute_group_count ( pattern ) def convert_optional ( text , m = None ) : if text : text = text . strip ( ) if not text : return None return converter ( text ) convert_optional . pattern = optional_pattern # OLD: convert_optional.group_count = group_count convert_optional . regex_group_count = group_count return convert_optional
| 2,150
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/cardinality.py#L116-L139
|
[
"def",
"detach_all_classes",
"(",
"self",
")",
":",
"classes",
"=",
"list",
"(",
"self",
".",
"_observers",
".",
"keys",
"(",
")",
")",
"for",
"cls",
"in",
"classes",
":",
"self",
".",
"detach_class",
"(",
"cls",
")"
] |
Handler for serving static files .
|
def server_static ( filepath ) : mimetype = "image/svg+xml" if filepath . endswith ( ".svg" ) else "auto" return bottle . static_file ( filepath , root = conf . StaticPath , mimetype = mimetype )
| 2,151
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L32-L35
|
[
"def",
"connect",
"(",
"dsn",
"=",
"None",
",",
"turbodbc_options",
"=",
"None",
",",
"connection_string",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"turbodbc_options",
"is",
"None",
":",
"turbodbc_options",
"=",
"make_options",
"(",
")",
"if",
"connection_string",
"is",
"not",
"None",
"and",
"(",
"dsn",
"is",
"not",
"None",
"or",
"len",
"(",
"kwargs",
")",
">",
"0",
")",
":",
"raise",
"ParameterError",
"(",
"\"Both connection_string and dsn or kwargs specified\"",
")",
"if",
"connection_string",
"is",
"None",
":",
"connection_string",
"=",
"_make_connection_string",
"(",
"dsn",
",",
"*",
"*",
"kwargs",
")",
"connection",
"=",
"Connection",
"(",
"intern_connect",
"(",
"connection_string",
",",
"turbodbc_options",
")",
")",
"return",
"connection"
] |
Handler for showing mouse statistics for specified type and day .
|
def mouse ( table , day = None ) : where = ( ( "day" , day ) , ) if day else ( ) events = db . fetch ( table , where = where , order = "day" ) for e in events : e [ "dt" ] = datetime . datetime . fromtimestamp ( e [ "stamp" ] ) stats , positions , events = stats_mouse ( events , table ) days , input = db . fetch ( "counts" , order = "day" , type = table ) , "mouse" return bottle . template ( "heatmap.tpl" , locals ( ) , conf = conf )
| 2,152
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L40-L47
|
[
"async",
"def",
"unset_lock",
"(",
"self",
",",
"resource",
",",
"lock_identifier",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"successes",
"=",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"[",
"i",
".",
"unset_lock",
"(",
"resource",
",",
"lock_identifier",
")",
"for",
"i",
"in",
"self",
".",
"instances",
"]",
",",
"return_exceptions",
"=",
"True",
")",
"successful_remvoes",
"=",
"sum",
"(",
"s",
"is",
"None",
"for",
"s",
"in",
"successes",
")",
"elapsed_time",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
"unlocked",
"=",
"True",
"if",
"successful_remvoes",
">=",
"int",
"(",
"len",
"(",
"self",
".",
"instances",
")",
"/",
"2",
")",
"+",
"1",
"else",
"False",
"self",
".",
"log",
".",
"debug",
"(",
"'Lock \"%s\" is unset on %d/%d instances in %s seconds'",
",",
"resource",
",",
"successful_remvoes",
",",
"len",
"(",
"self",
".",
"instances",
")",
",",
"elapsed_time",
")",
"if",
"not",
"unlocked",
":",
"raise",
"LockError",
"(",
"'Can not release the lock'",
")",
"return",
"elapsed_time"
] |
Handler for showing the keyboard statistics page .
|
def keyboard ( table , day = None ) : cols , group = "realkey AS key, COUNT(*) AS count" , "realkey" where = ( ( "day" , day ) , ) if day else ( ) counts_display = counts = db . fetch ( table , cols , where , group , "count DESC" ) if "combos" == table : counts_display = db . fetch ( table , "key, COUNT(*) AS count" , where , "key" , "count DESC" ) events = db . fetch ( table , where = where , order = "stamp" ) for e in events : e [ "dt" ] = datetime . datetime . fromtimestamp ( e [ "stamp" ] ) stats , collatedevents = stats_keyboard ( events , table ) days , input = db . fetch ( "counts" , order = "day" , type = table ) , "keyboard" return bottle . template ( "heatmap.tpl" , locals ( ) , conf = conf )
| 2,153
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L52-L64
|
[
"def",
"__ConfigureMultipartRequest",
"(",
"self",
",",
"http_request",
")",
":",
"# This is a multipart/related upload.",
"msg_root",
"=",
"mime_multipart",
".",
"MIMEMultipart",
"(",
"'related'",
")",
"# msg_root should not write out its own headers",
"setattr",
"(",
"msg_root",
",",
"'_write_headers'",
",",
"lambda",
"self",
":",
"None",
")",
"# attach the body as one part",
"msg",
"=",
"mime_nonmultipart",
".",
"MIMENonMultipart",
"(",
"*",
"http_request",
".",
"headers",
"[",
"'content-type'",
"]",
".",
"split",
"(",
"'/'",
")",
")",
"msg",
".",
"set_payload",
"(",
"http_request",
".",
"body",
")",
"msg_root",
".",
"attach",
"(",
"msg",
")",
"# attach the media as the second part",
"msg",
"=",
"mime_nonmultipart",
".",
"MIMENonMultipart",
"(",
"*",
"self",
".",
"mime_type",
".",
"split",
"(",
"'/'",
")",
")",
"msg",
"[",
"'Content-Transfer-Encoding'",
"]",
"=",
"'binary'",
"msg",
".",
"set_payload",
"(",
"self",
".",
"stream",
".",
"read",
"(",
")",
")",
"msg_root",
".",
"attach",
"(",
"msg",
")",
"# NOTE: We encode the body, but can't use",
"# `email.message.Message.as_string` because it prepends",
"# `> ` to `From ` lines.",
"fp",
"=",
"six",
".",
"BytesIO",
"(",
")",
"if",
"six",
".",
"PY3",
":",
"generator_class",
"=",
"MultipartBytesGenerator",
"else",
":",
"generator_class",
"=",
"email_generator",
".",
"Generator",
"g",
"=",
"generator_class",
"(",
"fp",
",",
"mangle_from_",
"=",
"False",
")",
"g",
".",
"flatten",
"(",
"msg_root",
",",
"unixfrom",
"=",
"False",
")",
"http_request",
".",
"body",
"=",
"fp",
".",
"getvalue",
"(",
")",
"multipart_boundary",
"=",
"msg_root",
".",
"get_boundary",
"(",
")",
"http_request",
".",
"headers",
"[",
"'content-type'",
"]",
"=",
"(",
"'multipart/related; boundary=%r'",
"%",
"multipart_boundary",
")",
"if",
"isinstance",
"(",
"multipart_boundary",
",",
"six",
".",
"text_type",
")",
":",
"multipart_boundary",
"=",
"multipart_boundary",
".",
"encode",
"(",
"'ascii'",
")",
"body_components",
"=",
"http_request",
".",
"body",
".",
"split",
"(",
"multipart_boundary",
")",
"headers",
",",
"_",
",",
"_",
"=",
"body_components",
"[",
"-",
"2",
"]",
".",
"partition",
"(",
"b'\\n\\n'",
")",
"body_components",
"[",
"-",
"2",
"]",
"=",
"b'\\n\\n'",
".",
"join",
"(",
"[",
"headers",
",",
"b'<media body>\\n\\n--'",
"]",
")",
"http_request",
".",
"loggable_body",
"=",
"multipart_boundary",
".",
"join",
"(",
"body_components",
")"
] |
Handler for showing keyboard or mouse page with day and total links .
|
def inputindex ( input ) : stats = { } countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last" tables = ( "moves" , "clicks" , "scrolls" ) if "mouse" == input else ( "keys" , "combos" ) for table in tables : stats [ table ] = db . fetchone ( "counts" , countminmax , type = table ) stats [ table ] [ "days" ] = db . fetch ( "counts" , order = "day DESC" , type = table ) return bottle . template ( "input.tpl" , locals ( ) , conf = conf )
| 2,154
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L68-L76
|
[
"def",
"_temporary_config",
"(",
")",
":",
"global",
"_config_cache",
",",
"_config_file_path",
"old_cc",
"=",
"_config_cache",
"old_cfp",
"=",
"_config_file_path",
"clear_global_config",
"(",
")",
"yield",
"_config_cache",
"=",
"old_cc",
"_config_file_path",
"=",
"old_cfp"
] |
Handler for showing the GUI index page .
|
def index ( ) : stats = dict ( ( k , { "count" : 0 } ) for k , tt in conf . InputTables ) countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last" for input , table in [ ( x , t ) for x , tt in conf . InputTables for t in tt ] : row = db . fetchone ( "counts" , countminmax , type = table ) if not row [ "count" ] : continue # for input, table
stats [ input ] [ "count" ] += row [ "count" ] for func , key in [ ( min , "first" ) , ( max , "last" ) ] : stats [ input ] [ key ] = ( row [ key ] if key not in stats [ input ] else func ( stats [ input ] [ key ] , row [ key ] ) ) return bottle . template ( "index.tpl" , locals ( ) , conf = conf )
| 2,155
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L80-L91
|
[
"def",
"ttl",
"(",
"self",
",",
"value",
")",
":",
"# get timer",
"timer",
"=",
"getattr",
"(",
"self",
",",
"Annotation",
".",
"__TIMER",
",",
"None",
")",
"# if timer is running, stop the timer",
"if",
"timer",
"is",
"not",
"None",
":",
"timer",
".",
"cancel",
"(",
")",
"# initialize timestamp",
"timestamp",
"=",
"None",
"# if value is None",
"if",
"value",
"is",
"None",
":",
"# nonify timer",
"timer",
"=",
"None",
"else",
":",
"# else, renew a timer",
"# get timestamp",
"timestamp",
"=",
"time",
"(",
")",
"+",
"value",
"# start a new timer",
"timer",
"=",
"Timer",
"(",
"value",
",",
"self",
".",
"__del__",
")",
"timer",
".",
"start",
"(",
")",
"# set/update attributes",
"setattr",
"(",
"self",
",",
"Annotation",
".",
"__TIMER",
",",
"timer",
")",
"setattr",
"(",
"self",
",",
"Annotation",
".",
"__TS",
",",
"timestamp",
")"
] |
Return statistics and collated events for keyboard events .
|
def stats_keyboard ( events , table ) : if len ( events ) < 2 : return [ ] , [ ] deltas , prev_dt = [ ] , None sessions , session = [ ] , None UNBROKEN_DELTA = datetime . timedelta ( seconds = conf . KeyboardSessionMaxDelta ) blank = collections . defaultdict ( lambda : collections . defaultdict ( int ) ) collated = [ blank . copy ( ) ] # [{dt, keys: {key: count}}]
for e in events : if prev_dt : if ( prev_dt . second != e [ "dt" ] . second or prev_dt . minute != e [ "dt" ] . minute or prev_dt . hour != e [ "dt" ] . hour ) : collated . append ( blank . copy ( ) ) delta = e [ "dt" ] - prev_dt deltas . append ( delta ) if delta > UNBROKEN_DELTA : session = None else : if not session : session = [ ] sessions . append ( session ) session . append ( delta ) collated [ - 1 ] [ "dt" ] = e [ "dt" ] collated [ - 1 ] [ "keys" ] [ e [ "realkey" ] ] += 1 prev_dt = e [ "dt" ] longest_session = max ( sessions + [ [ datetime . timedelta ( ) ] ] , key = lambda x : sum ( x , datetime . timedelta ( ) ) ) stats = [ ( "Average interval between combos" , sum ( deltas , datetime . timedelta ( ) ) / len ( deltas ) ) , ] if "combos" == table else [ ( "Keys per hour" , int ( 3600 * len ( events ) / timedelta_seconds ( events [ - 1 ] [ "dt" ] - events [ 0 ] [ "dt" ] ) ) ) , ( "Average interval between keys" , sum ( deltas , datetime . timedelta ( ) ) / len ( deltas ) ) , ( "Typing sessions (key interval < %ss)" % UNBROKEN_DELTA . seconds , len ( sessions ) ) , ( "Average keys in session" , sum ( len ( x ) + 1 for x in sessions ) / len ( sessions ) ) , ( "Average session duration" , sum ( ( sum ( x , datetime . timedelta ( ) ) for x in sessions ) , datetime . timedelta ( ) ) / len ( sessions ) ) , ( "Longest session duration" , sum ( longest_session , datetime . timedelta ( ) ) ) , ( "Keys in longest session" , len ( longest_session ) + 1 ) , ( "Most keys in session" , max ( len ( x ) + 1 for x in sessions ) ) , ] return stats , collated
| 2,156
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L94-L141
|
[
"def",
"deploy_from_template",
"(",
"self",
",",
"context",
",",
"deploy_action",
",",
"cancellation_context",
")",
":",
"deploy_from_template_model",
"=",
"self",
".",
"resource_model_parser",
".",
"convert_to_resource_model",
"(",
"attributes",
"=",
"deploy_action",
".",
"actionParams",
".",
"deployment",
".",
"attributes",
",",
"resource_model_type",
"=",
"vCenterVMFromTemplateResourceModel",
")",
"data_holder",
"=",
"DeployFromTemplateDetails",
"(",
"deploy_from_template_model",
",",
"deploy_action",
".",
"actionParams",
".",
"appName",
")",
"deploy_result_action",
"=",
"self",
".",
"command_wrapper",
".",
"execute_command_with_connection",
"(",
"context",
",",
"self",
".",
"deploy_command",
".",
"execute_deploy_from_template",
",",
"data_holder",
",",
"cancellation_context",
",",
"self",
".",
"folder_manager",
")",
"deploy_result_action",
".",
"actionId",
"=",
"deploy_action",
".",
"actionId",
"return",
"deploy_result_action"
] |
Returns the total timedelta duration in seconds .
|
def timedelta_seconds ( timedelta ) : return ( timedelta . total_seconds ( ) if hasattr ( timedelta , "total_seconds" ) else timedelta . days * 24 * 3600 + timedelta . seconds + timedelta . microseconds / 1000000. )
| 2,157
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L203-L207
|
[
"def",
"make_app",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"default_options",
"=",
"[",
"[",
"'content_path'",
",",
"'.'",
"]",
",",
"[",
"'uri_marker'",
",",
"''",
"]",
"]",
"args",
"=",
"list",
"(",
"args",
")",
"options",
"=",
"dict",
"(",
"default_options",
")",
"options",
".",
"update",
"(",
"kw",
")",
"while",
"default_options",
"and",
"args",
":",
"_d",
"=",
"default_options",
".",
"pop",
"(",
"0",
")",
"_a",
"=",
"args",
".",
"pop",
"(",
"0",
")",
"options",
"[",
"_d",
"[",
"0",
"]",
"]",
"=",
"_a",
"options",
"[",
"'content_path'",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"options",
"[",
"'content_path'",
"]",
".",
"decode",
"(",
"'utf8'",
")",
")",
"options",
"[",
"'uri_marker'",
"]",
"=",
"options",
"[",
"'uri_marker'",
"]",
".",
"decode",
"(",
"'utf8'",
")",
"selector",
"=",
"WSGIHandlerSelector",
"(",
")",
"git_inforefs_handler",
"=",
"GitHTTPBackendInfoRefs",
"(",
"*",
"*",
"options",
")",
"git_rpc_handler",
"=",
"GitHTTPBackendSmartHTTP",
"(",
"*",
"*",
"options",
")",
"static_handler",
"=",
"StaticServer",
"(",
"*",
"*",
"options",
")",
"file_handler",
"=",
"FileServer",
"(",
"*",
"*",
"options",
")",
"json_handler",
"=",
"JSONServer",
"(",
"*",
"*",
"options",
")",
"ui_handler",
"=",
"UIServer",
"(",
"*",
"*",
"options",
")",
"if",
"options",
"[",
"'uri_marker'",
"]",
":",
"marker_regex",
"=",
"r'(?P<decorative_path>.*?)(?:/'",
"+",
"options",
"[",
"'uri_marker'",
"]",
"+",
"')'",
"else",
":",
"marker_regex",
"=",
"''",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*?)/info/refs\\?.*?service=(?P<git_command>git-[^&]+).*$'",
",",
"GET",
"=",
"git_inforefs_handler",
",",
"HEAD",
"=",
"git_inforefs_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)/(?P<git_command>git-[^/]+)$'",
",",
"POST",
"=",
"git_rpc_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'/static/(?P<working_path>.*)$'",
",",
"GET",
"=",
"static_handler",
",",
"HEAD",
"=",
"static_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)/file$'",
",",
"GET",
"=",
"file_handler",
",",
"HEAD",
"=",
"file_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)$'",
",",
"GET",
"=",
"ui_handler",
",",
"POST",
"=",
"json_handler",
",",
"HEAD",
"=",
"ui_handler",
")",
"return",
"selector"
] |
Initialize configuration and web application .
|
def init ( ) : global app if app : return app conf . init ( ) , db . init ( conf . DbPath , conf . DbStatements ) bottle . TEMPLATE_PATH . insert ( 0 , conf . TemplatePath ) app = bottle . default_app ( ) bottle . BaseTemplate . defaults . update ( get_url = app . get_url ) return app
| 2,158
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L210-L219
|
[
"def",
"deserialize",
"(",
"self",
",",
"node",
":",
"SchemaNode",
",",
"cstruct",
":",
"Union",
"[",
"str",
",",
"ColanderNullType",
"]",
")",
"->",
"Optional",
"[",
"Pendulum",
"]",
":",
"if",
"not",
"cstruct",
":",
"return",
"colander",
".",
"null",
"try",
":",
"result",
"=",
"coerce_to_pendulum",
"(",
"cstruct",
",",
"assume_local",
"=",
"self",
".",
"use_local_tz",
")",
"except",
"(",
"ValueError",
",",
"ParserError",
")",
"as",
"e",
":",
"raise",
"Invalid",
"(",
"node",
",",
"\"Invalid date/time: value={!r}, error=\"",
"\"{!r}\"",
".",
"format",
"(",
"cstruct",
",",
"e",
")",
")",
"return",
"result"
] |
Starts the web server .
|
def start ( ) : global app bottle . run ( app , host = conf . WebHost , port = conf . WebPort , debug = conf . WebAutoReload , reloader = conf . WebAutoReload , quiet = conf . WebQuiet )
| 2,159
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/webui.py#L222-L227
|
[
"def",
"get_correlation_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Correlations\"",
",",
"label",
"=",
"\"tab:parameter_correlations\"",
")",
":",
"parameters",
",",
"cor",
"=",
"self",
".",
"get_correlations",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cor",
",",
"caption",
",",
"label",
")"
] |
Download a PDF or DJVU document from a url eventually using proxies .
|
def download ( url , proxies = None ) : # Handle default argument if proxies is None : proxies = [ "" ] # Loop over all available connections for proxy in proxies : # Handle no proxy case if proxy == "" : socket . socket = DEFAULT_SOCKET # Handle SOCKS proxy elif proxy . startswith ( 'socks' ) : if proxy [ 5 ] == '4' : proxy_type = socks . SOCKS4 else : proxy_type = socks . SOCKS5 proxy = proxy [ proxy . find ( '://' ) + 3 : ] try : proxy , port = proxy . split ( ':' ) except ValueError : port = None socks . set_default_proxy ( proxy_type , proxy , port ) socket . socket = socks . socksocket # Handle generic HTTP proxy else : try : proxy , port = proxy . split ( ':' ) except ValueError : port = None socks . set_default_proxy ( socks . HTTP , proxy , port ) socket . socket = socks . socksocket downloaded = _download_helper ( url ) if downloaded is not None : return downloaded # In case of running out of proxies, return (None, None) return ( None , None )
| 2,160
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/fetcher.py#L80-L132
|
[
"def",
"delete_category",
"(",
"self",
",",
"category",
")",
":",
"# Remove mapping of metrics-to-category",
"category_key",
"=",
"self",
".",
"_category_key",
"(",
"category",
")",
"self",
".",
"r",
".",
"delete",
"(",
"category_key",
")",
"# Remove category from Set",
"self",
".",
"r",
".",
"srem",
"(",
"self",
".",
"_categories_key",
",",
"category",
")"
] |
Build format string from a format specification .
|
def make_format ( format_spec ) : fill = '' align = '' zero = '' width = format_spec . width if format_spec . align : align = format_spec . align [ 0 ] if format_spec . fill : fill = format_spec . fill [ 0 ] if format_spec . zero : zero = '0' precision_part = "" if format_spec . precision : precision_part = ".%s" % format_spec . precision # -- FORMAT-SPEC: [[fill]align][0][width][.precision][type] return "%s%s%s%s%s%s" % ( fill , align , zero , width , precision_part , format_spec . type )
| 2,161
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/parse_util.py#L78-L101
|
[
"def",
"client_connected",
"(",
"self",
",",
"reader",
":",
"asyncio",
".",
"StreamReader",
",",
"writer",
":",
"asyncio",
".",
"StreamWriter",
")",
"->",
"None",
":",
"self",
".",
"reader",
"=",
"reader",
"self",
".",
"writer",
"=",
"writer"
] |
Extract fields in a parse expression schema .
|
def extract_fields ( cls , schema ) : # -- BASED-ON: parse.Parser._generate_expression() for part in parse . PARSE_RE . split ( schema ) : if not part or part == '{{' or part == '}}' : continue elif part [ 0 ] == '{' : # this will be a braces-delimited field to handle yield cls . parse ( part )
| 2,162
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/parse_util.py#L175-L187
|
[
"def",
"create_supercut_in_batches",
"(",
"composition",
",",
"outputfile",
",",
"padding",
")",
":",
"total_clips",
"=",
"len",
"(",
"composition",
")",
"start_index",
"=",
"0",
"end_index",
"=",
"BATCH_SIZE",
"batch_comp",
"=",
"[",
"]",
"while",
"start_index",
"<",
"total_clips",
":",
"filename",
"=",
"outputfile",
"+",
"'.tmp'",
"+",
"str",
"(",
"start_index",
")",
"+",
"'.mp4'",
"try",
":",
"create_supercut",
"(",
"composition",
"[",
"start_index",
":",
"end_index",
"]",
",",
"filename",
",",
"padding",
")",
"batch_comp",
".",
"append",
"(",
"filename",
")",
"gc",
".",
"collect",
"(",
")",
"start_index",
"+=",
"BATCH_SIZE",
"end_index",
"+=",
"BATCH_SIZE",
"except",
":",
"start_index",
"+=",
"BATCH_SIZE",
"end_index",
"+=",
"BATCH_SIZE",
"next",
"clips",
"=",
"[",
"VideoFileClip",
"(",
"filename",
")",
"for",
"filename",
"in",
"batch_comp",
"]",
"video",
"=",
"concatenate",
"(",
"clips",
")",
"video",
".",
"to_videofile",
"(",
"outputfile",
",",
"codec",
"=",
"\"libx264\"",
",",
"temp_audiofile",
"=",
"'temp-audio.m4a'",
",",
"remove_temp",
"=",
"True",
",",
"audio_codec",
"=",
"'aac'",
")",
"# remove partial video files",
"for",
"filename",
"in",
"batch_comp",
":",
"os",
".",
"remove",
"(",
"filename",
")",
"cleanup_log_files",
"(",
"outputfile",
")"
] |
Registers a handler .
|
def _registerHandler ( self , handler ) : self . _logger . addHandler ( handler ) self . _handlers . append ( handler )
| 2,163
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L80-L87
|
[
"def",
"get_decompressed_waveform",
"(",
"self",
",",
"tempout",
",",
"index",
",",
"f_lower",
"=",
"None",
",",
"approximant",
"=",
"None",
",",
"df",
"=",
"None",
")",
":",
"from",
"pycbc",
".",
"waveform",
".",
"waveform",
"import",
"props",
"from",
"pycbc",
".",
"waveform",
"import",
"get_waveform_filter_length_in_time",
"# Get the template hash corresponding to the template index taken in as argument",
"tmplt_hash",
"=",
"self",
".",
"table",
".",
"template_hash",
"[",
"index",
"]",
"# Read the compressed waveform from the bank file",
"compressed_waveform",
"=",
"pycbc",
".",
"waveform",
".",
"compress",
".",
"CompressedWaveform",
".",
"from_hdf",
"(",
"self",
".",
"filehandler",
",",
"tmplt_hash",
",",
"load_now",
"=",
"True",
")",
"# Get the interpolation method to be used to decompress the waveform",
"if",
"self",
".",
"waveform_decompression_method",
"is",
"not",
"None",
":",
"decompression_method",
"=",
"self",
".",
"waveform_decompression_method",
"else",
":",
"decompression_method",
"=",
"compressed_waveform",
".",
"interpolation",
"logging",
".",
"info",
"(",
"\"Decompressing waveform using %s\"",
",",
"decompression_method",
")",
"if",
"df",
"is",
"not",
"None",
":",
"delta_f",
"=",
"df",
"else",
":",
"delta_f",
"=",
"self",
".",
"delta_f",
"# Create memory space for writing the decompressed waveform",
"decomp_scratch",
"=",
"FrequencySeries",
"(",
"tempout",
"[",
"0",
":",
"self",
".",
"filter_length",
"]",
",",
"delta_f",
"=",
"delta_f",
",",
"copy",
"=",
"False",
")",
"# Get the decompressed waveform",
"hdecomp",
"=",
"compressed_waveform",
".",
"decompress",
"(",
"out",
"=",
"decomp_scratch",
",",
"f_lower",
"=",
"f_lower",
",",
"interpolation",
"=",
"decompression_method",
")",
"p",
"=",
"props",
"(",
"self",
".",
"table",
"[",
"index",
"]",
")",
"p",
".",
"pop",
"(",
"'approximant'",
")",
"try",
":",
"tmpltdur",
"=",
"self",
".",
"table",
"[",
"index",
"]",
".",
"template_duration",
"except",
"AttributeError",
":",
"tmpltdur",
"=",
"None",
"if",
"tmpltdur",
"is",
"None",
"or",
"tmpltdur",
"==",
"0.0",
":",
"tmpltdur",
"=",
"get_waveform_filter_length_in_time",
"(",
"approximant",
",",
"*",
"*",
"p",
")",
"hdecomp",
".",
"chirp_length",
"=",
"tmpltdur",
"hdecomp",
".",
"length_in_time",
"=",
"hdecomp",
".",
"chirp_length",
"return",
"hdecomp"
] |
Unregisters the logging handler .
|
def _unregisterHandler ( self , handler , shutdown = True ) : if handler in self . _handlers : self . _handlers . remove ( handler ) self . _logger . removeHandler ( handler ) if shutdown : try : handler . close ( ) except KeyError : # Depending on the Python version, it's possible for this call # to fail most likely because some logging module objects get # garbage collected before the VSGLogger object is. pass
| 2,164
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L89-L106
|
[
"def",
"get_covariance_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Covariance\"",
",",
"label",
"=",
"\"tab:parameter_covariance\"",
")",
":",
"parameters",
",",
"cov",
"=",
"self",
".",
"get_covariance",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cov",
",",
"caption",
",",
"label",
")"
] |
Retrieves the Python native logger
|
def getLogger ( cls , name = None ) : return logging . getLogger ( "{0}.{1}" . format ( cls . BASENAME , name ) if name else cls . BASENAME )
| 2,165
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L109-L116
|
[
"def",
"parse_torrent_properties",
"(",
"table_datas",
")",
":",
"output",
"=",
"{",
"'category'",
":",
"table_datas",
"[",
"0",
"]",
".",
"text",
",",
"'subcategory'",
":",
"None",
",",
"'quality'",
":",
"None",
",",
"'language'",
":",
"None",
"}",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"table_datas",
")",
")",
":",
"td",
"=",
"table_datas",
"[",
"i",
"]",
"url",
"=",
"td",
".",
"get",
"(",
"'href'",
")",
"params",
"=",
"Parser",
".",
"get_params",
"(",
"url",
")",
"if",
"Parser",
".",
"is_subcategory",
"(",
"params",
")",
"and",
"not",
"output",
"[",
"'subcategory'",
"]",
":",
"output",
"[",
"'subcategory'",
"]",
"=",
"td",
".",
"text",
"elif",
"Parser",
".",
"is_quality",
"(",
"params",
")",
"and",
"not",
"output",
"[",
"'quality'",
"]",
":",
"output",
"[",
"'quality'",
"]",
"=",
"td",
".",
"text",
"elif",
"Parser",
".",
"is_language",
"(",
"params",
")",
"and",
"not",
"output",
"[",
"'language'",
"]",
":",
"output",
"[",
"'language'",
"]",
"=",
"td",
".",
"text",
"return",
"output"
] |
Convenience function to log a message at the DEBUG level .
|
def debug ( cls , name , message , * args ) : cls . getLogger ( name ) . debug ( message , * args )
| 2,166
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L119-L128
|
[
"def",
"validate",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"check_crossrefs",
"(",
")",
"for",
"value",
"in",
"self",
".",
"values",
"(",
")",
":",
"value",
".",
"validate",
"(",
"*",
"*",
"kwargs",
")"
] |
Convenience function to log a message at the INFO level .
|
def info ( cls , name , message , * args ) : cls . getLogger ( name ) . info ( message , * args )
| 2,167
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L131-L140
|
[
"def",
"validate",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"check_crossrefs",
"(",
")",
"for",
"value",
"in",
"self",
".",
"values",
"(",
")",
":",
"value",
".",
"validate",
"(",
"*",
"*",
"kwargs",
")"
] |
Convenience function to log a message at the WARNING level .
|
def warning ( cls , name , message , * args ) : cls . getLogger ( name ) . warning ( message , * args )
| 2,168
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L143-L152
|
[
"def",
"update",
"(",
"self",
",",
"other",
")",
":",
"self",
".",
"update_ttl",
"(",
"other",
".",
"ttl",
")",
"super",
"(",
"Rdataset",
",",
"self",
")",
".",
"update",
"(",
"other",
")"
] |
Convenience function to log a message at the ERROR level .
|
def error ( cls , name , message , * args ) : cls . getLogger ( name ) . error ( message , * args )
| 2,169
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L155-L164
|
[
"def",
"execPluginsDialog",
"(",
"self",
")",
":",
"pluginsDialog",
"=",
"PluginsDialog",
"(",
"parent",
"=",
"self",
",",
"inspectorRegistry",
"=",
"self",
".",
"argosApplication",
".",
"inspectorRegistry",
",",
"rtiRegistry",
"=",
"self",
".",
"argosApplication",
".",
"rtiRegistry",
")",
"pluginsDialog",
".",
"exec_",
"(",
")"
] |
Convenience function to log a message at the CRITICAL level .
|
def critical ( cls , name , message , * args ) : cls . getLogger ( name ) . critical ( message , * args )
| 2,170
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L167-L176
|
[
"def",
"write",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"io",
"=",
"None",
",",
"coors",
"=",
"None",
",",
"igs",
"=",
"None",
",",
"out",
"=",
"None",
",",
"float_format",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"self",
".",
"name",
"+",
"'.mesh'",
"if",
"io",
"is",
"None",
":",
"io",
"=",
"self",
".",
"io",
"if",
"io",
"is",
"None",
":",
"io",
"=",
"'auto'",
"if",
"io",
"==",
"'auto'",
":",
"io",
"=",
"MeshIO",
".",
"any_from_filename",
"(",
"filename",
")",
"if",
"coors",
"is",
"None",
":",
"coors",
"=",
"self",
".",
"coors",
"if",
"igs",
"is",
"None",
":",
"igs",
"=",
"range",
"(",
"len",
"(",
"self",
".",
"conns",
")",
")",
"aux_mesh",
"=",
"Mesh",
".",
"from_data",
"(",
"self",
".",
"name",
",",
"coors",
",",
"self",
".",
"ngroups",
",",
"self",
".",
"conns",
",",
"self",
".",
"mat_ids",
",",
"self",
".",
"descs",
",",
"igs",
")",
"io",
".",
"set_float_format",
"(",
"float_format",
")",
"io",
".",
"write",
"(",
"filename",
",",
"aux_mesh",
",",
"out",
",",
"*",
"*",
"kwargs",
")"
] |
Convenience function to log a message at the ERROR level with additonal exception information .
|
def exception ( cls , name , message , * args ) : cls . getLogger ( name ) . exception ( message , * args )
| 2,171
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/logger.py#L179-L188
|
[
"def",
"echo_headers",
"(",
"headers",
",",
"file",
"=",
"None",
")",
":",
"for",
"k",
",",
"v",
"in",
"sorted",
"(",
"headers",
".",
"items",
"(",
")",
")",
":",
"click",
".",
"echo",
"(",
"\"{0}: {1}\"",
".",
"format",
"(",
"k",
".",
"title",
"(",
")",
",",
"v",
")",
",",
"file",
"=",
"file",
")",
"click",
".",
"echo",
"(",
"file",
"=",
"file",
")"
] |
Checks that the authorizer s request is authorized to perform all the given operations . Note that allow does not check first party caveats - if there is more than one macaroon that may authorize the request it will choose the first one that does regardless .
|
def allow ( self , ctx , ops ) : auth_info , _ = self . allow_any ( ctx , ops ) return auth_info
| 2,172
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_checker.py#L183-L212
|
[
"def",
"extract_zip",
"(",
"zip_path",
",",
"target_folder",
")",
":",
"with",
"zipfile",
".",
"ZipFile",
"(",
"zip_path",
")",
"as",
"archive",
":",
"archive",
".",
"extractall",
"(",
"target_folder",
")"
] |
like allow except that it will authorize as many of the operations as possible without requiring any to be authorized . If all the operations succeeded the array will be nil .
|
def allow_any ( self , ctx , ops ) : authed , used = self . _allow_any ( ctx , ops ) return self . _new_auth_info ( used ) , authed
| 2,173
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_checker.py#L214-L234
|
[
"def",
"receive_data_chunk",
"(",
"self",
",",
"raw_data",
",",
"start",
")",
":",
"self",
".",
"file",
".",
"write",
"(",
"raw_data",
")",
"# CHANGED: This un-hangs us long enough to keep things rolling.",
"eventlet",
".",
"sleep",
"(",
"0",
")"
] |
Checks that the user is allowed to perform all the given operations . If not a discharge error will be raised . If allow_capability succeeds it returns a list of first party caveat conditions that must be applied to any macaroon granting capability to execute the operations . Those caveat conditions will not include any declarations contained in login macaroons - the caller must be careful not to mint a macaroon associated with the LOGIN_OP operation unless they add the expected declaration caveat too - in general clients should not create capabilities that grant LOGIN_OP rights .
|
def allow_capability ( self , ctx , ops ) : nops = 0 for op in ops : if op != LOGIN_OP : nops += 1 if nops == 0 : raise ValueError ( 'no non-login operations required in capability' ) _ , used = self . _allow_any ( ctx , ops ) squasher = _CaveatSquasher ( ) for i , is_used in enumerate ( used ) : if not is_used : continue for cond in self . _conditions [ i ] : squasher . add ( cond ) return squasher . final ( )
| 2,174
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_checker.py#L317-L345
|
[
"def",
"assign_to",
"(",
"self",
",",
"obj",
")",
":",
"obj",
".",
"x",
"=",
"self",
".",
"x",
"obj",
".",
"y",
"=",
"self",
".",
"y"
] |
Registers a new recipe in the context of the current plugin .
|
def register ( self , name , path , description , final_words = None ) : return self . __app . recipes . register ( name , path , self . _plugin , description , final_words )
| 2,175
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L63-L72
|
[
"def",
"get_info_by_tail_number",
"(",
"self",
",",
"tail_number",
",",
"page",
"=",
"1",
",",
"limit",
"=",
"100",
")",
":",
"url",
"=",
"REG_BASE",
".",
"format",
"(",
"tail_number",
",",
"str",
"(",
"self",
".",
"AUTH_TOKEN",
")",
",",
"page",
",",
"limit",
")",
"return",
"self",
".",
"_fr24",
".",
"get_aircraft_data",
"(",
"url",
")"
] |
Gets a list of all recipes which are registered by the current plugin . If a name is provided only the requested recipe is returned or None .
|
def get ( self , name = None ) : return self . __app . recipes . get ( name , self . _plugin )
| 2,176
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L82-L89
|
[
"def",
"user_agent",
"(",
"self",
",",
"text",
",",
"*",
"*",
"kwargs",
")",
":",
"indicator_obj",
"=",
"UserAgent",
"(",
"text",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"_indicator",
"(",
"indicator_obj",
")"
] |
Builds a recipe
|
def build ( self , recipe ) : return self . __app . recipes . build ( recipe , self . _plugin )
| 2,177
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L91-L97
|
[
"def",
"_init_itemid2name",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
".",
"args",
",",
"'id2sym'",
")",
":",
"return",
"None",
"fin_id2sym",
"=",
"self",
".",
"args",
".",
"id2sym",
"if",
"fin_id2sym",
"is",
"not",
"None",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"fin_id2sym",
")",
":",
"id2sym",
"=",
"{",
"}",
"cmpl",
"=",
"re",
".",
"compile",
"(",
"r'^\\s*(\\S+)[\\s,;]+(\\S+)'",
")",
"with",
"open",
"(",
"fin_id2sym",
")",
"as",
"ifstrm",
":",
"for",
"line",
"in",
"ifstrm",
":",
"mtch",
"=",
"cmpl",
".",
"search",
"(",
"line",
")",
"if",
"mtch",
":",
"id2sym",
"[",
"mtch",
".",
"group",
"(",
"1",
")",
"]",
"=",
"mtch",
".",
"group",
"(",
"2",
")",
"return",
"id2sym"
] |
Registers a new recipe .
|
def register ( self , name , path , plugin , description = None , final_words = None ) : if name in self . recipes . keys ( ) : raise RecipeExistsException ( "Recipe %s was already registered by %s" % ( name , self . recipes [ "name" ] . plugin . name ) ) self . recipes [ name ] = Recipe ( name , path , plugin , description , final_words ) self . __log . debug ( "Recipe %s registered by %s" % ( name , plugin . name ) ) return self . recipes [ name ]
| 2,178
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L113-L123
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
Unregisters an existing recipe so that this recipe is no longer available .
|
def unregister ( self , recipe ) : if recipe not in self . recipes . keys ( ) : self . __log . warning ( "Can not unregister recipe %s" % recipe ) else : del ( self . recipes [ recipe ] ) self . __log . debug ( "Recipe %s got unregistered" % recipe )
| 2,179
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L125-L137
|
[
"def",
"wr_txt_grouping_gos",
"(",
"self",
")",
":",
"prt_goids",
"=",
"self",
".",
"grprobj",
".",
"gosubdag",
".",
"prt_goids",
"for",
"hdrgo",
",",
"usrgos",
"in",
"self",
".",
"grprobj",
".",
"hdrgo2usrgos",
".",
"items",
"(",
")",
":",
"keygos",
"=",
"usrgos",
".",
"union",
"(",
"[",
"hdrgo",
"]",
")",
"fout_txt",
"=",
"\"{BASE}.txt\"",
".",
"format",
"(",
"BASE",
"=",
"self",
".",
"grprobj",
".",
"get_fout_base",
"(",
"hdrgo",
")",
")",
"with",
"open",
"(",
"fout_txt",
",",
"'w'",
")",
"as",
"prt",
":",
"prt_goids",
"(",
"keygos",
",",
"prt",
"=",
"prt",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"\" {N:5,} GO IDs WROTE: {TXT}\\n\"",
".",
"format",
"(",
"N",
"=",
"len",
"(",
"keygos",
")",
",",
"TXT",
"=",
"fout_txt",
")",
")"
] |
Get one or more recipes .
|
def get ( self , recipe = None , plugin = None ) : if plugin is not None : if recipe is None : recipes_list = { } for key in self . recipes . keys ( ) : if self . recipes [ key ] . plugin == plugin : recipes_list [ key ] = self . recipes [ key ] return recipes_list else : if recipe in self . recipes . keys ( ) : if self . recipes [ recipe ] . plugin == plugin : return self . recipes [ recipe ] else : return None else : return None else : if recipe is None : return self . recipes else : if recipe in self . recipes . keys ( ) : return self . recipes [ recipe ] else : return None
| 2,180
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L139-L170
|
[
"def",
"writeGlobalFileStream",
"(",
"self",
",",
"cleanup",
"=",
"False",
")",
":",
"# TODO: Make this work with FileID",
"with",
"self",
".",
"jobStore",
".",
"writeFileStream",
"(",
"None",
"if",
"not",
"cleanup",
"else",
"self",
".",
"jobGraph",
".",
"jobStoreID",
")",
"as",
"(",
"backingStream",
",",
"fileStoreID",
")",
":",
"# We have a string version of the file ID, and the backing stream.",
"# We need to yield a stream the caller can write to, and a FileID",
"# that accurately reflects the size of the data written to the",
"# stream. We assume the stream is not seekable.",
"# Make and keep a reference to the file ID, which is currently empty",
"fileID",
"=",
"FileID",
"(",
"fileStoreID",
",",
"0",
")",
"# Wrap the stream to increment the file ID's size for each byte written",
"wrappedStream",
"=",
"WriteWatchingStream",
"(",
"backingStream",
")",
"# When the stream is written to, count the bytes",
"def",
"handle",
"(",
"numBytes",
")",
":",
"fileID",
".",
"size",
"+=",
"numBytes",
"wrappedStream",
".",
"onWrite",
"(",
"handle",
")",
"yield",
"wrappedStream",
",",
"fileID"
] |
Execute a recipe and creates new folder and files .
|
def build ( self , recipe , plugin = None ) : if recipe not in self . recipes . keys ( ) : raise RecipeMissingException ( "Recipe %s unknown." % recipe ) recipe_obj = self . recipes [ recipe ] if plugin is not None : if recipe_obj . plugin != plugin : raise RecipeWrongPluginException ( "The requested recipe does not belong to the given plugin. Use" "the app object, to retrieve the requested recipe: " "my_app.recipes.get(%s)" % recipe ) recipe_obj . build ( )
| 2,181
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L172-L190
|
[
"def",
"pop_no_diff_fields",
"(",
"latest_config",
",",
"current_config",
")",
":",
"for",
"field",
"in",
"[",
"'userIdentity'",
",",
"'principalId'",
",",
"'userAgent'",
",",
"'sourceIpAddress'",
",",
"'requestParameters'",
",",
"'eventName'",
"]",
":",
"latest_config",
".",
"pop",
"(",
"field",
",",
"None",
")",
"current_config",
".",
"pop",
"(",
"field",
",",
"None",
")"
] |
Buildes the recipe and creates needed folder and files . May ask the user for some parameter inputs .
|
def build ( self , output_dir = None , * * kwargs ) : if output_dir is None : output_dir = os . getcwd ( ) target = cookiecutter ( self . path , output_dir = output_dir , * * kwargs ) if self . final_words is not None and len ( self . final_words ) > 0 : print ( "" ) print ( self . final_words ) return target
| 2,182
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_recipes_pattern.py#L213-L229
|
[
"def",
"baseoffset",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"parent",
".",
"baseoffset",
"+",
"self",
".",
"offset",
"except",
"AttributeError",
":",
"return",
"self",
".",
"offset"
] |
high level function that can estimate where user is based on predefined setups .
|
def where_am_i ( ) : locations = { 'Work' : 0 , 'Home' : 0 } for ssid in scan_for_ssids ( ) : #print('checking scanned_ssid ', ssid) for l in logged_ssids : #print('checking logged_ssid ', l) if l [ 'name' ] == ssid : locations [ l [ 'location' ] ] += 1 #print('MATCH') print ( 'Where Am I: SSIDS Matching Home = ' , locations [ 'Home' ] , ' SSIDs matching Work = ' , locations [ 'Work' ] ) return max ( locations . keys ( ) , key = lambda k : locations [ k ] )
| 2,183
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_context.py#L54-L69
|
[
"def",
"parse_page",
"(",
"raw_page",
")",
":",
"ret",
"=",
"{",
"\"title\"",
":",
"get_title",
"(",
"raw_page",
")",
",",
"\"id\"",
":",
"get_id",
"(",
"raw_page",
")",
"}",
"if",
"\":\"",
"in",
"ret",
"[",
"\"title\"",
"]",
":",
"return",
"None",
"ret",
"[",
"\"revisions\"",
"]",
"=",
"get_revisions",
"(",
"raw_page",
")",
"return",
"ret"
] |
extrapolate a human readable summary of the contexts
|
def summarise ( self ) : res = '' if self . user == 'Developer' : if self . host == 'Home PC' : res += 'At Home' else : res += 'Away from PC' elif self . user == 'User' and self . host == 'Home PC' : res += 'Remote desktop into home PC' res += '\n' res += self . transport return res
| 2,184
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_context.py#L127-L141
|
[
"def",
"_init_glyph",
"(",
"self",
",",
"plot",
",",
"mapping",
",",
"properties",
")",
":",
"ret",
"=",
"super",
"(",
"ColorbarPlot",
",",
"self",
")",
".",
"_init_glyph",
"(",
"plot",
",",
"mapping",
",",
"properties",
")",
"if",
"self",
".",
"colorbar",
":",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"self",
".",
"handles",
".",
"items",
"(",
")",
")",
":",
"if",
"not",
"k",
".",
"endswith",
"(",
"'color_mapper'",
")",
":",
"continue",
"self",
".",
"_draw_colorbar",
"(",
"plot",
",",
"v",
",",
"k",
"[",
":",
"-",
"12",
"]",
")",
"return",
"ret"
] |
returns the host computer running this program
|
def get_host ( self ) : import socket host_name = socket . gethostname ( ) for h in hosts : if h [ 'name' ] == host_name : return h [ 'type' ] , h [ 'name' ] return dict ( type = 'Unknown' , name = host_name )
| 2,185
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_context.py#L162-L171
|
[
"def",
"create_or_update_secret",
"(",
"self",
",",
"path",
",",
"secret",
",",
"cas",
"=",
"None",
",",
"mount_point",
"=",
"DEFAULT_MOUNT_POINT",
")",
":",
"params",
"=",
"{",
"'options'",
":",
"{",
"}",
",",
"'data'",
":",
"secret",
",",
"}",
"if",
"cas",
"is",
"not",
"None",
":",
"params",
"[",
"'options'",
"]",
"[",
"'cas'",
"]",
"=",
"cas",
"api_path",
"=",
"'/v1/{mount_point}/data/{path}'",
".",
"format",
"(",
"mount_point",
"=",
"mount_point",
",",
"path",
"=",
"path",
")",
"response",
"=",
"self",
".",
"_adapter",
".",
"post",
"(",
"url",
"=",
"api_path",
",",
"json",
"=",
"params",
",",
")",
"return",
"response",
".",
"json",
"(",
")"
] |
returns the username on this computer
|
def get_user ( self ) : for name in ( 'LOGNAME' , 'USER' , 'LNAME' , 'USERNAME' ) : user = os . environ . get ( name ) if user : break for u in users : if u [ 'name' ] == user : return u [ 'type' ] , u [ 'name' ]
| 2,186
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_context.py#L173-L183
|
[
"def",
"get_random",
"(",
"self",
")",
":",
"import",
"random",
"Statement",
"=",
"self",
".",
"get_model",
"(",
"'statement'",
")",
"session",
"=",
"self",
".",
"Session",
"(",
")",
"count",
"=",
"self",
".",
"count",
"(",
")",
"if",
"count",
"<",
"1",
":",
"raise",
"self",
".",
"EmptyDatabaseException",
"(",
")",
"random_index",
"=",
"random",
".",
"randrange",
"(",
"0",
",",
"count",
")",
"random_statement",
"=",
"session",
".",
"query",
"(",
"Statement",
")",
"[",
"random_index",
"]",
"statement",
"=",
"self",
".",
"model_to_object",
"(",
"random_statement",
")",
"session",
".",
"close",
"(",
")",
"return",
"statement"
] |
get details of CPU RAM usage of this PC
|
def get_host_usage ( self ) : import psutil process_names = [ proc . name for proc in psutil . process_iter ( ) ] cpu_pct = psutil . cpu_percent ( interval = 1 ) mem = psutil . virtual_memory ( ) return str ( cpu_pct ) , str ( len ( process_names ) ) , str ( mem . available ) , str ( mem . total )
| 2,187
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_context.py#L229-L237
|
[
"def",
"_read_columns_file",
"(",
"f",
")",
":",
"try",
":",
"columns",
"=",
"json",
".",
"loads",
"(",
"open",
"(",
"f",
",",
"'r'",
")",
".",
"read",
"(",
")",
",",
"object_pairs_hook",
"=",
"collections",
".",
"OrderedDict",
")",
"except",
"Exception",
"as",
"err",
":",
"raise",
"InvalidColumnsFileError",
"(",
"\"There was an error while reading {0}: {1}\"",
".",
"format",
"(",
"f",
",",
"err",
")",
")",
"# Options are not supported yet:",
"if",
"'__options'",
"in",
"columns",
":",
"del",
"columns",
"[",
"'__options'",
"]",
"return",
"columns"
] |
Provide schema for shell configuration .
|
def schema ( ) : return Schema ( { 'script' : And ( Or ( type ( ' ' ) , type ( u' ' ) ) , len ) , Optional ( 'title' , default = '' ) : str , Optional ( 'model' , default = { } ) : { Optional ( And ( str , len ) ) : object } , Optional ( 'env' , default = { } ) : { Optional ( And ( str , len ) ) : And ( str , len ) } , Optional ( 'item' , default = None ) : object , Optional ( 'dry_run' , default = False ) : bool , Optional ( 'debug' , default = False ) : bool , Optional ( 'strict' , default = False ) : bool , Optional ( 'variables' , default = { } ) : { Optional ( And ( Or ( type ( ' ' ) , type ( u' ' ) ) , len , Regex ( r'([a-zA-Z][_a-zA-Z]*)' ) ) ) : Or ( type ( ' ' ) , type ( u' ' ) ) } , Optional ( 'temporary_scripts_path' , default = '' ) : Or ( type ( '' ) , type ( u'' ) ) , Optional ( 'internal' , default = False ) : bool } )
| 2,188
|
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/components/config.py#L29-L46
|
[
"def",
"save",
"(",
"self",
",",
"create_multiple_renditions",
"=",
"True",
",",
"preserve_source_rendition",
"=",
"True",
",",
"encode_to",
"=",
"enums",
".",
"EncodeToEnum",
".",
"FLV",
")",
":",
"if",
"is_ftp_connection",
"(",
"self",
".",
"connection",
")",
"and",
"len",
"(",
"self",
".",
"assets",
")",
">",
"0",
":",
"self",
".",
"connection",
".",
"post",
"(",
"xml",
"=",
"self",
".",
"to_xml",
"(",
")",
",",
"assets",
"=",
"self",
".",
"assets",
")",
"elif",
"not",
"self",
".",
"id",
"and",
"self",
".",
"_filename",
":",
"self",
".",
"id",
"=",
"self",
".",
"connection",
".",
"post",
"(",
"'create_video'",
",",
"self",
".",
"_filename",
",",
"create_multiple_renditions",
"=",
"create_multiple_renditions",
",",
"preserve_source_rendition",
"=",
"preserve_source_rendition",
",",
"encode_to",
"=",
"encode_to",
",",
"video",
"=",
"self",
".",
"_to_dict",
"(",
")",
")",
"elif",
"not",
"self",
".",
"id",
"and",
"len",
"(",
"self",
".",
"renditions",
")",
">",
"0",
":",
"self",
".",
"id",
"=",
"self",
".",
"connection",
".",
"post",
"(",
"'create_video'",
",",
"video",
"=",
"self",
".",
"_to_dict",
"(",
")",
")",
"elif",
"self",
".",
"id",
":",
"data",
"=",
"self",
".",
"connection",
".",
"post",
"(",
"'update_video'",
",",
"video",
"=",
"self",
".",
"_to_dict",
"(",
")",
")",
"if",
"data",
":",
"self",
".",
"_load",
"(",
"data",
")"
] |
returns an object Project which matches name
|
def get_by_name ( self , name ) : for p in self . project_list : if p . nme == name : return p return None
| 2,189
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L35-L40
|
[
"def",
"send",
"(",
"self",
",",
"data",
")",
":",
"assert",
"isinstance",
"(",
"data",
",",
"text_type",
")",
"# When data is send back to the client, we should replace the line",
"# endings. (We didn't allocate a real pseudo terminal, and the telnet",
"# connection is raw, so we are responsible for inserting \\r.)",
"self",
".",
"stdout",
".",
"write",
"(",
"data",
".",
"replace",
"(",
"'\\n'",
",",
"'\\r\\n'",
")",
")",
"self",
".",
"stdout",
".",
"flush",
"(",
")"
] |
run execute on all tasks IFF prior task is successful
|
def execute_tasks ( self ) : for t in self . tasks : print ( 'RUNNING ' + str ( t . task_id ) + ' = ' + t . name ) t . execute ( ) if t . success != '__IGNORE__RESULT__' : print ( t ) print ( 'TASK RESULT :' , t . result , ' but success = ' , t . success ) if t . result != t . success : #raise Exception('Project execution failed at task ' + str(t.task_id) + ' = ' + t.name) print ( 'ABORTING TASK EXECUTION SEQUENCE' + str ( t . task_id ) + ' = ' + t . name ) break
| 2,190
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L132-L145
|
[
"def",
"send_message",
"(",
"self",
",",
"message",
",",
"max_doc_size",
")",
":",
"if",
"(",
"self",
".",
"max_bson_size",
"is",
"not",
"None",
"and",
"max_doc_size",
">",
"self",
".",
"max_bson_size",
")",
":",
"raise",
"DocumentTooLarge",
"(",
"\"BSON document too large (%d bytes) - the connected server \"",
"\"supports BSON document sizes up to %d bytes.\"",
"%",
"(",
"max_doc_size",
",",
"self",
".",
"max_bson_size",
")",
")",
"try",
":",
"self",
".",
"sock",
".",
"sendall",
"(",
"message",
")",
"except",
"BaseException",
"as",
"error",
":",
"self",
".",
"_raise_connection_failure",
"(",
"error",
")"
] |
create a report showing all project details
|
def build_report ( self , op_file , tpe = 'md' ) : if tpe == 'md' : res = self . get_report_md ( ) elif tpe == 'rst' : res = self . get_report_rst ( ) elif tpe == 'html' : res = self . get_report_html ( ) else : res = 'Unknown report type passed to project.build_report' with open ( op_file , 'w' ) as f : f . write ( res )
| 2,191
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L149-L163
|
[
"def",
"_timestamp_regulator",
"(",
"self",
")",
":",
"unified_timestamps",
"=",
"_PrettyDefaultDict",
"(",
"list",
")",
"staged_files",
"=",
"self",
".",
"_list_audio_files",
"(",
"sub_dir",
"=",
"\"staging\"",
")",
"for",
"timestamp_basename",
"in",
"self",
".",
"__timestamps_unregulated",
":",
"if",
"len",
"(",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
")",
">",
"1",
":",
"# File has been splitted",
"timestamp_name",
"=",
"''",
".",
"join",
"(",
"timestamp_basename",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"staged_splitted_files_of_timestamp",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"staged_file",
":",
"(",
"timestamp_name",
"==",
"staged_file",
"[",
":",
"-",
"3",
"]",
"and",
"all",
"(",
"[",
"(",
"x",
"in",
"set",
"(",
"map",
"(",
"str",
",",
"range",
"(",
"10",
")",
")",
")",
")",
"for",
"x",
"in",
"staged_file",
"[",
"-",
"3",
":",
"]",
"]",
")",
")",
",",
"staged_files",
")",
")",
"if",
"len",
"(",
"staged_splitted_files_of_timestamp",
")",
"==",
"0",
":",
"self",
".",
"__errors",
"[",
"(",
"time",
"(",
")",
",",
"timestamp_basename",
")",
"]",
"=",
"{",
"\"reason\"",
":",
"\"Missing staged file\"",
",",
"\"current_staged_files\"",
":",
"staged_files",
"}",
"continue",
"staged_splitted_files_of_timestamp",
".",
"sort",
"(",
")",
"unified_timestamp",
"=",
"list",
"(",
")",
"for",
"staging_digits",
",",
"splitted_file",
"in",
"enumerate",
"(",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
")",
":",
"prev_splits_sec",
"=",
"0",
"if",
"int",
"(",
"staging_digits",
")",
"!=",
"0",
":",
"prev_splits_sec",
"=",
"self",
".",
"_get_audio_duration_seconds",
"(",
"\"{}/staging/{}{:03d}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"timestamp_name",
",",
"staging_digits",
"-",
"1",
")",
")",
"for",
"word_block",
"in",
"splitted_file",
":",
"unified_timestamp",
".",
"append",
"(",
"_WordBlock",
"(",
"word",
"=",
"word_block",
".",
"word",
",",
"start",
"=",
"round",
"(",
"word_block",
".",
"start",
"+",
"prev_splits_sec",
",",
"2",
")",
",",
"end",
"=",
"round",
"(",
"word_block",
".",
"end",
"+",
"prev_splits_sec",
",",
"2",
")",
")",
")",
"unified_timestamps",
"[",
"str",
"(",
"timestamp_basename",
")",
"]",
"+=",
"unified_timestamp",
"else",
":",
"unified_timestamps",
"[",
"timestamp_basename",
"]",
"+=",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
"[",
"0",
"]",
"self",
".",
"__timestamps",
".",
"update",
"(",
"unified_timestamps",
")",
"self",
".",
"__timestamps_unregulated",
"=",
"_PrettyDefaultDict",
"(",
"list",
")"
] |
formats the project into a report in RST format
|
def get_report_rst ( self ) : res = '' res += '-----------------------------------\n' res += self . nme + '\n' res += '-----------------------------------\n\n' res += self . desc + '\n' res += self . fldr + '\n\n' res += '.. contents:: \n\n\n' res += 'Overview\n' + '===========================================\n\n' res += 'This document contains details on the project ' + self . nme + '\n\n' for d in self . details : res += ' - ' + d [ 0 ] + ' = ' + d [ 1 ] + '\n\n' res += '\nTABLES\n' + '===========================================\n\n' for t in self . datatables : res += t . name + '\n' res += '-------------------------\n\n' res += t . format_rst ( ) + '\n\n' return res
| 2,192
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L165-L193
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
formats the project into a report in MD format - WARNING - tables missing BR
|
def get_report_html ( self ) : res = '<h2>Project:' + self . nme + '</h2>' res += '<p>' + self . desc + '</p>' res += '<p>' + self . fldr + '</p>' res += '<BR><h3>TABLES</h3>' for t in self . datatables : res += '<b>' + t . name + '<b><BR>' res += '<p>' + str ( t ) + '</p>' return res
| 2,193
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L213-L226
|
[
"def",
"wait_for_completion",
"(",
"self",
",",
"waiting_func",
"=",
"None",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"waiting to join queuingThread\"",
")",
"self",
".",
"_responsive_join",
"(",
"self",
".",
"queuing_thread",
",",
"waiting_func",
")"
] |
adds parameters as key value pairs
|
def add_param ( self , param_key , param_val ) : self . params . append ( [ param_key , param_val ] ) if param_key == '__success_test' : self . success = param_val
| 2,194
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L257-L263
|
[
"def",
"rate_limit",
"(",
"f",
")",
":",
"def",
"new_f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"errors",
"=",
"0",
"while",
"True",
":",
"resp",
"=",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"resp",
".",
"status_code",
"==",
"200",
":",
"errors",
"=",
"0",
"return",
"resp",
"elif",
"resp",
".",
"status_code",
"==",
"401",
":",
"# Hack to retain the original exception, but augment it with",
"# additional context for the user to interpret it. In a Python",
"# 3 only future we can raise a new exception of the same type",
"# with a new message from the old error.",
"try",
":",
"resp",
".",
"raise_for_status",
"(",
")",
"except",
"requests",
".",
"HTTPError",
"as",
"e",
":",
"message",
"=",
"\"\\nThis is a protected or locked account, or\"",
"+",
"\" the credentials provided are no longer valid.\"",
"e",
".",
"args",
"=",
"(",
"e",
".",
"args",
"[",
"0",
"]",
"+",
"message",
",",
")",
"+",
"e",
".",
"args",
"[",
"1",
":",
"]",
"log",
".",
"warning",
"(",
"\"401 Authentication required for %s\"",
",",
"resp",
".",
"url",
")",
"raise",
"elif",
"resp",
".",
"status_code",
"==",
"429",
":",
"reset",
"=",
"int",
"(",
"resp",
".",
"headers",
"[",
"'x-rate-limit-reset'",
"]",
")",
"now",
"=",
"time",
".",
"time",
"(",
")",
"seconds",
"=",
"reset",
"-",
"now",
"+",
"10",
"if",
"seconds",
"<",
"1",
":",
"seconds",
"=",
"10",
"log",
".",
"warning",
"(",
"\"rate limit exceeded: sleeping %s secs\"",
",",
"seconds",
")",
"time",
".",
"sleep",
"(",
"seconds",
")",
"elif",
"resp",
".",
"status_code",
">=",
"500",
":",
"errors",
"+=",
"1",
"if",
"errors",
">",
"30",
":",
"log",
".",
"warning",
"(",
"\"too many errors from Twitter, giving up\"",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"seconds",
"=",
"60",
"*",
"errors",
"log",
".",
"warning",
"(",
"\"%s from Twitter API, sleeping %s\"",
",",
"resp",
".",
"status_code",
",",
"seconds",
")",
"time",
".",
"sleep",
"(",
"seconds",
")",
"else",
":",
"resp",
".",
"raise_for_status",
"(",
")",
"return",
"new_f"
] |
executes all automatic tasks in order of task id
|
def execute ( self ) : func_params = [ ] exec_str = self . func . __name__ + '(' for p in self . params : if p [ 0 ] [ 0 : 2 ] != '__' : # ignore custom param names exec_str += p [ 0 ] + '="' + self . _force_str ( p [ 1 ] ) + '", ' func_params . append ( p [ 1 ] ) exec_str = exec_str [ : - 2 ] exec_str += ') # task' + str ( self . task_id ) + ': ' + self . name self . result = self . func ( * func_params ) print ( exec_str + ' loaded ' , self . result )
| 2,195
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/project.py#L265-L280
|
[
"def",
"_wrap_client_error",
"(",
"e",
")",
":",
"error_code",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"message",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
"if",
"error_code",
"==",
"'BadRequestException'",
":",
"if",
"\"Failed to copy S3 object. Access denied:\"",
"in",
"message",
":",
"match",
"=",
"re",
".",
"search",
"(",
"'bucket=(.+?), key=(.+?)$'",
",",
"message",
")",
"if",
"match",
":",
"return",
"S3PermissionsRequired",
"(",
"bucket",
"=",
"match",
".",
"group",
"(",
"1",
")",
",",
"key",
"=",
"match",
".",
"group",
"(",
"2",
")",
")",
"if",
"\"Invalid S3 URI\"",
"in",
"message",
":",
"return",
"InvalidS3UriError",
"(",
"message",
"=",
"message",
")",
"return",
"ServerlessRepoClientError",
"(",
"message",
"=",
"message",
")"
] |
Create a pd . MultiIndex using the column names and any categorical rows . Note that also non - main columns will be assigned a default category .
|
def create_column_index ( annotations ) : _column_index = OrderedDict ( { 'Column Name' : annotations [ 'Column Name' ] } ) categorical_rows = annotation_rows ( 'C:' , annotations ) _column_index . update ( categorical_rows ) numerical_rows = { name : [ float ( x ) if x != '' else float ( 'NaN' ) for x in values ] for name , values in annotation_rows ( 'N:' , annotations ) . items ( ) } # to floats _column_index . update ( numerical_rows ) column_index = pd . MultiIndex . from_tuples ( list ( zip ( * _column_index . values ( ) ) ) , names = list ( _column_index . keys ( ) ) ) if len ( column_index . names ) == 1 : # flatten single-level index name = column_index . names [ 0 ] column_index = column_index . get_level_values ( name ) return column_index
| 2,196
|
https://github.com/cox-labs/perseuspy/blob/3809c1bd46512605f9e7ca7f97e026e4940ed604/perseuspy/io/perseus/matrix.py#L61-L77
|
[
"def",
"wait_until_stale",
"(",
"self",
",",
"locator",
",",
"timeout",
"=",
"None",
")",
":",
"timeout",
"=",
"timeout",
"if",
"timeout",
"is",
"not",
"None",
"else",
"self",
".",
"timeout",
"def",
"wait",
"(",
")",
":",
"'''\n Wait function passed to executor\n '''",
"element",
"=",
"WebDriverWait",
"(",
"self",
".",
"driver",
",",
"timeout",
")",
".",
"until",
"(",
"EC",
".",
"staleness_of",
"(",
"(",
"self",
".",
"locator_handler",
".",
"parse_locator",
"(",
"locator",
")",
".",
"By",
",",
"self",
".",
"locator_handler",
".",
"parse_locator",
"(",
"locator",
")",
".",
"value",
")",
")",
")",
"return",
"WebElementWrapper",
".",
"WebElementWrapper",
"(",
"self",
",",
"locator",
",",
"element",
")",
"return",
"self",
".",
"execute_and_handle_webdriver_exceptions",
"(",
"wait",
",",
"timeout",
",",
"locator",
",",
"'Timeout waiting for element to become stale'",
")"
] |
Read a Perseus - formatted matrix into a pd . DataFrame . Annotation rows will be converted into a multi - index .
|
def read_perseus ( path_or_file , * * kwargs ) : annotations = read_annotations ( path_or_file , separator ) column_index = create_column_index ( annotations ) if 'usecols' in kwargs : usecols = kwargs [ 'usecols' ] if type ( usecols [ 0 ] ) is str : usecols = sorted ( [ list ( column_index ) . index ( x ) for x in usecols ] ) column_index = column_index [ usecols ] kwargs [ 'dtype' ] = dict ( kwargs . get ( 'dtype' , { } ) , * * annotations . get ( 'dtype' , { } ) ) kwargs [ 'converters' ] = dict ( kwargs . get ( 'converters' , { } ) , * * annotations . get ( 'converters' , { } ) ) df = pd . read_csv ( path_or_file , sep = separator , comment = '#' , * * kwargs ) df . columns = column_index return df
| 2,197
|
https://github.com/cox-labs/perseuspy/blob/3809c1bd46512605f9e7ca7f97e026e4940ed604/perseuspy/io/perseus/matrix.py#L79-L102
|
[
"def",
"checkIsReachable",
"(",
"rh",
")",
":",
"rh",
".",
"printSysLog",
"(",
"\"Enter powerVM.checkIsReachable, userid: \"",
"+",
"rh",
".",
"userid",
")",
"strCmd",
"=",
"\"echo 'ping'\"",
"results",
"=",
"execCmdThruIUCV",
"(",
"rh",
",",
"rh",
".",
"userid",
",",
"strCmd",
")",
"if",
"results",
"[",
"'overallRC'",
"]",
"==",
"0",
":",
"rh",
".",
"printLn",
"(",
"\"N\"",
",",
"rh",
".",
"userid",
"+",
"\": reachable\"",
")",
"reachable",
"=",
"1",
"else",
":",
"# A failure from execCmdThruIUCV is acceptable way of determining",
"# that the system is unreachable. We won't pass along the",
"# error message.",
"rh",
".",
"printLn",
"(",
"\"N\"",
",",
"rh",
".",
"userid",
"+",
"\": unreachable\"",
")",
"reachable",
"=",
"0",
"rh",
".",
"updateResults",
"(",
"{",
"\"rs\"",
":",
"reachable",
"}",
")",
"rh",
".",
"printSysLog",
"(",
"\"Exit powerVM.checkIsReachable, rc: 0\"",
")",
"return",
"0"
] |
Save pd . DataFrame to Perseus text format .
|
def to_perseus ( df , path_or_file , main_columns = None , separator = separator , convert_bool_to_category = True , numerical_annotation_rows = set ( [ ] ) ) : _df = df . copy ( ) if not _df . columns . name : _df . columns . name = 'Column Name' column_names = _df . columns . get_level_values ( 'Column Name' ) annotations = { } main_columns = _infer_main_columns ( _df ) if main_columns is None else main_columns annotations [ 'Type' ] = [ 'E' if column_names [ i ] in main_columns else dtype_to_perseus ( dtype ) for i , dtype in enumerate ( _df . dtypes ) ] # detect multi-numeric columns for i , column in enumerate ( _df . columns ) : valid_values = [ value for value in _df [ column ] if value is not None ] if len ( valid_values ) > 0 and all ( type ( value ) is list for value in valid_values ) : annotations [ 'Type' ] [ i ] = 'M' _df [ column ] = _df [ column ] . apply ( lambda xs : ';' . join ( str ( x ) for x in xs ) ) if convert_bool_to_category : for i , column in enumerate ( _df . columns ) : if _df . dtypes [ i ] is np . dtype ( 'bool' ) : values = _df [ column ] . values _df [ column ] [ values ] = '+' _df [ column ] [ ~ values ] = '' annotation_row_names = set ( _df . columns . names ) - { 'Column Name' } for name in annotation_row_names : annotation_type = 'N' if name in numerical_annotation_rows else 'C' annotations [ '{}:{}' . format ( annotation_type , name ) ] = _df . columns . get_level_values ( name ) with PathOrFile ( path_or_file , 'w' ) as f : f . write ( separator . join ( column_names ) + '\n' ) for name , values in annotations . items ( ) : f . write ( '#!{{{name}}}{values}\n' . format ( name = name , values = separator . join ( [ str ( x ) for x in values ] ) ) ) _df . to_csv ( f , header = None , index = False , sep = separator )
| 2,198
|
https://github.com/cox-labs/perseuspy/blob/3809c1bd46512605f9e7ca7f97e026e4940ed604/perseuspy/io/perseus/matrix.py#L105-L147
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
formats the entire search result in a table output
|
def get_page ( search_text ) : lst = search_aikif ( search_text ) txt = '<table class="as-table as-table-zebra as-table-horizontal">' for result in lst : txt += '<TR><TD>' + result + '</TD></TR>' txt += '</TABLE>\n\n' return txt
| 2,199
|
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/page_search.py#L12-L21
|
[
"def",
"vn_release",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"call",
"!=",
"'function'",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_reserve function must be called with -f or --function.'",
")",
"if",
"kwargs",
"is",
"None",
":",
"kwargs",
"=",
"{",
"}",
"vn_id",
"=",
"kwargs",
".",
"get",
"(",
"'vn_id'",
",",
"None",
")",
"vn_name",
"=",
"kwargs",
".",
"get",
"(",
"'vn_name'",
",",
"None",
")",
"path",
"=",
"kwargs",
".",
"get",
"(",
"'path'",
",",
"None",
")",
"data",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
"if",
"vn_id",
":",
"if",
"vn_name",
":",
"log",
".",
"warning",
"(",
"'Both the \\'vn_id\\' and \\'vn_name\\' arguments were provided. '",
"'\\'vn_id\\' will take precedence.'",
")",
"elif",
"vn_name",
":",
"vn_id",
"=",
"get_vn_id",
"(",
"kwargs",
"=",
"{",
"'name'",
":",
"vn_name",
"}",
")",
"else",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_release function requires a \\'vn_id\\' or a \\'vn_name\\' to '",
"'be provided.'",
")",
"if",
"data",
":",
"if",
"path",
":",
"log",
".",
"warning",
"(",
"'Both the \\'data\\' and \\'path\\' arguments were provided. '",
"'\\'data\\' will take precedence.'",
")",
"elif",
"path",
":",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"path",
",",
"mode",
"=",
"'r'",
")",
"as",
"rfh",
":",
"data",
"=",
"rfh",
".",
"read",
"(",
")",
"else",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_release function requires either \\'data\\' or a \\'path\\' to '",
"'be provided.'",
")",
"server",
",",
"user",
",",
"password",
"=",
"_get_xml_rpc",
"(",
")",
"auth",
"=",
"':'",
".",
"join",
"(",
"[",
"user",
",",
"password",
"]",
")",
"response",
"=",
"server",
".",
"one",
".",
"vn",
".",
"release",
"(",
"auth",
",",
"int",
"(",
"vn_id",
")",
",",
"data",
")",
"ret",
"=",
"{",
"'action'",
":",
"'vn.release'",
",",
"'released'",
":",
"response",
"[",
"0",
"]",
",",
"'resource_id'",
":",
"response",
"[",
"1",
"]",
",",
"'error_code'",
":",
"response",
"[",
"2",
"]",
",",
"}",
"return",
"ret"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.