query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Return a new PileupCollection that includes only pileup elements satisfying the specified criteria .
def filter ( self , drop_duplicates = False , drop_improper_mate_pairs = False , min_mapping_quality = None , min_base_quality = None , filters = None ) : if filters is None : filters = [ ] if drop_duplicates : filters . append ( lambda e : not e . alignment . is_duplicate ) if drop_improper_mate_pairs : filters . append ( lambda e : e . alignment . is_proper_pair ) if min_mapping_quality is not None : filters . append ( lambda e : e . alignment . mapping_quality >= min_mapping_quality ) if min_base_quality is not None : filters . append ( lambda e : e . min_base_quality >= min_base_quality ) pileups = OrderedDict ( ( locus , pileup . filter ( filters ) ) for ( locus , pileup ) in self . pileups . items ( ) ) return PileupCollection ( pileups = pileups , parent = self )
10,400
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_collection.py#L445-L501
[ "def", "start_vm", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "vm_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourceGroups/'", ",", "resource_group", ",", "'/providers/Microsoft.Compute/virtualMachines/'", ",", "vm_name", ",", "'/start'", ",", "'?api-version='", ",", "COMP_API", "]", ")", "return", "do_post", "(", "endpoint", ",", "''", ",", "access_token", ")" ]
Return a new PileupCollection that is the union of self and the other specified collections .
def merge ( self , * others ) : new_pileups = { } for collection in ( self , ) + others : for ( locus , pileup ) in collection . pileups . items ( ) : if locus in new_pileups : new_pileups [ locus ] . update ( pileup ) else : new_pileups [ locus ] = Pileup ( locus , pileup . elements ) return PileupCollection ( new_pileups , parent = self )
10,401
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_collection.py#L503-L515
[ "def", "modules", "(", "self", ")", ":", "def", "_iterate_modules", "(", "bars", ")", ":", "is_bar", "=", "True", "for", "char", "in", "map", "(", "int", ",", "bars", ")", ":", "while", "char", ">", "0", ":", "char", "-=", "1", "yield", "0", "if", "is_bar", "else", "1", "is_bar", "=", "not", "is_bar", "return", "list", "(", "_iterate_modules", "(", "self", ".", "bars", ")", ")" ]
Create a PileupCollection for a set of loci from a BAM file .
def from_bam ( pysam_samfile , loci , normalized_contig_names = True ) : loci = [ to_locus ( obj ) for obj in loci ] close_on_completion = False if typechecks . is_string ( pysam_samfile ) : pysam_samfile = Samfile ( pysam_samfile ) close_on_completion = True try : # Map from pyensembl normalized chromosome names used in Variant to # the names used in the BAM file. if normalized_contig_names : chromosome_name_map = { } for name in pysam_samfile . references : normalized = pyensembl . locus . normalize_chromosome ( name ) chromosome_name_map [ normalized ] = name chromosome_name_map [ name ] = name else : chromosome_name_map = None result = PileupCollection ( { } ) # Optimization: we sort variants so our BAM reads are localized. locus_iterator = itertools . chain . from_iterable ( ( Locus . from_interbase_coordinates ( locus_interval . contig , pos ) for pos in locus_interval . positions ) for locus_interval in sorted ( loci ) ) for locus in locus_iterator : result . pileups [ locus ] = Pileup ( locus , [ ] ) if normalized_contig_names : try : chromosome = chromosome_name_map [ locus . contig ] except KeyError : logging . warn ( "No such contig in bam: %s" % locus . contig ) continue else : chromosome = locus . contig columns = pysam_samfile . pileup ( chromosome , locus . position , locus . position + 1 , # exclusive, 0-indexed truncate = True , stepper = "nofilter" ) try : column = next ( columns ) except StopIteration : # No reads align to this locus. continue # Note that storing the pileups here is necessary, since the # subsequent assertion will invalidate our column. pileups = column . pileups assert list ( columns ) == [ ] # column is invalid after this. for pileup_read in pileups : if not pileup_read . is_refskip : element = PileupElement . from_pysam_alignment ( locus , pileup_read ) result . pileups [ locus ] . append ( element ) return result finally : if close_on_completion : pysam_samfile . close ( )
10,402
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_collection.py#L518-L603
[ "def", "_login", "(", "self", ",", "username", ",", "password", ")", ":", "data", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", ",", "'grant_type'", ":", "'password'", "}", "r", "=", "self", ".", "spark_api", ".", "oauth", ".", "token", ".", "POST", "(", "auth", "=", "(", "'spark'", ",", "'spark'", ")", ",", "data", "=", "data", ",", "timeout", "=", "self", ".", "timeout", ")", "self", ".", "_check_error", "(", "r", ")", "return", "r", ".", "json", "(", ")", "[", "'access_token'", "]" ]
Create a parser returning Elastic Search DSL query instance .
def invenio_query_factory ( parser = None , walkers = None ) : parser = parser or Main walkers = walkers or [ PypegConverter ( ) ] walkers . append ( ElasticSearchDSL ( ) ) def invenio_query ( pattern ) : query = pypeg2 . parse ( pattern , parser , whitespace = "" ) for walker in walkers : query = query . accept ( walker ) return query return invenio_query
10,403
https://github.com/inveniosoftware/invenio-query-parser/blob/21a2c36318003ff52d2e18e7196bb420db8ecb4b/invenio_query_parser/contrib/elasticsearch/__init__.py#L34-L45
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id", ",", "to_book_id", ")", "try", ":", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "from_book_id", ")", "except", ":", "# something went wrong, roll back assignment to to_book_id", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "to_book_id", ")", "raise" ]
Checks that the feature types of this dataset are consistent with a timeseries - profile - orthogonal dataset .
def check_dimensions ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are timeseries-profile-orthogonal feature types' ) message = '{} must be a valid profile-orthogonal feature type. It must have dimensions of (station, time, z).' message += ' If it\'s a single station, it must have dimensions (time, z). x and y dimensions must be scalar or have' message += ' dimensions (station). time must be a coordinate variable with dimension (time) and z must be a' message += ' coordinate variabel with dimension (z).' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_timeseries_profile_single_station ( dataset , variable ) is_valid = is_valid or util . is_timeseries_profile_multi_station ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable ) ) results . append ( required_ctx . to_result ( ) ) return results
10,404
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_timeseries_profile.py#L21-L43
[ "def", "cache_data", "(", "self", ")", ":", "# Set Slug if not set", "if", "not", "self", ".", "slug_name", ":", "self", ".", "slug_name", "=", "slugify", "(", "self", ".", "name", ")", ".", "strip", "(", ")", "if", "len", "(", "self", ".", "slug_name", ")", ">", "255", ":", "self", ".", "slug_name", "=", "self", ".", "slug_name", "[", "0", ":", "254", "]" ]
set name to bootstrap in case you want to use bootstrap . This also requires the template sto be in the main dir
def theme ( name = 'readthedocs' ) : os . environ [ 'SPHINX_THEME' ] = name if os . environ [ 'SPHINX_THEME' ] == 'bootstrap' : local ( 'cp docs/source/_templates/layout_bootstrap.html docs/source/_templates/layout.html' ) elif name is 'readthedocs' : return else : local ( 'cp docs/source/_templates/layout_simple.html docs/source/_templates/layout.html' )
10,405
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/fabfile/doc.py#L57-L67
[ "def", "getOverlayKey", "(", "self", ",", "ulOverlayHandle", ",", "pchValue", ",", "unBufferSize", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayKey", "pError", "=", "EVROverlayError", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "pchValue", ",", "unBufferSize", ",", "byref", "(", "pError", ")", ")", "return", "result", ",", "pError" ]
build the doc locally and view
def html ( theme_name = 'readthedocs' ) : # disable Flask RSTPAGES due to sphinx incompatibility os . environ [ 'RSTPAGES' ] = 'FALSE' theme ( theme_name ) api ( ) man ( ) clean ( ) local ( "cd docs; make html" ) local ( "fab security.check" ) local ( "touch docs/build/html/.nojekyll" )
10,406
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/fabfile/doc.py#L70-L80
[ "def", "checkIsConsistent", "(", "self", ")", ":", "if", "is_an_array", "(", "self", ".", "mask", ")", "and", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ":", "raise", "ConsistencyError", "(", "\"Shape mismatch mask={}, data={}\"", ".", "format", "(", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ")", ")" ]
Compute a message s signature .
def sign_message ( body : ByteString , secret : Text ) -> Text : return 'sha1={}' . format ( hmac . new ( secret . encode ( ) , body , sha1 ) . hexdigest ( ) )
10,407
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L112-L119
[ "def", "_approximate_unkown_bond_lengths", "(", "self", ")", ":", "dataset", "=", "self", ".", "lengths", "[", "BOND_SINGLE", "]", "for", "n1", "in", "periodic", ".", "iter_numbers", "(", ")", ":", "for", "n2", "in", "periodic", ".", "iter_numbers", "(", ")", ":", "if", "n1", "<=", "n2", ":", "pair", "=", "frozenset", "(", "[", "n1", ",", "n2", "]", ")", "atom1", "=", "periodic", "[", "n1", "]", "atom2", "=", "periodic", "[", "n2", "]", "#if (pair not in dataset) and hasattr(atom1, \"covalent_radius\") and hasattr(atom2, \"covalent_radius\"):", "if", "(", "pair", "not", "in", "dataset", ")", "and", "(", "atom1", ".", "covalent_radius", "is", "not", "None", ")", "and", "(", "atom2", ".", "covalent_radius", "is", "not", "None", ")", ":", "dataset", "[", "pair", "]", "=", "(", "atom1", ".", "covalent_radius", "+", "atom2", ".", "covalent_radius", ")" ]
Get the user dict from cache or query it from the platform if missing .
async def _get_user ( self ) : if self . _cache is None : try : self . _cache = await self . facebook . get_user ( self . fbid , self . page_id ) except PlatformOperationError : self . _cache = { } return self . _cache
10,408
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L146-L157
[ "def", "_recompress_archive", "(", "archive", ",", "verbosity", "=", "0", ",", "interactive", "=", "True", ")", ":", "format", ",", "compression", "=", "get_archive_format", "(", "archive", ")", "if", "compression", ":", "# only recompress the compression itself (eg. for .tar.xz)", "format", "=", "compression", "tmpdir", "=", "util", ".", "tmpdir", "(", ")", "tmpdir2", "=", "util", ".", "tmpdir", "(", ")", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "archive", ")", ")", "archive2", "=", "util", ".", "get_single_outfile", "(", "tmpdir2", ",", "base", ",", "extension", "=", "ext", ")", "try", ":", "# extract", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "format", "=", "format", ",", "outdir", "=", "tmpdir", ")", "path", "=", "_extract_archive", "(", "archive", ",", "*", "*", "kwargs", ")", "# compress to new file", "olddir", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "path", ")", "try", ":", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "interactive", "=", "interactive", ",", "format", "=", "format", ")", "files", "=", "tuple", "(", "os", ".", "listdir", "(", "path", ")", ")", "_create_archive", "(", "archive2", ",", "files", ",", "*", "*", "kwargs", ")", "finally", ":", "os", ".", "chdir", "(", "olddir", ")", "# check file sizes and replace if new file is smaller", "filesize", "=", "util", ".", "get_filesize", "(", "archive", ")", "filesize2", "=", "util", ".", "get_filesize", "(", "archive2", ")", "if", "filesize2", "<", "filesize", ":", "# replace file", "os", ".", "remove", "(", "archive", ")", "shutil", ".", "move", "(", "archive2", ",", "archive", ")", "diffsize", "=", "filesize", "-", "filesize2", "return", "\"... recompressed file is now %s smaller.\"", "%", "util", ".", "strsize", "(", "diffsize", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tmpdir", ",", "onerror", "=", "rmtree_log_error", ")", "shutil", ".", "rmtree", "(", "tmpdir2", ",", "onerror", "=", "rmtree_log_error", ")", "return", "\"... recompressed file is not smaller, leaving archive as is.\"" ]
The friendly name is mapped to Facebook s first name . If the first name is missing use the last name .
async def get_friendly_name ( self ) -> Text : u = await self . _get_user ( ) f = u . get ( 'first_name' , '' ) . strip ( ) l = u . get ( 'last_name' , '' ) . strip ( ) return f or l
10,409
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L171-L180
[ "def", "clear", "(", "self", ")", ":", "self", ".", "redis_conn", ".", "delete", "(", "self", ".", "window_key", ")", "self", ".", "redis_conn", ".", "delete", "(", "self", ".", "moderate_key", ")", "self", ".", "queue", ".", "clear", "(", ")" ]
Get the gender from Facebook .
async def get_gender ( self ) -> User . Gender : u = await self . _get_user ( ) try : return User . Gender ( u . get ( 'gender' ) ) except ValueError : return User . Gender . unknown
10,410
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L182-L191
[ "def", "ginterp", "(", "coeff_matrix", ",", "points", ")", ":", "# Dimension", "dim", "=", "len", "(", "points", "[", "0", "]", ")", "# Number of data points", "num_points", "=", "len", "(", "points", ")", "# Solve system of linear equations", "matrix_l", ",", "matrix_u", "=", "linalg", ".", "lu_decomposition", "(", "coeff_matrix", ")", "ctrlpts", "=", "[", "[", "0.0", "for", "_", "in", "range", "(", "dim", ")", "]", "for", "_", "in", "range", "(", "num_points", ")", "]", "for", "i", "in", "range", "(", "dim", ")", ":", "b", "=", "[", "pt", "[", "i", "]", "for", "pt", "in", "points", "]", "y", "=", "linalg", ".", "forward_substitution", "(", "matrix_l", ",", "b", ")", "x", "=", "linalg", ".", "backward_substitution", "(", "matrix_u", ",", "y", ")", "for", "j", "in", "range", "(", "num_points", ")", ":", "ctrlpts", "[", "j", "]", "[", "i", "]", "=", "x", "[", "j", "]", "# Return control points", "return", "ctrlpts" ]
Generate a Facebook user instance
def get_user ( self ) -> FacebookUser : return FacebookUser ( self . _event [ 'sender' ] [ 'id' ] , self . get_page_id ( ) , self . _facebook , self , )
10,411
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L244-L253
[ "def", "levenshtein", "(", "x", ",", "y", ")", ":", "n", "=", "len", "(", "x", ")", "m", "=", "len", "(", "y", ")", "# initializing row 0 and column 0", "A", "=", "[", "[", "i", "+", "j", "for", "j", "in", "range", "(", "m", "+", "1", ")", "]", "for", "i", "in", "range", "(", "n", "+", "1", ")", "]", "for", "i", "in", "range", "(", "n", ")", ":", "for", "j", "in", "range", "(", "m", ")", ":", "A", "[", "i", "+", "1", "]", "[", "j", "+", "1", "]", "=", "min", "(", "A", "[", "i", "]", "[", "j", "+", "1", "]", "+", "1", ",", "# insert", "A", "[", "i", "+", "1", "]", "[", "j", "]", "+", "1", ",", "# delete", "A", "[", "i", "]", "[", "j", "]", "+", "int", "(", "x", "[", "i", "]", "!=", "y", "[", "j", "]", ")", ")", "# subst.", "return", "A", "[", "n", "]", "[", "m", "]" ]
Return all layers that can be found in the message .
def get_layers ( self ) -> List [ BaseLayer ] : out = [ ] msg = self . _event . get ( 'message' , { } ) if 'text' in msg : out . append ( lyr . RawText ( msg [ 'text' ] ) ) for attachment in msg . get ( 'attachments' ) or [ ] : if attachment [ 'type' ] == 'image' : out . append ( lyr . Image ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'audio' : out . append ( lyr . Audio ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'file' : out . append ( lyr . File ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'video' : out . append ( lyr . Video ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'location' : # noinspection PyArgumentList out . append ( lyr . Location ( lyr . Location . Point ( lat = attachment [ 'payload' ] [ 'coordinates' ] [ 'lat' ] , lon = attachment [ 'payload' ] [ 'coordinates' ] [ 'long' ] , ) ) ) if 'quick_reply' in msg : out . append ( QuickReply ( msg [ 'quick_reply' ] [ 'payload' ] ) ) if 'postback' in self . _event : payload = ujson . loads ( self . _event [ 'postback' ] [ 'payload' ] ) out . append ( lyr . Postback ( payload ) ) if 'optin' in self . _event : out . append ( OptIn ( self . _event [ 'optin' ] [ 'ref' ] ) ) return out
10,412
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L261-L297
[ "def", "calculate_energy", "(", "self", ")", ":", "clibrebound", ".", "reb_tools_energy", ".", "restype", "=", "c_double", "return", "clibrebound", ".", "reb_tools_energy", "(", "byref", "(", "self", ")", ")" ]
Automatically generated secure verify token
def verify_token ( self ) : h = sha256 ( ) h . update ( self . app_access_token . encode ( ) ) return h . hexdigest ( )
10,413
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L385-L392
[ "def", "_broadcast_indexes", "(", "self", ",", "key", ")", ":", "key", "=", "self", ".", "_item_key_to_tuple", "(", "key", ")", "# key is a tuple", "# key is a tuple of full size", "key", "=", "indexing", ".", "expanded_indexer", "(", "key", ",", "self", ".", "ndim", ")", "# Convert a scalar Variable to an integer", "key", "=", "tuple", "(", "k", ".", "data", ".", "item", "(", ")", "if", "isinstance", "(", "k", ",", "Variable", ")", "and", "k", ".", "ndim", "==", "0", "else", "k", "for", "k", "in", "key", ")", "# Convert a 0d-array to an integer", "key", "=", "tuple", "(", "k", ".", "item", "(", ")", "if", "isinstance", "(", "k", ",", "np", ".", "ndarray", ")", "and", "k", ".", "ndim", "==", "0", "else", "k", "for", "k", "in", "key", ")", "if", "all", "(", "isinstance", "(", "k", ",", "BASIC_INDEXING_TYPES", ")", "for", "k", "in", "key", ")", ":", "return", "self", ".", "_broadcast_indexes_basic", "(", "key", ")", "self", ".", "_validate_indexers", "(", "key", ")", "# Detect it can be mapped as an outer indexer", "# If all key is unlabeled, or", "# key can be mapped as an OuterIndexer.", "if", "all", "(", "not", "isinstance", "(", "k", ",", "Variable", ")", "for", "k", "in", "key", ")", ":", "return", "self", ".", "_broadcast_indexes_outer", "(", "key", ")", "# If all key is 1-dimensional and there are no duplicate labels,", "# key can be mapped as an OuterIndexer.", "dims", "=", "[", "]", "for", "k", ",", "d", "in", "zip", "(", "key", ",", "self", ".", "dims", ")", ":", "if", "isinstance", "(", "k", ",", "Variable", ")", ":", "if", "len", "(", "k", ".", "dims", ")", ">", "1", ":", "return", "self", ".", "_broadcast_indexes_vectorized", "(", "key", ")", "dims", ".", "append", "(", "k", ".", "dims", "[", "0", "]", ")", "elif", "not", "isinstance", "(", "k", ",", "integer_types", ")", ":", "dims", ".", "append", "(", "d", ")", "if", "len", "(", "set", "(", "dims", ")", ")", "==", "len", "(", "dims", ")", ":", "return", "self", ".", "_broadcast_indexes_outer", "(", "key", ")", "return", "self", ".", "_broadcast_indexes_vectorized", "(", "key", ")" ]
Dynamically hooks the right webhook paths
def hook_up ( self , router : UrlDispatcher ) : router . add_get ( self . webhook_path , self . check_hook ) router . add_post ( self . webhook_path , self . receive_events )
10,414
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L410-L416
[ "def", "BuildChecks", "(", "self", ",", "request", ")", ":", "result", "=", "[", "]", "if", "request", ".", "HasField", "(", "\"start_time\"", ")", "or", "request", ".", "HasField", "(", "\"end_time\"", ")", ":", "def", "FilterTimestamp", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_mtime\"", ")", "and", "(", "file_stat", ".", "st_mtime", "<", "request", ".", "start_time", "or", "file_stat", ".", "st_mtime", ">", "request", ".", "end_time", ")", "result", ".", "append", "(", "FilterTimestamp", ")", "if", "request", ".", "HasField", "(", "\"min_file_size\"", ")", "or", "request", ".", "HasField", "(", "\"max_file_size\"", ")", ":", "def", "FilterSize", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_size\"", ")", "and", "(", "file_stat", ".", "st_size", "<", "request", ".", "min_file_size", "or", "file_stat", ".", "st_size", ">", "request", ".", "max_file_size", ")", "result", ".", "append", "(", "FilterSize", ")", "if", "request", ".", "HasField", "(", "\"perm_mode\"", ")", ":", "def", "FilterPerms", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "(", "file_stat", ".", "st_mode", "&", "request", ".", "perm_mask", ")", "!=", "request", ".", "perm_mode", "result", ".", "append", "(", "FilterPerms", ")", "if", "request", ".", "HasField", "(", "\"uid\"", ")", ":", "def", "FilterUID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_uid", "!=", "request", ".", "uid", "result", ".", "append", "(", "FilterUID", ")", "if", "request", ".", "HasField", "(", "\"gid\"", ")", ":", "def", "FilterGID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_gid", "!=", "request", ".", "gid", "result", ".", "append", "(", "FilterGID", ")", "if", "request", ".", "HasField", "(", "\"path_regex\"", ")", ":", "regex", "=", "request", ".", "path_regex", "def", "FilterPath", "(", "file_stat", ",", "regex", "=", "regex", ")", ":", "\"\"\"Suppress any filename not matching the regular expression.\"\"\"", "return", "not", "regex", ".", "Search", "(", "file_stat", ".", "pathspec", ".", "Basename", "(", ")", ")", "result", ".", "append", "(", "FilterPath", ")", "if", "request", ".", "HasField", "(", "\"data_regex\"", ")", ":", "def", "FilterData", "(", "file_stat", ",", "*", "*", "_", ")", ":", "\"\"\"Suppress files that do not match the content.\"\"\"", "return", "not", "self", ".", "TestFileContent", "(", "file_stat", ")", "result", ".", "append", "(", "FilterData", ")", "return", "result" ]
Called when Facebook checks the hook
async def check_hook ( self , request : HttpRequest ) : verify_token = request . query . get ( 'hub.verify_token' ) if not verify_token : return json_response ( { 'error' : 'No verification token was provided' , } , status = 400 ) if verify_token == self . verify_token : return Response ( text = request . query . get ( 'hub.challenge' , '' ) ) return json_response ( { 'error' : 'could not find the page token in the configuration' , } )
10,415
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L418-L435
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in", "self", ".", "UPLOAD_DIR", ".", "iterdir", "(", ")", ":", "if", "not", "user_dir", ".", "is_dir", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-directory in upload dir: %r\"", ",", "bytes", "(", "user_dir", ")", ")", "continue", "for", "content", "in", "user_dir", ".", "iterdir", "(", ")", ":", "if", "not", "content", ".", "is_file", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-file in user upload dir: %r\"", ",", "bytes", "(", "content", ")", ")", "continue", "if", "content", ".", "stat", "(", ")", ".", "st_ctime", "<", "minimum_age", ":", "content", ".", "unlink", "(", ")" ]
Events received from Facebook
async def receive_events ( self , request : HttpRequest ) : body = await request . read ( ) s = self . settings ( ) try : content = ujson . loads ( body ) except ValueError : return json_response ( { 'error' : True , 'message' : 'Cannot decode body' } , status = 400 ) secret = s [ 'app_secret' ] actual_sig = request . headers [ 'X-Hub-Signature' ] expected_sig = sign_message ( body , secret ) if not hmac . compare_digest ( actual_sig , expected_sig ) : return json_response ( { 'error' : True , 'message' : 'Invalid signature' , } , status = 401 ) for entry in content [ 'entry' ] : for raw_message in entry . get ( 'messaging' , [ ] ) : message = FacebookMessage ( raw_message , self ) await self . handle_event ( message ) return json_response ( { 'ok' : True , } )
10,416
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L437-L470
[ "def", "filter_model_items", "(", "index_instance", ",", "model_items", ",", "model_name", ",", "start_date", ",", "end_date", ")", ":", "if", "index_instance", ".", "updated_field", "is", "None", ":", "logger", ".", "warning", "(", "\"No updated date field found for {} - not restricting with start and end date\"", ".", "format", "(", "model_name", ")", ")", "else", ":", "if", "start_date", ":", "model_items", "=", "model_items", ".", "filter", "(", "*", "*", "{", "'{}__gte'", ".", "format", "(", "index_instance", ".", "updated_field", ")", ":", "__str_to_tzdate__", "(", "start_date", ")", "}", ")", "if", "end_date", ":", "model_items", "=", "model_items", ".", "filter", "(", "*", "*", "{", "'{}__lte'", ".", "format", "(", "index_instance", ".", "updated_field", ")", ":", "__str_to_tzdate__", "(", "end_date", ")", "}", ")", "return", "model_items" ]
Run those things in a sepearate tasks as they are not required for the bot to work and they take a lot of time to run .
async def _deferred_init ( self ) : await self . _check_subscriptions ( ) await self . _set_whitelist ( ) await self . _set_get_started ( ) await self . _set_greeting_text ( ) await self . _set_persistent_menu ( )
10,417
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L472-L482
[ "def", "upload_cbn_dir", "(", "dir_path", ",", "manager", ")", ":", "t", "=", "time", ".", "time", "(", ")", "for", "jfg_path", "in", "os", ".", "listdir", "(", "dir_path", ")", ":", "if", "not", "jfg_path", ".", "endswith", "(", "'.jgf'", ")", ":", "continue", "path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ")", "log", ".", "info", "(", "'opening %s'", ",", "path", ")", "with", "open", "(", "path", ")", "as", "f", ":", "cbn_jgif_dict", "=", "json", ".", "load", "(", "f", ")", "graph", "=", "pybel", ".", "from_cbn_jgif", "(", "cbn_jgif_dict", ")", "out_path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ".", "replace", "(", "'.jgf'", ",", "'.bel'", ")", ")", "with", "open", "(", "out_path", ",", "'w'", ")", "as", "o", ":", "pybel", ".", "to_bel", "(", "graph", ",", "o", ")", "strip_annotations", "(", "graph", ")", "enrich_pubmed_citations", "(", "manager", "=", "manager", ",", "graph", "=", "graph", ")", "pybel", ".", "to_database", "(", "graph", ",", "manager", "=", "manager", ")", "log", ".", "info", "(", "''", ")", "log", ".", "info", "(", "'done in %.2f'", ",", "time", ".", "time", "(", ")", "-", "t", ")" ]
The messenger profile API handles all meta - information about the bot like the menu . This allows to submit data to this API endpoint .
async def _send_to_messenger_profile ( self , page , content ) : log_name = ', ' . join ( repr ( x ) for x in content . keys ( ) ) page_id = page [ 'page_id' ] current = await self . _get_messenger_profile ( page , content . keys ( ) ) if dict_is_subset ( content , current ) : logger . info ( 'Page %s: %s is already up to date' , page_id , log_name ) return params = { 'access_token' : page [ 'page_token' ] , } headers = { 'content-type' : 'application/json' , } post = self . session . post ( PROFILE_ENDPOINT , params = params , headers = headers , data = ujson . dumps ( content ) ) # noinspection PyBroadException try : async with post as r : await self . _handle_fb_response ( r ) except Exception : logger . exception ( 'Page %s: %s could not be set' , page_id , log_name ) reporter . report ( ) else : logger . info ( 'Page %s: %s was updated' , page_id , log_name )
10,418
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L507-L548
[ "def", "_get_sync_model_vars_op", "(", "self", ")", ":", "ops", "=", "[", "]", "for", "(", "shadow_v", ",", "local_v", ")", "in", "self", ".", "_shadow_model_vars", ":", "ops", ".", "append", "(", "shadow_v", ".", "assign", "(", "local_v", ".", "read_value", "(", ")", ")", ")", "assert", "len", "(", "ops", ")", "return", "tf", ".", "group", "(", "*", "ops", ",", "name", "=", "'sync_{}_model_variables_to_ps'", ".", "format", "(", "len", "(", "ops", ")", ")", ")" ]
Set the get started action for all configured pages .
async def _set_get_started ( self ) : page = self . settings ( ) if 'get_started' in page : payload = page [ 'get_started' ] else : payload = { 'action' : 'get_started' } await self . _send_to_messenger_profile ( page , { 'get_started' : { 'payload' : ujson . dumps ( payload ) , } , } ) logger . info ( 'Get started set for page %s' , page [ 'page_id' ] )
10,419
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L550-L568
[ "def", "add_dataframe", "(", "df", ",", "name", ",", "pkg", "=", "None", ",", "description", "=", "''", ")", ":", "from", "warnings", "import", "warn", "from", "metapack", ".", "cli", ".", "core", "import", "alt_col_name", ",", "type_map", "import", "numpy", "as", "np", "if", "name", "is", "None", "or", "df", "is", "None", ":", "warn", "(", "\"Did not find dataframe for reference '{}' \"", ".", "format", "(", "ref", ")", ")", "return", "pkg", "=", "pkg", "or", "open_source_package", "(", ")", "resource_ref", "=", "'file:'", "+", "get_notebook_rel_path", "(", "pkg", ")", "+", "'#'", "+", "name", "t", "=", "pkg", ".", "find_first", "(", "'Root.Datafile'", ",", "value", "=", "resource_ref", ")", "col_props", "=", "{", "}", "if", "t", ":", "print", "(", "\"Datafile exists for url '{}', deleting\"", ".", "format", "(", "resource_ref", ")", ")", "if", "t", ".", "schema_term", ":", "col_props", "=", "{", "c", "[", "'name'", "]", ":", "c", "for", "c", "in", "t", ".", "columns", "(", ")", "}", "pkg", ".", "remove_term", "(", "t", ".", "schema_term", ")", "pkg", ".", "remove_term", "(", "t", ")", "t", "=", "pkg", "[", "'Resources'", "]", ".", "new_term", "(", "'Root.Datafile'", ",", "resource_ref", ",", "name", "=", "name", ",", "description", "=", "description", ")", "st", "=", "pkg", "[", "'Schema'", "]", ".", "new_term", "(", "'Table'", ",", "t", ".", "schema_name", ",", "description", "=", "description", ")", "for", "i", ",", "name", "in", "enumerate", "(", "df", ".", "columns", ")", ":", "props", "=", "col_props", ".", "get", "(", "name", ",", "{", "}", ")", "try", ":", "native_type", "=", "type", "(", "np", ".", "asscalar", "(", "df", "[", "name", "]", ".", "dtype", ".", "type", "(", "0", ")", ")", ")", ".", "__name__", "except", "ValueError", ":", "native_type", "=", "df", "[", "name", "]", ".", "dtype", ".", "name", "except", "AttributeError", ":", "native_type", "=", "type", "(", "df", "[", "name", "]", "[", "0", "]", ")", ".", "__name__", "for", "pn", "in", "'datatype name pos header'", ".", "split", "(", ")", ":", "if", "pn", "in", "props", ":", "del", "props", "[", "pn", "]", "if", "'altname'", "in", "props", ":", "altname", "=", "props", "[", "'altname'", "]", "del", "props", "[", "'altname'", "]", "else", ":", "raw_alt_name", "=", "alt_col_name", "(", "name", ",", "i", ")", "altname", "=", "raw_alt_name", "if", "raw_alt_name", "!=", "name", "else", "''", "col", "=", "df", "[", "name", "]", "if", "hasattr", "(", "col", ",", "'description'", ")", ":", "# custom property", "props", "[", "'description'", "]", "=", "col", ".", "description", "t", "=", "st", ".", "new_child", "(", "'Column'", ",", "name", ",", "datatype", "=", "type_map", ".", "get", "(", "native_type", ",", "native_type", ")", ",", "altname", "=", "altname", ",", "*", "*", "props", ")", "pkg", ".", "write_csv", "(", ")" ]
Set the greeting text of the page
async def _set_greeting_text ( self ) : page = self . settings ( ) if 'greeting' in page : await self . _send_to_messenger_profile ( page , { 'greeting' : page [ 'greeting' ] , } ) logger . info ( 'Greeting text set for page %s' , page [ 'page_id' ] )
10,420
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L570-L582
[ "def", "Process", "(", "self", ",", "parser_mediator", ",", "registry_key", ",", "*", "*", "kwargs", ")", ":", "if", "registry_key", "is", "None", ":", "raise", "ValueError", "(", "'Windows Registry key is not set.'", ")", "# This will raise if unhandled keyword arguments are passed.", "super", "(", "WindowsRegistryPlugin", ",", "self", ")", ".", "Process", "(", "parser_mediator", ",", "*", "*", "kwargs", ")", "self", ".", "ExtractEvents", "(", "parser_mediator", ",", "registry_key", ",", "*", "*", "kwargs", ")" ]
Define the persistent menu for all pages
async def _set_persistent_menu ( self ) : page = self . settings ( ) if 'menu' in page : await self . _send_to_messenger_profile ( page , { 'persistent_menu' : page [ 'menu' ] , } ) logger . info ( 'Set menu for page %s' , page [ 'page_id' ] )
10,421
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L584-L596
[ "def", "estimate_sub_second_time", "(", "files", ",", "interval", "=", "0.0", ")", ":", "if", "interval", "<=", "0.0", ":", "return", "[", "exif_time", "(", "f", ")", "for", "f", "in", "tqdm", "(", "files", ",", "desc", "=", "\"Reading image capture time\"", ")", "]", "onesecond", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "1.0", ")", "T", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "interval", ")", "for", "i", ",", "f", "in", "tqdm", "(", "enumerate", "(", "files", ")", ",", "desc", "=", "\"Estimating subsecond time\"", ")", ":", "m", "=", "exif_time", "(", "f", ")", "if", "not", "m", ":", "pass", "if", "i", "==", "0", ":", "smin", "=", "m", "smax", "=", "m", "+", "onesecond", "else", ":", "m0", "=", "m", "-", "T", "*", "i", "smin", "=", "max", "(", "smin", ",", "m0", ")", "smax", "=", "min", "(", "smax", ",", "m0", "+", "onesecond", ")", "if", "not", "smin", "or", "not", "smax", ":", "return", "None", "if", "smin", ">", "smax", ":", "# ERROR LOG", "print", "(", "'Interval not compatible with EXIF times'", ")", "return", "None", "else", ":", "s", "=", "smin", "+", "(", "smax", "-", "smin", ")", "/", "2", "return", "[", "s", "+", "T", "*", "i", "for", "i", "in", "range", "(", "len", "(", "files", ")", ")", "]" ]
Whitelist domains for the messenger extensions
async def _set_whitelist ( self ) : page = self . settings ( ) if 'whitelist' in page : await self . _send_to_messenger_profile ( page , { 'whitelisted_domains' : page [ 'whitelist' ] , } ) logger . info ( 'Whitelisted %s for page %s' , page [ 'whitelist' ] , page [ 'page_id' ] )
10,422
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L598-L612
[ "def", "sub", "(", "a", ",", "b", ")", ":", "if", "a", "is", "None", ":", "if", "b", "is", "None", ":", "return", "None", "else", ":", "return", "-", "1", "*", "b", "elif", "b", "is", "None", ":", "return", "a", "return", "a", "-", "b" ]
Generates the URL and tokens for the subscriptions endpoint
def _get_subscriptions_endpoint ( self ) : s = self . settings ( ) params = { 'access_token' : self . app_access_token , } return ( GRAPH_ENDPOINT . format ( f'{s["app_id"]}/subscriptions' ) , params , )
10,423
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L614-L628
[ "def", "_get_parent_timestamp", "(", "dirname", ",", "mtime", ")", ":", "parent_pathname", "=", "os", ".", "path", ".", "dirname", "(", "dirname", ")", "# max between the parent timestamp the one passed in", "mtime", "=", "_max_timestamps", "(", "parent_pathname", ",", "False", ",", "mtime", ")", "if", "dirname", "!=", "os", ".", "path", ".", "dirname", "(", "parent_pathname", ")", ":", "# this is only called if we're not at the root", "mtime", "=", "_get_parent_timestamp", "(", "parent_pathname", ",", "mtime", ")", "return", "mtime" ]
List the subscriptions currently active
async def _get_subscriptions ( self ) -> Tuple [ Set [ Text ] , Text ] : url , params = self . _get_subscriptions_endpoint ( ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) data = await r . json ( ) for scope in data [ 'data' ] : if scope [ 'object' ] == 'page' : return ( set ( x [ 'name' ] for x in scope [ 'fields' ] ) , scope [ 'callback_url' ] , ) return set ( ) , ''
10,424
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L630-L650
[ "def", "create_temp_project_avatar", "(", "self", ",", "project", ",", "filename", ",", "size", ",", "avatar_img", ",", "contentType", "=", "None", ",", "auto_confirm", "=", "False", ")", ":", "size_from_file", "=", "os", ".", "path", ".", "getsize", "(", "filename", ")", "if", "size", "!=", "size_from_file", ":", "size", "=", "size_from_file", "params", "=", "{", "'filename'", ":", "filename", ",", "'size'", ":", "size", "}", "headers", "=", "{", "'X-Atlassian-Token'", ":", "'no-check'", "}", "if", "contentType", "is", "not", "None", ":", "headers", "[", "'content-type'", "]", "=", "contentType", "else", ":", "# try to detect content-type, this may return None", "headers", "[", "'content-type'", "]", "=", "self", ".", "_get_mime_type", "(", "avatar_img", ")", "url", "=", "self", ".", "_get_url", "(", "'project/'", "+", "project", "+", "'/avatar/temporary'", ")", "r", "=", "self", ".", "_session", ".", "post", "(", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ",", "data", "=", "avatar_img", ")", "cropping_properties", "=", "json_loads", "(", "r", ")", "if", "auto_confirm", ":", "return", "self", ".", "confirm_project_avatar", "(", "project", ",", "cropping_properties", ")", "else", ":", "return", "cropping_properties" ]
Set the subscriptions to a specific list of values
async def _set_subscriptions ( self , subscriptions ) : url , params = self . _get_subscriptions_endpoint ( ) data = { 'object' : 'page' , 'callback_url' : self . webhook_url , 'fields' : ', ' . join ( subscriptions ) , 'verify_token' : self . verify_token , } headers = { 'Content-Type' : 'application/json' , } post = self . session . post ( url , params = params , data = ujson . dumps ( data ) , headers = headers , ) async with post as r : await self . _handle_fb_response ( r ) data = await r . json ( )
10,425
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L652-L679
[ "def", "fix_missing_lang_tags", "(", "marc_xml", ",", "dom", ")", ":", "def", "get_lang_tag", "(", "lang", ")", ":", "lang_str", "=", "'\\n <mods:language>\\n'", "lang_str", "+=", "' <mods:languageTerm authority=\"iso639-2b\" type=\"code\">'", "lang_str", "+=", "lang", "lang_str", "+=", "'</mods:languageTerm>\\n'", "lang_str", "+=", "' </mods:language>\\n\\n'", "lang_dom", "=", "dhtmlparser", ".", "parseString", "(", "lang_str", ")", "return", "first", "(", "lang_dom", ".", "find", "(", "\"mods:language\"", ")", ")", "for", "lang", "in", "reversed", "(", "marc_xml", "[", "\"041a0 \"", "]", ")", ":", "lang_tag", "=", "dom", ".", "find", "(", "\"mods:languageTerm\"", ",", "fn", "=", "lambda", "x", ":", "x", ".", "getContent", "(", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "==", "lang", ".", "lower", "(", ")", ")", "if", "not", "lang_tag", ":", "insert_tag", "(", "get_lang_tag", "(", "lang", ")", ",", "dom", ".", "find", "(", "\"mods:language\"", ")", ",", "get_mods_tag", "(", "dom", ")", ")" ]
Checks that all subscriptions are subscribed
async def _check_subscriptions ( self ) : subscribed , url = await self . _get_subscriptions ( ) expect = set ( settings . FACEBOOK_SUBSCRIPTIONS ) if ( expect - subscribed ) or url != self . webhook_url : await self . _set_subscriptions ( expect | subscribed ) logger . info ( 'Updated webhook subscriptions' ) else : logger . info ( 'No need to update webhook subscriptions' )
10,426
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L681-L693
[ "def", "loadFile", "(", "self", ",", "fileName", ",", "rtiClass", "=", "None", ",", "position", "=", "None", ",", "parentIndex", "=", "QtCore", ".", "QModelIndex", "(", ")", ")", ":", "logger", ".", "info", "(", "\"Loading data from: {!r}\"", ".", "format", "(", "fileName", ")", ")", "if", "rtiClass", "is", "None", ":", "repoTreeItem", "=", "createRtiFromFileName", "(", "fileName", ")", "else", ":", "repoTreeItem", "=", "rtiClass", ".", "createFromFileName", "(", "fileName", ")", "assert", "repoTreeItem", ".", "parentItem", "is", "None", ",", "\"repoTreeItem {!r}\"", ".", "format", "(", "repoTreeItem", ")", "return", "self", ".", "insertItem", "(", "repoTreeItem", ",", "position", "=", "position", ",", "parentIndex", "=", "parentIndex", ")" ]
Handle an incoming message from Facebook .
async def handle_event ( self , event : FacebookMessage ) : responder = FacebookResponder ( self ) await self . _notify ( event , responder )
10,427
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L695-L700
[ "def", "bin_exact_kb_dense", "(", "M", ",", "positions", ",", "length", "=", "10", ")", ":", "unit", "=", "10", "**", "3", "ul", "=", "unit", "*", "length", "units", "=", "positions", "/", "ul", "n", "=", "len", "(", "positions", ")", "idx", "=", "[", "i", "for", "i", "in", "range", "(", "n", "-", "1", ")", "if", "np", ".", "ceil", "(", "units", "[", "i", "]", ")", "<", "np", ".", "ceil", "(", "units", "[", "i", "+", "1", "]", ")", "]", "m", "=", "len", "(", "idx", ")", "-", "1", "N", "=", "np", ".", "zeros", "(", "(", "m", ",", "m", ")", ")", "remainders", "=", "[", "0", "]", "+", "[", "np", ".", "abs", "(", "units", "[", "i", "]", "-", "units", "[", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "m", ")", "]", "for", "i", "in", "range", "(", "m", ")", ":", "N", "[", "i", "]", "=", "np", ".", "array", "(", "[", "(", "M", "[", "idx", "[", "j", "]", ":", "idx", "[", "j", "+", "1", "]", ",", "idx", "[", "i", "]", ":", "idx", "[", "i", "+", "1", "]", "]", ".", "sum", "(", ")", "-", "remainders", "[", "j", "]", "*", "M", "[", "i", "]", "[", "j", "]", "+", "remainders", "[", "j", "+", "1", "]", "*", "M", "[", "i", "+", "1", "]", "[", "j", "]", ")", "for", "j", "in", "range", "(", "m", ")", "]", ")", "return", "N" ]
Guess the access token for that specific request .
def _access_token ( self , request : Request = None , page_id : Text = '' ) : if not page_id : msg = request . message # type: FacebookMessage page_id = msg . get_page_id ( ) page = self . settings ( ) if page [ 'page_id' ] == page_id : return page [ 'page_token' ] raise PlatformOperationError ( 'Trying to get access token of the ' 'page "{}", which is not configured.' . format ( page_id ) )
10,428
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L702-L718
[ "def", "symlink_bundles", "(", "self", ",", "app", ",", "bundle_dir", ")", ":", "for", "bundle_counter", ",", "bundle", "in", "enumerate", "(", "app", ".", "bundles", ")", ":", "count", "=", "0", "for", "path", ",", "relpath", "in", "bundle", ".", "filemap", ".", "items", "(", ")", ":", "bundle_path", "=", "os", ".", "path", ".", "join", "(", "bundle_dir", ",", "relpath", ")", "count", "+=", "1", "if", "os", ".", "path", ".", "exists", "(", "bundle_path", ")", ":", "continue", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "safe_mkdir", "(", "os", ".", "path", ".", "dirname", "(", "bundle_path", ")", ")", "os", ".", "symlink", "(", "path", ",", "bundle_path", ")", "elif", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "safe_mkdir", "(", "bundle_path", ")", "if", "count", "==", "0", ":", "raise", "TargetDefinitionException", "(", "app", ".", "target", ",", "'Bundle index {} of \"bundles\" field '", "'does not match any files.'", ".", "format", "(", "bundle_counter", ")", ")" ]
Generate a single quick reply s content .
async def _make_qr ( self , qr : QuickRepliesList . BaseOption , request : Request ) : if isinstance ( qr , QuickRepliesList . TextOption ) : return { 'content_type' : 'text' , 'title' : await render ( qr . text , request ) , 'payload' : qr . slug , } elif isinstance ( qr , QuickRepliesList . LocationOption ) : return { 'content_type' : 'location' , }
10,429
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L720-L736
[ "def", "adjust_privileges", "(", "state", ",", "privileges", ")", ":", "with", "win32", ".", "OpenProcessToken", "(", "win32", ".", "GetCurrentProcess", "(", ")", ",", "win32", ".", "TOKEN_ADJUST_PRIVILEGES", ")", "as", "hToken", ":", "NewState", "=", "(", "(", "priv", ",", "state", ")", "for", "priv", "in", "privileges", ")", "win32", ".", "AdjustTokenPrivileges", "(", "hToken", ",", "NewState", ")" ]
Send text layers to the user . Each layer will go in its own bubble .
async def _send_text ( self , request : Request , stack : Stack ) : parts = [ ] for layer in stack . layers : if isinstance ( layer , lyr . MultiText ) : lines = await render ( layer . text , request , multi_line = True ) for line in lines : for part in wrap ( line , 320 ) : parts . append ( part ) elif isinstance ( layer , ( lyr . Text , lyr . RawText ) ) : text = await render ( layer . text , request ) for part in wrap ( text , 320 ) : parts . append ( part ) for part in parts [ : - 1 ] : await self . _send ( request , { 'text' : part , } , stack ) part = parts [ - 1 ] msg = { 'text' : part , } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
10,430
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L749-L783
[ "def", "update_version_descriptor", "(", "self", ",", "task", ",", "releasetype", ",", "descriptor", ",", "verbrowser", ",", "commentbrowser", ")", ":", "if", "task", "is", "None", ":", "null", "=", "treemodel", ".", "TreeItem", "(", "None", ")", "verbrowser", ".", "set_model", "(", "treemodel", ".", "TreeModel", "(", "null", ")", ")", "return", "m", "=", "self", ".", "create_version_model", "(", "task", ",", "releasetype", ",", "descriptor", ")", "verbrowser", ".", "set_model", "(", "m", ")", "commentbrowser", ".", "set_model", "(", "m", ")" ]
Generates and send a generic template .
async def _send_generic_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( GenericTemplate ) payload = await gt . serialize ( request ) msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
10,431
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L785-L801
[ "def", "enforce_vertical_symmetry", "(", "pixmap", ")", ":", "mirror", "=", "[", "]", "for", "item", "in", "pixmap", ":", "y", "=", "item", "[", "0", "]", "x", "=", "item", "[", "1", "]", "if", "x", "<=", "IMAGE_APEX", ":", "diff_x", "=", "diff", "(", "x", ",", "IMAGE_APEX", ")", "mirror", ".", "append", "(", "(", "y", ",", "x", "+", "(", "2", "*", "diff_x", ")", "-", "1", ")", ")", "if", "x", ">", "IMAGE_APEX", ":", "diff_x", "=", "diff", "(", "x", ",", "IMAGE_APEX", ")", "mirror", ".", "append", "(", "(", "y", ",", "x", "-", "(", "2", "*", "diff_x", ")", "-", "1", ")", ")", "return", "mirror", "+", "pixmap" ]
Generates and send a button template .
async def _send_button_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( ButtonTemplate ) payload = { 'template_type' : 'button' , 'text' : await render ( gt . text , request ) , 'buttons' : [ await b . serialize ( request ) for b in gt . buttons ] , } msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
10,432
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L803-L824
[ "def", "_recursive_reindex_object_security", "(", "self", ",", "obj", ")", ":", "if", "hasattr", "(", "aq_base", "(", "obj", ")", ",", "\"objectValues\"", ")", ":", "for", "obj", "in", "obj", ".", "objectValues", "(", ")", ":", "self", ".", "_recursive_reindex_object_security", "(", "obj", ")", "logger", ".", "debug", "(", "\"Reindexing object security for {}\"", ".", "format", "(", "repr", "(", "obj", ")", ")", ")", "obj", ".", "reindexObjectSecurity", "(", ")" ]
Send to Facebook typing indications
async def _send_typing ( self , request : Request , stack : Stack ) : active = stack . get_layer ( lyr . Typing ) . active msg = ujson . dumps ( { 'recipient' : { 'id' : request . conversation . fbid , } , 'sender_action' : 'typing_on' if active else 'typing_off' , } ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg , headers = headers , ) logger . debug ( 'Sending: %s' , msg ) async with post as r : await self . _handle_fb_response ( r )
10,433
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L858-L890
[ "def", "update", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "# Increment revision and commit it.", "self", ".", "revision", "+=", "1", "self", ".", "server", ".", "commit", "(", "self", ".", "revision", "+", "1", ")", "# Unblock all waiting clients.", "self", ".", "next_revision_available", ".", "set", "(", ")", "self", ".", "next_revision_available", ".", "clear", "(", ")", "# Check sessions to see which revision can be removed.", "if", "self", ".", "sessions", ":", "lowest_revision", "=", "min", "(", "session", ".", "revision", "for", "session", "in", "self", ".", "sessions", ".", "itervalues", "(", ")", ")", "# Remove all old revision history", "if", "lowest_revision", "==", "self", ".", "revision", ":", "self", ".", "server", ".", "clean", "(", "lowest_revision", ")", "# Invoke hooks", "invoke_hooks", "(", "self", ".", "hooks", ",", "\"updated\"", ",", "self", ".", "revision", ")" ]
Check that Facebook was OK with the API call we just made and raise an exception if it failed .
async def _handle_fb_response ( self , response : aiohttp . ClientResponse ) : ok = response . status == 200 if not ok : # noinspection PyBroadException try : error = ( await response . json ( ) ) [ 'error' ] [ 'message' ] except Exception : error = '(nothing)' raise PlatformOperationError ( 'Facebook says: "{}"' . format ( error ) )
10,434
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L892-L908
[ "def", "_resizeColumnsToContents", "(", "self", ",", "header", ",", "data", ",", "limit_ms", ")", ":", "max_col", "=", "data", ".", "model", "(", ")", ".", "columnCount", "(", ")", "if", "limit_ms", "is", "None", ":", "max_col_ms", "=", "None", "else", ":", "max_col_ms", "=", "limit_ms", "/", "max", "(", "1", ",", "max_col", ")", "for", "col", "in", "range", "(", "max_col", ")", ":", "self", ".", "_resizeColumnToContents", "(", "header", ",", "data", ",", "col", ",", "max_col_ms", ")" ]
Actually proceed to sending the message to the Facebook API .
async def _send ( self , request : Request , content : Dict [ Text , Any ] , stack : Stack ) : msg = { 'recipient' : { 'id' : request . conversation . fbid , } , 'message' : content , } if stack and stack . has_layer ( MessagingType ) : mt = stack . get_layer ( MessagingType ) else : mt = MessagingType ( response = True ) msg . update ( mt . serialize ( ) ) msg_json = ujson . dumps ( msg ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg_json , headers = headers , ) logger . debug ( 'Sending: %s' , msg_json ) async with post as r : await self . _handle_fb_response ( r )
10,435
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L910-L951
[ "def", "eventFilter", "(", "self", ",", "watchedObject", ",", "event", ")", ":", "if", "self", ".", "comboBox", ".", "isEditable", "(", ")", "and", "event", ".", "type", "(", ")", "==", "QtCore", ".", "QEvent", ".", "KeyPress", ":", "key", "=", "event", ".", "key", "(", ")", "if", "key", "in", "(", "Qt", ".", "Key_Delete", ",", "Qt", ".", "Key_Backspace", ")", ":", "if", "(", "watchedObject", "==", "self", ".", "_comboboxListView", "or", "(", "watchedObject", "==", "self", ".", "comboBox", "and", "event", ".", "modifiers", "(", ")", "==", "Qt", ".", "ControlModifier", ")", ")", ":", "index", "=", "self", ".", "_comboboxListView", ".", "currentIndex", "(", ")", "if", "index", ".", "isValid", "(", ")", ":", "row", "=", "index", ".", "row", "(", ")", "logger", ".", "debug", "(", "\"Removing item {} from the combobox: {}\"", ".", "format", "(", "row", ",", "self", ".", "_comboboxListView", ".", "model", "(", ")", ".", "data", "(", "index", ")", ")", ")", "self", ".", "cti", ".", "removeValueByIndex", "(", "row", ")", "self", ".", "comboBox", ".", "removeItem", "(", "row", ")", "return", "True", "# Calling parent event filter, which may filter out other events.", "return", "super", "(", "ChoiceCtiEditor", ",", "self", ")", ".", "eventFilter", "(", "watchedObject", ",", "event", ")" ]
Query a user from the API and return its JSON
async def get_user ( self , user_id , page_id ) : access_token = self . _access_token ( page_id = page_id ) params = { 'fields' : 'first_name,last_name,profile_pic,locale,timezone' ',gender' , 'access_token' : access_token , } url = GRAPH_ENDPOINT . format ( user_id ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) return await r . json ( )
10,436
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L953-L971
[ "def", "compare", "(", "left", ":", "Optional", "[", "L", "]", ",", "right", ":", "Optional", "[", "R", "]", ")", "->", "'Comparison[L, R]'", ":", "if", "isinstance", "(", "left", ",", "File", ")", "and", "isinstance", "(", "right", ",", "Directory", ")", ":", "return", "FileDirectoryComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "Directory", ")", "and", "isinstance", "(", "right", ",", "File", ")", ":", "return", "DirectoryFileComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "File", ")", "or", "isinstance", "(", "right", ",", "File", ")", ":", "return", "FileComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "Directory", ")", "or", "isinstance", "(", "right", ",", "Directory", ")", ":", "return", "DirectoryComparison", "(", "left", ",", "right", ")", "raise", "TypeError", "(", "f'Cannot compare entities: {left}, {right}'", ")" ]
So far let s just accept URL media . We ll see in the future how it goes .
async def ensure_usable_media ( self , media : BaseMedia ) -> UrlMedia : if not isinstance ( media , UrlMedia ) : raise ValueError ( 'Facebook platform only accepts URL media' ) return media
10,437
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L973-L982
[ "def", "renew_compose", "(", "self", ",", "compose_id", ")", ":", "logger", ".", "info", "(", "\"Renewing compose %d\"", ",", "compose_id", ")", "response", "=", "self", ".", "session", ".", "patch", "(", "'{}composes/{}'", ".", "format", "(", "self", ".", "url", ",", "compose_id", ")", ")", "response", ".", "raise_for_status", "(", ")", "response_json", "=", "response", ".", "json", "(", ")", "compose_id", "=", "response_json", "[", "'id'", "]", "logger", ".", "info", "(", "\"Renewed compose is %d\"", ",", "compose_id", ")", "return", "response_json" ]
Creates a fake message for the given user_id . It contains a postback with the given payload .
def _make_fake_message ( self , user_id , page_id , payload ) : event = { 'sender' : { 'id' : user_id , } , 'recipient' : { 'id' : page_id , } , 'postback' : { 'payload' : ujson . dumps ( payload ) , } , } return FacebookMessage ( event , self , False )
10,438
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L984-L1002
[ "def", "rename_sectors", "(", "self", ",", "sectors", ")", ":", "if", "type", "(", "sectors", ")", "is", "list", ":", "sectors", "=", "{", "old", ":", "new", "for", "old", ",", "new", "in", "zip", "(", "self", ".", "get_sectors", "(", ")", ",", "sectors", ")", "}", "for", "df", "in", "self", ".", "get_DataFrame", "(", "data", "=", "True", ")", ":", "df", ".", "rename", "(", "index", "=", "sectors", ",", "columns", "=", "sectors", ",", "inplace", "=", "True", ")", "try", ":", "for", "ext", "in", "self", ".", "get_extensions", "(", "data", "=", "True", ")", ":", "for", "df", "in", "ext", ".", "get_DataFrame", "(", "data", "=", "True", ")", ":", "df", ".", "rename", "(", "index", "=", "sectors", ",", "columns", "=", "sectors", ",", "inplace", "=", "True", ")", "except", ":", "pass", "self", ".", "meta", ".", "_add_modify", "(", "\"Changed sector names\"", ")", "return", "self" ]
Tries to verify the signed request
def _message_from_sr ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : page = self . settings ( ) secret = page [ 'app_secret' ] try : sr_data = SignedRequest . parse ( token , secret ) except ( TypeError , ValueError , SignedRequestError ) as e : return return self . _make_fake_message ( sr_data [ 'psid' ] , page [ 'page_id' ] , payload , )
10,439
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L1004-L1022
[ "def", "sampleCellsWithinColumns", "(", "numCellPairs", ",", "cellsPerColumn", ",", "numColumns", ",", "seed", "=", "42", ")", ":", "np", ".", "random", ".", "seed", "(", "seed", ")", "cellPairs", "=", "[", "]", "for", "i", "in", "range", "(", "numCellPairs", ")", ":", "randCol", "=", "np", ".", "random", ".", "randint", "(", "numColumns", ")", "randCells", "=", "np", ".", "random", ".", "choice", "(", "np", ".", "arange", "(", "cellsPerColumn", ")", ",", "(", "2", ",", ")", ",", "replace", "=", "False", ")", "cellsPair", "=", "randCol", "*", "cellsPerColumn", "+", "randCells", "cellPairs", ".", "append", "(", "cellsPair", ")", "return", "cellPairs" ]
Analyzes a signed token and generates the matching message
def _message_from_token ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : try : tk = jwt . decode ( token , settings . WEBVIEW_SECRET_KEY ) except jwt . InvalidTokenError : return try : user_id = tk [ 'fb_psid' ] assert isinstance ( user_id , Text ) page_id = tk [ 'fb_pid' ] assert isinstance ( page_id , Text ) except ( KeyError , AssertionError ) : return if self . settings ( ) [ 'page_id' ] == page_id : return self . _make_fake_message ( user_id , page_id , payload )
10,440
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/platform.py#L1024-L1044
[ "def", "update", "(", "cls", ",", "id", ",", "bandwidth", ",", "vm", ",", "background", ")", ":", "if", "not", "background", "and", "not", "cls", ".", "intty", "(", ")", ":", "background", "=", "True", "iface_params", "=", "{", "}", "iface_id", "=", "cls", ".", "usable_id", "(", "id", ")", "if", "bandwidth", ":", "iface_params", "[", "'bandwidth'", "]", "=", "bandwidth", "if", "iface_params", ":", "result", "=", "cls", ".", "call", "(", "'hosting.iface.update'", ",", "iface_id", ",", "iface_params", ")", "if", "background", ":", "return", "result", "# interactive mode, run a progress bar", "cls", ".", "echo", "(", "'Updating your iface %s.'", "%", "id", ")", "cls", ".", "display_progress", "(", "result", ")", "if", "not", "vm", ":", "return", "vm_id", "=", "Iaas", ".", "usable_id", "(", "vm", ")", "opers", "=", "cls", ".", "_detach", "(", "iface_id", ")", "if", "opers", ":", "cls", ".", "echo", "(", "'Detaching iface.'", ")", "cls", ".", "display_progress", "(", "opers", ")", "result", "=", "cls", ".", "_attach", "(", "iface_id", ",", "vm_id", ")", "if", "background", ":", "return", "result", "cls", ".", "echo", "(", "'Attaching your iface.'", ")", "cls", ".", "display_progress", "(", "result", ")" ]
Convenience function to access the transition register of a specific kind .
def get_trans_reg ( self , name : Text , default : Any = None ) -> Any : tr = self . register . get ( Register . TRANSITION , { } ) return tr . get ( name , default )
10,441
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/request.py#L189-L199
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
Get the locale to use for this request . It s either the overridden locale or the locale provided by the platform .
async def get_locale ( self ) -> Text : if self . _locale_override : return self . _locale_override else : return await self . user . get_locale ( )
10,442
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/request.py#L229-L240
[ "def", "ready", "(", "self", ",", "source", ")", ":", "from", "hfos", ".", "database", "import", "configschemastore", "configschemastore", "[", "self", ".", "name", "]", "=", "self", ".", "configschema", "self", ".", "_start_server", "(", ")", "if", "not", "self", ".", "insecure", ":", "self", ".", "_drop_privileges", "(", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'components'", ",", "cli_components", ")", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'drop_privileges'", ",", "cli_drop_privileges", ")", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'reload_db'", ",", "cli_reload_db", ")", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'reload'", ",", "cli_reload", ")", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'quit'", ",", "cli_quit", ")", ")", "self", ".", "fireEvent", "(", "cli_register_event", "(", "'info'", ",", "cli_info", ")", ")" ]
Gives a chance to middlewares to make the translation flags
async def get_trans_flags ( self ) -> 'Flags' : from bernard . middleware import MiddlewareManager async def make_flags ( request : Request ) -> 'Flags' : return { } mf = MiddlewareManager . instance ( ) . get ( 'make_trans_flags' , make_flags ) return await mf ( self )
10,443
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/request.py#L242-L253
[ "def", "public_broadcaster", "(", ")", ":", "while", "__websocket_server_running__", ":", "pipein", "=", "open", "(", "PUBLIC_PIPE", ",", "'r'", ")", "line", "=", "pipein", ".", "readline", "(", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", ".", "replace", "(", "'\\r'", ",", "''", ")", "if", "line", "!=", "''", ":", "WebSocketHandler", ".", "broadcast", "(", "line", ")", "print", "line", "remaining_lines", "=", "pipein", ".", "read", "(", ")", "pipein", ".", "close", "(", ")", "pipeout", "=", "open", "(", "PUBLIC_PIPE", ",", "'w'", ")", "pipeout", ".", "write", "(", "remaining_lines", ")", "pipeout", ".", "close", "(", ")", "else", ":", "pipein", ".", "close", "(", ")", "time", ".", "sleep", "(", "0.05", ")" ]
Sign an URL with this request s auth token
async def sign_url ( self , url , method = HASH ) : token = await self . get_token ( ) if method == self . QUERY : return patch_qs ( url , { settings . WEBVIEW_TOKEN_KEY : token , } ) elif method == self . HASH : hash_id = 5 p = list ( urlparse ( url ) ) p [ hash_id ] = quote ( token ) return urlunparse ( p ) else : raise ValueError ( f'Invalid signing method "{method}"' )
10,444
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/request.py#L262-L279
[ "def", "start_processing_handler", "(", "self", ",", "event", ")", ":", "results_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "configuration", "[", "'results_folder'", "]", ",", "\"filesystem.json\"", ")", "self", ".", "logger", ".", "debug", "(", "\"Event %s: start comparing %s with %s.\"", ",", "event", ",", "self", ".", "checkpoints", "[", "0", "]", ",", "self", ".", "checkpoints", "[", "1", "]", ")", "results", "=", "compare_disks", "(", "self", ".", "checkpoints", "[", "0", "]", ",", "self", ".", "checkpoints", "[", "1", "]", ",", "self", ".", "configuration", ")", "with", "open", "(", "results_path", ",", "'w'", ")", "as", "results_file", ":", "json", ".", "dump", "(", "results", ",", "results_file", ")", "self", ".", "processing_done", ".", "set", "(", ")" ]
Perform a copy of the layers list in order to avoid the list changing without updating the index .
def layers ( self , value : List [ 'BaseLayer' ] ) : self . _layers = list ( value ) # type: List[BaseLayer] self . _index = self . _make_index ( ) self . _transformed = { }
10,445
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/stack.py#L66-L75
[ "def", "prt_tsv_hdr", "(", "prt", ",", "data_nts", ",", "*", "*", "kws", ")", ":", "sep", "=", "\"\\t\"", "if", "'sep'", "not", "in", "kws", "else", "kws", "[", "'sep'", "]", "flds_all", "=", "data_nts", "[", "0", "]", ".", "_fields", "hdrs", "=", "get_hdrs", "(", "flds_all", ",", "*", "*", "kws", ")", "prt", ".", "write", "(", "\"# {}\\n\"", ".", "format", "(", "sep", ".", "join", "(", "hdrs", ")", ")", ")" ]
Perform the index computation . It groups layers by type into a dictionary to allow quick access .
def _make_index ( self ) : out = { } for layer in self . _layers : cls = layer . __class__ out [ cls ] = out . get ( cls , [ ] ) + [ layer ] return out
10,446
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/stack.py#L77-L89
[ "def", "add_metadata", "(", "file_name", ",", "title", ",", "artist", ",", "album", ")", ":", "tags", "=", "EasyMP3", "(", "file_name", ")", "if", "title", ":", "tags", "[", "\"title\"", "]", "=", "title", "if", "artist", ":", "tags", "[", "\"artist\"", "]", "=", "artist", "if", "album", ":", "tags", "[", "\"album\"", "]", "=", "album", "tags", ".", "save", "(", ")", "return", "file_name" ]
Test the presence of a given layer type .
def has_layer ( self , class_ : Type [ L ] , became : bool = True ) -> bool : return ( class_ in self . _index or ( became and class_ in self . _transformed ) )
10,447
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/stack.py#L101-L110
[ "def", "_make_headers", "(", "config", ",", "kwargs", ")", ":", "headers", "=", "kwargs", ".", "get", "(", "'headers'", ")", "headers", "=", "headers", ".", "copy", "(", ")", "if", "headers", "is", "not", "None", "else", "{", "}", "headers", "[", "'User-Agent'", "]", "=", "config", ".", "args", ".", "user_agent", "kwargs", "=", "kwargs", ".", "copy", "(", ")", "kwargs", "[", "'headers'", "]", "=", "headers", "return", "kwargs" ]
Return the first layer of a given class . If that layer is not present then raise a KeyError .
def get_layer ( self , class_ : Type [ L ] , became : bool = True ) -> L : try : return self . _index [ class_ ] [ 0 ] except KeyError : if became : return self . _transformed [ class_ ] [ 0 ] else : raise
10,448
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/stack.py#L112-L127
[ "def", "_read_config", "(", "config_location", ")", ":", "global", "LOGGING_CONFIG", "with", "open", "(", "config_location", ",", "\"r\"", ")", "as", "config_loc", ":", "cfg_file", "=", "json", ".", "load", "(", "config_loc", ")", "if", "\"logging\"", "in", "cfg_file", ":", "log_dict", "=", "cfg_file", ".", "get", "(", "\"logging\"", ")", "with", "open", "(", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "__file__", ",", "os", ".", "path", ".", "pardir", ",", "'logging_schema.json'", ")", ")", ")", "as", "schema_file", ":", "logging_schema", "=", "json", ".", "load", "(", "schema_file", ")", "jsonschema", ".", "validate", "(", "log_dict", ",", "logging_schema", ")", "merged", "=", "jsonmerge", ".", "merge", "(", "LOGGING_CONFIG", ",", "log_dict", ")", "LOGGING_CONFIG", "=", "merged" ]
Returns the list of layers of a given class . If no layers are present then the list will be empty .
def get_layers ( self , class_ : Type [ L ] , became : bool = True ) -> List [ L ] : out = self . _index . get ( class_ , [ ] ) if became : out += self . _transformed . get ( class_ , [ ] ) return out
10,449
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/stack.py#L129-L143
[ "def", "user_agent", "(", "self", ",", "text", ",", "*", "*", "kwargs", ")", ":", "indicator_obj", "=", "UserAgent", "(", "text", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_indicator", "(", "indicator_obj", ")" ]
Checks that if a variable exists for the trajectory id it has the appropriate attributes
def check_trajectory_id ( self , dataset ) : results = [ ] exists_ctx = TestCtx ( BaseCheck . MEDIUM , 'Variable defining "trajectory_id" exists' ) trajectory_ids = dataset . get_variables_by_attributes ( cf_role = 'trajectory_id' ) # No need to check exists_ctx . assert_true ( trajectory_ids , 'variable defining cf_role="trajectory_id" exists' ) if not trajectory_ids : return exists_ctx . to_result ( ) results . append ( exists_ctx . to_result ( ) ) test_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended attributes for the {} variable' . format ( trajectory_ids [ 0 ] . name ) ) test_ctx . assert_true ( getattr ( trajectory_ids [ 0 ] , 'long_name' , '' ) != "" , "long_name attribute should exist and not be empty" ) results . append ( test_ctx . to_result ( ) ) return results
10,450
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_trajectory.py#L41-L61
[ "def", "Run", "(", "self", ")", ":", "global", "DB", "# pylint: disable=global-statement", "global", "REL_DB", "# pylint: disable=global-statement", "global", "BLOBS", "# pylint: disable=global-statement", "if", "flags", ".", "FLAGS", ".", "list_storage", ":", "self", ".", "_ListStorageOptions", "(", ")", "sys", ".", "exit", "(", "0", ")", "try", ":", "cls", "=", "DataStore", ".", "GetPlugin", "(", "config", ".", "CONFIG", "[", "\"Datastore.implementation\"", "]", ")", "except", "KeyError", ":", "msg", "=", "(", "\"No Storage System %s found.\"", "%", "config", ".", "CONFIG", "[", "\"Datastore.implementation\"", "]", ")", "if", "config", ".", "CONFIG", "[", "\"Datastore.implementation\"", "]", "==", "\"SqliteDataStore\"", ":", "msg", "=", "\"The SQLite datastore is no longer supported.\"", "print", "(", "msg", ")", "print", "(", "\"Available options:\"", ")", "self", ".", "_ListStorageOptions", "(", ")", "raise", "ValueError", "(", "msg", ")", "DB", "=", "cls", "(", ")", "# pylint: disable=g-bad-name", "DB", ".", "Initialize", "(", ")", "atexit", ".", "register", "(", "DB", ".", "Flush", ")", "monitor_port", "=", "config", ".", "CONFIG", "[", "\"Monitoring.http_port\"", "]", "if", "monitor_port", "!=", "0", ":", "DB", ".", "InitializeMonitorThread", "(", ")", "# Initialize the blobstore.", "blobstore_name", "=", "config", ".", "CONFIG", ".", "Get", "(", "\"Blobstore.implementation\"", ")", "try", ":", "cls", "=", "blob_store", ".", "REGISTRY", "[", "blobstore_name", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"No blob store %s found.\"", "%", "blobstore_name", ")", "BLOBS", "=", "blob_store", ".", "BlobStoreValidationWrapper", "(", "cls", "(", ")", ")", "# Initialize a relational DB if configured.", "rel_db_name", "=", "config", ".", "CONFIG", "[", "\"Database.implementation\"", "]", "if", "not", "rel_db_name", ":", "return", "try", ":", "cls", "=", "registry_init", ".", "REGISTRY", "[", "rel_db_name", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Database %s not found.\"", "%", "rel_db_name", ")", "logging", ".", "info", "(", "\"Using database implementation %s\"", ",", "rel_db_name", ")", "REL_DB", "=", "db", ".", "DatabaseValidationWrapper", "(", "cls", "(", ")", ")" ]
Feature type specific check of global required and highly recommended attributes .
def check_required_attributes ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'Required Global Attributes for Trajectory dataset' ) required_ctx . assert_true ( getattr ( dataset , 'nodc_template_version' , '' ) . lower ( ) == self . valid_templates [ 0 ] . lower ( ) , 'nodc_template_version attribute must be {}' . format ( self . valid_templates [ 0 ] ) ) required_ctx . assert_true ( getattr ( dataset , 'cdm_data_type' , '' ) == 'Trajectory' , 'cdm_data_type attribute must be set to Trajectory' ) required_ctx . assert_true ( getattr ( dataset , 'featureType' , '' ) == 'trajectory' , 'featureType attribute must be set to trajectory' ) results . append ( required_ctx . to_result ( ) ) return results
10,451
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_trajectory.py#L92-L113
[ "def", "connect_to_ipykernel", "(", "self", ",", "service_name", ",", "timeout", "=", "10", ")", ":", "kernel_json_file", "=", "self", ".", "wait_for_ipykernel", "(", "service_name", ",", "timeout", "=", "10", ")", "self", ".", "start_interactive_mode", "(", ")", "subprocess", ".", "check_call", "(", "[", "sys", ".", "executable", ",", "\"-m\"", ",", "\"IPython\"", ",", "\"console\"", ",", "\"--existing\"", ",", "kernel_json_file", "]", ")", "self", ".", "stop_interactive_mode", "(", ")" ]
Sets the current user UID in the session .
def login ( self , user , remember = True , session = None ) : logger = logging . getLogger ( __name__ ) logger . debug ( u'User `{0}` logged in' . format ( user . login ) ) if session is None : session = self . session session [ 'permanent' ] = remember session [ self . session_key ] = user . get_uhmac ( ) if callable ( getattr ( session , 'save' , None ) ) : session . save ( )
10,452
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/auth_authentication_mixin.py#L130-L146
[ "def", "close", "(", "self", ")", ":", "if", "not", "(", "yield", "from", "super", "(", ")", ".", "close", "(", ")", ")", ":", "return", "False", "for", "adapter", "in", "self", ".", "_ethernet_adapters", ".", "values", "(", ")", ":", "if", "adapter", "is", "not", "None", ":", "for", "nio", "in", "adapter", ".", "ports", ".", "values", "(", ")", ":", "if", "nio", "and", "isinstance", "(", "nio", ",", "NIOUDP", ")", ":", "self", ".", "manager", ".", "port_manager", ".", "release_udp_port", "(", "nio", ".", "lport", ",", "self", ".", "_project", ")", "try", ":", "self", ".", "acpi_shutdown", "=", "False", "yield", "from", "self", ".", "stop", "(", ")", "except", "VMwareError", ":", "pass", "if", "self", ".", "linked_clone", ":", "yield", "from", "self", ".", "manager", ".", "remove_from_vmware_inventory", "(", "self", ".", "_vmx_path", ")" ]
Return the index position of an element in the children of a parent .
def index ( elem ) : parent = elem . getparent ( ) for x in range ( 0 , len ( parent . getchildren ( ) ) ) : if parent . getchildren ( ) [ x ] == elem : return x return - 1
10,453
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/xmlutils.py#L18-L26
[ "def", "wrap_conn", "(", "conn_func", ")", ":", "def", "call", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "conn", "=", "conn_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "cursor_func", "=", "getattr", "(", "conn", ",", "CURSOR_WRAP_METHOD", ")", "wrapped", "=", "wrap_cursor", "(", "cursor_func", ")", "setattr", "(", "conn", ",", "cursor_func", ".", "__name__", ",", "wrapped", ")", "return", "conn", "except", "Exception", ":", "# pragma: NO COVER", "logging", ".", "warning", "(", "'Fail to wrap conn, mysql not traced.'", ")", "return", "conn_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "call" ]
Given a parent element replace oldelem with newelem .
def replaceelement ( oldelem , newelem ) : parent = oldelem . getparent ( ) if parent is not None : size = len ( parent . getchildren ( ) ) for x in range ( 0 , size ) : if parent . getchildren ( ) [ x ] == oldelem : parent . remove ( oldelem ) parent . insert ( x , newelem )
10,454
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/xmlutils.py#L29-L39
[ "def", "restclient_admin_required", "(", "view_func", ")", ":", "def", "wrapper", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "template", "=", "'access_denied.html'", "if", "hasattr", "(", "settings", ",", "'RESTCLIENTS_ADMIN_AUTH_MODULE'", ")", ":", "auth_func", "=", "import_string", "(", "settings", ".", "RESTCLIENTS_ADMIN_AUTH_MODULE", ")", "else", ":", "context", "=", "{", "'error_msg'", ":", "(", "\"Your application must define an authorization function as \"", "\"RESTCLIENTS_ADMIN_AUTH_MODULE in settings.py.\"", ")", "}", "return", "render", "(", "request", ",", "template", ",", "context", "=", "context", ",", "status", "=", "401", ")", "service", "=", "args", "[", "0", "]", "if", "len", "(", "args", ")", ">", "0", "else", "None", "url", "=", "args", "[", "1", "]", "if", "len", "(", "args", ")", ">", "1", "else", "None", "if", "auth_func", "(", "request", ",", "service", ",", "url", ")", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "render", "(", "request", ",", "template", ",", "status", "=", "401", ")", "return", "login_required", "(", "function", "=", "wrapper", ")" ]
Convert the content of an element into more ElementTree structures . We do this because sometimes we want to set xml as the content of an element .
def parseelement ( elem ) : xml = '<%(tag)s>%(content)s</%(tag)s>' % { 'tag' : elem . tag , 'content' : elem . text } et = etree . fromstring ( xml ) replaceelement ( elem , et )
10,455
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/xmlutils.py#L42-L49
[ "def", "compare_hives", "(", "fs0", ",", "fs1", ")", ":", "registries", "=", "[", "]", "for", "path", "in", "chain", "(", "registries_path", "(", "fs0", ".", "fsroot", ")", ",", "user_registries", "(", "fs0", ",", "fs1", ")", ")", ":", "if", "fs0", ".", "checksum", "(", "path", ")", "!=", "fs1", ".", "checksum", "(", "path", ")", ":", "registries", ".", "append", "(", "path", ")", "return", "registries" ]
Checks that either both valid_min and valid_max exist or valid_range exists .
def _check_min_max_range ( self , var , test_ctx ) : if 'valid_range' in var . ncattrs ( ) : test_ctx . assert_true ( var . valid_range . dtype == var . dtype and len ( var . valid_range ) == 2 and var . valid_range [ 0 ] <= var . valid_range [ 1 ] , "valid_range must be a two element vector of min followed by max with the same data type as {}" . format ( var . name ) ) else : for bound in ( 'valid_min' , 'valid_max' ) : v_bound = getattr ( var , bound , '' ) warn_msg = '{} attribute should exist, have the same type as {}, and not be empty or valid_range should be defined' . format ( bound , var . name ) # need to special case str attributes since they aren't directly # comparable to numpy dtypes if isinstance ( v_bound , six . string_types ) : test_ctx . assert_true ( v_bound != '' and var . dtype . char == 'S' , warn_msg ) # otherwise compare the numpy types directly else : test_ctx . assert_true ( v_bound . dtype == var . dtype , warn_msg ) return test_ctx
10,456
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_base.py#L92-L115
[ "def", "rotate_bitmaps_to_roots", "(", "bitmaps", ",", "roots", ")", ":", "abs_bitmaps", "=", "[", "]", "for", "bitmap", ",", "chord_root", "in", "zip", "(", "bitmaps", ",", "roots", ")", ":", "abs_bitmaps", ".", "append", "(", "rotate_bitmap_to_root", "(", "bitmap", ",", "chord_root", ")", ")", "return", "np", ".", "asarray", "(", "abs_bitmaps", ")" ]
Check the global required and highly recommended attributes for 1 . 1 templates . These go an extra step besides just checking that they exist .
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) metadata_conventions = getattr ( dataset , 'Metadata_Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) accepted_conventions = 'CF-1.6' test_ctx . assert_true ( conventions == accepted_conventions , 'Conventions attribute is missing or is not equal to CF-1.6: {}' . format ( conventions ) ) test_ctx . assert_true ( metadata_conventions == 'Unidata Dataset Discovery v1.0' , "Metadata_Conventions attribute is required to be 'Unidata Dataset Discovery v1.0': {}" . format ( metadata_conventions ) ) test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) test_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) test_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) return test_ctx . to_result ( )
10,457
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_base.py#L419-L457
[ "def", "_decrypt_object", "(", "obj", ",", "translate_newlines", "=", "False", ")", ":", "if", "salt", ".", "utils", ".", "stringio", ".", "is_readable", "(", "obj", ")", ":", "return", "_decrypt_object", "(", "obj", ".", "getvalue", "(", ")", ",", "translate_newlines", ")", "if", "isinstance", "(", "obj", ",", "six", ".", "string_types", ")", ":", "try", ":", "return", "_decrypt_ciphertext", "(", "obj", ",", "translate_newlines", "=", "translate_newlines", ")", "except", "(", "fernet", ".", "InvalidToken", ",", "TypeError", ")", ":", "return", "obj", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "obj", ")", ":", "obj", "[", "key", "]", "=", "_decrypt_object", "(", "value", ",", "translate_newlines", "=", "translate_newlines", ")", "return", "obj", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "for", "key", ",", "value", "in", "enumerate", "(", "obj", ")", ":", "obj", "[", "key", "]", "=", "_decrypt_object", "(", "value", ",", "translate_newlines", "=", "translate_newlines", ")", "return", "obj", "else", ":", "return", "obj" ]
Check the global required and highly recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) # Define conventions accepted_conventions = [ 'CF-1.6' , 'ACDD-1.3' ] dataset_conventions = conventions . replace ( ' ' , '' ) . split ( ',' ) for accepted_convention in accepted_conventions : if accepted_convention not in dataset_conventions : test_ctx . assert_true ( False , 'Conventions attribute is missing or is not equal to "CF-1.6, ACDD-1.3": {}' . format ( conventions ) ) break else : test_ctx . assert_true ( True , '' ) # Check feature types test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) return test_ctx . to_result ( )
10,458
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_base.py#L738-L773
[ "def", "delete_conversations", "(", "self", ",", "ids", ")", ":", "str_ids", "=", "self", ".", "_return_comma_list", "(", "ids", ")", "self", ".", "request", "(", "'ConvAction'", ",", "{", "'action'", ":", "{", "'op'", ":", "'delete'", ",", "'id'", ":", "str_ids", "}", "}", ")" ]
Check the global recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
def check_recommended_global_attributes ( self , dataset ) : recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) sea_names = [ sn . lower ( ) for sn in util . get_sea_names ( ) ] sea_name = getattr ( dataset , 'sea_name' , '' ) sea_name = sea_name . replace ( ', ' , ',' ) sea_name = sea_name . split ( ',' ) if sea_name else [ ] for sea in sea_name : recommended_ctx . assert_true ( sea . lower ( ) in sea_names , 'sea_name attribute should exist and should be from the NODC sea names list: {} is not a valid sea name' . format ( sea ) ) # Parse dates, check for ISO 8601 for attr in [ 'time_coverage_start' , 'time_coverage_end' , 'date_created' , 'date_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) recommended_ctx . assert_true ( True , '' ) # Score it True! except ISO8601Error : recommended_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) value = getattr ( dataset , 'geospatial_vertical_positive' , '' ) recommended_ctx . assert_true ( value . lower ( ) in [ 'up' , 'down' ] , 'geospatial_vertical_positive attribute should be up or down: {}' . format ( value ) ) # I hate english. ack_exists = any ( ( getattr ( dataset , attr , '' ) != '' for attr in [ 'acknowledgment' , 'acknowledgement' ] ) ) recommended_ctx . assert_true ( ack_exists , 'acknowledgement attribute should exist and not be empty' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) recommended_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) if hasattr ( dataset , 'comment' ) : recommended_ctx . assert_true ( getattr ( dataset , 'comment' , '' ) != '' , 'comment attribute should not be empty if specified' ) return recommended_ctx . to_result ( )
10,459
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_base.py#L775-L853
[ "def", "close", "(", "self", ")", "->", "None", ":", "self", ".", "__data_channel_buffer", ".", "stop", "(", ")", "self", ".", "__data_channel_buffer", ".", "close", "(", ")", "self", ".", "__data_channel_buffer", "=", "None", "if", "not", "self", ".", "__was_playing", ":", "self", ".", "__hardware_source", ".", "stop_playing", "(", ")" ]
Check the global suggested attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
def check_base_suggested_attributes ( self , dataset ) : suggested_ctx = TestCtx ( BaseCheck . LOW , 'Suggested global attributes' ) # Do any of the variables define platform ? platform_name = getattr ( dataset , 'platform' , '' ) suggested_ctx . assert_true ( platform_name != '' , 'platform should exist and point to a term in :platform_vocabulary.' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) suggested_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) # Parse dates, check for ISO 8601 for attr in [ 'date_modified' , 'date_issued' , 'date_metadata_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) suggested_ctx . assert_true ( True , '' ) # Score it True! except ISO8601Error : suggested_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) units = getattr ( dataset , 'geospatial_lat_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_north' , 'geospatial_lat_units attribute should be degrees_north: {}' . format ( units ) ) units = getattr ( dataset , 'geospatial_lon_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_east' , 'geospatial_lon_units attribute should be degrees_east: {}' . format ( units ) ) contributor_name = getattr ( dataset , 'contributor_name' , '' ) contributor_role = getattr ( dataset , 'contributor_role' , '' ) names = contributor_role . split ( ',' ) roles = contributor_role . split ( ',' ) suggested_ctx . assert_true ( contributor_name != '' , 'contributor_name should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) suggested_ctx . assert_true ( contributor_role != '' , 'contributor_role should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) return suggested_ctx . to_result ( )
10,460
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_base.py#L855-L919
[ "def", "export_public_keys", "(", "self", ",", "identities", ")", ":", "public_keys", "=", "[", "]", "with", "self", ".", "device", ":", "for", "i", "in", "identities", ":", "pubkey", "=", "self", ".", "device", ".", "pubkey", "(", "identity", "=", "i", ")", "vk", "=", "formats", ".", "decompress_pubkey", "(", "pubkey", "=", "pubkey", ",", "curve_name", "=", "i", ".", "curve_name", ")", "public_key", "=", "formats", ".", "export_public_key", "(", "vk", "=", "vk", ",", "label", "=", "i", ".", "to_string", "(", ")", ")", "public_keys", ".", "append", "(", "public_key", ")", "return", "public_keys" ]
Configure the core of sirbot
def _configure ( self ) : path = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'config.yml' ) with open ( path ) as file : defaultconfig = yaml . load ( file ) self . config = merge_dict ( self . config , defaultconfig ) if 'logging' in self . config : logging . config . dictConfig ( self . config [ 'logging' ] ) else : logging . getLogger ( 'sirbot' ) . setLevel ( 'INFO' )
10,461
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L64-L83
[ "def", "list_conferences_groups", "(", "self", ",", "group_id", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - group_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"group_id\"", "]", "=", "group_id", "self", ".", "logger", ".", "debug", "(", "\"GET /api/v1/groups/{group_id}/conferences with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"GET\"", ",", "\"/api/v1/groups/{group_id}/conferences\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "all_pages", "=", "True", ")" ]
Import and register plugin in the plugin manager .
def _import_plugins ( self ) -> None : logger . debug ( 'Importing plugins' ) self . _pm = pluggy . PluginManager ( 'sirbot' ) self . _pm . add_hookspecs ( hookspecs ) for plugin in self . config [ 'sirbot' ] [ 'plugins' ] : try : p = importlib . import_module ( plugin ) except ( ModuleNotFoundError , ) : if os . getcwd ( ) not in sys . path : sys . path . append ( os . getcwd ( ) ) p = importlib . import_module ( plugin ) else : raise self . _pm . register ( p )
10,462
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L107-L126
[ "def", "dump_text", "(", "self", ",", "filename", ")", ":", "_safe_call", "(", "_LIB", ".", "LGBM_DatasetDumpText", "(", "self", ".", "construct", "(", ")", ".", "handle", ",", "c_str", "(", "filename", ")", ")", ")", "return", "self" ]
Initialize the plugins
def _initialize_plugins ( self ) : logger . debug ( 'Initializing plugins' ) plugins = self . _pm . hook . plugins ( loop = self . _loop ) if plugins : for plugin in plugins : name = plugin . __name__ registry_name = plugin . __registry__ or plugin . __name__ config = self . config . get ( name , { } ) priority = config . get ( 'priority' , 50 ) if priority : self . _plugins [ name ] = { 'plugin' : plugin , 'config' : config , 'priority' : priority , 'factory' : registry_name } self . _start_priority [ priority ] . append ( name ) else : logger . error ( 'No plugins found' )
10,463
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L128-L155
[ "def", "ReleaseFileObject", "(", "self", ",", "file_object", ")", ":", "identifier", ",", "cache_value", "=", "self", ".", "_file_object_cache", ".", "GetCacheValueByObject", "(", "file_object", ")", "if", "not", "identifier", ":", "raise", "RuntimeError", "(", "'Object not cached.'", ")", "if", "not", "cache_value", ":", "raise", "RuntimeError", "(", "'Invalid cache value.'", ")", "self", ".", "_file_object_cache", ".", "ReleaseObject", "(", "identifier", ")", "result", "=", "cache_value", ".", "IsDereferenced", "(", ")", "if", "result", ":", "self", ".", "_file_object_cache", ".", "RemoveObject", "(", "identifier", ")", "return", "result" ]
Index the available factories
def _register_factory ( self ) : for name , info in self . _plugins . items ( ) : if info [ 'priority' ] : factory = getattr ( info [ 'plugin' ] , 'factory' , None ) if callable ( factory ) : registry [ info [ 'factory' ] ] = info [ 'plugin' ] . factory registry . freeze ( )
10,464
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L157-L168
[ "def", "analyzePython", "(", "code_text", ")", ":", "code", ",", "comment", ",", "docstr", "=", "0", ",", "0", ",", "0", "p1", "=", "r\"\"\"(?<=%s)[\\s\\S]*?(?=%s)\"\"\"", "%", "(", "'\"\"\"'", ",", "'\"\"\"'", ")", "p2", "=", "r\"\"\"(?<=%s)[\\s\\S]*?(?=%s)\"\"\"", "%", "(", "\"'''\"", ",", "\"'''\"", ")", "# count docstr", "for", "pattern", "in", "[", "p1", ",", "p2", "]", ":", "for", "res", "in", "re", ".", "findall", "(", "pattern", ",", "code_text", ")", "[", ":", ":", "2", "]", ":", "lines", "=", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "res", ".", "split", "(", "\"\\n\"", ")", "if", "i", ".", "strip", "(", ")", "]", "docstr", "+=", "len", "(", "lines", ")", "# count comment line and code", "lines", "=", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "code_text", ".", "split", "(", "\"\\n\"", ")", "if", "i", ".", "strip", "(", ")", "]", "for", "line", "in", "lines", ":", "if", "line", ".", "startswith", "(", "\"#\"", ")", ":", "comment", "+=", "1", "else", ":", "code", "+=", "1", "purecode", "=", "code", "-", "docstr", "# pure code = code - docstr", "return", "code", ",", "comment", ",", "docstr", ",", "purecode" ]
Configure the plugins
async def _configure_plugins ( self ) -> None : logger . debug ( 'Configuring plugins' ) funcs = [ info [ 'plugin' ] . configure ( config = info [ 'config' ] , session = self . _session , router = self . app . router ) for info in self . _plugins . values ( ) ] if funcs : await asyncio . gather ( * funcs , loop = self . _loop ) logger . debug ( 'Plugins configured' )
10,465
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L170-L189
[ "def", "ReleaseFileObject", "(", "self", ",", "file_object", ")", ":", "identifier", ",", "cache_value", "=", "self", ".", "_file_object_cache", ".", "GetCacheValueByObject", "(", "file_object", ")", "if", "not", "identifier", ":", "raise", "RuntimeError", "(", "'Object not cached.'", ")", "if", "not", "cache_value", ":", "raise", "RuntimeError", "(", "'Invalid cache value.'", ")", "self", ".", "_file_object_cache", ".", "ReleaseObject", "(", "identifier", ")", "result", "=", "cache_value", ".", "IsDereferenced", "(", ")", "if", "result", ":", "self", ".", "_file_object_cache", ".", "RemoveObject", "(", "identifier", ")", "return", "result" ]
Start the plugins by priority
async def _start_plugins ( self ) -> None : logger . debug ( 'Starting plugins' ) for priority in sorted ( self . _start_priority , reverse = True ) : logger . debug ( 'Starting plugins %s' , ', ' . join ( self . _start_priority [ priority ] ) ) for name in self . _start_priority [ priority ] : plugin = self . _plugins [ name ] self . _tasks [ name ] = self . _loop . create_task ( plugin [ 'plugin' ] . start ( ) ) while not all ( self . _plugins [ name ] [ 'plugin' ] . started for name in self . _tasks ) : for task in self . _tasks . values ( ) : if task . done ( ) : task . result ( ) await asyncio . sleep ( 0.2 , loop = self . _loop ) else : logger . debug ( 'Plugins %s started' , ', ' . join ( self . _start_priority [ priority ] ) )
10,466
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/core/core.py#L191-L222
[ "def", "download_wiod2013", "(", "storage_folder", ",", "years", "=", "None", ",", "overwrite_existing", "=", "False", ",", "satellite_urls", "=", "WIOD_CONFIG", "[", "'satellite_urls'", "]", ")", ":", "try", ":", "os", ".", "makedirs", "(", "storage_folder", ")", "except", "FileExistsError", ":", "pass", "if", "type", "(", "years", ")", "is", "int", "or", "type", "(", "years", ")", "is", "str", ":", "years", "=", "[", "years", "]", "years", "=", "years", "if", "years", "else", "range", "(", "1995", ",", "2012", ")", "years", "=", "[", "str", "(", "yy", ")", ".", "zfill", "(", "2", ")", "[", "-", "2", ":", "]", "for", "yy", "in", "years", "]", "wiod_web_content", "=", "_get_url_datafiles", "(", "url_db_view", "=", "WIOD_CONFIG", "[", "'url_db_view'", "]", ",", "url_db_content", "=", "WIOD_CONFIG", "[", "'url_db_content'", "]", ",", "mrio_regex", "=", "'protected.*?wiot\\d\\d.*?xlsx'", ")", "restricted_wiod_io_urls", "=", "[", "url", "for", "url", "in", "wiod_web_content", ".", "data_urls", "if", "re", ".", "search", "(", "r\"(wiot)(\\d\\d)\"", ",", "os", ".", "path", ".", "basename", "(", "url", ")", ")", ".", "group", "(", "2", ")", "in", "years", "]", "meta", "=", "MRIOMetaData", "(", "location", "=", "storage_folder", ",", "description", "=", "'WIOD metadata file for pymrio'", ",", "name", "=", "'WIOD'", ",", "system", "=", "'ixi'", ",", "version", "=", "'data13'", ")", "meta", "=", "_download_urls", "(", "url_list", "=", "restricted_wiod_io_urls", "+", "satellite_urls", ",", "storage_folder", "=", "storage_folder", ",", "overwrite_existing", "=", "overwrite_existing", ",", "meta_handler", "=", "meta", ")", "meta", ".", "save", "(", ")", "return", "meta" ]
Creates the settings object that will be sent to the frontend vizualization
def _create_settings ( self ) : self . settings = { "columns" : [ { "Header" : s , "accessor" : s } for s in self . settings ] , "port" : self . port , "docs" : construct_trie ( self . docs ) }
10,467
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/table.py#L75-L86
[ "def", "parse_duration_with_start", "(", "start", ",", "duration", ")", ":", "elements", "=", "_parse_duration_string", "(", "_clean", "(", "duration", ")", ")", "year", ",", "month", "=", "_year_month_delta_from_elements", "(", "elements", ")", "end", "=", "start", ".", "replace", "(", "year", "=", "start", ".", "year", "+", "year", ",", "month", "=", "start", ".", "month", "+", "month", ")", "del", "elements", "[", "'years'", "]", "del", "elements", "[", "'months'", "]", "end", "+=", "_timedelta_from_elements", "(", "elements", ")", "return", "start", ",", "end", "-", "start" ]
Runs a server to handle queries to the index without creating the javascript table .
def run_server ( self ) : app = build_app ( ) run ( app , host = 'localhost' , port = self . port )
10,468
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/table.py#L133-L140
[ "def", "wrap_multipart_params", "(", "func", ")", ":", "def", "wrapper", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ctype", ",", "pdict", "=", "parse_header", "(", "request", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", ")", "if", "ctype", "==", "\"multipart/form-data\"", ":", "if", "isinstance", "(", "pdict", "[", "'boundary'", "]", ",", "str", ")", ":", "pdict", "[", "'boundary'", "]", "=", "pdict", "[", "'boundary'", "]", ".", "encode", "(", ")", "params", "=", "{", "}", "mp", "=", "MultipartParser", "(", "BytesIO", "(", "request", ".", "body", ")", ",", "pdict", "[", "'boundary'", "]", ")", "for", "part", "in", "mp", ":", "params", "[", "part", ".", "name", "]", "=", "{", "\"filename\"", ":", "part", ".", "filename", ",", "\"file\"", ":", "part", ".", "file", ",", "}", "request", ".", "params", "=", "merge_dicts", "(", "getattr", "(", "request", ",", "\"params\"", ",", "None", ")", ",", "params", ")", "request", ".", "multipart_params", "=", "params", "return", "func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
Cleans trailing whitespaces and replaces also multiple whitespaces with a single space .
def strip_spaces ( value , sep = None , join = True ) : value = value . strip ( ) value = [ v . strip ( ) for v in value . split ( sep ) ] join_sep = sep or ' ' return join_sep . join ( value ) if join else value
10,469
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/string_utils.py#L5-L10
[ "def", "next_blob", "(", "self", ")", ":", "blob_file", "=", "self", ".", "blob_file", "try", ":", "preamble", "=", "DAQPreamble", "(", "file_obj", "=", "blob_file", ")", "except", "struct", ".", "error", ":", "raise", "StopIteration", "try", ":", "data_type", "=", "DATA_TYPES", "[", "preamble", ".", "data_type", "]", "except", "KeyError", ":", "log", ".", "error", "(", "\"Unkown datatype: {0}\"", ".", "format", "(", "preamble", ".", "data_type", ")", ")", "data_type", "=", "'Unknown'", "blob", "=", "Blob", "(", ")", "blob", "[", "data_type", "]", "=", "None", "blob", "[", "'DAQPreamble'", "]", "=", "preamble", "if", "data_type", "==", "'DAQSummaryslice'", ":", "daq_frame", "=", "DAQSummaryslice", "(", "blob_file", ")", "blob", "[", "data_type", "]", "=", "daq_frame", "blob", "[", "'DAQHeader'", "]", "=", "daq_frame", ".", "header", "elif", "data_type", "==", "'DAQEvent'", ":", "daq_frame", "=", "DAQEvent", "(", "blob_file", ")", "blob", "[", "data_type", "]", "=", "daq_frame", "blob", "[", "'DAQHeader'", "]", "=", "daq_frame", ".", "header", "else", ":", "log", ".", "warning", "(", "\"Skipping DAQ frame with data type code '{0}'.\"", ".", "format", "(", "preamble", ".", "data_type", ")", ")", "blob_file", ".", "seek", "(", "preamble", ".", "length", "-", "DAQPreamble", ".", "size", ",", "1", ")", "return", "blob" ]
Computes the rank of this transition for a given request .
async def rank ( self , request , origin : Optional [ Text ] ) -> Tuple [ float , Optional [ BaseTrigger ] , Optional [ type ] , Optional [ bool ] , ] : if self . origin_name == origin : score = 1.0 elif self . origin_name is None : score = settings . JUMPING_TRIGGER_PENALTY else : return 0.0 , None , None , None trigger = self . factory ( request ) rank = await run_or_return ( trigger . rank ( ) ) score *= self . weight * ( rank or 0.0 ) return score , trigger , self . dest , self . do_not_register
10,470
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/transition.py#L72-L100
[ "def", "char_conv", "(", "out", ")", ":", "out_conv", "=", "list", "(", ")", "for", "i", "in", "range", "(", "out", ".", "shape", "[", "0", "]", ")", ":", "tmp_str", "=", "''", "for", "j", "in", "range", "(", "out", ".", "shape", "[", "1", "]", ")", ":", "if", "int", "(", "out", "[", "i", "]", "[", "j", "]", ")", ">=", "0", ":", "tmp_char", "=", "int2char", "(", "int", "(", "out", "[", "i", "]", "[", "j", "]", ")", ")", "if", "int", "(", "out", "[", "i", "]", "[", "j", "]", ")", "==", "27", ":", "tmp_char", "=", "''", "tmp_str", "=", "tmp_str", "+", "tmp_char", "out_conv", ".", "append", "(", "tmp_str", ")", "return", "out_conv" ]
Checks that the feature types of this dataset are consitent with a point dataset
def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are point feature types' ) t = util . get_time_variable ( dataset ) # Exit prematurely if not t : required_ctx . assert_true ( False , 'A dimension representing time is required for point feature types' ) return required_ctx . to_result ( ) t_dims = dataset . variables [ t ] . dimensions o = None or ( t_dims and t_dims [ 0 ] ) message = '{} must be a valid timeseries feature type. It must have dimensions of ({}), and all coordinates must have dimensions of ({})' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_point ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable , o , o ) ) return required_ctx . to_result ( )
10,471
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_point.py#L19-L40
[ "def", "sync", "(", "self", ")", ":", "if", "self", ".", "writeback", "and", "self", ".", "cache", ":", "super", "(", "_TimeoutMixin", ",", "self", ")", ".", "__delitem__", "(", "self", ".", "_INDEX", ")", "super", "(", "_TimeoutMixin", ",", "self", ")", ".", "sync", "(", ")", "self", ".", "writeback", "=", "False", "super", "(", "_TimeoutMixin", ",", "self", ")", ".", "__setitem__", "(", "self", ".", "_INDEX", ",", "self", ".", "_index", ")", "self", ".", "writeback", "=", "True", "if", "hasattr", "(", "self", ".", "dict", ",", "'sync'", ")", ":", "self", ".", "dict", ".", "sync", "(", ")" ]
Find the settings for the current class inside the platforms configuration .
def settings ( cls ) : from bernard . platforms . management import get_platform_settings for platform in get_platform_settings ( ) : candidate = import_class ( platform [ 'class' ] ) if candidate == cls : return platform . get ( 'settings' , { } )
10,472
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/platform.py#L72-L83
[ "def", "destroy", "(", "self", ")", ":", "response", "=", "GettRequest", "(", ")", ".", "post", "(", "\"/shares/%s/destroy?accesstoken=%s\"", "%", "(", "self", ".", "sharename", ",", "self", ".", "user", ".", "access_token", "(", ")", ")", ",", "None", ")", "if", "response", ".", "http_status", "==", "200", ":", "return", "True" ]
Notify all callbacks that a message was received .
async def _notify ( self , message : BaseMessage , responder : Responder ) : for cb in self . _listeners : coro = cb ( message , responder , self . fsm_creates_task ) if not self . fsm_creates_task : self . _register = await coro
10,473
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/platform.py#L104-L112
[ "def", "_database", "(", "self", ",", "writable", "=", "False", ")", ":", "if", "self", ".", "path", "==", "MEMORY_DB_NAME", ":", "if", "not", "self", ".", "inmemory_db", ":", "self", ".", "inmemory_db", "=", "xapian", ".", "inmemory_open", "(", ")", "return", "self", ".", "inmemory_db", "if", "writable", ":", "database", "=", "xapian", ".", "WritableDatabase", "(", "self", ".", "path", ",", "xapian", ".", "DB_CREATE_OR_OPEN", ")", "else", ":", "try", ":", "database", "=", "xapian", ".", "Database", "(", "self", ".", "path", ")", "except", "xapian", ".", "DatabaseOpeningError", ":", "raise", "InvalidIndexError", "(", "'Unable to open index at %s'", "%", "self", ".", "path", ")", "return", "database" ]
During async init we just need to create a HTTP session so we can keep outgoing connexions to the platform alive .
async def async_init ( self ) : self . session = aiohttp . ClientSession ( ) asyncio . get_event_loop ( ) . create_task ( self . _deferred_init ( ) )
10,474
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/platform.py#L177-L183
[ "def", "_get_by_index", "(", "self", ",", "index", ")", ":", "volume_or_disk", "=", "self", ".", "parser", ".", "get_by_index", "(", "index", ")", "volume", ",", "disk", "=", "(", "volume_or_disk", ",", "None", ")", "if", "not", "isinstance", "(", "volume_or_disk", ",", "Disk", ")", "else", "(", "None", ",", "volume_or_disk", ")", "return", "volume", ",", "disk" ]
Checks that the stack can be accepted according to the PATTERNS .
def accept ( self , stack : Stack ) : for name , pattern in self . PATTERNS . items ( ) : if stack . match_exp ( pattern ) : stack . annotation = name return True return False
10,475
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/platform.py#L192-L204
[ "def", "WriteBlobs", "(", "self", ",", "blob_id_data_map", ",", "cursor", "=", "None", ")", ":", "chunks", "=", "[", "]", "for", "blob_id", ",", "blob", "in", "iteritems", "(", "blob_id_data_map", ")", ":", "chunks", ".", "extend", "(", "_BlobToChunks", "(", "blob_id", ".", "AsBytes", "(", ")", ",", "blob", ")", ")", "for", "values", "in", "_PartitionChunks", "(", "chunks", ")", ":", "_Insert", "(", "cursor", ",", "\"blobs\"", ",", "values", ")" ]
Send a stack to the platform .
def send ( self , request : Request , stack : Stack ) -> Coroutine : if stack . annotation not in self . PATTERNS : if not self . accept ( stack ) : raise UnacceptableStack ( 'Cannot accept stack {}' . format ( stack ) ) func = getattr ( self , '_send_' + stack . annotation ) return func ( request , stack )
10,476
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/platform.py#L206-L219
[ "def", "get_reddit", "(", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "_download", ".", "LOCAL_CACHE_DIR", ",", "\"reddit.hdf5\"", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "log", ".", "info", "(", "\"Downloading dataset to '%s'\"", ",", "filename", ")", "_download", ".", "download_file", "(", "URL", ",", "filename", ")", "else", ":", "log", ".", "info", "(", "\"Using cached dataset at '%s'\"", ",", "filename", ")", "with", "h5py", ".", "File", "(", "filename", ",", "'r'", ")", "as", "f", ":", "m", "=", "f", ".", "get", "(", "'item_user_ratings'", ")", "return", "csr_matrix", "(", "(", "m", ".", "get", "(", "'data'", ")", ",", "m", ".", "get", "(", "'indices'", ")", ",", "m", ".", "get", "(", "'indptr'", ")", ")", ")" ]
Creates a string representation of memory size given number .
def to_unit_memory ( number ) : kb = 1024 number /= kb if number < 100 : return '{} Kb' . format ( round ( number , 2 ) ) number /= kb if number < 300 : return '{} Mb' . format ( round ( number , 2 ) ) number /= kb return '{} Gb' . format ( round ( number , 2 ) )
10,477
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/units.py#L5-L20
[ "def", "get_enroll", "(", "self", ")", ":", "devices", "=", "[", "DeviceRegistration", ".", "wrap", "(", "device", ")", "for", "device", "in", "self", ".", "__get_u2f_devices", "(", ")", "]", "enroll", "=", "start_register", "(", "self", ".", "__appid", ",", "devices", ")", "enroll", "[", "'status'", "]", "=", "'ok'", "session", "[", "'_u2f_enroll_'", "]", "=", "enroll", ".", "json", "return", "enroll" ]
Creates a percentage string representation from the given number . The number is multiplied by 100 before adding a % character .
def to_percentage ( number , rounding = 2 ) : number = float ( number ) * 100 number_as_int = int ( number ) rounded = round ( number , rounding ) return '{}%' . format ( number_as_int if number_as_int == rounded else rounded )
10,478
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/units.py#L23-L33
[ "def", "boot", "(", "hostname", ",", "boot_port", "=", "consts", ".", "BOOT_PORT", ",", "scamp_binary", "=", "None", ",", "sark_struct", "=", "None", ",", "boot_delay", "=", "0.05", ",", "post_boot_delay", "=", "2.0", ",", "sv_overrides", "=", "dict", "(", ")", ",", "*", "*", "kwargs", ")", ":", "# Get the boot data if not specified.", "scamp_binary", "=", "(", "scamp_binary", "if", "scamp_binary", "is", "not", "None", "else", "pkg_resources", ".", "resource_filename", "(", "\"rig\"", ",", "\"boot/scamp.boot\"", ")", ")", "sark_struct", "=", "(", "sark_struct", "if", "sark_struct", "is", "not", "None", "else", "pkg_resources", ".", "resource_filename", "(", "\"rig\"", ",", "\"boot/sark.struct\"", ")", ")", "with", "open", "(", "scamp_binary", ",", "\"rb\"", ")", "as", "f", ":", "boot_data", "=", "f", ".", "read", "(", ")", "# Read the struct file and modify the \"sv\" struct to contain the", "# configuration values and write this into the boot data.", "with", "open", "(", "sark_struct", ",", "\"rb\"", ")", "as", "f", ":", "struct_data", "=", "f", ".", "read", "(", ")", "structs", "=", "struct_file", ".", "read_struct_file", "(", "struct_data", ")", "sv", "=", "structs", "[", "b\"sv\"", "]", "sv_overrides", ".", "update", "(", "kwargs", ")", "# Allow non-explicit keyword arguments for SV", "sv", ".", "update_default_values", "(", "*", "*", "sv_overrides", ")", "sv", ".", "update_default_values", "(", "unix_time", "=", "int", "(", "time", ".", "time", "(", ")", ")", ",", "boot_sig", "=", "int", "(", "time", ".", "time", "(", ")", ")", ",", "root_chip", "=", "1", ")", "struct_packed", "=", "sv", ".", "pack", "(", ")", "assert", "len", "(", "struct_packed", ")", ">=", "128", "# Otherwise shoving this data in is nasty", "buf", "=", "bytearray", "(", "boot_data", ")", "buf", "[", "BOOT_DATA_OFFSET", ":", "BOOT_DATA_OFFSET", "+", "BOOT_DATA_LENGTH", "]", "=", "struct_packed", "[", ":", "BOOT_DATA_LENGTH", "]", "assert", "len", "(", "buf", ")", "<", "DTCM_SIZE", "# Assert that we fit in DTCM", "boot_data", "=", "bytes", "(", "buf", ")", "# Create a socket to communicate with the board", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "sock", ".", "connect", "(", "(", "hostname", ",", "boot_port", ")", ")", "# Transmit the boot data as a series of SDP packets. First determine", "# how many blocks must be sent and transmit that, then transmit each", "# block.", "n_blocks", "=", "(", "len", "(", "buf", ")", "+", "BOOT_BYTE_SIZE", "-", "1", ")", "//", "BOOT_BYTE_SIZE", "assert", "n_blocks", "<=", "BOOT_MAX_BLOCKS", "boot_packet", "(", "sock", ",", "BootCommand", ".", "start", ",", "arg3", "=", "n_blocks", "-", "1", ")", "time", ".", "sleep", "(", "boot_delay", ")", "block", "=", "0", "while", "len", "(", "boot_data", ")", ">", "0", ":", "# Get the data to transmit", "data", ",", "boot_data", "=", "(", "boot_data", "[", ":", "BOOT_BYTE_SIZE", "]", ",", "boot_data", "[", "BOOT_BYTE_SIZE", ":", "]", ")", "# Transmit, delay and increment the block count", "a1", "=", "(", "(", "BOOT_WORD_SIZE", "-", "1", ")", "<<", "8", ")", "|", "block", "boot_packet", "(", "sock", ",", "BootCommand", ".", "send_block", ",", "a1", ",", "data", "=", "data", ")", "time", ".", "sleep", "(", "boot_delay", ")", "block", "+=", "1", "# Send the END command", "boot_packet", "(", "sock", ",", "BootCommand", ".", "end", ",", "1", ")", "# Close the socket and give time to boot", "sock", ".", "close", "(", ")", "time", ".", "sleep", "(", "post_boot_delay", ")", "return", "structs" ]
Sets the associated editor when the editor s offset calculator mode emit the signal pic_infos_available the table is automatically refreshed .
def set_editor ( self , editor ) : if self . _editor is not None : try : self . _editor . offset_calculator . pic_infos_available . disconnect ( self . _update ) except ( AttributeError , RuntimeError , ReferenceError ) : # see https://github.com/OpenCobolIDE/OpenCobolIDE/issues/89 pass self . _editor = weakref . proxy ( editor ) if editor else editor try : self . _editor . offset_calculator . pic_infos_available . connect ( self . _update ) except AttributeError : pass
10,479
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/widgets/pic_offsets.py#L25-L45
[ "def", "_unique_class_name", "(", "namespace", ":", "Dict", "[", "str", ",", "Any", "]", ",", "uuid", ":", "uuid", ".", "UUID", ")", "->", "str", ":", "count", "=", "0", "name", "=", "original_name", "=", "'f_'", "+", "uuid", ".", "hex", "while", "name", "in", "namespace", ":", "count", "+=", "1", "name", "=", "original_name", "+", "'_'", "+", "str", "(", "count", ")", "return", "name" ]
Reload the configuration form scratch . Only the default config is loaded not the environment - specified config .
def patch_conf ( settings_patch = None , settings_file = None ) : if settings_patch is None : settings_patch = { } reload_config ( ) os . environ [ ENVIRONMENT_VARIABLE ] = settings_file if settings_file else '' from bernard . conf import settings as l_settings # noinspection PyProtectedMember r_settings = l_settings . _settings r_settings . update ( settings_patch ) if 'bernard.i18n' in modules : from bernard . i18n import translate , intents translate . _regenerate_word_dict ( ) intents . _refresh_intents_db ( ) yield
10,480
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/conf/utils.py#L30-L59
[ "def", "delete_types_s", "(", "s", ",", "types", ")", ":", "patt", "=", "'(?s)'", "+", "'|'", ".", "join", "(", "'(?<=\\n)'", "+", "s", "+", "'\\n.+?\\n(?=\\S+|$)'", "for", "s", "in", "types", ")", "return", "re", ".", "sub", "(", "patt", ",", "''", ",", "'\\n'", "+", "s", ".", "strip", "(", ")", "+", "'\\n'", ",", ")", ".", "strip", "(", ")" ]
Resolves the requested key to an object instance raising a KeyError if the key is missing
def resolve ( self , key ) : registration = self . _registrations . get ( key ) if registration is None : raise KeyError ( "Unknown key: '{0}'" . format ( key ) ) return registration . resolve ( self , key )
10,481
https://github.com/giancosta86/Iris/blob/b3d92cca5cce3653519bd032346b211c46a57d05/info/gianlucacosta/iris/ioc.py#L139-L148
[ "def", "hexedit", "(", "pktlist", ")", ":", "f", "=", "get_temp_file", "(", ")", "wrpcap", "(", "f", ",", "pktlist", ")", "with", "ContextManagerSubprocess", "(", "\"hexedit()\"", ",", "conf", ".", "prog", ".", "hexedit", ")", ":", "subprocess", ".", "call", "(", "[", "conf", ".", "prog", ".", "hexedit", ",", "f", "]", ")", "pktlist", "=", "rdpcap", "(", "f", ")", "os", ".", "unlink", "(", "f", ")", "return", "pktlist" ]
Disposes every performed registration ; the container can then be used again
def dispose ( self ) : for registration in self . _registrations . values ( ) : registration . dispose ( ) self . _registrations = { }
10,482
https://github.com/giancosta86/Iris/blob/b3d92cca5cce3653519bd032346b211c46a57d05/info/gianlucacosta/iris/ioc.py#L151-L158
[ "def", "getTotalBulkPrice", "(", "self", ")", ":", "price", "=", "self", ".", "getBulkPrice", "(", ")", "vat", "=", "self", ".", "getVAT", "(", ")", "price", "=", "price", "and", "price", "or", "0", "vat", "=", "vat", "and", "vat", "or", "0", "return", "float", "(", "price", ")", "+", "(", "float", "(", "price", ")", "*", "float", "(", "vat", ")", ")", "/", "100" ]
Build user s workspace relative path .
def build_workspace_path ( user_id , workflow_id = None ) : workspace_path = os . path . join ( 'users' , str ( user_id ) , 'workflows' ) if workflow_id : workspace_path = os . path . join ( workspace_path , str ( workflow_id ) ) return workspace_path
10,483
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/utils.py#L14-L27
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Get Workflow from database with uuid or name .
def _get_workflow_with_uuid_or_name ( uuid_or_name , user_uuid ) : from reana_db . models import Workflow # Check existence if not uuid_or_name : raise ValueError ( 'No Workflow was specified.' ) # Check validity try : uuid_or_name . encode ( 'ascii' ) except UnicodeEncodeError : # `workflow_name` contains something else than just ASCII. raise ValueError ( 'Workflow name {} is not valid.' . format ( uuid_or_name ) ) # Check if UUIDv4 try : # is_uuid = UUID(uuid_or_name, version=4) is_uuid = UUID ( '{' + uuid_or_name + '}' , version = 4 ) except ( TypeError , ValueError ) : is_uuid = None if is_uuid : # `uuid_or_name` is an UUIDv4. # Search with it since it is expected to be unique. return _get_workflow_by_uuid ( uuid_or_name ) else : # `uuid_or_name` is not and UUIDv4. Expect it is a name. # Expect name might be in format 'reana.workflow.123' with arbitrary # number of dot-delimited substring, where last substring specifies # the run_number of the workflow this workflow name refers to. # Possible candidates for names are e.g. : # 'workflow_name' -> ValueError # 'workflow.name' -> True, True # 'workflow.name.123' -> True, True # '123.' -> True, False # '' -> ValueError # '.123' -> False, True # '..' -> False, False # '123.12' -> True, True # '123.12.' -> True, False # Try to split the dot-separated string. try : workflow_name , run_number = uuid_or_name . rsplit ( '.' , maxsplit = 1 ) except ValueError : # Couldn't split. Probably not a dot-separated string. # -> Search with `uuid_or_name` return _get_workflow_by_name ( uuid_or_name , user_uuid ) # Check if `run_number` was specified if not run_number : # No `run_number` specified. # -> Search by `workflow_name` return _get_workflow_by_name ( workflow_name , user_uuid ) # `run_number` was specified. # Check `run_number` is valid. if not run_number . isdigit ( ) : # `uuid_or_name` was split, so it is a dot-separated string # but it didn't contain a valid `run_number`. # Assume that this dot-separated string is the name of # the workflow and search with it. return _get_workflow_by_name ( uuid_or_name , user_uuid ) # `run_number` is valid. # Search by `run_number` since it is a primary key. workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . run_number == run_number , Workflow . owner_id == user_uuid ) . one_or_none ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name , run_number ) ) return workflow
10,484
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/utils.py#L30-L127
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
From Workflows named as workflow_name the latest run_number .
def _get_workflow_by_name ( workflow_name , user_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . owner_id == user_uuid ) . order_by ( Workflow . run_number . desc ( ) ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name ) ) return workflow
10,485
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/utils.py#L130-L149
[ "def", "is_removable", "(", "self", ",", "device", ")", ":", "if", "not", "self", ".", "is_handleable", "(", "device", ")", ":", "return", "False", "if", "device", ".", "is_filesystem", ":", "return", "device", ".", "is_mounted", "if", "device", ".", "is_crypto", ":", "return", "device", ".", "is_unlocked", "if", "device", ".", "is_partition_table", "or", "device", ".", "is_drive", ":", "return", "any", "(", "self", ".", "is_removable", "(", "dev", ")", "for", "dev", "in", "self", ".", "get_all_handleable", "(", ")", "if", "_is_parent_of", "(", "device", ",", "dev", ")", ")", "return", "False" ]
Get Workflow with UUIDv4 .
def _get_workflow_by_uuid ( workflow_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . id_ == workflow_uuid ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_uuid ) ) return workflow
10,486
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/utils.py#L152-L170
[ "def", "decode", "(", "dinfo", ",", "cio", ")", ":", "argtypes", "=", "[", "ctypes", ".", "POINTER", "(", "DecompressionInfoType", ")", ",", "ctypes", ".", "POINTER", "(", "CioType", ")", "]", "OPENJPEG", ".", "opj_decode", ".", "argtypes", "=", "argtypes", "OPENJPEG", ".", "opj_decode", ".", "restype", "=", "ctypes", ".", "POINTER", "(", "ImageType", ")", "image", "=", "OPENJPEG", ".", "opj_decode", "(", "dinfo", ",", "cio", ")", "return", "image" ]
Start the watching loop .
async def _watch ( self ) : file_name = os . path . basename ( self . _file_path ) logger . info ( 'Watching %s "%s"' , self . THING , self . _file_path , ) while self . _running : evt = await self . _watcher . get_event ( ) if evt . name == file_name : await self . _load ( ) logger . info ( 'Reloading changed %s from "%s"' , self . THING , self . _file_path )
10,487
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/loaders.py#L54-L75
[ "def", "to_url", "(", "self", ",", "site", "=", "'amazon'", ",", "country", "=", "'us'", ")", ":", "try", ":", "try", ":", "url", ",", "tlds", "=", "URL_MAP", "[", "site", "]", "except", "ValueError", ":", "tlds", "=", "None", "url", "=", "URL_MAP", "[", "site", "]", "except", "KeyError", ":", "raise", "SiteError", "(", "site", ")", "inject", "=", "{", "'isbn'", ":", "self", ".", "_isbn", "}", "if", "tlds", ":", "if", "country", "not", "in", "tlds", ":", "raise", "CountryError", "(", "country", ")", "tld", "=", "tlds", "[", "country", "]", "if", "not", "tld", ":", "tld", "=", "country", "inject", "[", "'tld'", "]", "=", "tld", "return", "url", "%", "inject" ]
Setup the watching utilities start the loop and load data a first time .
async def start ( self , file_path , locale = None , kwargs = None ) : self . _file_path = os . path . realpath ( file_path ) self . _locale = locale if kwargs : self . _kwargs = kwargs if settings . I18N_LIVE_RELOAD : loop = asyncio . get_event_loop ( ) self . _running = True self . _watcher = aionotify . Watcher ( ) self . _watcher . watch ( path = os . path . dirname ( self . _file_path ) , flags = aionotify . Flags . MOVED_TO | aionotify . Flags . MODIFY , ) await self . _watcher . setup ( loop ) await self . _load ( ) loop . create_task ( self . _watch ( ) ) else : await self . _load ( )
10,488
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/loaders.py#L77-L103
[ "def", "render_cvmfs_pvc", "(", "cvmfs_volume", ")", ":", "name", "=", "CVMFS_REPOSITORIES", "[", "cvmfs_volume", "]", "rendered_template", "=", "dict", "(", "REANA_CVMFS_PVC_TEMPLATE", ")", "rendered_template", "[", "'metadata'", "]", "[", "'name'", "]", "=", "'csi-cvmfs-{}-pvc'", ".", "format", "(", "name", ")", "rendered_template", "[", "'spec'", "]", "[", "'storageClassName'", "]", "=", "\"csi-cvmfs-{}\"", ".", "format", "(", "name", ")", "return", "rendered_template" ]
Propagate updates to listeners
def _update ( self , data : TransDict , * args , * * kwargs ) : for l in self . listeners : l ( data , * args , * * kwargs )
10,489
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/loaders.py#L126-L134
[ "def", "decode_header", "(", "cls", ",", "stream", ")", ":", "read", "=", "stream", ".", "read", "data", "=", "read", "(", "2", ")", "if", "len", "(", "data", ")", "!=", "2", ":", "raise", "WebSocketError", "(", "\"Unexpected EOF while decoding header\"", ")", "first_byte", ",", "second_byte", "=", "struct", ".", "unpack", "(", "'!BB'", ",", "data", ")", "header", "=", "cls", "(", "fin", "=", "first_byte", "&", "cls", ".", "FIN_MASK", "==", "cls", ".", "FIN_MASK", ",", "opcode", "=", "first_byte", "&", "cls", ".", "OPCODE_MASK", ",", "flags", "=", "first_byte", "&", "cls", ".", "HEADER_FLAG_MASK", ",", "length", "=", "second_byte", "&", "cls", ".", "LENGTH_MASK", ")", "has_mask", "=", "second_byte", "&", "cls", ".", "MASK_MASK", "==", "cls", ".", "MASK_MASK", "if", "header", ".", "opcode", ">", "0x07", ":", "if", "not", "header", ".", "fin", ":", "raise", "WebSocketError", "(", "'Received fragmented control frame: {0!r}'", ".", "format", "(", "data", ")", ")", "# Control frames MUST have a payload length of 125 bytes or less", "if", "header", ".", "length", ">", "125", ":", "raise", "FrameTooLargeException", "(", "'Control frame cannot be larger than 125 bytes: {0!r}'", ".", "format", "(", "data", ")", ")", "if", "header", ".", "length", "==", "126", ":", "# 16 bit length", "data", "=", "read", "(", "2", ")", "if", "len", "(", "data", ")", "!=", "2", ":", "raise", "WebSocketError", "(", "'Unexpected EOF while decoding header'", ")", "header", ".", "length", "=", "struct", ".", "unpack", "(", "'!H'", ",", "data", ")", "[", "0", "]", "elif", "header", ".", "length", "==", "127", ":", "# 64 bit length", "data", "=", "read", "(", "8", ")", "if", "len", "(", "data", ")", "!=", "8", ":", "raise", "WebSocketError", "(", "'Unexpected EOF while decoding header'", ")", "header", ".", "length", "=", "struct", ".", "unpack", "(", "'!Q'", ",", "data", ")", "[", "0", "]", "if", "has_mask", ":", "mask", "=", "read", "(", "4", ")", "if", "len", "(", "mask", ")", "!=", "4", ":", "raise", "WebSocketError", "(", "'Unexpected EOF while decoding header'", ")", "header", ".", "mask", "=", "mask", "return", "header" ]
prints some info that the user may find useful
def print_info ( self ) : d = dir ( self ) self . plugins = [ ] for key in d : if key . startswith ( "info_" ) : self . plugins . append ( key ) for key in self . plugins : if self . echo : Console . ok ( "> {0}" . format ( key . replace ( "_" , " " , 1 ) ) ) exec ( "self.%s()" % key )
10,490
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/info.py#L7-L19
[ "def", "boolean_union", "(", "self", ",", "mesh", ",", "inplace", "=", "False", ")", ":", "bfilter", "=", "vtk", ".", "vtkBooleanOperationPolyDataFilter", "(", ")", "bfilter", ".", "SetOperationToUnion", "(", ")", "bfilter", ".", "SetInputData", "(", "1", ",", "mesh", ")", "bfilter", ".", "SetInputData", "(", "0", ",", "self", ")", "bfilter", ".", "ReorientDifferenceCellsOff", "(", ")", "bfilter", ".", "Update", "(", ")", "mesh", "=", "_get_output", "(", "bfilter", ")", "if", "inplace", ":", "self", ".", "overwrite", "(", "mesh", ")", "else", ":", "return", "mesh" ]
Given parsed variant - loading arguments return a pandas DataFrame .
def load_from_args_as_dataframe ( args ) : if not args . variants and not args . single_variant : return None if args . variant_source_name : variant_source_names = util . expand ( args . variant_source_name , 'variant_source_name' , 'variant source' , len ( args . variants ) ) else : variant_source_names = util . drop_prefix ( args . variants ) variant_to_sources = collections . defaultdict ( list ) dfs = [ ] for i in range ( len ( args . variants ) ) : name = variant_source_names [ i ] prefix = ( 'metadata:' if len ( args . variants ) == 1 else "metadata:%s:" % name ) df = load_as_dataframe ( args . variants [ i ] , name = name , genome = args . genome , max_variants = args . max_variants_per_source , only_passing = not args . include_failing_variants , metadata_column_prefix = prefix ) if df . shape [ 0 ] == 0 : logging . warn ( "No variants loaded from: %s" % args . variants [ i ] ) else : for variant in df . variant : variant_to_sources [ variant ] . append ( name ) dfs . append ( df ) if args . single_variant : variants = [ ] extra_args = { } if args . genome : extra_args = { 'ensembl' : varcode . reference . infer_genome ( args . genome ) } for ( locus_str , ref , alt ) in args . single_variant : locus = Locus . parse ( locus_str ) variant = varcode . Variant ( locus . contig , locus . inclusive_start , ref , alt , * * extra_args ) variants . append ( variant ) variant_to_sources [ variant ] . append ( "commandline" ) dfs . append ( variants_to_dataframe ( variants ) ) df = dfs . pop ( 0 ) for other_df in dfs : df = pandas . merge ( df , other_df , how = 'outer' , on = [ "variant" ] + STANDARD_DATAFRAME_COLUMNS ) genomes = df [ "genome" ] . unique ( ) if len ( genomes ) > 1 : raise ValueError ( "Mixing references is not supported. " "Reference genomes: %s" % ( ", " . join ( genomes ) ) ) df [ "sources" ] = [ " " . join ( variant_to_sources [ v ] ) for v in df . variant ] # Apply filters: if args . ref : df = df . ix [ df . ref . isin ( args . ref ) ] if args . alt : df = df . ix [ df . alt . isin ( args . alt ) ] loci = loci_util . load_from_args ( util . remove_prefix_from_parsed_args ( args , "variant" ) ) if loci is not None : df = df . ix [ [ loci . intersects ( pileup_collection . to_locus ( v ) ) for v in df . variant ] ] return df
10,491
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/variants_util.py#L72-L159
[ "def", "move_vobject", "(", "self", ",", "uid", ",", "from_file", ",", "to_file", ")", ":", "if", "from_file", "not", "in", "self", ".", "_reminders", "or", "to_file", "not", "in", "self", ".", "_reminders", ":", "return", "uid", "=", "uid", ".", "split", "(", "'@'", ")", "[", "0", "]", "with", "self", ".", "_lock", ":", "rem", "=", "open", "(", "from_file", ")", ".", "readlines", "(", ")", "for", "(", "index", ",", "line", ")", "in", "enumerate", "(", "rem", ")", ":", "if", "uid", "==", "md5", "(", "line", "[", ":", "-", "1", "]", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", ":", "del", "rem", "[", "index", "]", "open", "(", "from_file", ",", "'w'", ")", ".", "writelines", "(", "rem", ")", "open", "(", "to_file", ",", "'a'", ")", ".", "write", "(", "line", ")", "break" ]
Request data fromo the server .
def request ( self , cmd , * args , * * kwargs ) : params = { 'action' : cmd } #TODO: serialize the kwargs? params . update ( kwargs ) return self . __request ( self . url , params )
10,492
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/proxy.py#L77-L88
[ "def", "n_weekends", "(", "self", ")", "->", "int", ":", "startdate", "=", "self", ".", "start", ".", "date", "(", ")", "enddate", "=", "self", ".", "end", ".", "date", "(", ")", "ndays", "=", "(", "enddate", "-", "startdate", ")", ".", "days", "+", "1", "in_weekend", "=", "False", "n_weekends", "=", "0", "for", "i", "in", "range", "(", "ndays", ")", ":", "date", "=", "startdate", "+", "datetime", ".", "timedelta", "(", "days", "=", "i", ")", "if", "not", "in_weekend", "and", "is_weekend", "(", "date", ")", ":", "in_weekend", "=", "True", "n_weekends", "+=", "1", "elif", "in_weekend", "and", "not", "is_weekend", "(", "date", ")", ":", "in_weekend", "=", "False", "return", "n_weekends" ]
Make an HTTP POST request to the server and return JSON data .
def __request ( self , url , params ) : log . debug ( 'request: %s %s' % ( url , str ( params ) ) ) try : response = urlopen ( url , urlencode ( params ) ) . read ( ) if params . get ( 'action' ) != 'data' : log . debug ( 'response: %s' % response ) if params . get ( 'action' , None ) == 'data' : return response else : return json . loads ( response ) except TypeError , e : log . exception ( 'request error' ) raise ServerError ( e ) except IOError , e : log . error ( 'request error: %s' % str ( e ) ) raise ServerError ( e )
10,493
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/proxy.py#L90-L112
[ "def", "for_me", "(", "conditions", ",", "myself", ")", ":", "if", "not", "conditions", ".", "audience_restriction", ":", "# No audience restriction", "return", "True", "for", "restriction", "in", "conditions", ".", "audience_restriction", ":", "if", "not", "restriction", ".", "audience", ":", "continue", "for", "audience", "in", "restriction", ".", "audience", ":", "if", "audience", ".", "text", ".", "strip", "(", ")", "==", "myself", ":", "return", "True", "else", ":", "# print(\"Not for me: %s != %s\" % (audience.text.strip(),", "# myself))", "pass", "return", "False" ]
If this locus spans a single base this property gives that position . Otherwise raises a ValueError .
def position ( self ) : if self . end != self . start + 1 : raise ValueError ( "Not a single base: %s" % str ( self ) ) return self . start
10,494
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/locus.py#L45-L52
[ "def", "_read_config_file", "(", "args", ")", ":", "stage", "=", "args", ".", "stage", "with", "open", "(", "args", ".", "config", ",", "'rt'", ")", "as", "f", ":", "config", "=", "yaml", ".", "safe_load", "(", "f", ".", "read", "(", ")", ")", "STATE", "[", "'stages'", "]", "=", "config", "[", "'stages'", "]", "config", "[", "'config'", "]", "=", "_decrypt_item", "(", "config", "[", "'config'", "]", ",", "stage", "=", "stage", ",", "key", "=", "''", ",", "render", "=", "True", ")", "return", "config", "[", "'stages'", "]", ",", "config", "[", "'config'", "]" ]
Given coordinates in 0 - based interbase coordinates return a Locus instance .
def from_interbase_coordinates ( contig , start , end = None ) : typechecks . require_string ( contig ) typechecks . require_integer ( start ) if end is None : end = start + 1 typechecks . require_integer ( end ) contig = pyensembl . locus . normalize_chromosome ( contig ) return Locus ( contig , start , end )
10,495
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/locus.py#L71-L82
[ "def", "update_classroom", "(", "self", ",", "course", ",", "classroomid", ",", "new_data", ")", ":", "student_list", ",", "tutor_list", ",", "other_students", ",", "_", "=", "self", ".", "get_user_lists", "(", "course", ",", "classroomid", ")", "# Check tutors", "new_data", "[", "\"tutors\"", "]", "=", "[", "tutor", "for", "tutor", "in", "map", "(", "str", ".", "strip", ",", "new_data", "[", "\"tutors\"", "]", ")", "if", "tutor", "in", "tutor_list", "]", "students", ",", "groups", ",", "errored_students", "=", "[", "]", ",", "[", "]", ",", "[", "]", "new_data", "[", "\"students\"", "]", "=", "map", "(", "str", ".", "strip", ",", "new_data", "[", "\"students\"", "]", ")", "# Check the students", "for", "student", "in", "new_data", "[", "\"students\"", "]", ":", "if", "student", "in", "student_list", ":", "students", ".", "append", "(", "student", ")", "else", ":", "if", "student", "in", "other_students", ":", "# Remove user from the other classroom", "self", ".", "database", ".", "classrooms", ".", "find_one_and_update", "(", "{", "\"courseid\"", ":", "course", ".", "get_id", "(", ")", ",", "\"groups.students\"", ":", "student", "}", ",", "{", "\"$pull\"", ":", "{", "\"groups.$.students\"", ":", "student", ",", "\"students\"", ":", "student", "}", "}", ")", "self", ".", "database", ".", "classrooms", ".", "find_one_and_update", "(", "{", "\"courseid\"", ":", "course", ".", "get_id", "(", ")", ",", "\"students\"", ":", "student", "}", ",", "{", "\"$pull\"", ":", "{", "\"students\"", ":", "student", "}", "}", ")", "students", ".", "append", "(", "student", ")", "else", ":", "# Check if user can be registered", "user_info", "=", "self", ".", "user_manager", ".", "get_user_info", "(", "student", ")", "if", "user_info", "is", "None", "or", "student", "in", "tutor_list", ":", "errored_students", ".", "append", "(", "student", ")", "else", ":", "students", ".", "append", "(", "student", ")", "removed_students", "=", "[", "student", "for", "student", "in", "student_list", "if", "student", "not", "in", "new_data", "[", "\"students\"", "]", "]", "self", ".", "database", ".", "classrooms", ".", "find_one_and_update", "(", "{", "\"courseid\"", ":", "course", ".", "get_id", "(", ")", ",", "\"default\"", ":", "True", "}", ",", "{", "\"$push\"", ":", "{", "\"students\"", ":", "{", "\"$each\"", ":", "removed_students", "}", "}", "}", ")", "new_data", "[", "\"students\"", "]", "=", "students", "# Check the groups", "for", "group", "in", "new_data", "[", "\"groups\"", "]", ":", "group", "[", "\"students\"", "]", "=", "[", "student", "for", "student", "in", "map", "(", "str", ".", "strip", ",", "group", "[", "\"students\"", "]", ")", "if", "student", "in", "new_data", "[", "\"students\"", "]", "]", "if", "len", "(", "group", "[", "\"students\"", "]", ")", "<=", "group", "[", "\"size\"", "]", ":", "groups", ".", "append", "(", "group", ")", "new_data", "[", "\"groups\"", "]", "=", "groups", "classroom", "=", "self", ".", "database", ".", "classrooms", ".", "find_one_and_update", "(", "{", "\"_id\"", ":", "ObjectId", "(", "classroomid", ")", "}", ",", "{", "\"$set\"", ":", "{", "\"description\"", ":", "new_data", "[", "\"description\"", "]", ",", "\"students\"", ":", "students", ",", "\"tutors\"", ":", "new_data", "[", "\"tutors\"", "]", ",", "\"groups\"", ":", "groups", "}", "}", ",", "return_document", "=", "ReturnDocument", ".", "AFTER", ")", "return", "classroom", ",", "errored_students" ]
Retrieve the surronding reference region from a variant .
def variant_context ( reference_fasta , contig , inclusive_start , inclusive_end , alt , context_length ) : # Move from 1-base coorindates to 0-base coordinates start = int ( inclusive_start ) - 1 end = int ( inclusive_end ) full_sequence = reference_fasta [ contig ] left = str ( full_sequence [ start - context_length : start ] . seq ) . upper ( ) middle = str ( full_sequence [ start : end ] . seq ) . upper ( ) right = str ( full_sequence [ end : end + context_length ] . seq ) . upper ( ) # Complement and reverse the context if necessary so the ref base is a # pyrmidine (C/T) if middle [ 0 ] in ( 'A' , 'G' ) : context_5prime = pyfaidx . complement ( right ) [ : : - 1 ] context_3prime = pyfaidx . complement ( left ) [ : : - 1 ] context_mutation = "%s>%s" % ( pyfaidx . complement ( middle ) [ : : - 1 ] , pyfaidx . complement ( alt ) [ : : - 1 ] ) else : context_5prime = left context_3prime = right context_mutation = "%s>%s" % ( middle , alt ) return ( context_5prime , context_mutation , context_3prime )
10,496
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/sequence_context.py#L17-L85
[ "def", "find_username_from_user_id", "(", "session", ",", "user_id", ")", ":", "comments_page", "=", "session", ".", "session", ".", "get", "(", "u'http://myanimelist.net/comments.php?'", "+", "urllib", ".", "urlencode", "(", "{", "'id'", ":", "int", "(", "user_id", ")", "}", ")", ")", ".", "text", "comments_page", "=", "bs4", ".", "BeautifulSoup", "(", "comments_page", ")", "username_elt", "=", "comments_page", ".", "find", "(", "'h1'", ")", "if", "\"'s Comments\"", "not", "in", "username_elt", ".", "text", ":", "raise", "InvalidUserError", "(", "user_id", ",", "message", "=", "\"Invalid user ID given when looking up username\"", ")", "return", "username_elt", ".", "text", ".", "replace", "(", "\"'s Comments\"", ",", "\"\"", ")" ]
Compute the similarity with the provided other trigram .
def similarity ( self , other : 'Trigram' ) -> float : if not len ( self . _trigrams ) or not len ( other . _trigrams ) : return 0 count = float ( len ( self . _trigrams & other . _trigrams ) ) len1 = float ( len ( self . _trigrams ) ) len2 = float ( len ( other . _trigrams ) ) return count / ( len1 + len2 - count )
10,497
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/trigram.py#L89-L100
[ "def", "stations", "(", "self", ",", "*", ",", "generated", "=", "True", ",", "library", "=", "True", ")", ":", "station_list", "=", "[", "]", "for", "chunk", "in", "self", ".", "stations_iter", "(", "page_size", "=", "49995", ")", ":", "for", "station", "in", "chunk", ":", "if", "(", "(", "generated", "and", "not", "station", ".", "get", "(", "'inLibrary'", ")", ")", "or", "(", "library", "and", "station", ".", "get", "(", "'inLibrary'", ")", ")", ")", ":", "station_list", ".", "append", "(", "station", ")", "return", "station_list" ]
Match a trigram with another one . If the negative matching wins returns an inverted matching .
def _match ( self , local : Tuple [ Trigram , ... ] , other : Trigram ) -> float : pos = local [ 0 ] % other neg = max ( ( x % other for x in local [ 1 : ] ) , default = 0 ) if neg > pos : return 0.0 return pos
10,498
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/trigram.py#L120-L132
[ "def", "TrimVariableTable", "(", "self", ",", "new_size", ")", ":", "def", "ProcessBufferFull", "(", "variables", ")", ":", "for", "variable", "in", "variables", ":", "var_index", "=", "variable", ".", "get", "(", "'varTableIndex'", ")", "if", "var_index", "is", "not", "None", "and", "(", "var_index", ">=", "new_size", ")", ":", "variable", "[", "'varTableIndex'", "]", "=", "0", "# Buffer full.", "members", "=", "variable", ".", "get", "(", "'members'", ")", "if", "members", "is", "not", "None", ":", "ProcessBufferFull", "(", "members", ")", "del", "self", ".", "_var_table", "[", "new_size", ":", "]", "ProcessBufferFull", "(", "self", ".", "breakpoint", "[", "'evaluatedExpressions'", "]", ")", "for", "stack_frame", "in", "self", ".", "breakpoint", "[", "'stackFrames'", "]", ":", "ProcessBufferFull", "(", "stack_frame", "[", "'arguments'", "]", ")", "ProcessBufferFull", "(", "stack_frame", "[", "'locals'", "]", ")", "ProcessBufferFull", "(", "self", ".", "_var_table", ")" ]
Find the best similarity within known trigrams .
def similarity ( self , other : Trigram ) -> float : return max ( ( self . _match ( x , other ) for x in self . trigrams ) , default = 0 )
10,499
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/trigram.py#L134-L138
[ "def", "stations", "(", "self", ",", "*", ",", "generated", "=", "True", ",", "library", "=", "True", ")", ":", "station_list", "=", "[", "]", "for", "chunk", "in", "self", ".", "stations_iter", "(", "page_size", "=", "49995", ")", ":", "for", "station", "in", "chunk", ":", "if", "(", "(", "generated", "and", "not", "station", ".", "get", "(", "'inLibrary'", ")", ")", "or", "(", "library", "and", "station", ".", "get", "(", "'inLibrary'", ")", ")", ")", ":", "station_list", ".", "append", "(", "station", ")", "return", "station_list" ]