query
stringlengths 5
1.23k
| positive
stringlengths 53
15.2k
| id_
int64 0
252k
| task_name
stringlengths 87
242
| negative
listlengths 20
553
|
|---|---|---|---|---|
Takes a model instance and converts it into a dict .
|
def model_to_dict ( instance , * * options ) : options = _defaults ( options ) attrs = { } if options [ 'prehook' ] : if isinstance ( options [ 'prehook' ] , collections . Callable ) : instance = options [ 'prehook' ] ( instance ) if instance is None : return attrs # Items in the `fields` list are the output aliases, not the raw # accessors (field, method, property names) for alias in options [ 'fields' ] : # Get the accessor for the object accessor = options [ 'aliases' ] . get ( alias , alias ) # Create the key that will be used in the output dict key = options [ 'prefix' ] + alias # Optionally camelcase the key if options [ 'camelcase' ] : key = convert_to_camel ( key ) # Get the field value. Use the mapped value to the actually property or # method name. `value` may be a number of things, so the various types # are checked below. value = get_field_value ( instance , accessor , allow_missing = options [ 'allow_missing' ] ) # Related objects, perform some checks on their options if isinstance ( value , ( models . Model , QuerySet ) ) : _options = _defaults ( options [ 'related' ] . get ( accessor , { } ) ) # If the `prefix` follows the below template, generate the # `prefix` for the related object if '%(accessor)s' in _options [ 'prefix' ] : _options [ 'prefix' ] = _options [ 'prefix' ] % { 'accessor' : alias } if isinstance ( value , models . Model ) : if len ( _options [ 'fields' ] ) == 1 and _options [ 'flat' ] and not _options [ 'merge' ] : value = list ( serialize ( value , * * _options ) . values ( ) ) [ 0 ] else : # Recurse, get the dict representation _attrs = serialize ( value , * * _options ) # Check if this object should be merged into the parent, # otherwise nest it under the accessor name if _options [ 'merge' ] : attrs . update ( _attrs ) continue value = _attrs else : value = serialize ( value , * * _options ) attrs [ key ] = value # Apply post-hook to serialized attributes if options [ 'posthook' ] : attrs = options [ 'posthook' ] ( instance , attrs ) return attrs
| 1,200
|
https://github.com/bruth/django-preserialize/blob/d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6/preserialize/serialize.py#L59-L124
|
[
"def",
"_check",
"(",
"self",
")",
":",
"_logger",
".",
"debug",
"(",
"'Check if timeout.'",
")",
"self",
".",
"_call_later_handle",
"=",
"None",
"if",
"self",
".",
"_touch_time",
"is",
"not",
"None",
":",
"difference",
"=",
"self",
".",
"_event_loop",
".",
"time",
"(",
")",
"-",
"self",
".",
"_touch_time",
"_logger",
".",
"debug",
"(",
"'Time difference %s'",
",",
"difference",
")",
"if",
"difference",
">",
"self",
".",
"_timeout",
":",
"self",
".",
"_connection",
".",
"close",
"(",
")",
"self",
".",
"_timed_out",
"=",
"True",
"if",
"not",
"self",
".",
"_connection",
".",
"closed",
"(",
")",
":",
"self",
".",
"_schedule",
"(",
")"
] |
Sets the root save directory for saving screenshots . Screenshots will be saved in subdirectories under this directory by browser window size .
|
def set_save_directory ( base , source ) : root = os . path . join ( base , source ) if not os . path . isdir ( root ) : os . makedirs ( root ) world . screenshot_root = root
| 1,201
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/screenshot.py#L13-L22
|
[
"def",
"render_unregistered",
"(",
"error",
"=",
"None",
")",
":",
"return",
"template",
"(",
"read_index_template",
"(",
")",
",",
"registered",
"=",
"False",
",",
"error",
"=",
"error",
",",
"seeder_data",
"=",
"None",
",",
"url_id",
"=",
"None",
",",
")"
] |
Change a user s password .
|
def change_password ( self , id , new , old = None , change_token = True ) : # pylint: disable=invalid-name,redefined-builtin schema = UserSchema ( exclude = ( 'password' , 'password_confirm' ) ) resp = self . service . post ( self . base + str ( id ) + '/password/' , params = { 'change_token' : change_token } , json = { 'old' : old , 'new' : new , 'new_confirm' : new } ) return self . service . decode ( schema , resp )
| 1,202
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/users.py#L154-L168
|
[
"def",
"delete_attachments",
"(",
"self",
",",
"volumeID",
",",
"attachmentsID",
")",
":",
"log",
".",
"debug",
"(",
"\"deleting attachments from volume '{}': {}\"",
".",
"format",
"(",
"volumeID",
",",
"attachmentsID",
")",
")",
"rawVolume",
"=",
"self",
".",
"_req_raw_volume",
"(",
"volumeID",
")",
"insID",
"=",
"[",
"a",
"[",
"'id'",
"]",
"for",
"a",
"in",
"rawVolume",
"[",
"'_source'",
"]",
"[",
"'_attachments'",
"]",
"]",
"# check that all requested file are present",
"for",
"id",
"in",
"attachmentsID",
":",
"if",
"id",
"not",
"in",
"insID",
":",
"raise",
"NotFoundException",
"(",
"\"could not found attachment '{}' of the volume '{}'\"",
".",
"format",
"(",
"id",
",",
"volumeID",
")",
")",
"for",
"index",
",",
"id",
"in",
"enumerate",
"(",
"attachmentsID",
")",
":",
"rawVolume",
"[",
"'_source'",
"]",
"[",
"'_attachments'",
"]",
".",
"pop",
"(",
"insID",
".",
"index",
"(",
"id",
")",
")",
"self",
".",
"_db",
".",
"modify_book",
"(",
"volumeID",
",",
"rawVolume",
"[",
"'_source'",
"]",
",",
"version",
"=",
"rawVolume",
"[",
"'_version'",
"]",
")"
] |
Change a user s token .
|
def change_token ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = UserSchema ( exclude = ( 'password' , 'password_confirm' ) ) resp = self . service . post ( self . base + str ( id ) + '/token/' ) return self . service . decode ( schema , resp )
| 1,203
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/users.py#L170-L179
|
[
"def",
"removeAllChildrenAtIndex",
"(",
"self",
",",
"parentIndex",
")",
":",
"if",
"not",
"parentIndex",
".",
"isValid",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"\"No valid item selected for deletion (ignored).\"",
")",
"return",
"parentItem",
"=",
"self",
".",
"getItem",
"(",
"parentIndex",
",",
"None",
")",
"logger",
".",
"debug",
"(",
"\"Removing children of {!r}\"",
".",
"format",
"(",
"parentItem",
")",
")",
"assert",
"parentItem",
",",
"\"parentItem not found\"",
"#firstChildRow = self.index(0, 0, parentIndex).row()",
"#lastChildRow = self.index(parentItem.nChildren()-1, 0, parentIndex).row()",
"#logger.debug(\"Removing rows: {} to {}\".format(firstChildRow, lastChildRow))",
"#self.beginRemoveRows(parentIndex, firstChildRow, lastChildRow)",
"self",
".",
"beginRemoveRows",
"(",
"parentIndex",
",",
"0",
",",
"parentItem",
".",
"nChildren",
"(",
")",
"-",
"1",
")",
"try",
":",
"parentItem",
".",
"removeAllChildren",
"(",
")",
"finally",
":",
"self",
".",
"endRemoveRows",
"(",
")",
"logger",
".",
"debug",
"(",
"\"removeAllChildrenAtIndex completed\"",
")"
] |
Bulk copy a set of users .
|
def bulk_copy ( self , ids ) : schema = UserSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
| 1,204
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/users.py#L188-L195
|
[
"def",
"is_citeable",
"(",
"publication_info",
")",
":",
"def",
"_item_has_pub_info",
"(",
"item",
")",
":",
"return",
"all",
"(",
"key",
"in",
"item",
"for",
"key",
"in",
"(",
"'journal_title'",
",",
"'journal_volume'",
")",
")",
"def",
"_item_has_page_or_artid",
"(",
"item",
")",
":",
"return",
"any",
"(",
"key",
"in",
"item",
"for",
"key",
"in",
"(",
"'page_start'",
",",
"'artid'",
")",
")",
"has_pub_info",
"=",
"any",
"(",
"_item_has_pub_info",
"(",
"item",
")",
"for",
"item",
"in",
"publication_info",
")",
"has_page_or_artid",
"=",
"any",
"(",
"_item_has_page_or_artid",
"(",
"item",
")",
"for",
"item",
"in",
"publication_info",
")",
"return",
"has_pub_info",
"and",
"has_page_or_artid"
] |
Get a list of a device s attachments .
|
def list ( self , id , filter = None , type = None , sort = None , limit = None , page = None ) : # pylint: disable=invalid-name,redefined-builtin schema = AttachmentSchema ( exclude = ( 'path' ) ) resp = self . service . list ( self . _base ( id ) , filter , type , sort , limit , page ) at , l = self . service . decode ( schema , resp , many = True , links = True ) return Page ( at , l )
| 1,205
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L68-L82
|
[
"def",
"build_catalog_info",
"(",
"self",
",",
"catalog_info",
")",
":",
"cat",
"=",
"SourceFactory",
".",
"build_catalog",
"(",
"*",
"*",
"catalog_info",
")",
"catalog_info",
"[",
"'catalog'",
"]",
"=",
"cat",
"# catalog_info['catalog_table'] =",
"# Table.read(catalog_info['catalog_file'])",
"catalog_info",
"[",
"'catalog_table'",
"]",
"=",
"cat",
".",
"table",
"catalog_info",
"[",
"'roi_model'",
"]",
"=",
"SourceFactory",
".",
"make_fermipy_roi_model_from_catalogs",
"(",
"[",
"cat",
"]",
")",
"catalog_info",
"[",
"'srcmdl_name'",
"]",
"=",
"self",
".",
"_name_factory",
".",
"srcmdl_xml",
"(",
"sourcekey",
"=",
"catalog_info",
"[",
"'catalog_name'",
"]",
")",
"return",
"CatalogInfo",
"(",
"*",
"*",
"catalog_info",
")"
] |
Get a list of attachments . Whereas list fetches a single page of attachments according to its limit and page arguments iter_list returns all attachments by internally making successive calls to list .
|
def iter_list ( self , id , * args , * * kwargs ) : l = partial ( self . list , id ) return self . service . iter_list ( l , * args , * * kwargs )
| 1,206
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L84-L97
|
[
"def",
"apply_crtomo_cfg",
"(",
"self",
")",
":",
"for",
"key",
"in",
"sorted",
"(",
"self",
".",
"tds",
".",
"keys",
"(",
")",
")",
":",
"self",
".",
"tds",
"[",
"key",
"]",
".",
"crtomo_cfg",
"=",
"self",
".",
"crtomo_cfg",
".",
"copy",
"(",
")"
] |
Get a device s attachment .
|
def get ( self , id , attid ) : # pylint: disable=invalid-name,redefined-builtin schema = AttachmentSchema ( ) resp = self . service . get_id ( self . _base ( id ) , attid ) return self . service . decode ( schema , resp )
| 1,207
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L99-L109
|
[
"def",
"KL",
"(",
"self",
",",
"other",
")",
":",
"return",
".5",
"*",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"variance",
"/",
"other",
".",
"variance",
")",
"+",
"(",
"(",
"other",
".",
"mean",
"-",
"self",
".",
"mean",
")",
"**",
"2",
"/",
"other",
".",
"variance",
")",
".",
"sum",
"(",
")",
"-",
"self",
".",
"num_data",
"*",
"self",
".",
"input_dim",
"+",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"other",
".",
"variance",
")",
")",
"-",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"self",
".",
"variance",
")",
")",
")"
] |
Add an attachment to a device .
|
def create ( self , id , fd , filename = 'attachment-name' ) : # pylint: disable=invalid-name,redefined-builtin schema = AttachmentSchema ( exclude = ( 'id' , 'created' , 'updated' , 'size' , 'path' , 'device_id' ) ) resp = self . service . post ( self . _base ( id ) , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
| 1,208
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L111-L123
|
[
"def",
"Modify",
"(",
"self",
",",
"client_limit",
"=",
"None",
",",
"client_rate",
"=",
"None",
",",
"duration",
"=",
"None",
")",
":",
"args",
"=",
"hunt_pb2",
".",
"ApiModifyHuntArgs",
"(",
"hunt_id",
"=",
"self",
".",
"hunt_id",
")",
"if",
"client_limit",
"is",
"not",
"None",
":",
"args",
".",
"client_limit",
"=",
"client_limit",
"if",
"client_rate",
"is",
"not",
"None",
":",
"args",
".",
"client_rate",
"=",
"client_rate",
"if",
"duration",
"is",
"not",
"None",
":",
"args",
".",
"duration",
"=",
"duration",
"data",
"=",
"self",
".",
"_context",
".",
"SendRequest",
"(",
"\"ModifyHunt\"",
",",
"args",
")",
"return",
"Hunt",
"(",
"data",
"=",
"data",
",",
"context",
"=",
"self",
".",
"_context",
")"
] |
Download a device s attachment .
|
def download ( self , id , attid ) : # pylint: disable=invalid-name,redefined-builtin resp = self . service . get_id ( self . _base ( id ) , attid , params = { 'format' : 'download' } , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
| 1,209
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L125-L137
|
[
"def",
"KL",
"(",
"self",
",",
"other",
")",
":",
"return",
".5",
"*",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"variance",
"/",
"other",
".",
"variance",
")",
"+",
"(",
"(",
"other",
".",
"mean",
"-",
"self",
".",
"mean",
")",
"**",
"2",
"/",
"other",
".",
"variance",
")",
".",
"sum",
"(",
")",
"-",
"self",
".",
"num_data",
"*",
"self",
".",
"input_dim",
"+",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"other",
".",
"variance",
")",
")",
"-",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"self",
".",
"variance",
")",
")",
")"
] |
Edit a device s attachment .
|
def edit ( self , resource ) : # pylint: disable=invalid-name,redefined-builtin schema = AttachmentSchema ( exclude = ( 'id' , 'created' , 'updated' , 'size' , 'path' , 'device_id' ) ) json = self . service . encode ( schema , resource ) schema = AttachmentSchema ( ) resp = self . service . edit ( self . _base ( resource . device_id ) , resource . id , json ) return self . service . decode ( schema , resp )
| 1,210
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L156-L168
|
[
"def",
"KL",
"(",
"self",
",",
"other",
")",
":",
"return",
".5",
"*",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"variance",
"/",
"other",
".",
"variance",
")",
"+",
"(",
"(",
"other",
".",
"mean",
"-",
"self",
".",
"mean",
")",
"**",
"2",
"/",
"other",
".",
"variance",
")",
".",
"sum",
"(",
")",
"-",
"self",
".",
"num_data",
"*",
"self",
".",
"input_dim",
"+",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"other",
".",
"variance",
")",
")",
"-",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"self",
".",
"variance",
")",
")",
")"
] |
Delete a device s attachment .
|
def delete ( self , id , attid ) : # pylint: disable=invalid-name,redefined-builtin return self . service . edit ( self . _base ( id ) , attid )
| 1,211
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/attachments.py#L170-L176
|
[
"def",
"KL",
"(",
"self",
",",
"other",
")",
":",
"return",
".5",
"*",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"variance",
"/",
"other",
".",
"variance",
")",
"+",
"(",
"(",
"other",
".",
"mean",
"-",
"self",
".",
"mean",
")",
"**",
"2",
"/",
"other",
".",
"variance",
")",
".",
"sum",
"(",
")",
"-",
"self",
".",
"num_data",
"*",
"self",
".",
"input_dim",
"+",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"other",
".",
"variance",
")",
")",
"-",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"self",
".",
"variance",
")",
")",
")"
] |
Get a device by name .
|
def get_by_name ( self , name ) : # pylint: disable=invalid-name,redefined-builtin rs , _ = self . list ( filter = field ( 'name' ) . eq ( name ) , limit = 1 ) if len ( rs ) is 0 : raise CDRouterError ( 'no such device' ) return rs [ 0 ]
| 1,212
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L229-L239
|
[
"def",
"_place_rows_and_render_borders",
"(",
"container",
",",
"rendered_rows",
")",
":",
"def",
"draw_cell_border",
"(",
"rendered_cell",
",",
"cell_height",
",",
"container",
")",
":",
"cell_width",
"=",
"rendered_cell",
".",
"width",
"background",
"=",
"TableCellBackground",
"(",
"(",
"0",
",",
"0",
")",
",",
"cell_width",
",",
"cell_height",
",",
"parent",
"=",
"rendered_cell",
".",
"cell",
")",
"background",
".",
"render",
"(",
"container",
")",
"for",
"position",
"in",
"(",
"'top'",
",",
"'right'",
",",
"'bottom'",
",",
"'left'",
")",
":",
"border",
"=",
"TableCellBorder",
"(",
"rendered_cell",
",",
"cell_height",
",",
"position",
")",
"border",
".",
"render",
"(",
"container",
")",
"return",
"background",
"y_cursor",
"=",
"container",
".",
"cursor",
"for",
"r",
",",
"rendered_row",
"in",
"enumerate",
"(",
"rendered_rows",
")",
":",
"container",
".",
"advance",
"(",
"rendered_row",
".",
"height",
")",
"if",
"rendered_row",
".",
"index",
"==",
"0",
":",
"container",
".",
"register_styled",
"(",
"rendered_row",
".",
"row",
".",
"section",
")",
"container",
".",
"register_styled",
"(",
"rendered_row",
".",
"row",
")",
"for",
"c",
",",
"rendered_cell",
"in",
"enumerate",
"(",
"rendered_row",
")",
":",
"cell_height",
"=",
"sum",
"(",
"rendered_row",
".",
"height",
"for",
"rendered_row",
"in",
"rendered_rows",
"[",
"r",
":",
"r",
"+",
"rendered_cell",
".",
"rowspan",
"]",
")",
"x_cursor",
"=",
"rendered_cell",
".",
"x_position",
"y_pos",
"=",
"float",
"(",
"y_cursor",
"+",
"cell_height",
")",
"cell_container",
"=",
"VirtualContainer",
"(",
"container",
")",
"background",
"=",
"draw_cell_border",
"(",
"rendered_cell",
",",
"cell_height",
",",
"cell_container",
")",
"cell_container",
".",
"place_at",
"(",
"container",
",",
"x_cursor",
",",
"y_pos",
")",
"vertical_align",
"=",
"rendered_cell",
".",
"cell",
".",
"get_style",
"(",
"'vertical_align'",
",",
"container",
")",
"if",
"vertical_align",
"==",
"VerticalAlign",
".",
"TOP",
":",
"vertical_offset",
"=",
"0",
"elif",
"vertical_align",
"==",
"VerticalAlign",
".",
"MIDDLE",
":",
"vertical_offset",
"=",
"(",
"cell_height",
"-",
"rendered_cell",
".",
"height",
")",
"/",
"2",
"elif",
"vertical_align",
"==",
"VerticalAlign",
".",
"BOTTOM",
":",
"vertical_offset",
"=",
"(",
"cell_height",
"-",
"rendered_cell",
".",
"height",
")",
"y_offset",
"=",
"float",
"(",
"y_cursor",
"+",
"vertical_offset",
")",
"rendered_cell",
".",
"container",
".",
"place_at",
"(",
"container",
",",
"x_cursor",
",",
"y_offset",
")",
"container",
".",
"register_styled",
"(",
"background",
")",
"y_cursor",
"+=",
"rendered_row",
".",
"height"
] |
Edit a device .
|
def edit ( self , resource ) : schema = DeviceSchema ( exclude = ( 'id' , 'created' , 'updated' , 'result_id' , 'attachments_dir' ) ) json = self . service . encode ( schema , resource ) schema = DeviceSchema ( ) resp = self . service . edit ( self . base , resource . id , json ) return self . service . decode ( schema , resp )
| 1,213
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L255-L267
|
[
"def",
"clean_subject_location",
"(",
"self",
")",
":",
"cleaned_data",
"=",
"super",
"(",
"ImageAdminForm",
",",
"self",
")",
".",
"clean",
"(",
")",
"subject_location",
"=",
"cleaned_data",
"[",
"'subject_location'",
"]",
"if",
"not",
"subject_location",
":",
"# if supplied subject location is empty, do not check it",
"return",
"subject_location",
"# use thumbnail's helper function to check the format",
"coordinates",
"=",
"normalize_subject_location",
"(",
"subject_location",
")",
"if",
"not",
"coordinates",
":",
"err_msg",
"=",
"ugettext_lazy",
"(",
"'Invalid subject location format. '",
")",
"err_code",
"=",
"'invalid_subject_format'",
"elif",
"(",
"coordinates",
"[",
"0",
"]",
">",
"self",
".",
"instance",
".",
"width",
"or",
"coordinates",
"[",
"1",
"]",
">",
"self",
".",
"instance",
".",
"height",
")",
":",
"err_msg",
"=",
"ugettext_lazy",
"(",
"'Subject location is outside of the image. '",
")",
"err_code",
"=",
"'subject_out_of_bounds'",
"else",
":",
"return",
"subject_location",
"self",
".",
"_set_previous_subject_location",
"(",
"cleaned_data",
")",
"raise",
"forms",
".",
"ValidationError",
"(",
"string_concat",
"(",
"err_msg",
",",
"ugettext_lazy",
"(",
"'Your input: \"{subject_location}\". '",
".",
"format",
"(",
"subject_location",
"=",
"subject_location",
")",
")",
",",
"'Previous value is restored.'",
")",
",",
"code",
"=",
"err_code",
")"
] |
Get information on proxy connection to a device s management interface .
|
def get_connection ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = ConnectionSchema ( ) resp = self . service . get ( self . base + str ( id ) + '/connect/' ) return self . service . decode ( schema , resp )
| 1,214
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L301-L310
|
[
"def",
"write",
"(",
"filename",
",",
"data",
",",
"extname",
"=",
"None",
",",
"extver",
"=",
"None",
",",
"units",
"=",
"None",
",",
"compress",
"=",
"None",
",",
"table_type",
"=",
"'binary'",
",",
"header",
"=",
"None",
",",
"clobber",
"=",
"False",
",",
"*",
"*",
"keys",
")",
":",
"with",
"FITS",
"(",
"filename",
",",
"'rw'",
",",
"clobber",
"=",
"clobber",
",",
"*",
"*",
"keys",
")",
"as",
"fits",
":",
"fits",
".",
"write",
"(",
"data",
",",
"table_type",
"=",
"table_type",
",",
"units",
"=",
"units",
",",
"extname",
"=",
"extname",
",",
"extver",
"=",
"extver",
",",
"compress",
"=",
"compress",
",",
"header",
"=",
"header",
",",
"*",
"*",
"keys",
")"
] |
Open proxy connection to a device s management interface .
|
def connect ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = ConnectionSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/connect/' ) return self . service . decode ( schema , resp )
| 1,215
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L312-L321
|
[
"def",
"write",
"(",
"filename",
",",
"data",
",",
"extname",
"=",
"None",
",",
"extver",
"=",
"None",
",",
"units",
"=",
"None",
",",
"compress",
"=",
"None",
",",
"table_type",
"=",
"'binary'",
",",
"header",
"=",
"None",
",",
"clobber",
"=",
"False",
",",
"*",
"*",
"keys",
")",
":",
"with",
"FITS",
"(",
"filename",
",",
"'rw'",
",",
"clobber",
"=",
"clobber",
",",
"*",
"*",
"keys",
")",
"as",
"fits",
":",
"fits",
".",
"write",
"(",
"data",
",",
"table_type",
"=",
"table_type",
",",
"units",
"=",
"units",
",",
"extname",
"=",
"extname",
",",
"extver",
"=",
"extver",
",",
"compress",
"=",
"compress",
",",
"header",
"=",
"header",
",",
"*",
"*",
"keys",
")"
] |
Close proxy connection to a device s management interface .
|
def disconnect ( self , id ) : # pylint: disable=invalid-name,redefined-builtin return self . service . post ( self . base + str ( id ) + '/disconnect/' )
| 1,216
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L323-L328
|
[
"def",
"write",
"(",
"filename",
",",
"data",
",",
"extname",
"=",
"None",
",",
"extver",
"=",
"None",
",",
"units",
"=",
"None",
",",
"compress",
"=",
"None",
",",
"table_type",
"=",
"'binary'",
",",
"header",
"=",
"None",
",",
"clobber",
"=",
"False",
",",
"*",
"*",
"keys",
")",
":",
"with",
"FITS",
"(",
"filename",
",",
"'rw'",
",",
"clobber",
"=",
"clobber",
",",
"*",
"*",
"keys",
")",
"as",
"fits",
":",
"fits",
".",
"write",
"(",
"data",
",",
"table_type",
"=",
"table_type",
",",
"units",
"=",
"units",
",",
"extname",
"=",
"extname",
",",
"extver",
"=",
"extver",
",",
"compress",
"=",
"compress",
",",
"header",
"=",
"header",
",",
"*",
"*",
"keys",
")"
] |
Power on a device using it s power on command .
|
def power_on ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = PowerCmdSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/power/on/' ) return self . service . decode ( schema , resp )
| 1,217
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L330-L339
|
[
"def",
"parse_match",
"(",
"match",
")",
":",
"try",
":",
"upload",
"=",
"FileUpload",
".",
"objects",
".",
"get",
"(",
"slug",
"=",
"match",
".",
"group",
"(",
"1",
")",
")",
"except",
"FileUpload",
".",
"DoesNotExist",
":",
"upload",
"=",
"None",
"options",
"=",
"parse_options",
"(",
"match",
".",
"group",
"(",
"2",
")",
")",
"return",
"(",
"upload",
",",
"options",
")"
] |
Bulk copy a set of devices .
|
def bulk_copy ( self , ids ) : schema = DeviceSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
| 1,218
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/devices.py#L360-L367
|
[
"def",
"add_table_ends",
"(",
"para",
",",
"oformat",
"=",
"'latex'",
",",
"caption",
"=",
"\"caption-text\"",
",",
"label",
"=",
"\"table\"",
")",
":",
"fpara",
"=",
"\"\"",
"if",
"oformat",
"==",
"'latex'",
":",
"fpara",
"+=",
"\"\\\\begin{table}[H]\\n\"",
"fpara",
"+=",
"\"\\\\centering\\n\"",
"fpara",
"+=",
"\"\\\\begin{tabular}{cc}\\n\"",
"fpara",
"+=",
"\"\\\\toprule\\n\"",
"fpara",
"+=",
"\"Parameter & Value \\\\\\\\\\n\"",
"fpara",
"+=",
"\"\\\\midrule\\n\"",
"fpara",
"+=",
"para",
"fpara",
"+=",
"\"\\\\bottomrule\\n\"",
"fpara",
"+=",
"\"\\\\end{tabular}\\n\"",
"fpara",
"+=",
"\"\\\\caption{%s \\label{tab:%s}}\\n\"",
"%",
"(",
"caption",
",",
"label",
")",
"fpara",
"+=",
"\"\\\\end{table}\\n\\n\"",
"return",
"fpara"
] |
Wrap value in list if it is not one .
|
def ensure_list ( value : Union [ T , Sequence [ T ] ] ) -> Sequence [ T ] : if value is None : return [ ] return value if isinstance ( value , list ) else [ value ]
| 1,219
|
https://github.com/ASMfreaK/yandex_weather_api/blob/d58ad80f7389dc3b58c721bb42c2441e9ff3e351/yandex_weather_api/types.py#L28-L32
|
[
"def",
"import_file",
"(",
"filename",
")",
":",
"#file_path = os.path.relpath(filename)",
"file_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
"log",
"(",
"DEBUG",
",",
"\"Loading prices from %s\"",
",",
"file_path",
")",
"prices",
"=",
"__read_prices_from_file",
"(",
"file_path",
")",
"with",
"BookAggregate",
"(",
"for_writing",
"=",
"True",
")",
"as",
"svc",
":",
"svc",
".",
"prices",
".",
"import_prices",
"(",
"prices",
")",
"print",
"(",
"\"Saving book...\"",
")",
"svc",
".",
"book",
".",
"save",
"(",
")"
] |
Highlight string for terminal color coding .
|
def color ( out_string , color = 'grn' ) : c = { 'blk' : Fore . BLACK , 'blu' : Fore . BLUE , 'cyn' : Fore . CYAN , 'grn' : Fore . GREEN , 'mag' : Fore . MAGENTA , 'red' : Fore . RED , 'wht' : Fore . WHITE , 'yel' : Fore . YELLOW , } try : init ( ) return ( c [ color ] + Style . BRIGHT + out_string + Fore . RESET + Style . NORMAL ) except AttributeError : return out_string
| 1,220
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/color_utils.py#L6-L38
|
[
"def",
"returnJobReqs",
"(",
"self",
",",
"jobReqs",
")",
":",
"# Since we are only reading this job's specific values from the state file, we don't",
"# need a lock",
"jobState",
"=",
"self",
".",
"_JobState",
"(",
"self",
".",
"_CacheState",
".",
"_load",
"(",
"self",
".",
"cacheStateFile",
")",
".",
"jobState",
"[",
"self",
".",
"jobID",
"]",
")",
"for",
"x",
"in",
"list",
"(",
"jobState",
".",
"jobSpecificFiles",
".",
"keys",
"(",
")",
")",
":",
"self",
".",
"deleteLocalFile",
"(",
"x",
")",
"with",
"self",
".",
"_CacheState",
".",
"open",
"(",
"self",
")",
"as",
"cacheInfo",
":",
"cacheInfo",
".",
"sigmaJob",
"-=",
"jobReqs"
] |
Add color ANSI codes for diff lines .
|
def color_diffs ( string ) : string = string . replace ( '--- ' , color ( '--- ' , 'red' ) ) string = string . replace ( '\n+++ ' , color ( '\n+++ ' ) ) string = string . replace ( '\n-' , color ( '\n-' , 'red' ) ) string = string . replace ( '\n+' , color ( '\n+' ) ) string = string . replace ( '\n@@ ' , color ( '\n@@ ' , 'yel' ) ) return string
| 1,221
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/color_utils.py#L57-L74
|
[
"def",
"unregisterDataItem",
"(",
"self",
",",
"path",
")",
":",
"url",
"=",
"self",
".",
"_url",
"+",
"\"/unregisterItem\"",
"params",
"=",
"{",
"\"f\"",
":",
"\"json\"",
",",
"\"itempath\"",
":",
"path",
",",
"\"force\"",
":",
"\"true\"",
"}",
"return",
"self",
".",
"_post",
"(",
"url",
",",
"param_dict",
"=",
"params",
",",
"securityHandler",
"=",
"self",
".",
"_securityHandler",
",",
"proxy_url",
"=",
"self",
".",
"_proxy_url",
",",
"proxy_port",
"=",
"self",
".",
"_proxy_port",
")"
] |
List linked accounts .
|
def index ( ) : oauth = current_app . extensions [ 'oauthlib.client' ] services = [ ] service_map = { } i = 0 for appid , conf in six . iteritems ( current_app . config [ 'OAUTHCLIENT_REMOTE_APPS' ] ) : if not conf . get ( 'hide' , False ) : services . append ( dict ( appid = appid , title = conf [ 'title' ] , icon = conf . get ( 'icon' , None ) , description = conf . get ( 'description' , None ) , account = None ) ) service_map [ oauth . remote_apps [ appid ] . consumer_key ] = i i += 1 # Fetch already linked accounts accounts = RemoteAccount . query . filter_by ( user_id = current_user . get_id ( ) ) . all ( ) for a in accounts : if a . client_id in service_map : services [ service_map [ a . client_id ] ] [ 'account' ] = a # Sort according to title services . sort ( key = itemgetter ( 'title' ) ) return render_template ( 'invenio_oauthclient/settings/index.html' , services = services )
| 1,222
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/settings.py#L47-L83
|
[
"def",
"vn_release",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"call",
"!=",
"'function'",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_reserve function must be called with -f or --function.'",
")",
"if",
"kwargs",
"is",
"None",
":",
"kwargs",
"=",
"{",
"}",
"vn_id",
"=",
"kwargs",
".",
"get",
"(",
"'vn_id'",
",",
"None",
")",
"vn_name",
"=",
"kwargs",
".",
"get",
"(",
"'vn_name'",
",",
"None",
")",
"path",
"=",
"kwargs",
".",
"get",
"(",
"'path'",
",",
"None",
")",
"data",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
"if",
"vn_id",
":",
"if",
"vn_name",
":",
"log",
".",
"warning",
"(",
"'Both the \\'vn_id\\' and \\'vn_name\\' arguments were provided. '",
"'\\'vn_id\\' will take precedence.'",
")",
"elif",
"vn_name",
":",
"vn_id",
"=",
"get_vn_id",
"(",
"kwargs",
"=",
"{",
"'name'",
":",
"vn_name",
"}",
")",
"else",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_release function requires a \\'vn_id\\' or a \\'vn_name\\' to '",
"'be provided.'",
")",
"if",
"data",
":",
"if",
"path",
":",
"log",
".",
"warning",
"(",
"'Both the \\'data\\' and \\'path\\' arguments were provided. '",
"'\\'data\\' will take precedence.'",
")",
"elif",
"path",
":",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"path",
",",
"mode",
"=",
"'r'",
")",
"as",
"rfh",
":",
"data",
"=",
"rfh",
".",
"read",
"(",
")",
"else",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vn_release function requires either \\'data\\' or a \\'path\\' to '",
"'be provided.'",
")",
"server",
",",
"user",
",",
"password",
"=",
"_get_xml_rpc",
"(",
")",
"auth",
"=",
"':'",
".",
"join",
"(",
"[",
"user",
",",
"password",
"]",
")",
"response",
"=",
"server",
".",
"one",
".",
"vn",
".",
"release",
"(",
"auth",
",",
"int",
"(",
"vn_id",
")",
",",
"data",
")",
"ret",
"=",
"{",
"'action'",
":",
"'vn.release'",
",",
"'released'",
":",
"response",
"[",
"0",
"]",
",",
"'resource_id'",
":",
"response",
"[",
"1",
"]",
",",
"'error_code'",
":",
"response",
"[",
"2",
"]",
",",
"}",
"return",
"ret"
] |
Return the id of a label s for attribute
|
def element_id_by_label ( browser , label ) : label = XPathSelector ( browser , unicode ( '//label[contains(., "%s")]' % label ) ) if not label : return False return label . get_attribute ( 'for' )
| 1,223
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/util.py#L141-L147
|
[
"def",
"load",
"(",
"cls",
",",
"fname",
",",
"args",
")",
":",
"if",
"args",
".",
"type",
"==",
"JSON",
":",
"if",
"fname",
".",
"endswith",
"(",
"'.bz2'",
")",
":",
"open_",
"=",
"bz2",
".",
"open",
"else",
":",
"open_",
"=",
"open",
"if",
"args",
".",
"progress",
":",
"print",
"(",
"'Loading JSON data...'",
")",
"with",
"open_",
"(",
"fname",
",",
"'rt'",
")",
"as",
"fp",
":",
"storage",
"=",
"JsonStorage",
".",
"load",
"(",
"fp",
")",
"else",
":",
"storage",
"=",
"SqliteStorage",
".",
"load",
"(",
"fname",
")",
"if",
"args",
".",
"settings",
"is",
"not",
"None",
":",
"extend",
"(",
"storage",
".",
"settings",
",",
"args",
".",
"settings",
")",
"return",
"cls",
".",
"from_storage",
"(",
"storage",
")"
] |
Locate an input field of a given value
|
def find_field ( browser , field , value ) : return find_field_by_id ( browser , field , value ) + find_field_by_name ( browser , field , value ) + find_field_by_label ( browser , field , value )
| 1,224
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/util.py#L197-L206
|
[
"def",
"add_otp_style",
"(",
"self",
",",
"zip_odp",
",",
"style_file",
")",
":",
"style",
"=",
"zipwrap",
".",
"Zippier",
"(",
"style_file",
")",
"for",
"picture_file",
"in",
"style",
".",
"ls",
"(",
"\"Pictures\"",
")",
":",
"zip_odp",
".",
"write",
"(",
"picture_file",
",",
"style",
".",
"cat",
"(",
"picture_file",
",",
"True",
")",
")",
"xml_data",
"=",
"style",
".",
"cat",
"(",
"\"styles.xml\"",
",",
"False",
")",
"# import pdb;pdb.set_trace()",
"xml_data",
"=",
"self",
".",
"override_styles",
"(",
"xml_data",
")",
"zip_odp",
".",
"write",
"(",
"\"styles.xml\"",
",",
"xml_data",
")"
] |
Find a field of any of the specified types .
|
def find_any_field ( browser , field_types , field_name ) : return reduce ( operator . add , ( find_field ( browser , field_type , field_name ) for field_type in field_types ) )
| 1,225
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/util.py#L209-L218
|
[
"def",
"set_password",
"(",
"self",
",",
"service",
",",
"username",
",",
"password",
")",
":",
"# encrypt the password",
"password_encrypted",
"=",
"_win_crypto",
".",
"encrypt",
"(",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"# encode with base64",
"password_base64",
"=",
"base64",
".",
"encodestring",
"(",
"password_encrypted",
")",
"# encode again to unicode",
"password_saved",
"=",
"password_base64",
".",
"decode",
"(",
"'ascii'",
")",
"# store the password",
"key_name",
"=",
"self",
".",
"_key_for_service",
"(",
"service",
")",
"hkey",
"=",
"winreg",
".",
"CreateKey",
"(",
"winreg",
".",
"HKEY_CURRENT_USER",
",",
"key_name",
")",
"winreg",
".",
"SetValueEx",
"(",
"hkey",
",",
"username",
",",
"0",
",",
"winreg",
".",
"REG_SZ",
",",
"password_saved",
")"
] |
Locate the control input that has a label pointing to it
|
def find_field_by_label ( browser , field , label ) : return XPathSelector ( browser , field_xpath ( field , 'id' , escape = False ) % u'//label[contains(., "{0}")]/@for' . format ( label ) )
| 1,226
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/util.py#L246-L257
|
[
"def",
"readme_verify",
"(",
")",
":",
"expected",
"=",
"populate_readme",
"(",
"REVISION",
",",
"RTD_VERSION",
")",
"# Actually get the stored contents.",
"with",
"open",
"(",
"README_FILE",
",",
"\"r\"",
")",
"as",
"file_obj",
":",
"contents",
"=",
"file_obj",
".",
"read",
"(",
")",
"if",
"contents",
"!=",
"expected",
":",
"err_msg",
"=",
"\"\\n\"",
"+",
"get_diff",
"(",
"contents",
",",
"expected",
",",
"\"README.rst.actual\"",
",",
"\"README.rst.expected\"",
")",
"raise",
"ValueError",
"(",
"err_msg",
")",
"else",
":",
"print",
"(",
"\"README contents are as expected.\"",
")"
] |
A decorator to invoke a function periodically until it returns a truthy value .
|
def wait_for ( func ) : def wrapped ( * args , * * kwargs ) : timeout = kwargs . pop ( 'timeout' , 15 ) start = time ( ) result = None while time ( ) - start < timeout : result = func ( * args , * * kwargs ) if result : break sleep ( 0.2 ) return result return wrapped
| 1,227
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/util.py#L278-L298
|
[
"def",
"_fix_mates",
"(",
"orig_file",
",",
"out_file",
",",
"ref_file",
",",
"config",
")",
":",
"if",
"not",
"file_exists",
"(",
"out_file",
")",
":",
"with",
"file_transaction",
"(",
"config",
",",
"out_file",
")",
"as",
"tx_out_file",
":",
"samtools",
"=",
"config_utils",
".",
"get_program",
"(",
"\"samtools\"",
",",
"config",
")",
"cmd",
"=",
"\"{samtools} view -bS -h -t {ref_file}.fai -F 8 {orig_file} > {tx_out_file}\"",
"do",
".",
"run",
"(",
"cmd",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
",",
"\"Fix mate pairs in TopHat output\"",
",",
"{",
"}",
")",
"return",
"out_file"
] |
Returns a dictionary of variables and their possibly os - dependent defaults .
|
def get_defaults ( ) : DEFAULTS = { } # Determine the run-time pipe read/write buffer. if 'PC_PIPE_BUF' in os . pathconf_names : # unix x , y = os . pipe ( ) DEFAULTS [ 'PIPE_BUF' ] = os . fpathconf ( x , "PC_PIPE_BUF" ) else : # in Jython 16384 # on windows 512 # in jython in windows 512 DEFAULTS [ 'PIPE_BUF' ] = 512 # Determine the run-time socket buffers. # Note that this number is determine on the papy server # and inherited by the clients. tcp_sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) DEFAULTS [ 'TCP_SNDBUF' ] = tcp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_SNDBUF ) DEFAULTS [ 'TCP_RCVBUF' ] = tcp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_RCVBUF ) udp_sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) DEFAULTS [ 'UDP_SNDBUF' ] = udp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_SNDBUF ) DEFAULTS [ 'UDP_RCVBUF' ] = udp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_RCVBUF ) # check the ip visible from the world. DEFAULTS [ 'WHATS_MYIP_URL' ] = 'http://www.whatismyip.com/automation/n09230945.asp' return DEFAULTS
| 1,228
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/config.py#L16-L50
|
[
"def",
"_extract_html_hex",
"(",
"string",
")",
":",
"try",
":",
"hex_string",
"=",
"string",
"and",
"_hex_regexp",
"(",
")",
".",
"search",
"(",
"string",
")",
".",
"group",
"(",
"0",
")",
"or",
"''",
"except",
"AttributeError",
":",
"return",
"None",
"if",
"len",
"(",
"hex_string",
")",
"==",
"3",
":",
"hex_string",
"=",
"hex_string",
"[",
"0",
"]",
"*",
"2",
"+",
"hex_string",
"[",
"1",
"]",
"*",
"2",
"+",
"hex_string",
"[",
"2",
"]",
"*",
"2",
"return",
"hex_string"
] |
Determine the server URL .
|
def site_url ( url ) : base_url = 'http://%s' % socket . gethostname ( ) if server . port is not 80 : base_url += ':%d' % server . port return urlparse . urljoin ( base_url , url )
| 1,229
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/django.py#L15-L24
|
[
"def",
"thaw",
"(",
"vault_client",
",",
"src_file",
",",
"opt",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"src_file",
")",
":",
"raise",
"aomi",
".",
"exceptions",
".",
"AomiFile",
"(",
"\"%s does not exist\"",
"%",
"src_file",
")",
"tmp_dir",
"=",
"ensure_tmpdir",
"(",
")",
"zip_file",
"=",
"thaw_decrypt",
"(",
"vault_client",
",",
"src_file",
",",
"tmp_dir",
",",
"opt",
")",
"archive",
"=",
"zipfile",
".",
"ZipFile",
"(",
"zip_file",
",",
"'r'",
")",
"for",
"archive_file",
"in",
"archive",
".",
"namelist",
"(",
")",
":",
"archive",
".",
"extract",
"(",
"archive_file",
",",
"tmp_dir",
")",
"os",
".",
"chmod",
"(",
"\"%s/%s\"",
"%",
"(",
"tmp_dir",
",",
"archive_file",
")",
",",
"0o640",
")",
"LOG",
".",
"debug",
"(",
"\"Extracted %s from archive\"",
",",
"archive_file",
")",
"LOG",
".",
"info",
"(",
"\"Thawing secrets into %s\"",
",",
"opt",
".",
"secrets",
")",
"config",
"=",
"get_secretfile",
"(",
"opt",
")",
"Context",
".",
"load",
"(",
"config",
",",
"opt",
")",
".",
"thaw",
"(",
"tmp_dir",
")"
] |
Get external id from account info .
|
def _get_external_id ( account_info ) : if all ( k in account_info for k in ( 'external_id' , 'external_method' ) ) : return dict ( id = account_info [ 'external_id' ] , method = account_info [ 'external_method' ] ) return None
| 1,230
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L40-L45
|
[
"def",
"render",
"(",
"self",
",",
"data",
",",
"accepted_media_type",
"=",
"None",
",",
"renderer_context",
"=",
"None",
")",
":",
"if",
"'SWAGGER_JSON_PATH'",
"in",
"os",
".",
"environ",
":",
"with",
"io",
".",
"open",
"(",
"os",
".",
"environ",
"[",
"'SWAGGER_JSON_PATH'",
"]",
",",
"'rb'",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")",
"else",
":",
"return",
"super",
"(",
"ConditionalOpenAPIRenderer",
",",
"self",
")",
".",
"render",
"(",
"data",
",",
"accepted_media_type",
",",
"renderer_context",
")"
] |
Retrieve user object for the given request .
|
def oauth_get_user ( client_id , account_info = None , access_token = None ) : if access_token : token = RemoteToken . get_by_token ( client_id , access_token ) if token : return token . remote_account . user if account_info : external_id = _get_external_id ( account_info ) if external_id : user_identity = UserIdentity . query . filter_by ( id = external_id [ 'id' ] , method = external_id [ 'method' ] ) . first ( ) if user_identity : return user_identity . user email = account_info . get ( 'user' , { } ) . get ( 'email' ) if email : return User . query . filter_by ( email = email ) . one_or_none ( ) return None
| 1,231
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L48-L75
|
[
"def",
"delete_network_acl",
"(",
"network_acl_id",
"=",
"None",
",",
"network_acl_name",
"=",
"None",
",",
"disassociate",
"=",
"False",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"if",
"disassociate",
":",
"network_acl",
"=",
"_get_resource",
"(",
"'network_acl'",
",",
"name",
"=",
"network_acl_name",
",",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"if",
"network_acl",
"and",
"network_acl",
".",
"associations",
":",
"subnet_id",
"=",
"network_acl",
".",
"associations",
"[",
"0",
"]",
".",
"subnet_id",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"conn",
".",
"disassociate_network_acl",
"(",
"subnet_id",
")",
"except",
"BotoServerError",
":",
"pass",
"return",
"_delete_resource",
"(",
"resource",
"=",
"'network_acl'",
",",
"name",
"=",
"network_acl_name",
",",
"resource_id",
"=",
"network_acl_id",
",",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")"
] |
Authenticate an oauth authorized callback .
|
def oauth_authenticate ( client_id , user , require_existing_link = False ) : # Authenticate via the access token (access token used to get user_id) if not requires_confirmation ( user ) : after_this_request ( _commit ) if login_user ( user , remember = False ) : if require_existing_link : account = RemoteAccount . get ( user . id , client_id ) if account is None : logout_user ( ) return False return True return False
| 1,232
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L78-L97
|
[
"def",
"_getNearestMappingIndexList",
"(",
"fromValList",
",",
"toValList",
")",
":",
"indexList",
"=",
"[",
"]",
"for",
"fromTimestamp",
"in",
"fromValList",
":",
"smallestDiff",
"=",
"_getSmallestDifference",
"(",
"toValList",
",",
"fromTimestamp",
")",
"i",
"=",
"toValList",
".",
"index",
"(",
"smallestDiff",
")",
"indexList",
".",
"append",
"(",
"i",
")",
"return",
"indexList"
] |
Register user if possible .
|
def oauth_register ( form ) : if form . validate ( ) : data = form . to_dict ( ) if not data . get ( 'password' ) : data [ 'password' ] = '' user = register_user ( * * data ) if not data [ 'password' ] : user . password = None _datastore . commit ( ) return user
| 1,233
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L100-L114
|
[
"async",
"def",
"setup_streamer",
"(",
"self",
")",
":",
"self",
".",
"streamer",
".",
"volume",
"=",
"self",
".",
"volume",
"/",
"100",
"self",
".",
"streamer",
".",
"start",
"(",
")",
"self",
".",
"pause_time",
"=",
"None",
"self",
".",
"vclient_starttime",
"=",
"self",
".",
"vclient",
".",
"loop",
".",
"time",
"(",
")",
"# Cache next song",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Caching next song\"",
")",
"dl_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"download_next_song_cache",
")",
"dl_thread",
".",
"start",
"(",
")"
] |
Link a user to an external id .
|
def oauth_link_external_id ( user , external_id = None ) : try : with db . session . begin_nested ( ) : db . session . add ( UserIdentity ( id = external_id [ 'id' ] , method = external_id [ 'method' ] , id_user = user . id ) ) except IntegrityError : raise AlreadyLinkedError ( user , external_id )
| 1,234
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L117-L134
|
[
"def",
"min_and",
"(",
"a",
",",
"b",
",",
"c",
",",
"d",
",",
"w",
")",
":",
"m",
"=",
"(",
"1",
"<<",
"(",
"w",
"-",
"1",
")",
")",
"while",
"m",
"!=",
"0",
":",
"if",
"(",
"~",
"a",
"&",
"~",
"c",
"&",
"m",
")",
"!=",
"0",
":",
"temp",
"=",
"(",
"a",
"|",
"m",
")",
"&",
"-",
"m",
"if",
"temp",
"<=",
"b",
":",
"a",
"=",
"temp",
"break",
"temp",
"=",
"(",
"c",
"|",
"m",
")",
"&",
"-",
"m",
"if",
"temp",
"<=",
"d",
":",
"c",
"=",
"temp",
"break",
"m",
">>=",
"1",
"return",
"a",
"&",
"c"
] |
Unlink a user from an external id .
|
def oauth_unlink_external_id ( external_id ) : with db . session . begin_nested ( ) : UserIdentity . query . filter_by ( id = external_id [ 'id' ] , method = external_id [ 'method' ] ) . delete ( )
| 1,235
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L137-L144
|
[
"def",
"LT",
"(",
"classical_reg1",
",",
"classical_reg2",
",",
"classical_reg3",
")",
":",
"classical_reg1",
",",
"classical_reg2",
",",
"classical_reg3",
"=",
"prepare_ternary_operands",
"(",
"classical_reg1",
",",
"classical_reg2",
",",
"classical_reg3",
")",
"return",
"ClassicalLessThan",
"(",
"classical_reg1",
",",
"classical_reg2",
",",
"classical_reg3",
")"
] |
Make a registration form .
|
def create_registrationform ( * args , * * kwargs ) : class RegistrationForm ( _security . confirm_register_form ) : password = None recaptcha = None return RegistrationForm ( * args , * * kwargs )
| 1,236
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L184-L189
|
[
"def",
"trim_trailing_silence",
"(",
"self",
")",
":",
"length",
"=",
"self",
".",
"get_active_length",
"(",
")",
"self",
".",
"pianoroll",
"=",
"self",
".",
"pianoroll",
"[",
":",
"length",
"]"
] |
Prefill form with data .
|
def fill_form ( form , data ) : for ( key , value ) in data . items ( ) : if hasattr ( form , key ) : if isinstance ( value , dict ) : fill_form ( getattr ( form , key ) , value ) else : getattr ( form , key ) . data = value return form
| 1,237
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L197-L210
|
[
"def",
"_set_vibration_nix",
"(",
"self",
",",
"left_motor",
",",
"right_motor",
",",
"duration",
")",
":",
"code",
"=",
"self",
".",
"__get_vibration_code",
"(",
"left_motor",
",",
"right_motor",
",",
"duration",
")",
"secs",
",",
"msecs",
"=",
"convert_timeval",
"(",
"time",
".",
"time",
"(",
")",
")",
"outer_event",
"=",
"struct",
".",
"pack",
"(",
"EVENT_FORMAT",
",",
"secs",
",",
"msecs",
",",
"0x15",
",",
"code",
",",
"1",
")",
"self",
".",
"_write_device",
".",
"write",
"(",
"outer_event",
")",
"self",
".",
"_write_device",
".",
"flush",
"(",
")"
] |
Return the right param to disable CSRF depending on WTF - Form version .
|
def _get_csrf_disabled_param ( ) : import flask_wtf from pkg_resources import parse_version supports_meta = parse_version ( flask_wtf . __version__ ) >= parse_version ( "0.14.0" ) return dict ( meta = { 'csrf' : False } ) if supports_meta else dict ( csrf_enabled = False )
| 1,238
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/utils.py#L224-L235
|
[
"def",
"cartoon",
"(",
"self",
",",
"cmap",
"=",
"None",
")",
":",
"# Parse secondary structure",
"top",
"=",
"self",
".",
"topology",
"geom",
"=",
"gg",
".",
"GeomProteinCartoon",
"(",
"gg",
".",
"Aes",
"(",
"xyz",
"=",
"self",
".",
"coordinates",
",",
"types",
"=",
"top",
"[",
"'atom_names'",
"]",
",",
"secondary_type",
"=",
"top",
"[",
"'secondary_structure'",
"]",
")",
",",
"cmap",
"=",
"cmap",
")",
"primitives",
"=",
"geom",
".",
"produce",
"(",
"gg",
".",
"Aes",
"(",
")",
")",
"ids",
"=",
"[",
"self",
".",
"add_representation",
"(",
"r",
"[",
"'rep_type'",
"]",
",",
"r",
"[",
"'options'",
"]",
")",
"for",
"r",
"in",
"primitives",
"]",
"def",
"update",
"(",
"self",
"=",
"self",
",",
"geom",
"=",
"geom",
",",
"ids",
"=",
"ids",
")",
":",
"primitives",
"=",
"geom",
".",
"produce",
"(",
"gg",
".",
"Aes",
"(",
"xyz",
"=",
"self",
".",
"coordinates",
")",
")",
"[",
"self",
".",
"update_representation",
"(",
"id_",
",",
"rep_options",
")",
"for",
"id_",
",",
"rep_options",
"in",
"zip",
"(",
"ids",
",",
"primitives",
")",
"]",
"self",
".",
"update_callbacks",
".",
"append",
"(",
"update",
")",
"self",
".",
"autozoom",
"(",
"self",
".",
"coordinates",
")"
] |
Find and load step definitions and them find and load features under base_path specified on constructor
|
def run ( self ) : try : self . loader . find_and_load_step_definitions ( ) except StepLoadingError , e : print "Error loading step definitions:\n" , e return results = [ ] if self . explicit_features : features_files = self . explicit_features else : features_files = self . loader . find_feature_files ( ) if self . random : random . shuffle ( features_files ) if not features_files : self . output . print_no_features_found ( self . loader . base_dir ) return processes = Pool ( processes = self . parallelization ) test_results_it = processes . imap_unordered ( worker_process , [ ( self , filename ) for filename in features_files ] ) all_total = ParallelTotalResult ( ) for result in test_results_it : all_total += result [ 'total' ] sys . stdout . write ( result [ 'stdout' ] ) sys . stderr . write ( result [ 'stderr' ] ) return all_total
| 1,239
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/parallel_runner.py#L107-L140
|
[
"def",
"calc_temp",
"(",
"Data_ref",
",",
"Data",
")",
":",
"T",
"=",
"300",
"*",
"(",
"(",
"Data",
".",
"A",
"*",
"Data_ref",
".",
"Gamma",
")",
"/",
"(",
"Data_ref",
".",
"A",
"*",
"Data",
".",
"Gamma",
")",
")",
"Data",
".",
"T",
"=",
"T",
"return",
"T"
] |
Open a Jaide session with the device .
|
def open_connection ( ip , username , password , function , args , write = False , conn_timeout = 5 , sess_timeout = 300 , port = 22 ) : # start with the header line on the output. output = color ( '=' * 50 + '\nResults from device: %s\n' % ip , 'yel' ) try : # create the Jaide session object for the device. conn = Jaide ( ip , username , password , connect_timeout = conn_timeout , session_timeout = sess_timeout , port = port ) if write is not False : return write , output + function ( conn , * args ) else : return output + function ( conn , * args ) except errors . SSHError : output += color ( 'Unable to connect to port %s on device: %s\n' % ( str ( port ) , ip ) , 'red' ) except errors . AuthenticationError : # NCClient auth failure output += color ( 'Authentication failed for device: %s' % ip , 'red' ) except AuthenticationException : # Paramiko auth failure output += color ( 'Authentication failed for device: %s' % ip , 'red' ) except SSHException as e : output += color ( 'Error connecting to device: %s\nError: %s' % ( ip , str ( e ) ) , 'red' ) except socket . timeout : output += color ( 'Timeout exceeded connecting to device: %s' % ip , 'red' ) except socket . gaierror : output += color ( 'No route to host, or invalid hostname: %s' % ip , 'red' ) except socket . error : output += color ( 'The device refused the connection on port %s, or ' 'no route to host.' % port , 'red' ) if write is not False : return write , output else : return output
| 1,240
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/wrap.py#L32-L103
|
[
"def",
"get_covariance_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Covariance\"",
",",
"label",
"=",
"\"tab:parameter_covariance\"",
")",
":",
"parameters",
",",
"cov",
"=",
"self",
".",
"get_covariance",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cov",
",",
"caption",
",",
"label",
")"
] |
Run an operational command .
|
def command ( jaide , commands , format = "text" , xpath = False ) : output = "" for cmd in clean_lines ( commands ) : expression = "" output += color ( '> ' + cmd + '\n' , 'yel' ) # Get xpath expression from the command, if it is there. # If there is an xpath expr, the output will be xml, # overriding the req_format parameter # # Example command forcing xpath: show route % //rt-entry if len ( cmd . split ( '%' ) ) == 2 : expression = cmd . split ( '%' ) [ 1 ] . strip ( ) cmd = cmd . split ( '%' ) [ 0 ] + '\n' elif xpath is not False : expression = xpath if expression : try : output += jaide . op_cmd ( command = cmd , req_format = 'xml' , xpath_expr = expression ) + '\n' except lxml . etree . XMLSyntaxError : output += color ( 'Xpath expression resulted in no response.\n' , 'red' ) else : output += jaide . op_cmd ( cmd , req_format = format ) + '\n' return output
| 1,241
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/wrap.py#L106-L146
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
Send shell commands to a device .
|
def shell ( jaide , commands ) : out = "" for cmd in clean_lines ( commands ) : out += color ( '> %s\n' % cmd , 'yel' ) out += jaide . shell_cmd ( cmd ) + '\n' return out
| 1,242
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/wrap.py#L454-L469
|
[
"def",
"add_copyright",
"(",
"self",
",",
"material",
"=",
"None",
",",
"holder",
"=",
"None",
",",
"statement",
"=",
"None",
",",
"url",
"=",
"None",
",",
"year",
"=",
"None",
")",
":",
"copyright",
"=",
"{",
"}",
"for",
"key",
"in",
"(",
"'holder'",
",",
"'statement'",
",",
"'url'",
")",
":",
"if",
"locals",
"(",
")",
"[",
"key",
"]",
"is",
"not",
"None",
":",
"copyright",
"[",
"key",
"]",
"=",
"locals",
"(",
")",
"[",
"key",
"]",
"if",
"material",
"is",
"not",
"None",
":",
"copyright",
"[",
"'material'",
"]",
"=",
"material",
".",
"lower",
"(",
")",
"if",
"year",
"is",
"not",
"None",
":",
"copyright",
"[",
"'year'",
"]",
"=",
"int",
"(",
"year",
")",
"self",
".",
"_append_to",
"(",
"'copyright'",
",",
"copyright",
")"
] |
Returns all keys from etcd .
|
def get_all_keys ( reactor , key_type , value_type , etcd_address ) : etcd = Client ( reactor , etcd_address ) result = yield etcd . get ( b'\x00' , range_end = b'\x00' ) res = { } for item in result . kvs : if key_type == u'utf8' : key = item . key . decode ( 'utf8' ) elif key_type == u'binary' : key = binascii . b2a_base64 ( item . key ) . decode ( ) . strip ( ) else : raise Exception ( 'logic error' ) if value_type == u'json' : value = json . loads ( item . value . decode ( 'utf8' ) ) elif value_type == u'binary' : value = binascii . b2a_base64 ( item . value ) . decode ( ) . strip ( ) elif value_type == u'utf8' : value = item . value . decode ( 'utf8' ) else : raise Exception ( 'logic error' ) res [ key ] = value returnValue ( res )
| 1,243
|
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/cli/exporter.py#L49-L81
|
[
"def",
"sql_column_like_drug",
"(",
"self",
",",
"column_name",
":",
"str",
")",
"->",
"str",
":",
"clauses",
"=",
"[",
"\"{col} LIKE {fragment}\"",
".",
"format",
"(",
"col",
"=",
"column_name",
",",
"fragment",
"=",
"sql_string_literal",
"(",
"f",
")",
")",
"for",
"f",
"in",
"self",
".",
"sql_like_fragments",
"]",
"return",
"\"({})\"",
".",
"format",
"(",
"\" OR \"",
".",
"join",
"(",
"clauses",
")",
")"
] |
Send the GET request required to stop the scan
|
def stop ( self , timeout = None ) : assert self . scan_id is not None , 'No scan_id has been set' # # Simple stop # if timeout is None : url = '/scans/%s/stop' % self . scan_id self . conn . send_request ( url , method = 'GET' ) return # # Stop with timeout # self . stop ( ) for _ in xrange ( timeout ) : time . sleep ( 1 ) is_running = self . get_status ( ) [ 'is_running' ] if not is_running : return msg = 'Failed to stop the scan in %s seconds' raise ScanStopTimeoutException ( msg % timeout )
| 1,244
|
https://github.com/andresriancho/w3af-api-client/blob/adeb79bad75264d754de69f0bb981b366da96f32/w3af_api_client/scan.py#L56-L91
|
[
"def",
"_annotate_objects",
"(",
"self",
")",
":",
"self",
".",
"metadata",
"=",
"[",
"]",
"sizer",
"=",
"Asizer",
"(",
")",
"sizes",
"=",
"sizer",
".",
"asizesof",
"(",
"*",
"self",
".",
"objects",
")",
"self",
".",
"total_size",
"=",
"sizer",
".",
"total",
"for",
"obj",
",",
"sz",
"in",
"zip",
"(",
"self",
".",
"objects",
",",
"sizes",
")",
":",
"md",
"=",
"_MetaObject",
"(",
")",
"md",
".",
"size",
"=",
"sz",
"md",
".",
"id",
"=",
"id",
"(",
"obj",
")",
"try",
":",
"md",
".",
"type",
"=",
"obj",
".",
"__class__",
".",
"__name__",
"except",
"(",
"AttributeError",
",",
"ReferenceError",
")",
":",
"# pragma: no cover",
"md",
".",
"type",
"=",
"type",
"(",
"obj",
")",
".",
"__name__",
"md",
".",
"str",
"=",
"safe_repr",
"(",
"obj",
",",
"clip",
"=",
"128",
")",
"self",
".",
"metadata",
".",
"append",
"(",
"md",
")"
] |
Convert an xrb address to public key in bytes
|
def xrb_address_to_public_key ( address ) : address = bytearray ( address , 'ascii' ) if not address . startswith ( b'xrb_' ) : raise ValueError ( 'address does not start with xrb_: %s' % address ) if len ( address ) != 64 : raise ValueError ( 'address must be 64 chars long: %s' % address ) address = bytes ( address ) key_b32xrb = b'1111' + address [ 4 : 56 ] key_bytes = b32xrb_decode ( key_b32xrb ) [ 3 : ] checksum = address [ 56 : ] if b32xrb_encode ( address_checksum ( key_bytes ) ) != checksum : raise ValueError ( 'invalid address, invalid checksum: %s' % address ) return key_bytes
| 1,245
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/accounts.py#L73-L106
|
[
"def",
"get_site_amplification",
"(",
"self",
",",
"C",
",",
"sites",
")",
":",
"# Gets delta normalised z1",
"dz1",
"=",
"sites",
".",
"z1pt0",
"-",
"np",
".",
"exp",
"(",
"self",
".",
"_get_lnmu_z1",
"(",
"sites",
".",
"vs30",
")",
")",
"f_s",
"=",
"C",
"[",
"\"c5\"",
"]",
"*",
"dz1",
"# Calculates site amplification term",
"f_s",
"[",
"dz1",
">",
"self",
".",
"CONSTANTS",
"[",
"\"dz1ref\"",
"]",
"]",
"=",
"(",
"C",
"[",
"\"c5\"",
"]",
"*",
"self",
".",
"CONSTANTS",
"[",
"\"dz1ref\"",
"]",
")",
"idx",
"=",
"sites",
".",
"vs30",
">",
"self",
".",
"CONSTANTS",
"[",
"\"v1\"",
"]",
"f_s",
"[",
"idx",
"]",
"+=",
"(",
"C",
"[",
"\"c4\"",
"]",
"*",
"np",
".",
"log",
"(",
"self",
".",
"CONSTANTS",
"[",
"\"v1\"",
"]",
"/",
"C",
"[",
"\"vref\"",
"]",
")",
")",
"idx",
"=",
"np",
".",
"logical_not",
"(",
"idx",
")",
"f_s",
"[",
"idx",
"]",
"+=",
"(",
"C",
"[",
"\"c4\"",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"idx",
"]",
"/",
"C",
"[",
"\"vref\"",
"]",
")",
")",
"return",
"f_s"
] |
Generates an adhoc account and keypair
|
def generate_account ( seed = None , index = 0 ) : if not seed : seed = unhexlify ( '' . join ( random . choice ( '0123456789ABCDEF' ) for i in range ( 64 ) ) ) pair = keypair_from_seed ( seed , index = index ) result = { 'address' : public_key_to_xrb_address ( pair [ 'public' ] ) , 'private_key_bytes' : pair [ 'private' ] , 'public_key_bytes' : pair [ 'public' ] , } result [ 'private_key_hex' ] = hexlify ( pair [ 'private' ] ) result [ 'public_key_hex' ] = hexlify ( pair [ 'public' ] ) return result
| 1,246
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/accounts.py#L109-L143
|
[
"def",
"mw",
"(",
"self",
")",
":",
"counter",
"=",
"collections",
".",
"Counter",
"(",
"self",
".",
"seq",
".",
"lower",
"(",
")",
")",
"mw_a",
"=",
"counter",
"[",
"'a'",
"]",
"*",
"313.2",
"mw_t",
"=",
"counter",
"[",
"'t'",
"]",
"*",
"304.2",
"mw_g",
"=",
"counter",
"[",
"'g'",
"]",
"*",
"289.2",
"mw_c",
"=",
"counter",
"[",
"'c'",
"]",
"*",
"329.2",
"mw_u",
"=",
"counter",
"[",
"'u'",
"]",
"*",
"306.2",
"if",
"self",
".",
"material",
"==",
"'dna'",
":",
"return",
"mw_a",
"+",
"mw_t",
"+",
"mw_g",
"+",
"mw_c",
"+",
"79.0",
"else",
":",
"return",
"mw_a",
"+",
"mw_u",
"+",
"mw_g",
"+",
"mw_c",
"+",
"159.0"
] |
Passes inputs with indecies in s . By default passes the whole inbox .
|
def spasser ( inbox , s = None ) : seq = ( s or range ( len ( inbox ) ) ) return [ input_ for i , input_ in enumerate ( inbox ) if i in seq ]
| 1,247
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L67-L78
|
[
"def",
"stage",
"(",
"x",
",",
"staging",
")",
":",
"linknames",
"=",
"[",
"]",
"# Objects that don't represent a file shouldn't be staged",
"non_file_objects",
"=",
"(",
"track",
".",
"ViewTrack",
",",
"track",
".",
"CompositeTrack",
",",
"track",
".",
"AggregateTrack",
",",
"track",
".",
"SuperTrack",
",",
"genome",
".",
"Genome",
",",
")",
"if",
"isinstance",
"(",
"x",
",",
"non_file_objects",
")",
":",
"return",
"linknames",
"# If it's an object representing a file, then render it.",
"#",
"# Track objects don't represent files, but their documentation does",
"linknames",
".",
"append",
"(",
"x",
".",
"render",
"(",
"staging",
")",
")",
"if",
"hasattr",
"(",
"x",
",",
"'source'",
")",
"and",
"hasattr",
"(",
"x",
",",
"'filename'",
")",
":",
"def",
"_stg",
"(",
"x",
",",
"ext",
"=",
"''",
")",
":",
"# A remote track hosted elsewhere does not need staging. This is",
"# defined by a track with a url, but no source or filename.",
"if",
"(",
"x",
".",
"source",
"is",
"None",
"and",
"x",
".",
"filename",
"is",
"None",
"and",
"getattr",
"(",
"x",
",",
"'url'",
",",
"None",
")",
"is",
"not",
"None",
")",
":",
"return",
"linknames",
".",
"append",
"(",
"local_link",
"(",
"x",
".",
"source",
"+",
"ext",
",",
"x",
".",
"filename",
"+",
"ext",
",",
"staging",
")",
")",
"_stg",
"(",
"x",
")",
"if",
"isinstance",
"(",
"x",
",",
"track",
".",
"Track",
")",
":",
"if",
"x",
".",
"tracktype",
"==",
"'bam'",
":",
"_stg",
"(",
"x",
",",
"ext",
"=",
"'.bai'",
")",
"if",
"x",
".",
"tracktype",
"==",
"'vcfTabix'",
":",
"_stg",
"(",
"x",
",",
"ext",
"=",
"'.tbi'",
")",
"if",
"isinstance",
"(",
"x",
",",
"track",
".",
"CompositeTrack",
")",
":",
"if",
"x",
".",
"_html",
":",
"_stg",
"(",
"x",
".",
"_html",
")",
"return",
"linknames"
] |
String joins input with indices in s .
|
def sjoiner ( inbox , s = None , join = "" ) : return join . join ( [ input_ for i , input_ in enumerate ( inbox ) if i in s ] )
| 1,248
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L119-L131
|
[
"def",
"relative_humidity_wet_psychrometric",
"(",
"dry_bulb_temperature",
",",
"web_bulb_temperature",
",",
"pressure",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"psychrometric_vapor_pressure_wet",
"(",
"dry_bulb_temperature",
",",
"web_bulb_temperature",
",",
"pressure",
",",
"*",
"*",
"kwargs",
")",
"/",
"saturation_vapor_pressure",
"(",
"dry_bulb_temperature",
")",
")"
] |
Loads data from a file . Determines the file type automatically file fifo socket but allows to specify the representation type string or mmap for memory mapped access to the file . Returns the loaded item as a str or mmap object . Internally creates an item from a file .
|
def load_item ( inbox , type = "string" , remove = True , buffer = None ) : is_file , is_fifo , is_socket = False , False , False file = inbox [ 0 ] try : file_type = file [ 0 ] except : raise ValueError ( "invalid inbox item" ) if file_type == "file" : is_file = os . path . exists ( file [ 1 ] ) elif file_type == "fifo" : is_fifo = stat . S_ISFIFO ( os . stat ( file [ 1 ] ) . st_mode ) elif file_type == "socket" : # how to test is valid socket? is_socket = True else : raise ValueError ( "type: %s not undertood" % file_type ) if ( is_fifo or is_socket ) and ( type == 'mmap' ) : raise ValueError ( "mmap is not supported for FIFOs and sockets" ) if ( is_fifo or is_socket ) and not remove : raise ValueError ( "FIFOs and sockets have to be removed" ) # get a fd and start/stop start = 0 if is_fifo or is_file : stop = os . stat ( file [ 1 ] ) . st_size - 1 fd = os . open ( file [ 1 ] , os . O_RDONLY ) BUFFER = ( buffer or PAPY_DEFAULTS [ 'PIPE_BUF' ] ) elif is_socket : host , port = socket . gethostbyname ( file [ 1 ] ) , file [ 2 ] sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . connect ( ( host , port ) ) stop = - 1 fd = sock . fileno ( ) BUFFER = ( buffer or PAPY_DEFAULTS [ 'TCP_RCVBUF' ] ) else : raise ValueError ( "got unknown inbox: %s" % ( repr ( inbox ) ) ) # get the data if type == 'mmap' : offset = start - ( start % ( getattr ( mmap , 'ALLOCATIONGRANULARITY' , None ) or getattr ( mmap , 'PAGESIZE' ) ) ) start = start - offset stop = stop - offset + 1 try : data = mmap . mmap ( fd , stop , access = mmap . ACCESS_READ , offset = offset ) except TypeError : # we're on Python 2.5 data = mmap . mmap ( fd , stop , access = mmap . ACCESS_READ ) data . seek ( start ) elif type == 'string' : data = [ ] if stop == - 1 : while True : buffer_ = os . read ( fd , BUFFER ) if not buffer_ : break data . append ( buffer_ ) data = "" . join ( data ) # data = sock.recv(socket.MSG_WAITALL) # this would read all the data from a socket else : os . lseek ( fd , start , 0 ) data = os . read ( fd , stop - start + 1 ) else : raise ValueError ( 'type: %s not understood.' % type ) # remove the file or close the socket if remove : if is_socket : # closes client socket sock . close ( ) else : # pipes and files are just removed os . close ( fd ) os . unlink ( file [ 1 ] ) else : os . close ( fd ) # returns a string or mmap return data
| 1,249
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L379-L478
|
[
"async",
"def",
"_wait_peers",
"(",
"self",
")",
":",
"# Make sure we remove dead peers.",
"for",
"p",
"in",
"self",
".",
"_peers",
"[",
":",
"]",
":",
"if",
"p",
".",
"dead",
":",
"self",
".",
"_peers",
".",
"remove",
"(",
"p",
")",
"while",
"not",
"self",
".",
"_peers",
":",
"await",
"self",
".",
"_peers",
".",
"wait_not_empty",
"(",
")"
] |
Serializes the first element of the input using the pickle protocol using the fastes binary protocol .
|
def pickle_dumps ( inbox ) : # http://bugs.python.org/issue4074 gc . disable ( ) str_ = cPickle . dumps ( inbox [ 0 ] , cPickle . HIGHEST_PROTOCOL ) gc . enable ( ) return str_
| 1,250
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L510-L520
|
[
"def",
"update_room_topic",
"(",
"self",
")",
":",
"try",
":",
"response",
"=",
"self",
".",
"client",
".",
"api",
".",
"get_room_topic",
"(",
"self",
".",
"room_id",
")",
"if",
"\"topic\"",
"in",
"response",
"and",
"response",
"[",
"\"topic\"",
"]",
"!=",
"self",
".",
"topic",
":",
"self",
".",
"topic",
"=",
"response",
"[",
"\"topic\"",
"]",
"return",
"True",
"else",
":",
"return",
"False",
"except",
"MatrixRequestError",
":",
"return",
"False"
] |
Deserializes the first element of the input using the pickle protocol .
|
def pickle_loads ( inbox ) : gc . disable ( ) obj = cPickle . loads ( inbox [ 0 ] ) gc . enable ( ) return obj
| 1,251
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L523-L531
|
[
"def",
"addTextErr",
"(",
"self",
",",
"text",
")",
":",
"self",
".",
"_currentColor",
"=",
"self",
".",
"_red",
"self",
".",
"addText",
"(",
"text",
")"
] |
Serializes the first element of the input using the JSON protocol as implemented by the json Python 2 . 6 library .
|
def json_dumps ( inbox ) : gc . disable ( ) str_ = json . dumps ( inbox [ 0 ] ) gc . enable ( ) return str_
| 1,252
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L535-L544
|
[
"def",
"store",
"(",
"self",
",",
"mutagen_file",
",",
"pictures",
")",
":",
"mutagen_file",
".",
"clear_pictures",
"(",
")",
"for",
"pic",
"in",
"pictures",
":",
"mutagen_file",
".",
"add_picture",
"(",
"pic",
")"
] |
Deserializes the first element of the input using the JSON protocol as implemented by the json Python 2 . 6 library .
|
def json_loads ( inbox ) : gc . disable ( ) obj = json . loads ( inbox [ 0 ] ) gc . enable ( ) return obj
| 1,253
|
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/func.py#L547-L556
|
[
"def",
"store",
"(",
"self",
",",
"mutagen_file",
",",
"pictures",
")",
":",
"mutagen_file",
".",
"clear_pictures",
"(",
")",
"for",
"pic",
"in",
"pictures",
":",
"mutagen_file",
".",
"add_picture",
"(",
"pic",
")"
] |
Callback validating the at_time commit option .
|
def at_time_validate ( ctx , param , value ) : # if they are doing commit_at, ensure the input is formatted correctly. if value is not None : if ( re . search ( r'([0-2]\d)(:[0-5]\d){1,2}' , value ) is None and re . search ( r'\d{4}-[01]\d-[0-3]\d [0-2]\d:[0-5]\d(:[0-5]\d)?' , value ) is None ) : raise click . BadParameter ( "A commit at time must be in one of the " "two formats: 'hh:mm[:ss]' or " "'yyyy-mm-dd hh:mm[:ss]' (seconds are " "optional)." ) ctx . obj [ 'at_time' ] = value return value
| 1,254
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L54-L86
|
[
"def",
"remove_organization",
"(",
"self",
",",
"service_desk_id",
",",
"organization_id",
")",
":",
"log",
".",
"warning",
"(",
"'Removing organization...'",
")",
"url",
"=",
"'rest/servicedeskapi/servicedesk/{}/organization'",
".",
"format",
"(",
"service_desk_id",
")",
"data",
"=",
"{",
"'organizationId'",
":",
"organization_id",
"}",
"return",
"self",
".",
"delete",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"experimental_headers",
",",
"data",
"=",
"data",
")"
] |
Validate the - w option .
|
def write_validate ( ctx , param , value ) : if value != ( "default" , "default" ) : try : mode , dest_file = ( value [ 0 ] , value [ 1 ] ) except IndexError : raise click . BadParameter ( 'Expecting two arguments, one for how to ' 'output (s, single, m, multiple), and ' 'the second is a filepath where to put' ' the output.' ) if mode . lower ( ) not in [ 's' , 'single' , 'm' , 'multiple' ] : raise click . BadParameter ( 'The first argument of the -w/--write ' 'option must specifies whether to write' ' to one file per device, or all device' ' output to a single file. Valid options' ' are "s", "single", "m", and "multiple"' ) # we've passed the checks, so set the 'out' context variable to our # tuple of the mode, and the destination file. ctx . obj [ 'out' ] = ( mode . lower ( ) , dest_file ) else : # they didn't use -w, so set the context variable accordingly. ctx . obj [ 'out' ] = None
| 1,255
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L90-L136
|
[
"def",
"send_to_kinesis_stream",
"(",
"events",
",",
"stream_name",
",",
"partition_key",
"=",
"None",
",",
"packer",
"=",
"None",
",",
"serializer",
"=",
"json",
".",
"dumps",
")",
":",
"if",
"not",
"events",
":",
"logger",
".",
"info",
"(",
"\"No events provided: nothing delivered to Firehose\"",
")",
"return",
"records",
"=",
"[",
"]",
"for",
"event",
"in",
"events",
":",
"if",
"not",
"partition_key",
":",
"partition_key_value",
"=",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"elif",
"hasattr",
"(",
"partition_key",
",",
"\"__call__\"",
")",
":",
"partition_key_value",
"=",
"partition_key",
"(",
"event",
")",
"else",
":",
"partition_key_value",
"=",
"partition_key",
"if",
"not",
"isinstance",
"(",
"event",
",",
"str",
")",
":",
"event",
"=",
"serializer",
"(",
"event",
")",
"if",
"packer",
":",
"event",
"=",
"packer",
"(",
"event",
")",
"record",
"=",
"{",
"\"Data\"",
":",
"event",
",",
"\"PartitionKey\"",
":",
"partition_key_value",
"}",
"records",
".",
"append",
"(",
"record",
")",
"kinesis",
"=",
"boto3",
".",
"client",
"(",
"\"kinesis\"",
")",
"resp",
"=",
"kinesis",
".",
"put_records",
"(",
"StreamName",
"=",
"stream_name",
",",
"Records",
"=",
"records",
")",
"return",
"resp"
] |
Callback function to write the output from the script .
|
def write_out ( input ) : # peel off the to_file metadata from the output. to_file , output = input if to_file != "quiet" : try : # split the to_file metadata into it's separate parts. mode , dest_file = to_file except TypeError : # just dump the output if we had an internal problem with getting # the metadata. click . echo ( output ) else : ip = output . split ( 'device: ' ) [ 1 ] . split ( '\n' ) [ 0 ] . strip ( ) if mode in [ 'm' , 'multiple' ] : # put the IP in front of the filename if we're writing each # device to its own file. dest_file = path . join ( path . split ( dest_file ) [ 0 ] , ip + "_" + path . split ( dest_file ) [ 1 ] ) try : out_file = open ( dest_file , 'a+b' ) except IOError as e : print ( color ( "Could not open output file '%s' for writing. " "Output would have been:\n%s" % ( dest_file , output ) , 'red' ) ) print ( color ( 'Here is the error for opening the output file:' + str ( e ) , 'red' ) ) else : click . echo ( output , nl = False , file = out_file ) print ( color ( '%s output appended to: %s' % ( ip , dest_file ) ) ) out_file . close ( )
| 1,256
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L139-L185
|
[
"def",
"_get_optional_attrs",
"(",
"kws",
")",
":",
"vals",
"=",
"OboOptionalAttrs",
".",
"attributes",
".",
"intersection",
"(",
"kws",
".",
"keys",
"(",
")",
")",
"if",
"'sections'",
"in",
"kws",
":",
"vals",
".",
"add",
"(",
"'relationship'",
")",
"if",
"'norel'",
"in",
"kws",
":",
"vals",
".",
"discard",
"(",
"'relationship'",
")",
"return",
"vals"
] |
Manipulate one or more Junos devices .
|
def main ( ctx , host , password , port , quiet , session_timeout , connect_timeout , username ) : # build the list of hosts ctx . obj [ 'hosts' ] = [ ip for ip in clean_lines ( host ) ] # set the connection parameters ctx . obj [ 'conn' ] = { "username" : username , "password" : password , "port" : port , "session_timeout" : session_timeout , "connect_timeout" : connect_timeout } if quiet : ctx . obj [ 'out' ] = "quiet"
| 1,257
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L222-L271
|
[
"def",
"parse_plotCorrelation",
"(",
"self",
")",
":",
"self",
".",
"deeptools_plotCorrelationData",
"=",
"dict",
"(",
")",
"for",
"f",
"in",
"self",
".",
"find_log_files",
"(",
"'deeptools/plotCorrelationData'",
",",
"filehandles",
"=",
"False",
")",
":",
"parsed_data",
",",
"samples",
"=",
"self",
".",
"parsePlotCorrelationData",
"(",
"f",
")",
"for",
"k",
",",
"v",
"in",
"parsed_data",
".",
"items",
"(",
")",
":",
"if",
"k",
"in",
"self",
".",
"deeptools_plotCorrelationData",
":",
"log",
".",
"warning",
"(",
"\"Replacing duplicate sample {}.\"",
".",
"format",
"(",
"k",
")",
")",
"self",
".",
"deeptools_plotCorrelationData",
"[",
"k",
"]",
"=",
"v",
"if",
"len",
"(",
"parsed_data",
")",
">",
"0",
":",
"self",
".",
"add_data_source",
"(",
"f",
",",
"section",
"=",
"'plotCorrelation'",
")",
"if",
"len",
"(",
"self",
".",
"deeptools_plotCorrelationData",
")",
">",
"0",
":",
"config",
"=",
"{",
"'id'",
":",
"'deeptools_correlation_plot'",
",",
"'title'",
":",
"'deeptools: Correlation Plot'",
",",
"}",
"data",
"=",
"[",
"]",
"for",
"s_name",
"in",
"samples",
":",
"try",
":",
"data",
".",
"append",
"(",
"self",
".",
"deeptools_plotCorrelationData",
"[",
"s_name",
"]",
")",
"except",
"KeyError",
":",
"pass",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"log",
".",
"debug",
"(",
"'No valid data for correlation plot'",
")",
"return",
"None",
"self",
".",
"add_section",
"(",
"name",
"=",
"\"Correlation heatmap\"",
",",
"anchor",
"=",
"\"deeptools_correlation\"",
",",
"description",
"=",
"\"Pairwise correlations of samples based on distribution of sequence reads\"",
",",
"plot",
"=",
"heatmap",
".",
"plot",
"(",
"data",
",",
"samples",
",",
"samples",
",",
"config",
")",
")",
"return",
"len",
"(",
"self",
".",
"deeptools_plotCorrelationData",
")"
] |
Run show | compare for set commands .
|
def compare ( ctx , commands ) : mp_pool = multiprocessing . Pool ( multiprocessing . cpu_count ( ) * 2 ) for ip in ctx . obj [ 'hosts' ] : mp_pool . apply_async ( wrap . open_connection , args = ( ip , ctx . obj [ 'conn' ] [ 'username' ] , ctx . obj [ 'conn' ] [ 'password' ] , wrap . compare , [ commands ] , ctx . obj [ 'out' ] , ctx . obj [ 'conn' ] [ 'connect_timeout' ] , ctx . obj [ 'conn' ] [ 'session_timeout' ] , ctx . obj [ 'conn' ] [ 'port' ] ) , callback = write_out ) mp_pool . close ( ) mp_pool . join ( )
| 1,258
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L376-L408
|
[
"def",
"_new_sample",
"(",
"self",
",",
"sink",
")",
":",
"if",
"self",
".",
"running",
":",
"# New data is available from the pipeline! Dump it into our",
"# queue (or possibly block if we're full).",
"buf",
"=",
"sink",
".",
"emit",
"(",
"'pull-sample'",
")",
".",
"get_buffer",
"(",
")",
"# We can't use Gst.Buffer.extract() to read the data as it crashes",
"# when called through PyGObject. We also can't use",
"# Gst.Buffer.extract_dup() because we have no way in Python to free",
"# the memory that it returns. Instead we get access to the actual",
"# data via Gst.Memory.map().",
"mem",
"=",
"buf",
".",
"get_all_memory",
"(",
")",
"success",
",",
"info",
"=",
"mem",
".",
"map",
"(",
"Gst",
".",
"MapFlags",
".",
"READ",
")",
"if",
"success",
":",
"data",
"=",
"info",
".",
"data",
"mem",
".",
"unmap",
"(",
"info",
")",
"self",
".",
"queue",
".",
"put",
"(",
"data",
")",
"else",
":",
"raise",
"GStreamerError",
"(",
"\"Unable to map buffer memory while reading the file.\"",
")",
"return",
"Gst",
".",
"FlowReturn",
".",
"OK"
] |
Config comparison between two devices .
|
def diff_config ( ctx , second_host , mode ) : mp_pool = multiprocessing . Pool ( multiprocessing . cpu_count ( ) * 2 ) for ip in ctx . obj [ 'hosts' ] : mp_pool . apply_async ( wrap . open_connection , args = ( ip , ctx . obj [ 'conn' ] [ 'username' ] , ctx . obj [ 'conn' ] [ 'password' ] , wrap . diff_config , [ second_host , mode ] , ctx . obj [ 'out' ] , ctx . obj [ 'conn' ] [ 'connect_timeout' ] , ctx . obj [ 'conn' ] [ 'session_timeout' ] , ctx . obj [ 'conn' ] [ 'port' ] ) , callback = write_out ) mp_pool . close ( ) mp_pool . join ( )
| 1,259
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L593-L621
|
[
"def",
"publish",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"topic",
"=",
"self",
".",
"_get_required_parameter",
"(",
"'topic'",
",",
"*",
"*",
"kwargs",
")",
"# payload is an optional parameter",
"payload",
"=",
"kwargs",
".",
"get",
"(",
"'payload'",
",",
"b''",
")",
"function_arn",
"=",
"ROUTER_FUNCTION_ARN",
"client_context",
"=",
"{",
"'custom'",
":",
"{",
"'source'",
":",
"MY_FUNCTION_ARN",
",",
"'subject'",
":",
"topic",
"}",
"}",
"customer_logger",
".",
"info",
"(",
"'Publishing message on topic \"{}\" with Payload \"{}\"'",
".",
"format",
"(",
"topic",
",",
"payload",
")",
")",
"self",
".",
"lambda_client",
".",
"_invoke_internal",
"(",
"function_arn",
",",
"payload",
",",
"base64",
".",
"b64encode",
"(",
"json",
".",
"dumps",
"(",
"client_context",
")",
".",
"encode",
"(",
")",
")",
")"
] |
Allow for partial commands .
|
def get_command ( self , ctx , cmd_name ) : rv = click . Group . get_command ( self , ctx , cmd_name ) if rv is not None : return rv matches = [ x for x in self . list_commands ( ctx ) if x . startswith ( cmd_name ) ] if not matches : return None elif len ( matches ) == 1 : return click . Group . get_command ( self , ctx , matches [ 0 ] ) ctx . fail ( 'Command ambiguous, could be: %s' % ', ' . join ( sorted ( matches ) ) )
| 1,260
|
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/cli.py#L39-L51
|
[
"def",
"raise_thread_exception",
"(",
"thread_id",
",",
"exception",
")",
":",
"if",
"current_platform",
"==",
"\"CPython\"",
":",
"_raise_thread_exception_cpython",
"(",
"thread_id",
",",
"exception",
")",
"else",
":",
"message",
"=",
"\"Setting thread exceptions (%s) is not supported for your current platform (%r).\"",
"exctype",
"=",
"(",
"exception",
"if",
"inspect",
".",
"isclass",
"(",
"exception",
")",
"else",
"type",
"(",
"exception",
")",
")",
".",
"__name__",
"logger",
".",
"critical",
"(",
"message",
",",
"exctype",
",",
"current_platform",
")"
] |
Converts a value from from_unit units to to_unit units
|
def convert ( value , from_unit , to_unit ) : if isinstance ( value , float ) : raise ValueError ( "float values can lead to unexpected precision loss, please use a" " Decimal or string eg." " convert('%s', %r, %r)" % ( value , from_unit , to_unit ) ) if from_unit not in UNITS_TO_RAW : raise ValueError ( 'unknown unit: %r' % from_unit ) if to_unit not in UNITS_TO_RAW : raise ValueError ( 'unknown unit: %r' % to_unit ) try : value = Decimal ( value ) except Exception : raise ValueError ( 'not a number: %r' % value ) from_value_in_base = UNITS_TO_RAW [ from_unit ] to_value_in_base = UNITS_TO_RAW [ to_unit ] result = value * ( from_value_in_base / to_value_in_base ) return result . normalize ( )
| 1,261
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/conversion.py#L45-L85
|
[
"def",
"_WriteIfcfg",
"(",
"self",
",",
"interfaces",
",",
"logger",
")",
":",
"for",
"interface",
"in",
"interfaces",
":",
"interface_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"network_path",
",",
"'ifcfg-%s'",
"%",
"interface",
")",
"interface_content",
"=",
"[",
"'# Added by Google.'",
",",
"'STARTMODE=hotplug'",
",",
"'BOOTPROTO=dhcp'",
",",
"'DHCLIENT_SET_DEFAULT_ROUTE=yes'",
",",
"'DHCLIENT_ROUTE_PRIORITY=10%s00'",
"%",
"interface",
",",
"''",
",",
"]",
"with",
"open",
"(",
"interface_config",
",",
"'w'",
")",
"as",
"interface_file",
":",
"interface_file",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"interface_content",
")",
")",
"logger",
".",
"info",
"(",
"'Created ifcfg file for interface %s.'",
",",
"interface",
")"
] |
Endpoint that SNS accesses . Includes logic verifying request
|
def endpoint ( request ) : # pylint: disable=too-many-return-statements,too-many-branches # In order to 'hide' the endpoint, all non-POST requests should return # the site's default HTTP404 if request . method != 'POST' : raise Http404 # If necessary, check that the topic is correct if hasattr ( settings , 'BOUNCY_TOPIC_ARN' ) : # Confirm that the proper topic header was sent if 'HTTP_X_AMZ_SNS_TOPIC_ARN' not in request . META : return HttpResponseBadRequest ( 'No TopicArn Header' ) # Check to see if the topic is in the settings # Because you can have bounces and complaints coming from multiple # topics, BOUNCY_TOPIC_ARN is a list if ( not request . META [ 'HTTP_X_AMZ_SNS_TOPIC_ARN' ] in settings . BOUNCY_TOPIC_ARN ) : return HttpResponseBadRequest ( 'Bad Topic' ) # Load the JSON POST Body if isinstance ( request . body , str ) : # requests return str in python 2.7 request_body = request . body else : # and return bytes in python 3.4 request_body = request . body . decode ( ) try : data = json . loads ( request_body ) except ValueError : logger . warning ( 'Notification Not Valid JSON: {}' . format ( request_body ) ) return HttpResponseBadRequest ( 'Not Valid JSON' ) # Ensure that the JSON we're provided contains all the keys we expect # Comparison code from http://stackoverflow.com/questions/1285911/ if not set ( VITAL_NOTIFICATION_FIELDS ) <= set ( data ) : logger . warning ( 'Request Missing Necessary Keys' ) return HttpResponseBadRequest ( 'Request Missing Necessary Keys' ) # Ensure that the type of notification is one we'll accept if not data [ 'Type' ] in ALLOWED_TYPES : logger . info ( 'Notification Type Not Known %s' , data [ 'Type' ] ) return HttpResponseBadRequest ( 'Unknown Notification Type' ) # Confirm that the signing certificate is hosted on a correct domain # AWS by default uses sns.{region}.amazonaws.com # On the off chance you need this to be a different domain, allow the # regex to be overridden in settings domain = urlparse ( data [ 'SigningCertURL' ] ) . netloc pattern = getattr ( settings , 'BOUNCY_CERT_DOMAIN_REGEX' , r"sns.[a-z0-9\-]+.amazonaws.com$" ) if not re . search ( pattern , domain ) : logger . warning ( 'Improper Certificate Location %s' , data [ 'SigningCertURL' ] ) return HttpResponseBadRequest ( 'Improper Certificate Location' ) # Verify that the notification is signed by Amazon if ( getattr ( settings , 'BOUNCY_VERIFY_CERTIFICATE' , True ) and not verify_notification ( data ) ) : logger . error ( 'Verification Failure %s' , ) return HttpResponseBadRequest ( 'Improper Signature' ) # Send a signal to say a valid notification has been received signals . notification . send ( sender = 'bouncy_endpoint' , notification = data , request = request ) # Handle subscription-based messages. if data [ 'Type' ] == 'SubscriptionConfirmation' : # Allow the disabling of the auto-subscription feature if not getattr ( settings , 'BOUNCY_AUTO_SUBSCRIBE' , True ) : raise Http404 return approve_subscription ( data ) elif data [ 'Type' ] == 'UnsubscribeConfirmation' : # We won't handle unsubscribe requests here. Return a 200 status code # so Amazon won't redeliver the request. If you want to remove this # endpoint, remove it either via the API or the AWS Console logger . info ( 'UnsubscribeConfirmation Not Handled' ) return HttpResponse ( 'UnsubscribeConfirmation Not Handled' ) try : message = json . loads ( data [ 'Message' ] ) except ValueError : # This message is not JSON. But we need to return a 200 status code # so that Amazon doesn't attempt to deliver the message again logger . info ( 'Non-Valid JSON Message Received' ) return HttpResponse ( 'Message is not valid JSON' ) return process_message ( message , data )
| 1,262
|
https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/views.py#L38-L128
|
[
"def",
"write_metadata",
"(",
"self",
",",
"key",
",",
"values",
")",
":",
"values",
"=",
"Series",
"(",
"values",
")",
"self",
".",
"parent",
".",
"put",
"(",
"self",
".",
"_get_metadata_path",
"(",
"key",
")",
",",
"values",
",",
"format",
"=",
"'table'",
",",
"encoding",
"=",
"self",
".",
"encoding",
",",
"errors",
"=",
"self",
".",
"errors",
",",
"nan_rep",
"=",
"self",
".",
"nan_rep",
")"
] |
Function to process a JSON message delivered from Amazon
|
def process_message ( message , notification ) : # Confirm that there are 'notificationType' and 'mail' fields in our # message if not set ( VITAL_MESSAGE_FIELDS ) <= set ( message ) : # At this point we're sure that it's Amazon sending the message # If we don't return a 200 status code, Amazon will attempt to send us # this same message a few seconds later. logger . info ( 'JSON Message Missing Vital Fields' ) return HttpResponse ( 'Missing Vital Fields' ) if message [ 'notificationType' ] == 'Complaint' : return process_complaint ( message , notification ) if message [ 'notificationType' ] == 'Bounce' : return process_bounce ( message , notification ) if message [ 'notificationType' ] == 'Delivery' : return process_delivery ( message , notification ) else : return HttpResponse ( 'Unknown Notification Type' )
| 1,263
|
https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/views.py#L131-L151
|
[
"def",
"isHeld",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"lockPath",
")",
":",
"return",
"False",
"try",
":",
"mtime",
"=",
"os",
".",
"stat",
"(",
"self",
".",
"lockPath",
")",
".",
"st_mtime",
"except",
"FileNotFoundError",
"as",
"e",
":",
"return",
"False",
"if",
"self",
".",
"__checkExpiration",
"(",
"mtime",
")",
":",
"return",
"False",
"return",
"True"
] |
Function to process a bounce notification
|
def process_bounce ( message , notification ) : mail = message [ 'mail' ] bounce = message [ 'bounce' ] bounces = [ ] for recipient in bounce [ 'bouncedRecipients' ] : # Create each bounce record. Add to a list for reference later. bounces += [ Bounce . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = recipient [ 'emailAddress' ] , feedback_id = bounce [ 'feedbackId' ] , feedback_timestamp = clean_time ( bounce [ 'timestamp' ] ) , hard = bool ( bounce [ 'bounceType' ] == 'Permanent' ) , bounce_type = bounce [ 'bounceType' ] , bounce_subtype = bounce [ 'bounceSubType' ] , reporting_mta = bounce . get ( 'reportingMTA' ) , action = recipient . get ( 'action' ) , status = recipient . get ( 'status' ) , diagnostic_code = recipient . get ( 'diagnosticCode' ) ) ] # Send signals for each bounce. for bounce in bounces : signals . feedback . send ( sender = Bounce , instance = bounce , message = message , notification = notification ) logger . info ( 'Logged %s Bounce(s)' , str ( len ( bounces ) ) ) return HttpResponse ( 'Bounce Processed' )
| 1,264
|
https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/views.py#L154-L191
|
[
"def",
"quantile_1D",
"(",
"data",
",",
"weights",
",",
"quantile",
")",
":",
"# Check the data",
"if",
"not",
"isinstance",
"(",
"data",
",",
"np",
".",
"matrix",
")",
":",
"data",
"=",
"np",
".",
"asarray",
"(",
"data",
")",
"if",
"not",
"isinstance",
"(",
"weights",
",",
"np",
".",
"matrix",
")",
":",
"weights",
"=",
"np",
".",
"asarray",
"(",
"weights",
")",
"nd",
"=",
"data",
".",
"ndim",
"if",
"nd",
"!=",
"1",
":",
"raise",
"TypeError",
"(",
"\"data must be a one dimensional array\"",
")",
"ndw",
"=",
"weights",
".",
"ndim",
"if",
"ndw",
"!=",
"1",
":",
"raise",
"TypeError",
"(",
"\"weights must be a one dimensional array\"",
")",
"if",
"data",
".",
"shape",
"!=",
"weights",
".",
"shape",
":",
"raise",
"TypeError",
"(",
"\"the length of data and weights must be the same\"",
")",
"if",
"(",
"(",
"quantile",
">",
"1.",
")",
"or",
"(",
"quantile",
"<",
"0.",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"quantile must have a value between 0. and 1.\"",
")",
"# Sort the data",
"ind_sorted",
"=",
"np",
".",
"argsort",
"(",
"data",
")",
"sorted_data",
"=",
"data",
"[",
"ind_sorted",
"]",
"sorted_weights",
"=",
"weights",
"[",
"ind_sorted",
"]",
"# Compute the auxiliary arrays",
"Sn",
"=",
"np",
".",
"cumsum",
"(",
"sorted_weights",
")",
"# TODO: Check that the weights do not sum zero",
"#assert Sn != 0, \"The sum of the weights must not be zero\"",
"Pn",
"=",
"(",
"Sn",
"-",
"0.5",
"*",
"sorted_weights",
")",
"/",
"Sn",
"[",
"-",
"1",
"]",
"# Get the value of the weighted median",
"return",
"np",
".",
"interp",
"(",
"quantile",
",",
"Pn",
",",
"sorted_data",
")"
] |
Function to process a complaint notification
|
def process_complaint ( message , notification ) : mail = message [ 'mail' ] complaint = message [ 'complaint' ] if 'arrivalDate' in complaint : arrival_date = clean_time ( complaint [ 'arrivalDate' ] ) else : arrival_date = None complaints = [ ] for recipient in complaint [ 'complainedRecipients' ] : # Create each Complaint. Save in a list for reference later. complaints += [ Complaint . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = recipient [ 'emailAddress' ] , feedback_id = complaint [ 'feedbackId' ] , feedback_timestamp = clean_time ( complaint [ 'timestamp' ] ) , useragent = complaint . get ( 'userAgent' ) , feedback_type = complaint . get ( 'complaintFeedbackType' ) , arrival_date = arrival_date ) ] # Send signals for each complaint. for complaint in complaints : signals . feedback . send ( sender = Complaint , instance = complaint , message = message , notification = notification ) logger . info ( 'Logged %s Complaint(s)' , str ( len ( complaints ) ) ) return HttpResponse ( 'Complaint Processed' )
| 1,265
|
https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/views.py#L194-L232
|
[
"def",
"dict_to_schema",
"(",
"schema_dict",
",",
"required",
",",
"allow_custom_keys",
"=",
"True",
",",
"modifier",
"=",
"None",
")",
":",
"if",
"modifier",
":",
"modifier",
"=",
"Use",
"(",
"modifier",
")",
"def",
"_to",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"d",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"value",
".",
"iteritems",
"(",
")",
":",
"if",
"isinstance",
"(",
"k",
",",
"basestring",
")",
":",
"k",
"=",
"Required",
"(",
"k",
")",
"if",
"required",
"else",
"Optional",
"(",
"k",
")",
"d",
"[",
"k",
"]",
"=",
"_to",
"(",
"v",
")",
"if",
"allow_custom_keys",
":",
"d",
"[",
"Optional",
"(",
"basestring",
")",
"]",
"=",
"modifier",
"or",
"object",
"schema",
"=",
"Schema",
"(",
"d",
")",
"elif",
"modifier",
":",
"schema",
"=",
"And",
"(",
"value",
",",
"modifier",
")",
"else",
":",
"schema",
"=",
"value",
"return",
"schema",
"return",
"_to",
"(",
"schema_dict",
")"
] |
Function to process a delivery notification
|
def process_delivery ( message , notification ) : mail = message [ 'mail' ] delivery = message [ 'delivery' ] if 'timestamp' in delivery : delivered_datetime = clean_time ( delivery [ 'timestamp' ] ) else : delivered_datetime = None deliveries = [ ] for eachrecipient in delivery [ 'recipients' ] : # Create each delivery deliveries += [ Delivery . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = eachrecipient , # delivery delivered_time = delivered_datetime , processing_time = int ( delivery [ 'processingTimeMillis' ] ) , smtp_response = delivery [ 'smtpResponse' ] ) ] # Send signals for each delivery. for eachdelivery in deliveries : signals . feedback . send ( sender = Delivery , instance = eachdelivery , message = message , notification = notification ) logger . info ( 'Logged %s Deliveries(s)' , str ( len ( deliveries ) ) ) return HttpResponse ( 'Delivery Processed' )
| 1,266
|
https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/views.py#L235-L272
|
[
"def",
"load_toml_rest_api_config",
"(",
"filename",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"LOGGER",
".",
"info",
"(",
"\"Skipping rest api loading from non-existent config file: %s\"",
",",
"filename",
")",
"return",
"RestApiConfig",
"(",
")",
"LOGGER",
".",
"info",
"(",
"\"Loading rest api information from config: %s\"",
",",
"filename",
")",
"try",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"fd",
":",
"raw_config",
"=",
"fd",
".",
"read",
"(",
")",
"except",
"IOError",
"as",
"e",
":",
"raise",
"RestApiConfigurationError",
"(",
"\"Unable to load rest api configuration file: {}\"",
".",
"format",
"(",
"str",
"(",
"e",
")",
")",
")",
"toml_config",
"=",
"toml",
".",
"loads",
"(",
"raw_config",
")",
"invalid_keys",
"=",
"set",
"(",
"toml_config",
".",
"keys",
"(",
")",
")",
".",
"difference",
"(",
"[",
"'bind'",
",",
"'connect'",
",",
"'timeout'",
",",
"'opentsdb_db'",
",",
"'opentsdb_url'",
",",
"'opentsdb_username'",
",",
"'opentsdb_password'",
",",
"'client_max_size'",
"]",
")",
"if",
"invalid_keys",
":",
"raise",
"RestApiConfigurationError",
"(",
"\"Invalid keys in rest api config: {}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"sorted",
"(",
"list",
"(",
"invalid_keys",
")",
")",
")",
")",
")",
"config",
"=",
"RestApiConfig",
"(",
"bind",
"=",
"toml_config",
".",
"get",
"(",
"\"bind\"",
",",
"None",
")",
",",
"connect",
"=",
"toml_config",
".",
"get",
"(",
"'connect'",
",",
"None",
")",
",",
"timeout",
"=",
"toml_config",
".",
"get",
"(",
"'timeout'",
",",
"None",
")",
",",
"opentsdb_url",
"=",
"toml_config",
".",
"get",
"(",
"'opentsdb_url'",
",",
"None",
")",
",",
"opentsdb_db",
"=",
"toml_config",
".",
"get",
"(",
"'opentsdb_db'",
",",
"None",
")",
",",
"opentsdb_username",
"=",
"toml_config",
".",
"get",
"(",
"'opentsdb_username'",
",",
"None",
")",
",",
"opentsdb_password",
"=",
"toml_config",
".",
"get",
"(",
"'opentsdb_password'",
",",
"None",
")",
",",
"client_max_size",
"=",
"toml_config",
".",
"get",
"(",
"'client_max_size'",
",",
"None",
")",
")",
"return",
"config"
] |
Click on a label
|
def click_on_label ( step , label ) : with AssertContextManager ( step ) : elem = world . browser . find_element_by_xpath ( str ( '//label[normalize-space(text()) = "%s"]' % label ) ) elem . click ( )
| 1,267
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L258-L266
|
[
"def",
"readme_verify",
"(",
")",
":",
"expected",
"=",
"populate_readme",
"(",
"REVISION",
",",
"RTD_VERSION",
")",
"# Actually get the stored contents.",
"with",
"open",
"(",
"README_FILE",
",",
"\"r\"",
")",
"as",
"file_obj",
":",
"contents",
"=",
"file_obj",
".",
"read",
"(",
")",
"if",
"contents",
"!=",
"expected",
":",
"err_msg",
"=",
"\"\\n\"",
"+",
"get_diff",
"(",
"contents",
",",
"expected",
",",
"\"README.rst.actual\"",
",",
"\"README.rst.expected\"",
")",
"raise",
"ValueError",
"(",
"err_msg",
")",
"else",
":",
"print",
"(",
"\"README contents are as expected.\"",
")"
] |
Check if the element is focused
|
def element_focused ( step , id ) : elem = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) focused = world . browser . switch_to_active_element ( ) assert_true ( step , elem == focused )
| 1,268
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L270-L278
|
[
"def",
"load_clients",
"(",
"stream",
",",
"configuration_class",
"=",
"ClientConfiguration",
")",
":",
"client_dict",
"=",
"yaml",
".",
"safe_load",
"(",
"stream",
")",
"if",
"isinstance",
"(",
"client_dict",
",",
"dict",
")",
":",
"return",
"{",
"client_name",
":",
"configuration_class",
"(",
"*",
"*",
"client_config",
")",
"for",
"client_name",
",",
"client_config",
"in",
"six",
".",
"iteritems",
"(",
"client_dict",
")",
"}",
"raise",
"ValueError",
"(",
"\"Valid configuration could not be decoded.\"",
")"
] |
Check if the element is not focused
|
def element_not_focused ( step , id ) : elem = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) focused = world . browser . switch_to_active_element ( ) assert_false ( step , elem == focused )
| 1,269
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L282-L290
|
[
"def",
"populateFromRow",
"(",
"self",
",",
"readGroupSetRecord",
")",
":",
"self",
".",
"_dataUrl",
"=",
"readGroupSetRecord",
".",
"dataurl",
"self",
".",
"_indexFile",
"=",
"readGroupSetRecord",
".",
"indexfile",
"self",
".",
"_programs",
"=",
"[",
"]",
"for",
"jsonDict",
"in",
"json",
".",
"loads",
"(",
"readGroupSetRecord",
".",
"programs",
")",
":",
"program",
"=",
"protocol",
".",
"fromJson",
"(",
"json",
".",
"dumps",
"(",
"jsonDict",
")",
",",
"protocol",
".",
"Program",
")",
"self",
".",
"_programs",
".",
"append",
"(",
"program",
")",
"stats",
"=",
"protocol",
".",
"fromJson",
"(",
"readGroupSetRecord",
".",
"stats",
",",
"protocol",
".",
"ReadStats",
")",
"self",
".",
"_numAlignedReads",
"=",
"stats",
".",
"aligned_read_count",
"self",
".",
"_numUnalignedReads",
"=",
"stats",
".",
"unaligned_read_count"
] |
Check that the form input element has given value .
|
def input_has_value ( step , field_name , value ) : with AssertContextManager ( step ) : text_field = find_any_field ( world . browser , DATE_FIELDS + TEXT_FIELDS , field_name ) assert_false ( step , text_field is False , 'Can not find a field named "%s"' % field_name ) assert_equals ( text_field . get_attribute ( 'value' ) , value )
| 1,270
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L294-L304
|
[
"def",
"detail_dict",
"(",
"self",
")",
":",
"d",
"=",
"self",
".",
"dict",
"def",
"aug_col",
"(",
"c",
")",
":",
"d",
"=",
"c",
".",
"dict",
"d",
"[",
"'stats'",
"]",
"=",
"[",
"s",
".",
"dict",
"for",
"s",
"in",
"c",
".",
"stats",
"]",
"return",
"d",
"d",
"[",
"'table'",
"]",
"=",
"self",
".",
"table",
".",
"dict",
"d",
"[",
"'table'",
"]",
"[",
"'columns'",
"]",
"=",
"[",
"aug_col",
"(",
"c",
")",
"for",
"c",
"in",
"self",
".",
"table",
".",
"columns",
"]",
"return",
"d"
] |
Submit the form having given id .
|
def submit_form_id ( step , id ) : form = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) form . submit ( )
| 1,271
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L317-L322
|
[
"def",
"_generate_noise_temporal",
"(",
"stimfunction_tr",
",",
"tr_duration",
",",
"dimensions",
",",
"template",
",",
"mask",
",",
"noise_dict",
")",
":",
"# Set up common parameters",
"# How many TRs are there",
"trs",
"=",
"len",
"(",
"stimfunction_tr",
")",
"# What time points are sampled by a TR?",
"timepoints",
"=",
"list",
"(",
"np",
".",
"linspace",
"(",
"0",
",",
"(",
"trs",
"-",
"1",
")",
"*",
"tr_duration",
",",
"trs",
")",
")",
"# Preset the volume",
"noise_volume",
"=",
"np",
".",
"zeros",
"(",
"(",
"dimensions",
"[",
"0",
"]",
",",
"dimensions",
"[",
"1",
"]",
",",
"dimensions",
"[",
"2",
"]",
",",
"trs",
")",
")",
"# Generate the drift noise",
"if",
"noise_dict",
"[",
"'drift_sigma'",
"]",
"!=",
"0",
":",
"# Calculate the drift time course",
"noise",
"=",
"_generate_noise_temporal_drift",
"(",
"trs",
",",
"tr_duration",
",",
")",
"# Create a volume with the drift properties",
"volume",
"=",
"np",
".",
"ones",
"(",
"dimensions",
")",
"# Combine the volume and noise",
"noise_volume",
"+=",
"np",
".",
"multiply",
".",
"outer",
"(",
"volume",
",",
"noise",
")",
"*",
"noise_dict",
"[",
"'drift_sigma'",
"]",
"# Generate the physiological noise",
"if",
"noise_dict",
"[",
"'physiological_sigma'",
"]",
"!=",
"0",
":",
"# Calculate the physiological time course",
"noise",
"=",
"_generate_noise_temporal_phys",
"(",
"timepoints",
",",
")",
"# Create a brain shaped volume with similar smoothing properties",
"volume",
"=",
"_generate_noise_spatial",
"(",
"dimensions",
"=",
"dimensions",
",",
"mask",
"=",
"mask",
",",
"fwhm",
"=",
"noise_dict",
"[",
"'fwhm'",
"]",
",",
")",
"# Combine the volume and noise",
"noise_volume",
"+=",
"np",
".",
"multiply",
".",
"outer",
"(",
"volume",
",",
"noise",
")",
"*",
"noise_dict",
"[",
"'physiological_sigma'",
"]",
"# Generate the AR noise",
"if",
"noise_dict",
"[",
"'auto_reg_sigma'",
"]",
"!=",
"0",
":",
"# Calculate the AR time course volume",
"noise",
"=",
"_generate_noise_temporal_autoregression",
"(",
"timepoints",
",",
"noise_dict",
",",
"dimensions",
",",
"mask",
",",
")",
"# Combine the volume and noise",
"noise_volume",
"+=",
"noise",
"*",
"noise_dict",
"[",
"'auto_reg_sigma'",
"]",
"# Generate the task related noise",
"if",
"noise_dict",
"[",
"'task_sigma'",
"]",
"!=",
"0",
"and",
"np",
".",
"sum",
"(",
"stimfunction_tr",
")",
">",
"0",
":",
"# Calculate the task based noise time course",
"noise",
"=",
"_generate_noise_temporal_task",
"(",
"stimfunction_tr",
",",
")",
"# Create a brain shaped volume with similar smoothing properties",
"volume",
"=",
"_generate_noise_spatial",
"(",
"dimensions",
"=",
"dimensions",
",",
"mask",
"=",
"mask",
",",
"fwhm",
"=",
"noise_dict",
"[",
"'fwhm'",
"]",
",",
")",
"# Combine the volume and noise",
"noise_volume",
"+=",
"np",
".",
"multiply",
".",
"outer",
"(",
"volume",
",",
"noise",
")",
"*",
"noise_dict",
"[",
"'task_sigma'",
"]",
"# Finally, z score each voxel so things mix nicely",
"noise_volume",
"=",
"stats",
".",
"zscore",
"(",
"noise_volume",
",",
"3",
")",
"# If it is a nan it is because you just divided by zero (since some",
"# voxels are zeros in the template)",
"noise_volume",
"[",
"np",
".",
"isnan",
"(",
"noise_volume",
")",
"]",
"=",
"0",
"return",
"noise_volume"
] |
Submit the form having given action URL .
|
def submit_form_action ( step , url ) : form = world . browser . find_element_by_xpath ( str ( '//form[@action="%s"]' % url ) ) form . submit ( )
| 1,272
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L326-L332
|
[
"def",
"prepare_blobs",
"(",
"self",
")",
":",
"self",
".",
"raw_header",
"=",
"self",
".",
"extract_header",
"(",
")",
"if",
"self",
".",
"cache_enabled",
":",
"self",
".",
"_cache_offsets",
"(",
")"
] |
Check the alert text
|
def check_alert ( step , text ) : try : alert = Alert ( world . browser ) assert_equals ( alert . text , text ) except WebDriverException : # PhantomJS is kinda poor pass
| 1,273
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L472-L482
|
[
"def",
"delete",
"(",
"self",
",",
"index",
"=",
"None",
")",
":",
"if",
"index",
"is",
"None",
":",
"javabridge",
".",
"call",
"(",
"self",
".",
"jobject",
",",
"\"delete\"",
",",
"\"()V\"",
")",
"else",
":",
"javabridge",
".",
"call",
"(",
"self",
".",
"jobject",
",",
"\"delete\"",
",",
"\"(I)V\"",
",",
"index",
")"
] |
Check that the page title matches the given one .
|
def page_title ( step , title ) : with AssertContextManager ( step ) : assert_equals ( world . browser . title , title )
| 1,274
|
https://github.com/bbangert/lettuce_webdriver/blob/d11f8531c43bb7150c316e0dc4ccd083617becf7/lettuce_webdriver/webdriver.py#L544-L550
|
[
"def",
"get_constant",
"(",
"self",
",",
"const_name",
",",
"context",
")",
":",
"# check if value is compatible with",
"const",
"=",
"self",
".",
"_constants",
"[",
"const_name",
"]",
"if",
"isinstance",
"(",
"const",
",",
"ast",
".",
"AnnAssign",
")",
":",
"# Handle ByteArrays.",
"if",
"context",
":",
"expr",
"=",
"Expr",
"(",
"const",
".",
"value",
",",
"context",
")",
".",
"lll_node",
"return",
"expr",
"else",
":",
"raise",
"VariableDeclarationException",
"(",
"\"ByteArray: Can not be used outside of a function context: %s\"",
"%",
"const_name",
")",
"# Other types are already unwrapped, no need",
"return",
"self",
".",
"_constants",
"[",
"const_name",
"]"
] |
Get a tag .
|
def get ( self , name ) : schema = TagSchema ( ) resp = self . service . get_id ( self . base , name ) return self . service . decode ( schema , resp )
| 1,275
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/tags.py#L93-L102
|
[
"def",
"add_item",
"(",
"self",
",",
"item_url",
",",
"item_metadata",
")",
":",
"c",
"=",
"self",
".",
"conn",
".",
"cursor",
"(",
")",
"c",
".",
"execute",
"(",
"\"DELETE FROM items WHERE url=?\"",
",",
"(",
"str",
"(",
"item_url",
")",
",",
")",
")",
"self",
".",
"conn",
".",
"commit",
"(",
")",
"c",
".",
"execute",
"(",
"\"INSERT INTO items VALUES (?, ?, ?)\"",
",",
"(",
"str",
"(",
"item_url",
")",
",",
"item_metadata",
",",
"self",
".",
"__now_iso_8601",
"(",
")",
")",
")",
"self",
".",
"conn",
".",
"commit",
"(",
")",
"c",
".",
"close",
"(",
")"
] |
Edit a tag .
|
def edit ( self , resource ) : schema = TagSchema ( only = ( 'name' , 'configs' , 'devices' , 'packages' , 'results' ) ) json = self . service . encode ( schema , resource ) schema = TagSchema ( ) resp = self . service . edit ( self . base , resource . name , json ) return self . service . decode ( schema , resp )
| 1,276
|
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/tags.py#L104-L116
|
[
"def",
"deletecols",
"(",
"X",
",",
"cols",
")",
":",
"if",
"isinstance",
"(",
"cols",
",",
"str",
")",
":",
"cols",
"=",
"cols",
".",
"split",
"(",
"','",
")",
"retain",
"=",
"[",
"n",
"for",
"n",
"in",
"X",
".",
"dtype",
".",
"names",
"if",
"n",
"not",
"in",
"cols",
"]",
"if",
"len",
"(",
"retain",
")",
">",
"0",
":",
"return",
"X",
"[",
"retain",
"]",
"else",
":",
"return",
"None"
] |
Get the remaining time - to - live of this lease .
|
def remaining ( self ) : if self . _expired : raise Expired ( ) obj = { u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/kv/lease/timetolive' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) ttl = obj . get ( u'TTL' , None ) if not ttl : self . _expired = True raise Expired ( ) # grantedTTL = int(obj[u'grantedTTL']) # header = Header._parse(obj[u'header']) if u'header' in obj else None returnValue ( ttl )
| 1,277
|
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_lease.py#L85-L113
|
[
"def",
"pass_data_on",
"(",
"self",
",",
"data_setters",
")",
":",
"data_setters",
".",
"init_structure",
"(",
"self",
".",
"num_bonds",
",",
"len",
"(",
"self",
".",
"x_coord_list",
")",
",",
"len",
"(",
"self",
".",
"group_type_list",
")",
",",
"len",
"(",
"self",
".",
"chain_id_list",
")",
",",
"len",
"(",
"self",
".",
"chains_per_model",
")",
",",
"self",
".",
"structure_id",
")",
"decoder_utils",
".",
"add_entity_info",
"(",
"self",
",",
"data_setters",
")",
"decoder_utils",
".",
"add_atomic_information",
"(",
"self",
",",
"data_setters",
")",
"decoder_utils",
".",
"add_header_info",
"(",
"self",
",",
"data_setters",
")",
"decoder_utils",
".",
"add_xtalographic_info",
"(",
"self",
",",
"data_setters",
")",
"decoder_utils",
".",
"generate_bio_assembly",
"(",
"self",
",",
"data_setters",
")",
"decoder_utils",
".",
"add_inter_group_bonds",
"(",
"self",
",",
"data_setters",
")",
"data_setters",
".",
"finalize_structure",
"(",
")"
] |
Revokes a lease . All keys attached to the lease will expire and be deleted .
|
def revoke ( self ) : if self . _expired : raise Expired ( ) obj = { # ID is the lease ID to revoke. When the ID is revoked, all # associated keys will be deleted. u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/kv/lease/revoke' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) header = Header . _parse ( obj [ u'header' ] ) if u'header' in obj else None self . _expired = True returnValue ( header )
| 1,278
|
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_lease.py#L146-L173
|
[
"def",
"InitializeFD",
"(",
"self",
",",
"Channel",
",",
"BitrateFD",
")",
":",
"try",
":",
"res",
"=",
"self",
".",
"__m_dllBasic",
".",
"CAN_InitializeFD",
"(",
"Channel",
",",
"BitrateFD",
")",
"return",
"TPCANStatus",
"(",
"res",
")",
"except",
":",
"logger",
".",
"error",
"(",
"\"Exception on PCANBasic.InitializeFD\"",
")",
"raise"
] |
Keeps the lease alive by streaming keep alive requests from the client to the server and streaming keep alive responses from the server to the client .
|
def refresh ( self ) : if self . _expired : raise Expired ( ) obj = { # ID is the lease ID for the lease to keep alive. u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/lease/keepalive' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) if u'result' not in obj : raise Exception ( 'bogus lease refresh response (missing "result") in {}' . format ( obj ) ) ttl = obj [ u'result' ] . get ( u'TTL' , None ) if not ttl : self . _expired = True raise Expired ( ) header = Header . _parse ( obj [ u'result' ] [ u'header' ] ) if u'header' in obj [ u'result' ] else None self . _expired = False returnValue ( header )
| 1,279
|
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_lease.py#L176-L211
|
[
"def",
"median_filter",
"(",
"data",
",",
"mask",
",",
"radius",
",",
"percent",
"=",
"50",
")",
":",
"if",
"mask",
"is",
"None",
":",
"mask",
"=",
"np",
".",
"ones",
"(",
"data",
".",
"shape",
",",
"dtype",
"=",
"bool",
")",
"if",
"np",
".",
"all",
"(",
"~",
"mask",
")",
":",
"return",
"data",
".",
"copy",
"(",
")",
"#",
"# Normalize the ranked data to 0-255",
"#",
"if",
"(",
"not",
"np",
".",
"issubdtype",
"(",
"data",
".",
"dtype",
",",
"np",
".",
"int",
")",
"or",
"np",
".",
"min",
"(",
"data",
")",
"<",
"0",
"or",
"np",
".",
"max",
"(",
"data",
")",
">",
"255",
")",
":",
"ranked_data",
",",
"translation",
"=",
"rank_order",
"(",
"data",
"[",
"mask",
"]",
",",
"nbins",
"=",
"255",
")",
"was_ranked",
"=",
"True",
"else",
":",
"ranked_data",
"=",
"data",
"[",
"mask",
"]",
"was_ranked",
"=",
"False",
"input",
"=",
"np",
".",
"zeros",
"(",
"data",
".",
"shape",
",",
"np",
".",
"uint8",
")",
"input",
"[",
"mask",
"]",
"=",
"ranked_data",
"mmask",
"=",
"np",
".",
"ascontiguousarray",
"(",
"mask",
",",
"np",
".",
"uint8",
")",
"output",
"=",
"np",
".",
"zeros",
"(",
"data",
".",
"shape",
",",
"np",
".",
"uint8",
")",
"_filter",
".",
"median_filter",
"(",
"input",
",",
"mmask",
",",
"output",
",",
"radius",
",",
"percent",
")",
"if",
"was_ranked",
":",
"result",
"=",
"translation",
"[",
"output",
"]",
"else",
":",
"result",
"=",
"output",
"return",
"result"
] |
Read links and associated categories for specified articles in text file seperated by a space
|
def read_links_file ( self , file_path ) : articles = [ ] with open ( file_path ) as f : for line in f : line = line . strip ( ) #Ignore blank lines if len ( line ) != 0 : link , category = line . split ( ' ' ) articles . append ( ( category . rstrip ( ) , link . strip ( ) ) ) return articles
| 1,280
|
https://github.com/skillachie/news-corpus-builder/blob/7ef73c6d6a56e827ad694cdd446901590936baf9/news_corpus_builder/news_corpus_generator.py#L48-L71
|
[
"def",
"on_resize",
"(",
"width",
",",
"height",
")",
":",
"glViewport",
"(",
"0",
",",
"0",
",",
"width",
",",
"height",
")",
"glMatrixMode",
"(",
"GL_PROJECTION",
")",
"glLoadIdentity",
"(",
")",
"gluPerspective",
"(",
"30",
",",
"1.0",
"*",
"width",
"/",
"height",
",",
"0.1",
",",
"1000.0",
")",
"glMatrixMode",
"(",
"GL_MODELVIEW",
")",
"glLoadIdentity",
"(",
")"
] |
Makes an RPC call to the server and returns the json response
|
def call ( self , action , params = None ) : params = params or { } params [ 'action' ] = action resp = self . session . post ( self . host , json = params , timeout = self . timeout ) result = resp . json ( ) if 'error' in result : raise RPCException ( result [ 'error' ] ) return result
| 1,281
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L57-L89
|
[
"def",
"secret_file",
"(",
"filename",
")",
":",
"filestat",
"=",
"os",
".",
"stat",
"(",
"abspath",
"(",
"filename",
")",
")",
"if",
"stat",
".",
"S_ISREG",
"(",
"filestat",
".",
"st_mode",
")",
"==",
"0",
"and",
"stat",
".",
"S_ISLNK",
"(",
"filestat",
".",
"st_mode",
")",
"==",
"0",
":",
"e_msg",
"=",
"\"Secret file %s must be a real file or symlink\"",
"%",
"filename",
"raise",
"aomi",
".",
"exceptions",
".",
"AomiFile",
"(",
"e_msg",
")",
"if",
"platform",
".",
"system",
"(",
")",
"!=",
"\"Windows\"",
":",
"if",
"filestat",
".",
"st_mode",
"&",
"stat",
".",
"S_IROTH",
"or",
"filestat",
".",
"st_mode",
"&",
"stat",
".",
"S_IWOTH",
"or",
"filestat",
".",
"st_mode",
"&",
"stat",
".",
"S_IWGRP",
":",
"e_msg",
"=",
"\"Secret file %s has too loose permissions\"",
"%",
"filename",
"raise",
"aomi",
".",
"exceptions",
".",
"AomiFile",
"(",
"e_msg",
")"
] |
Process a value that will be sent to backend
|
def _process_value ( self , value , type ) : if not isinstance ( value , six . string_types + ( list , ) ) : value = json . dumps ( value ) return value
| 1,282
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L91-L104
|
[
"def",
"read_struct",
"(",
"fstream",
")",
":",
"line",
"=",
"fstream",
".",
"readline",
"(",
")",
".",
"strip",
"(",
")",
"fragments",
"=",
"line",
".",
"split",
"(",
"\",\"",
")",
"fragments",
"=",
"[",
"x",
"for",
"x",
"in",
"fragments",
"if",
"x",
"is",
"not",
"None",
"]",
"partition",
"=",
"dict",
"(",
")",
"if",
"not",
"len",
"(",
"fragments",
")",
">=",
"3",
":",
"return",
"None",
"partition",
"[",
"\"struct\"",
"]",
"=",
"fragments",
"[",
"0",
"]",
"partition",
"[",
"\"info\"",
"]",
"=",
"fragments",
"[",
"1",
"]",
"partition",
"[",
"\"num_lines\"",
"]",
"=",
"fragments",
"[",
"2",
"]",
"struct",
"=",
"None",
"if",
"partition",
"is",
"not",
"None",
"and",
"partition",
"[",
"\"struct\"",
"]",
"==",
"\"STRUCT\"",
":",
"num_lines",
"=",
"int",
"(",
"partition",
"[",
"\"num_lines\"",
"]",
".",
"strip",
"(",
")",
")",
"struct",
"=",
"{",
"}",
"for",
"_",
"in",
"range",
"(",
"num_lines",
")",
":",
"cols",
"=",
"fetch_cols",
"(",
"fstream",
")",
"struct",
".",
"update",
"(",
"{",
"cols",
"[",
"0",
"]",
":",
"cols",
"[",
"1",
":",
"]",
"}",
")",
"return",
"struct"
] |
Returns the account containing block
|
def block_account ( self , hash ) : hash = self . _process_value ( hash , 'block' ) payload = { "hash" : hash } resp = self . call ( 'block_account' , payload ) return resp [ 'account' ]
| 1,283
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L872-L894
|
[
"def",
"interpoled_resampling",
"(",
"W",
",",
"x",
")",
":",
"N",
"=",
"W",
".",
"shape",
"[",
"0",
"]",
"idx",
"=",
"np",
".",
"argsort",
"(",
"x",
")",
"xs",
"=",
"x",
"[",
"idx",
"]",
"ws",
"=",
"W",
"[",
"idx",
"]",
"cs",
"=",
"np",
".",
"cumsum",
"(",
"avg_n_nplusone",
"(",
"ws",
")",
")",
"u",
"=",
"random",
".",
"rand",
"(",
"N",
")",
"xrs",
"=",
"np",
".",
"empty",
"(",
"N",
")",
"where",
"=",
"np",
".",
"searchsorted",
"(",
"cs",
",",
"u",
")",
"# costs O(N log(N)) but algorithm has O(N log(N)) complexity anyway",
"for",
"n",
"in",
"range",
"(",
"N",
")",
":",
"m",
"=",
"where",
"[",
"n",
"]",
"if",
"m",
"==",
"0",
":",
"xrs",
"[",
"n",
"]",
"=",
"xs",
"[",
"0",
"]",
"elif",
"m",
"==",
"N",
":",
"xrs",
"[",
"n",
"]",
"=",
"xs",
"[",
"-",
"1",
"]",
"else",
":",
"xrs",
"[",
"n",
"]",
"=",
"interpol",
"(",
"cs",
"[",
"m",
"-",
"1",
"]",
",",
"cs",
"[",
"m",
"]",
",",
"xs",
"[",
"m",
"-",
"1",
"]",
",",
"xs",
"[",
"m",
"]",
",",
"u",
"[",
"n",
"]",
")",
"return",
"xrs"
] |
Reports the number of blocks in the ledger and unchecked synchronizing blocks
|
def block_count ( self ) : resp = self . call ( 'block_count' ) for k , v in resp . items ( ) : resp [ k ] = int ( v ) return resp
| 1,284
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L897-L917
|
[
"def",
"_parse_key",
"(",
"key",
")",
":",
"splt",
"=",
"key",
".",
"split",
"(",
"\"\\\\\"",
")",
"hive",
"=",
"splt",
".",
"pop",
"(",
"0",
")",
"key",
"=",
"'\\\\'",
".",
"join",
"(",
"splt",
")",
"return",
"hive",
",",
"key"
] |
Divide a raw amount down by the Mrai ratio .
|
def mrai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'mrai_from_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,285
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1384-L1404
|
[
"def",
"get_bundle_versions",
"(",
")",
":",
"global",
"_cached_versions",
"if",
"not",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
":",
"_cached_versions",
"=",
"{",
"}",
"if",
"_cached_versions",
"is",
"None",
":",
"locs",
"=",
"{",
"}",
"try",
":",
"execfile",
"(",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
",",
"locs",
")",
"_cached_versions",
"=",
"locs",
"[",
"'BUNDLES_VERSIONS'",
"]",
"except",
"IOError",
":",
"_cached_versions",
"=",
"{",
"}",
"return",
"_cached_versions"
] |
Multiply an Mrai amount by the Mrai ratio .
|
def mrai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'mrai_to_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,286
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1407-L1427
|
[
"def",
"insert_system",
"(",
"cur",
",",
"system_name",
",",
"encoded_data",
"=",
"None",
")",
":",
"if",
"encoded_data",
"is",
"None",
":",
"encoded_data",
"=",
"{",
"}",
"if",
"'system_name'",
"not",
"in",
"encoded_data",
":",
"encoded_data",
"[",
"'system_name'",
"]",
"=",
"system_name",
"insert",
"=",
"\"INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);\"",
"cur",
".",
"execute",
"(",
"insert",
",",
"encoded_data",
")"
] |
Divide a raw amount down by the krai ratio .
|
def krai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'krai_from_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,287
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1430-L1449
|
[
"def",
"get_bundle_versions",
"(",
")",
":",
"global",
"_cached_versions",
"if",
"not",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
":",
"_cached_versions",
"=",
"{",
"}",
"if",
"_cached_versions",
"is",
"None",
":",
"locs",
"=",
"{",
"}",
"try",
":",
"execfile",
"(",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
",",
"locs",
")",
"_cached_versions",
"=",
"locs",
"[",
"'BUNDLES_VERSIONS'",
"]",
"except",
"IOError",
":",
"_cached_versions",
"=",
"{",
"}",
"return",
"_cached_versions"
] |
Multiply an krai amount by the krai ratio .
|
def krai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'krai_to_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,288
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1452-L1472
|
[
"def",
"generate",
"(",
"env",
")",
":",
"link",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'FRAMEWORKPATHPREFIX'",
"]",
"=",
"'-F'",
"env",
"[",
"'_FRAMEWORKPATH'",
"]",
"=",
"'${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, \"\", __env__)}'",
"env",
"[",
"'_FRAMEWORKS'",
"]",
"=",
"'${_concat(\"-framework \", FRAMEWORKS, \"\", __env__)}'",
"env",
"[",
"'LINKCOM'",
"]",
"=",
"env",
"[",
"'LINKCOM'",
"]",
"+",
"' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'",
"env",
"[",
"'SHLINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -dynamiclib'",
")",
"env",
"[",
"'SHLINKCOM'",
"]",
"=",
"env",
"[",
"'SHLINKCOM'",
"]",
"+",
"' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'",
"# TODO: Work needed to generate versioned shared libraries",
"# Leaving this commented out, and also going to disable versioned library checking for now",
"# see: http://docstore.mik.ua/orelly/unix3/mac/ch05_04.htm for proper naming",
"#link._setup_versioned_lib_variables(env, tool = 'applelink')#, use_soname = use_soname)",
"#env['LINKCALLBACKS'] = link._versioned_lib_callbacks()",
"# override the default for loadable modules, which are different",
"# on OS X than dynamic shared libs. echoing what XCode does for",
"# pre/suffixes:",
"env",
"[",
"'LDMODULEPREFIX'",
"]",
"=",
"''",
"env",
"[",
"'LDMODULESUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'LDMODULEFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -bundle'",
")",
"env",
"[",
"'LDMODULECOM'",
"]",
"=",
"'$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'"
] |
Divide a raw amount down by the rai ratio .
|
def rai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'rai_from_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,289
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1475-L1495
|
[
"def",
"get_bundle_versions",
"(",
")",
":",
"global",
"_cached_versions",
"if",
"not",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
":",
"_cached_versions",
"=",
"{",
"}",
"if",
"_cached_versions",
"is",
"None",
":",
"locs",
"=",
"{",
"}",
"try",
":",
"execfile",
"(",
"bundles_settings",
".",
"BUNDLES_VERSION_FILE",
",",
"locs",
")",
"_cached_versions",
"=",
"locs",
"[",
"'BUNDLES_VERSIONS'",
"]",
"except",
"IOError",
":",
"_cached_versions",
"=",
"{",
"}",
"return",
"_cached_versions"
] |
Multiply an rai amount by the rai ratio .
|
def rai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'rai_to_raw' , payload ) return int ( resp [ 'amount' ] )
| 1,290
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1498-L1518
|
[
"def",
"_unbind_topics",
"(",
"self",
",",
"topics",
")",
":",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topics",
".",
"status",
")",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topics",
".",
"tracing",
")",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topics",
".",
"streaming",
")",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topics",
".",
"response",
")"
] |
Begin a new payment session . Searches wallet for an account that s marked as available and has a 0 balance . If one is found the account number is returned and is marked as unavailable . If no account is found a new account is created placed in the wallet and returned .
|
def payment_begin ( self , wallet ) : wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'payment_begin' , payload ) return resp [ 'account' ]
| 1,291
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1683-L1708
|
[
"def",
"max_speed",
"(",
"self",
")",
":",
"(",
"self",
".",
"_max_speed",
",",
"value",
")",
"=",
"self",
".",
"get_cached_attr_int",
"(",
"self",
".",
"_max_speed",
",",
"'max_speed'",
")",
"return",
"value"
] |
Marks all accounts in wallet as available for being used as a payment session .
|
def payment_init ( self , wallet ) : wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'payment_init' , payload ) return resp [ 'status' ] == 'Ready'
| 1,292
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1714-L1736
|
[
"def",
"validate_request_timestamp",
"(",
"req_body",
",",
"max_diff",
"=",
"150",
")",
":",
"time_str",
"=",
"req_body",
".",
"get",
"(",
"'request'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'timestamp'",
")",
"if",
"not",
"time_str",
":",
"log",
".",
"error",
"(",
"'timestamp not present %s'",
",",
"req_body",
")",
"return",
"False",
"req_ts",
"=",
"datetime",
".",
"strptime",
"(",
"time_str",
",",
"\"%Y-%m-%dT%H:%M:%SZ\"",
")",
"diff",
"=",
"(",
"datetime",
".",
"utcnow",
"(",
")",
"-",
"req_ts",
")",
".",
"total_seconds",
"(",
")",
"if",
"abs",
"(",
"diff",
")",
">",
"max_diff",
":",
"log",
".",
"error",
"(",
"'timestamp difference too high: %d sec'",
",",
"diff",
")",
"return",
"False",
"return",
"True"
] |
End a payment session . Marks the account as available for use in a payment session .
|
def payment_end ( self , account , wallet ) : account = self . _process_value ( account , 'account' ) wallet = self . _process_value ( wallet , 'wallet' ) payload = { "account" : account , "wallet" : wallet } resp = self . call ( 'payment_end' , payload ) return resp == { }
| 1,293
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1739-L1766
|
[
"def",
"summarize",
"(",
"objects",
")",
":",
"count",
"=",
"{",
"}",
"total_size",
"=",
"{",
"}",
"for",
"o",
"in",
"objects",
":",
"otype",
"=",
"_repr",
"(",
"o",
")",
"if",
"otype",
"in",
"count",
":",
"count",
"[",
"otype",
"]",
"+=",
"1",
"total_size",
"[",
"otype",
"]",
"+=",
"_getsizeof",
"(",
"o",
")",
"else",
":",
"count",
"[",
"otype",
"]",
"=",
"1",
"total_size",
"[",
"otype",
"]",
"=",
"_getsizeof",
"(",
"o",
")",
"rows",
"=",
"[",
"]",
"for",
"otype",
"in",
"count",
":",
"rows",
".",
"append",
"(",
"[",
"otype",
",",
"count",
"[",
"otype",
"]",
",",
"total_size",
"[",
"otype",
"]",
"]",
")",
"return",
"rows"
] |
Returns a list of pairs of representative and its voting weight
|
def representatives ( self , count = None , sorting = False ) : payload = { } if count is not None : payload [ 'count' ] = self . _process_value ( count , 'int' ) if sorting : payload [ 'sorting' ] = self . _process_value ( sorting , 'strbool' ) resp = self . call ( 'representatives' , payload ) representatives = resp . get ( 'representatives' ) or { } for k , v in representatives . items ( ) : representatives [ k ] = int ( v ) return representatives
| 1,294
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1929-L1968
|
[
"def",
"create_xml_file",
"(",
"self",
",",
"source_file",
",",
"destination",
"=",
"None",
")",
":",
"xml_file",
"=",
"destination",
"# If file specified, remove it to start else create new file name",
"if",
"xml_file",
":",
"utils",
".",
"remove_file_no_raise",
"(",
"xml_file",
",",
"self",
".",
"__config",
")",
"else",
":",
"xml_file",
"=",
"utils",
".",
"create_temp_file_name",
"(",
"suffix",
"=",
"'.xml'",
")",
"ffname",
"=",
"source_file",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"ffname",
")",
":",
"ffname",
"=",
"self",
".",
"__file_full_name",
"(",
"source_file",
")",
"command_line",
"=",
"self",
".",
"__create_command_line",
"(",
"ffname",
",",
"xml_file",
")",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"args",
"=",
"command_line",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"try",
":",
"results",
"=",
"[",
"]",
"while",
"process",
".",
"poll",
"(",
")",
"is",
"None",
":",
"line",
"=",
"process",
".",
"stdout",
".",
"readline",
"(",
")",
"if",
"line",
".",
"strip",
"(",
")",
":",
"results",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
")",
")",
"for",
"line",
"in",
"process",
".",
"stdout",
".",
"readlines",
"(",
")",
":",
"if",
"line",
".",
"strip",
"(",
")",
":",
"results",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
")",
")",
"exit_status",
"=",
"process",
".",
"returncode",
"msg",
"=",
"os",
".",
"linesep",
".",
"join",
"(",
"[",
"str",
"(",
"s",
")",
"for",
"s",
"in",
"results",
"]",
")",
"if",
"self",
".",
"__config",
".",
"ignore_gccxml_output",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"xml_file",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Error occurred while running \"",
"+",
"self",
".",
"__config",
".",
"xml_generator",
".",
"upper",
"(",
")",
"+",
"\": %s status:%s\"",
"%",
"(",
"msg",
",",
"exit_status",
")",
")",
"else",
":",
"if",
"msg",
"or",
"exit_status",
"or",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"xml_file",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"xml_file",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Error occurred while running \"",
"+",
"self",
".",
"__config",
".",
"xml_generator",
".",
"upper",
"(",
")",
"+",
"\" xml file does not exist\"",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Error occurred while running \"",
"+",
"self",
".",
"__config",
".",
"xml_generator",
".",
"upper",
"(",
")",
"+",
"\": %s status:%s\"",
"%",
"(",
"msg",
",",
"exit_status",
")",
")",
"except",
"Exception",
":",
"utils",
".",
"remove_file_no_raise",
"(",
"xml_file",
",",
"self",
".",
"__config",
")",
"raise",
"finally",
":",
"process",
".",
"wait",
"(",
")",
"process",
".",
"stdout",
".",
"close",
"(",
")",
"return",
"xml_file"
] |
Returns the node s RPC version
|
def version ( self ) : resp = self . call ( 'version' ) for key in ( 'rpc_version' , 'store_version' ) : resp [ key ] = int ( resp [ key ] ) return resp
| 1,295
|
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L3350-L3370
|
[
"def",
"matchw",
"(",
"string",
",",
"templ",
",",
"wstr",
",",
"wchr",
")",
":",
"# ctypes.c_char(wstr.encode(encoding='UTF-8')",
"string",
"=",
"stypes",
".",
"stringToCharP",
"(",
"string",
")",
"templ",
"=",
"stypes",
".",
"stringToCharP",
"(",
"templ",
")",
"wstr",
"=",
"ctypes",
".",
"c_char",
"(",
"wstr",
".",
"encode",
"(",
"encoding",
"=",
"'UTF-8'",
")",
")",
"wchr",
"=",
"ctypes",
".",
"c_char",
"(",
"wchr",
".",
"encode",
"(",
"encoding",
"=",
"'UTF-8'",
")",
")",
"return",
"bool",
"(",
"libspice",
".",
"matchw_c",
"(",
"string",
",",
"templ",
",",
"wstr",
",",
"wchr",
")",
")"
] |
Get user email from github .
|
def _extract_email ( gh ) : return next ( ( x . email for x in gh . emails ( ) if x . verified and x . primary ) , None )
| 1,296
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/github.py#L110-L113
|
[
"def",
"_set_vibration_nix",
"(",
"self",
",",
"left_motor",
",",
"right_motor",
",",
"duration",
")",
":",
"code",
"=",
"self",
".",
"__get_vibration_code",
"(",
"left_motor",
",",
"right_motor",
",",
"duration",
")",
"secs",
",",
"msecs",
"=",
"convert_timeval",
"(",
"time",
".",
"time",
"(",
")",
")",
"outer_event",
"=",
"struct",
".",
"pack",
"(",
"EVENT_FORMAT",
",",
"secs",
",",
"msecs",
",",
"0x15",
",",
"code",
",",
"1",
")",
"self",
".",
"_write_device",
".",
"write",
"(",
"outer_event",
")",
"self",
".",
"_write_device",
".",
"flush",
"(",
")"
] |
Authorized callback handler for GitHub .
|
def authorized ( resp , remote ) : if resp and 'error' in resp : if resp [ 'error' ] == 'bad_verification_code' : # See https://developer.github.com/v3/oauth/#bad-verification-code # which recommends starting auth flow again. return redirect ( url_for ( 'invenio_oauthclient.login' , remote_app = 'github' ) ) elif resp [ 'error' ] in [ 'incorrect_client_credentials' , 'redirect_uri_mismatch' ] : raise OAuthResponseError ( 'Application mis-configuration in GitHub' , remote , resp ) return authorized_signup_handler ( resp , remote )
| 1,297
|
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/github.py#L180-L198
|
[
"def",
"ToInternal",
"(",
"self",
")",
":",
"self",
".",
"validate_units",
"(",
")",
"savewunits",
"=",
"self",
".",
"waveunits",
"angwave",
"=",
"self",
".",
"waveunits",
".",
"Convert",
"(",
"self",
".",
"_wavetable",
",",
"'angstrom'",
")",
"self",
".",
"_wavetable",
"=",
"angwave",
".",
"copy",
"(",
")",
"self",
".",
"waveunits",
"=",
"savewunits"
] |
Get all unique sequences from reads spanning a variant locus . This will include partial sequences due to reads starting in the middle of the sequence around around a variant .
|
def initial_variant_sequences_from_reads ( variant_reads , max_nucleotides_before_variant = None , max_nucleotides_after_variant = None ) : unique_sequence_groups = group_unique_sequences ( variant_reads , max_prefix_size = max_nucleotides_before_variant , max_suffix_size = max_nucleotides_after_variant ) return [ VariantSequence ( prefix = prefix , alt = alt , suffix = suffix , reads = reads ) for ( ( prefix , alt , suffix ) , reads ) in unique_sequence_groups . items ( ) ]
| 1,298
|
https://github.com/openvax/isovar/blob/b39b684920e3f6b344851d6598a1a1c67bce913b/isovar/variant_sequences.py#L245-L267
|
[
"def",
"cancelHistoricalData",
"(",
"self",
",",
"bars",
":",
"BarDataList",
")",
":",
"self",
".",
"client",
".",
"cancelHistoricalData",
"(",
"bars",
".",
"reqId",
")",
"self",
".",
"wrapper",
".",
"endSubscription",
"(",
"bars",
")"
] |
Trim VariantSequences to desired coverage and then combine any subsequences which get generated .
|
def trim_variant_sequences ( variant_sequences , min_variant_sequence_coverage ) : n_total = len ( variant_sequences ) trimmed_variant_sequences = [ variant_sequence . trim_by_coverage ( min_variant_sequence_coverage ) for variant_sequence in variant_sequences ] collapsed_variant_sequences = collapse_substrings ( trimmed_variant_sequences ) n_after_trimming = len ( collapsed_variant_sequences ) logger . info ( "Kept %d/%d variant sequences after read coverage trimming to >=%dx" , n_after_trimming , n_total , min_variant_sequence_coverage ) return collapsed_variant_sequences
| 1,299
|
https://github.com/openvax/isovar/blob/b39b684920e3f6b344851d6598a1a1c67bce913b/isovar/variant_sequences.py#L319-L336
|
[
"def",
"_enforceDataType",
"(",
"self",
",",
"data",
")",
":",
"idx",
"=",
"int",
"(",
"data",
")",
"if",
"idx",
"<",
"0",
":",
"idx",
"+=",
"len",
"(",
"self",
".",
"_displayValues",
")",
"assert",
"0",
"<=",
"idx",
"<",
"len",
"(",
"self",
".",
"_displayValues",
")",
",",
"\"Index should be >= 0 and < {}. Got {}\"",
".",
"format",
"(",
"len",
"(",
"self",
".",
"_displayValues",
")",
",",
"idx",
")",
"return",
"idx"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.