query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Switch to specific chunk
def _useChunk ( self , index ) -> None : if self . currentChunk is not None : if self . currentChunkIndex == index and not self . currentChunk . closed : return self . currentChunk . close ( ) self . currentChunk = self . _openChunk ( index ) self . currentChunkIndex = index self . itemNum = self . currentChunk . numKeys + 1
5,700
https://github.com/hyperledger-archives/indy-ledger/blob/7210c3b288e07f940eddad09b1dfc6a56be846df/ledger/stores/chunked_file_store.py#L123-L138
[ "def", "_compute_non_linear_term", "(", "self", ",", "C", ",", "pga_only", ",", "sites", ")", ":", "Vref", "=", "self", ".", "CONSTS", "[", "'Vref'", "]", "Vcon", "=", "self", ".", "CONSTS", "[", "'Vcon'", "]", "c", "=", "self", ".", "CONSTS", "[", "'c'", "]", "n", "=", "self", ".", "CONSTS", "[", "'n'", "]", "lnS", "=", "np", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "# equation (6a)\r", "idx", "=", "sites", ".", "vs30", "<", "Vref", "lnS", "[", "idx", "]", "=", "(", "C", "[", "'sb1'", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "+", "C", "[", "'sb2'", "]", "*", "np", ".", "log", "(", "(", "pga_only", "[", "idx", "]", "+", "c", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "n", ")", "/", "(", "(", "pga_only", "[", "idx", "]", "+", "c", ")", "*", "(", "sites", ".", "vs30", "[", "idx", "]", "/", "Vref", ")", "**", "n", ")", ")", ")", "# equation (6b)\r", "idx", "=", "sites", ".", "vs30", ">=", "Vref", "new_sites", "=", "sites", ".", "vs30", "[", "idx", "]", "new_sites", "[", "new_sites", ">", "Vcon", "]", "=", "Vcon", "lnS", "[", "idx", "]", "=", "C", "[", "'sb1'", "]", "*", "np", ".", "log", "(", "new_sites", "/", "Vref", ")", "return", "lnS" ]
This will iterate only over the last chunk since the name of the last chunk indicates how many lines in total exist in all other chunks
def numKeys ( self ) -> int : chunks = self . _listChunks ( ) num_chunks = len ( chunks ) if num_chunks == 0 : return 0 count = ( num_chunks - 1 ) * self . chunkSize last_chunk = self . _openChunk ( chunks [ - 1 ] ) count += sum ( 1 for _ in last_chunk . _lines ( ) ) last_chunk . close ( ) return count
5,701
https://github.com/hyperledger-archives/indy-ledger/blob/7210c3b288e07f940eddad09b1dfc6a56be846df/ledger/stores/chunked_file_store.py#L291-L304
[ "def", "get_aggregate", "(", "self", ",", "security", ":", "Commodity", ")", "->", "SecurityAggregate", ":", "assert", "security", "is", "not", "None", "assert", "isinstance", "(", "security", ",", "Commodity", ")", "return", "SecurityAggregate", "(", "self", ".", "book", ",", "security", ")" ]
Register a new subscriber . This method should be invoked by listeners to start receiving messages .
def register ( self , subscriber ) : assert isinstance ( subscriber , RequestHandler ) logger . debug ( 'New subscriber' ) self . subscribers . add ( subscriber )
5,702
https://github.com/mivade/tornadose/blob/d220e0e3040d24c98997eee7a8a236602b4c5159/tornadose/stores.py#L35-L42
[ "def", "getOverlayWidthInMeters", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayWidthInMeters", "pfWidthInMeters", "=", "c_float", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "pfWidthInMeters", ")", ")", "return", "result", ",", "pfWidthInMeters", ".", "value" ]
Stop publishing to a subscriber .
def deregister ( self , subscriber ) : try : logger . debug ( 'Subscriber left' ) self . subscribers . remove ( subscriber ) except KeyError : logger . debug ( 'Error removing subscriber: ' + str ( subscriber ) )
5,703
https://github.com/mivade/tornadose/blob/d220e0e3040d24c98997eee7a8a236602b4c5159/tornadose/stores.py#L44-L52
[ "def", "readout", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ",", "10.0", ")", "self", ".", "start_readout", "(", "*", "args", ",", "*", "*", "kwargs", ")", "try", ":", "yield", "finally", ":", "try", ":", "self", ".", "stop_readout", "(", "timeout", "=", "timeout", ")", "except", "Exception", ":", "# in case something fails, call this on last resort", "# if run was aborted, immediately stop readout", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "with", "self", ".", "_readout_lock", ":", "if", "self", ".", "fifo_readout", ".", "is_running", ":", "self", ".", "fifo_readout", ".", "stop", "(", "timeout", "=", "0.0", ")" ]
Stop the publishing loop .
def shutdown ( self ) : self . _done . set ( ) self . executor . shutdown ( wait = False )
5,704
https://github.com/mivade/tornadose/blob/d220e0e3040d24c98997eee7a8a236602b4c5159/tornadose/stores.py#L152-L155
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
add_alignment - method for adding the alignment to an annotation
def add_alignment ( self , ref_seq , annotation ) -> Annotation : seq_features = get_seqs ( ref_seq ) annoated_align = { } allele = ref_seq . description . split ( "," ) [ 0 ] locus = allele . split ( "*" ) [ 0 ] . split ( "-" ) [ 1 ] for feat in seq_features : if feat in annotation . annotation : if isinstance ( annotation . annotation [ feat ] , DBSeq ) : seq_len = len ( str ( annotation . annotation [ feat ] ) ) ref_len = len ( seq_features [ feat ] ) else : seq_len = len ( str ( annotation . annotation [ feat ] . seq ) ) ref_len = len ( seq_features [ feat ] ) if seq_len == ref_len : seq = list ( annotation . annotation [ feat ] . seq ) gaps = self . refdata . annoated_alignments [ locus ] [ allele ] [ feat ] [ 'Gaps' ] if self . verbose and self . verbosity > 0 : self . logger . info ( self . logname + " Lengths match for " + feat ) self . logger . info ( self . logname + " Gaps at " + feat ) self . logger . info ( self . logname + "-" . join ( [ "," . join ( [ str ( s ) for s in g ] ) for g in gaps ] ) ) for i in range ( 0 , len ( gaps ) ) : for j in gaps [ i ] : loc = j seq . insert ( loc , '-' ) nseq = '' . join ( seq ) annoated_align . update ( { feat : nseq } ) else : in_seq = str ( annotation . annotation [ feat ] . seq ) ref_seq = self . refdata . annoated_alignments [ locus ] [ allele ] [ feat ] [ 'Seq' ] alignment = pairwise2 . align . globalxx ( in_seq , ref_seq ) if self . verbose and self . verbosity > 0 : self . logger . info ( self . logname + " Align2 -> in_seq != ref_len " + feat ) self . logger . info ( self . logname + " " + str ( len ( in_seq ) ) + " == " + str ( ref_len ) ) annoated_align . update ( { feat : alignment [ 0 ] [ 0 ] } ) else : nseq = '' . join ( list ( repeat ( '-' , len ( seq_features [ feat ] ) ) ) ) annoated_align . update ( { feat : nseq } ) annotation . aligned = annoated_align return annotation
5,705
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/seqann/sequence_annotation.py#L1129-L1180
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
r Convert python object to xml string .
def object2xml ( self , data ) : if not self . __options [ 'encoding' ] : self . set_options ( encoding = self . __encoding ) if self . __options [ 'header_declare' ] : self . __tree . append ( self . build_xml_header ( ) ) root = self . __options [ 'root' ] if not root : assert ( isinstance ( data , utils . DictTypes ) and len ( data ) == 1 ) , 'if root not specified, the data that dict object and length must be one required.' root , data = data . items ( ) [ 0 ] self . build_tree ( data , root ) xml = unicode ( '' . join ( self . __tree ) . strip ( ) ) if self . __options [ 'encoding' ] != self . __encoding : xml = xml . encode ( self . __options [ 'encoding' ] , errors = self . __options [ 'errors' ] ) return xml
5,706
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L60-L85
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
r Build xml tree .
def build_tree ( self , data , tagname , attrs = None , depth = 0 ) : if data is None : data = '' indent = ( '\n%s' % ( self . __options [ 'indent' ] * depth ) ) if self . __options [ 'indent' ] else '' if isinstance ( data , utils . DictTypes ) : if self . __options [ 'hasattr' ] and self . check_structure ( data . keys ( ) ) : attrs , values = self . pickdata ( data ) self . build_tree ( values , tagname , attrs , depth ) else : self . __tree . append ( '%s%s' % ( indent , self . tag_start ( tagname , attrs ) ) ) iter = data . iteritems ( ) if self . __options [ 'ksort' ] : iter = sorted ( iter , key = lambda x : x [ 0 ] , reverse = self . __options [ 'reverse' ] ) for k , v in iter : attrs = { } if self . __options [ 'hasattr' ] and isinstance ( v , utils . DictTypes ) and self . check_structure ( v . keys ( ) ) : attrs , v = self . pickdata ( v ) self . build_tree ( v , k , attrs , depth + 1 ) self . __tree . append ( '%s%s' % ( indent , self . tag_end ( tagname ) ) ) elif utils . is_iterable ( data ) : for v in data : self . build_tree ( v , tagname , attrs , depth ) else : self . __tree . append ( indent ) data = self . safedata ( data , self . __options [ 'cdata' ] ) self . __tree . append ( self . build_tag ( tagname , data , attrs ) )
5,707
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L94-L128
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
r Check structure availability by attrkey and valuekey option .
def check_structure ( self , keys ) : return set ( keys ) <= set ( [ self . __options [ 'attrkey' ] , self . __options [ 'valuekey' ] ] )
5,708
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L130-L133
[ "def", "render_pdf_file_to_image_files_pdftoppm_pgm", "(", "pdf_file_name", ",", "root_output_file_path", ",", "res_x", "=", "150", ",", "res_y", "=", "150", ")", ":", "comm_output", "=", "render_pdf_file_to_image_files_pdftoppm_ppm", "(", "pdf_file_name", ",", "root_output_file_path", ",", "res_x", ",", "res_y", ",", "[", "\"-gray\"", "]", ")", "return", "comm_output" ]
r Pick data from attrkey and valuekey option .
def pickdata ( self , data ) : attrs = data . get ( self . __options [ 'attrkey' ] ) or { } values = data . get ( self . __options [ 'valuekey' ] ) or '' return ( attrs , values )
5,709
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L135-L143
[ "def", "set_recovery_range", "(", "working_dir", ",", "start_block", ",", "end_block", ")", ":", "recovery_range_path", "=", "os", ".", "path", ".", "join", "(", "working_dir", ",", "'.recovery'", ")", "with", "open", "(", "recovery_range_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'{}\\n{}\\n'", ".", "format", "(", "start_block", ",", "end_block", ")", ")", "f", ".", "flush", "(", ")", "os", ".", "fsync", "(", "f", ".", "fileno", "(", ")", ")" ]
r Convert xml special chars to entities .
def safedata ( self , data , cdata = True ) : safe = ( '<![CDATA[%s]]>' % data ) if cdata else cgi . escape ( str ( data ) , True ) return safe
5,710
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L145-L154
[ "def", "get_default_handler", "(", "self", ",", "*", "*", "kw", ")", ":", "gke_cluster_name", "=", "retrieve_metadata_server", "(", "_GKE_CLUSTER_NAME", ")", "if", "(", "_APPENGINE_FLEXIBLE_ENV_VM", "in", "os", ".", "environ", "or", "_APPENGINE_INSTANCE_ID", "in", "os", ".", "environ", ")", ":", "return", "AppEngineHandler", "(", "self", ",", "*", "*", "kw", ")", "elif", "gke_cluster_name", "is", "not", "None", ":", "return", "ContainerEngineHandler", "(", "*", "*", "kw", ")", "else", ":", "return", "CloudLoggingHandler", "(", "self", ",", "*", "*", "kw", ")" ]
r Build tag full info include the attributes .
def build_tag ( self , tag , text = '' , attrs = None ) : return '%s%s%s' % ( self . tag_start ( tag , attrs ) , text , self . tag_end ( tag ) )
5,711
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L156-L165
[ "def", "update_sandbox_product", "(", "self", ",", "product_id", ",", "surge_multiplier", "=", "None", ",", "drivers_available", "=", "None", ",", ")", ":", "args", "=", "{", "'surge_multiplier'", ":", "surge_multiplier", ",", "'drivers_available'", ":", "drivers_available", ",", "}", "endpoint", "=", "'v1.2/sandbox/products/{}'", ".", "format", "(", "product_id", ")", "return", "self", ".", "_api_call", "(", "'PUT'", ",", "endpoint", ",", "args", "=", "args", ")" ]
r Build tag attributes .
def build_attr ( self , attrs ) : attrs = sorted ( attrs . iteritems ( ) , key = lambda x : x [ 0 ] ) return ' ' . join ( map ( lambda x : '%s="%s"' % x , attrs ) )
5,712
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L167-L175
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
r Build started tag info .
def tag_start ( self , tag , attrs = None ) : return '<%s %s>' % ( tag , self . build_attr ( attrs ) ) if attrs else '<%s>' % tag
5,713
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/builder.py#L177-L185
[ "def", "split", "(", "self", ",", "k", ")", ":", "if", "not", "1", "<=", "k", "<=", "self", ".", "num_rows", "-", "1", ":", "raise", "ValueError", "(", "\"Invalid value of k. k must be between 1 and the\"", "\"number of rows - 1\"", ")", "rows", "=", "np", ".", "random", ".", "permutation", "(", "self", ".", "num_rows", ")", "first", "=", "self", ".", "take", "(", "rows", "[", ":", "k", "]", ")", "rest", "=", "self", ".", "take", "(", "rows", "[", "k", ":", "]", ")", "for", "column_label", "in", "self", ".", "_formats", ":", "first", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "rest", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "return", "first", ",", "rest" ]
Opens a file dialog to get the path to a file and put tha tpath in the correct textbox
def open_file_dialog ( self ) : dialog = QtWidgets . QFileDialog sender = self . sender ( ) if sender == self . btn_open_source : textbox = self . source_path elif sender == self . btn_open_target : textbox = self . target_path folder = dialog . getExistingDirectory ( self , 'Select a file:' , textbox . text ( ) , options = QtWidgets . QFileDialog . ShowDirsOnly ) if str ( folder ) != '' : textbox . setText ( folder ) # load elements from file and display in tree if sender == self . btn_open_source : self . reset_avaliable ( folder )
5,714
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/gui/windows_and_widgets/export_dialog.py#L69-L84
[ "def", "excluding", "(", "self", ",", "sequence", ")", "->", "Generator", ":", "return", "(", "element", "for", "element", "in", "sequence", "if", "self", ".", "indexer", "(", "element", ")", "not", "in", "self", ".", "predicates", ")" ]
Forces a reset if the class type is changed from instruments to scripts or vice versa
def class_type_changed ( self ) : if self . source_path . text ( ) : self . reset_avaliable ( self . source_path . text ( ) )
5,715
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/gui/windows_and_widgets/export_dialog.py#L105-L110
[ "async", "def", "receive", "(", "self", ",", "timeout", ":", "float", "=", "None", ")", "->", "Union", "[", "Message", ",", "None", "]", ":", "if", "timeout", ":", "coro", "=", "self", ".", "queue", ".", "get", "(", ")", "try", ":", "msg", "=", "await", "asyncio", ".", "wait_for", "(", "coro", ",", "timeout", "=", "timeout", ")", "except", "asyncio", ".", "TimeoutError", ":", "msg", "=", "None", "else", ":", "try", ":", "msg", "=", "self", ".", "queue", ".", "get_nowait", "(", ")", "except", "asyncio", ".", "QueueEmpty", ":", "msg", "=", "None", "return", "msg" ]
finds apropriate utxo s to include in rawtx while being careful to never spend old transactions with a lot of coin age . Argument is intiger returns list of apropriate UTXO s
def select_inputs ( self , address : str , amount : int ) -> dict : utxos = [ ] utxo_sum = Decimal ( 0 ) for tx in sorted ( self . listunspent ( address = address ) , key = itemgetter ( 'confirmations' ) ) : if tx [ "address" ] not in ( self . pa_parameters . P2TH_addr , self . pa_parameters . test_P2TH_addr ) : utxos . append ( MutableTxIn ( txid = tx [ 'txid' ] , txout = tx [ 'vout' ] , sequence = Sequence . max ( ) , script_sig = ScriptSig . empty ( ) ) ) utxo_sum += Decimal ( tx [ "amount" ] ) if utxo_sum >= amount : return { 'utxos' : utxos , 'total' : utxo_sum } if utxo_sum < amount : raise InsufficientFunds ( "Insufficient funds." ) raise Exception ( "undefined behavior :.(" )
5,716
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/rpcnode.py#L21-L47
[ "def", "extension_sort_key", "(", "extension", ")", ":", "name", "=", "extension", ".", "name", "category", "=", "name", ".", "split", "(", "'_'", ",", "2", ")", "[", "1", "]", "return", "(", "0", ",", "name", ")", "if", "category", "in", "(", "'ARB'", ",", "'KHR'", ",", "'OES'", ")", "else", "(", "1", ",", "name", ")" ]
list UTXOs modified version to allow filtering by address .
def listunspent ( self , address : str = "" , minconf : int = 1 , maxconf : int = 999999 , ) -> list : if address : return self . req ( "listunspent" , [ minconf , maxconf , [ address ] ] ) return self . req ( "listunspent" , [ minconf , maxconf ] )
5,717
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/rpcnode.py#L67-L79
[ "async", "def", "_wait_exponentially", "(", "self", ",", "exception", ",", "max_wait_time", "=", "300", ")", ":", "wait_time", "=", "min", "(", "(", "2", "**", "self", ".", "_connection_attempts", ")", "+", "random", ".", "random", "(", ")", ",", "max_wait_time", ")", "try", ":", "wait_time", "=", "exception", ".", "response", "[", "\"headers\"", "]", "[", "\"Retry-After\"", "]", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "self", ".", "_logger", ".", "debug", "(", "\"Waiting %s seconds before reconnecting.\"", ",", "wait_time", ")", "await", "asyncio", ".", "sleep", "(", "float", "(", "wait_time", ")", ")" ]
Command line tool to convert from Remind to iCalendar
def rem2ics ( ) : # pylint: disable=maybe-no-member from argparse import ArgumentParser , FileType from dateutil . parser import parse from sys import stdin , stdout parser = ArgumentParser ( description = 'Converter from Remind to iCalendar syntax.' ) parser . add_argument ( '-s' , '--startdate' , type = lambda s : parse ( s ) . date ( ) , default = date . today ( ) - timedelta ( weeks = 12 ) , help = 'Start offset for remind call (default: -12 weeks)' ) parser . add_argument ( '-m' , '--month' , type = int , default = 15 , help = 'Number of month to generate calendar beginning wit startdate (default: 15)' ) parser . add_argument ( '-a' , '--alarm' , type = int , default = - 10 , help = 'Trigger time for the alarm before the event in minutes (default: -10)' ) parser . add_argument ( '-z' , '--zone' , help = 'Timezone of Remind file (default: local timezone)' ) parser . add_argument ( 'infile' , nargs = '?' , default = expanduser ( '~/.reminders' ) , help = 'The Remind file to process (default: ~/.reminders)' ) parser . add_argument ( 'outfile' , nargs = '?' , type = FileType ( 'w' ) , default = stdout , help = 'Output iCalendar file (default: stdout)' ) args = parser . parse_args ( ) zone = timezone ( args . zone ) if args . zone else None if args . infile == '-' : remind = Remind ( args . infile , zone , args . startdate , args . month , timedelta ( minutes = args . alarm ) ) vobject = remind . stdin_to_vobject ( stdin . read ( ) ) if vobject : args . outfile . write ( vobject . serialize ( ) ) else : remind = Remind ( args . infile , zone , args . startdate , args . month , timedelta ( minutes = args . alarm ) ) args . outfile . write ( remind . to_vobject ( ) . serialize ( ) )
5,718
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L594-L626
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
Command line tool to convert from iCalendar to Remind
def ics2rem ( ) : from argparse import ArgumentParser , FileType from sys import stdin , stdout parser = ArgumentParser ( description = 'Converter from iCalendar to Remind syntax.' ) parser . add_argument ( '-l' , '--label' , help = 'Label for every Remind entry' ) parser . add_argument ( '-p' , '--priority' , type = int , help = 'Priority for every Remind entry (0..9999)' ) parser . add_argument ( '-t' , '--tag' , action = 'append' , help = 'Tag(s) for every Remind entry' ) parser . add_argument ( '--tail' , help = 'Text to append to every remind summary, following final %%"' ) parser . add_argument ( '--sep' , default = " " , help = 'String to separate summary (and tail) from description' ) parser . add_argument ( '--postdate' , help = 'String to follow the date in every Remind entry. ' 'Useful for entering "back" and "delta" fields (see man remind).' ) parser . add_argument ( '--posttime' , help = 'String to follow the time in every timed Remind entry. ' 'Useful for entering "tdelta" and "trepeat" fields (see man remind).' ) parser . add_argument ( '-z' , '--zone' , help = 'Timezone of Remind file (default: local timezone)' ) parser . add_argument ( 'infile' , nargs = '?' , type = FileType ( 'r' ) , default = stdin , help = 'Input iCalendar file (default: stdin)' ) parser . add_argument ( 'outfile' , nargs = '?' , type = FileType ( 'w' ) , default = stdout , help = 'Output Remind file (default: stdout)' ) args = parser . parse_args ( ) zone = timezone ( args . zone ) if args . zone else None vobject = readOne ( args . infile . read ( ) ) rem = Remind ( localtz = zone ) . to_reminders ( vobject , args . label , args . priority , args . tag , args . tail , args . sep , args . postdate , args . posttime ) args . outfile . write ( rem )
5,719
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L629-L664
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
Calls remind and parses the output into a dict
def _parse_remind ( self , filename , lines = '' ) : files = { } reminders = { } if lines : filename = '-' files [ filename ] = lines reminders [ filename ] = { } cmd = [ 'remind' , '-l' , '-s%d' % self . _month , '-b1' , '-y' , '-r' , filename , str ( self . _startdate ) ] try : rem = Popen ( cmd , stdin = PIPE , stdout = PIPE ) . communicate ( input = lines . encode ( 'utf-8' ) ) [ 0 ] . decode ( 'utf-8' ) except OSError : raise OSError ( 'Error running: %s' % ' ' . join ( cmd ) ) rem = rem . splitlines ( ) for ( fileinfo , line ) in zip ( rem [ : : 2 ] , rem [ 1 : : 2 ] ) : fileinfo = fileinfo . split ( ) src_filename = fileinfo [ 3 ] if src_filename not in files : # There is a race condition with the remind call above here. # This could be solved by parsing the remind -de output, # but I don't see an easy way to do that. files [ src_filename ] = open ( src_filename ) . readlines ( ) reminders [ src_filename ] = { } mtime = getmtime ( src_filename ) if mtime > self . _mtime : self . _mtime = mtime text = files [ src_filename ] [ int ( fileinfo [ 2 ] ) - 1 ] event = self . _parse_remind_line ( line , text ) if event [ 'uid' ] in reminders [ src_filename ] : reminders [ src_filename ] [ event [ 'uid' ] ] [ 'dtstart' ] += event [ 'dtstart' ] reminders [ src_filename ] [ event [ 'uid' ] ] [ 'line' ] += line else : reminders [ src_filename ] [ event [ 'uid' ] ] = event reminders [ src_filename ] [ event [ 'uid' ] ] [ 'line' ] = line # Find included files without reminders and add them to the file list for source in files . values ( ) : for line in source : if line . startswith ( 'include' ) : new_file = line . split ( ' ' ) [ 1 ] . strip ( ) if new_file not in reminders : reminders [ new_file ] = { } mtime = getmtime ( new_file ) if mtime > self . _mtime : self . _mtime = mtime return reminders
5,720
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L54-L109
[ "def", "_GetPathSegmentIndexForValueWeights", "(", "self", ",", "value_weights", ")", ":", "largest_weight", "=", "value_weights", ".", "GetLargestWeight", "(", ")", "if", "largest_weight", ">", "0", ":", "value_weight_indexes", "=", "value_weights", ".", "GetIndexesForWeight", "(", "largest_weight", ")", "else", ":", "value_weight_indexes", "=", "[", "]", "if", "value_weight_indexes", ":", "path_segment_index", "=", "value_weight_indexes", "[", "0", "]", "else", ":", "path_segment_index", "=", "value_weights", ".", "GetFirstAvailableIndex", "(", ")", "if", "path_segment_index", "is", "None", ":", "raise", "RuntimeError", "(", "'No path segment index found.'", ")", "return", "path_segment_index" ]
Parse a line of remind output into a dict
def _parse_remind_line ( self , line , text ) : event = { } line = line . split ( None , 6 ) dat = [ int ( f ) for f in line [ 0 ] . split ( '/' ) ] if line [ 4 ] != '*' : start = divmod ( int ( line [ 4 ] ) , 60 ) event [ 'dtstart' ] = [ datetime ( dat [ 0 ] , dat [ 1 ] , dat [ 2 ] , start [ 0 ] , start [ 1 ] , tzinfo = self . _localtz ) ] if line [ 3 ] != '*' : event [ 'duration' ] = timedelta ( minutes = int ( line [ 3 ] ) ) else : event [ 'dtstart' ] = [ date ( dat [ 0 ] , dat [ 1 ] , dat [ 2 ] ) ] msg = ' ' . join ( line [ 5 : ] ) if line [ 4 ] == '*' else line [ 6 ] msg = msg . strip ( ) . replace ( '%_' , '\n' ) . replace ( '["["]' , '[' ) if ' at ' in msg : ( event [ 'msg' ] , event [ 'location' ] ) = msg . rsplit ( ' at ' , 1 ) else : event [ 'msg' ] = msg if '%"' in text : event [ 'description' ] = Remind . _gen_description ( text ) tags = line [ 2 ] . split ( ',' ) classes = [ 'PUBLIC' , 'PRIVATE' , 'CONFIDENTIAL' ] for tag in tags [ : - 1 ] : if tag in classes : event [ 'class' ] = tag event [ 'categories' ] = [ tag for tag in tags [ : - 1 ] if tag not in classes ] event [ 'uid' ] = '%s@%s' % ( tags [ - 1 ] [ 7 : ] , getfqdn ( ) ) return event
5,721
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L118-L158
[ "def", "_GetPathSegmentIndexForSimilarityWeights", "(", "self", ",", "similarity_weights", ",", "occurrence_weights", ",", "value_weights", ")", ":", "largest_weight", "=", "similarity_weights", ".", "GetLargestWeight", "(", ")", "if", "largest_weight", ">", "0", ":", "similarity_weight_indexes", "=", "similarity_weights", ".", "GetIndexesForWeight", "(", "largest_weight", ")", "number_of_similarity_indexes", "=", "len", "(", "similarity_weight_indexes", ")", "else", ":", "number_of_similarity_indexes", "=", "0", "path_segment_index", "=", "None", "if", "number_of_similarity_indexes", "==", "0", ":", "path_segment_index", "=", "self", ".", "_GetPathSegmentIndexForOccurrenceWeights", "(", "occurrence_weights", ",", "value_weights", ")", "elif", "number_of_similarity_indexes", "==", "1", ":", "path_segment_index", "=", "similarity_weight_indexes", "[", "0", "]", "else", ":", "largest_weight", "=", "0", "largest_value_weight", "=", "0", "for", "similarity_index", "in", "similarity_weight_indexes", ":", "occurrence_weight", "=", "occurrence_weights", ".", "GetWeightForIndex", "(", "similarity_index", ")", "if", "largest_weight", ">", "0", "and", "largest_weight", "==", "occurrence_weight", ":", "value_weight", "=", "value_weights", ".", "GetWeightForIndex", "(", "similarity_index", ")", "if", "largest_value_weight", "<", "value_weight", ":", "largest_weight", "=", "0", "if", "not", "path_segment_index", "or", "largest_weight", "<", "occurrence_weight", ":", "largest_weight", "=", "occurrence_weight", "path_segment_index", "=", "similarity_index", "largest_value_weight", "=", "value_weights", ".", "GetWeightForIndex", "(", "similarity_index", ")", "return", "path_segment_index" ]
Return the distance between all dates and 0 if they are different
def _interval ( dates ) : interval = ( dates [ 1 ] - dates [ 0 ] ) . days last = dates [ 0 ] for dat in dates [ 1 : ] : if ( dat - last ) . days != interval : return 0 last = dat return interval
5,722
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L161-L169
[ "def", "cli", "(", "env", ",", "identifier", ",", "crt", ",", "csr", ",", "icc", ",", "key", ",", "notes", ")", ":", "template", "=", "{", "'id'", ":", "identifier", "}", "if", "crt", ":", "template", "[", "'certificate'", "]", "=", "open", "(", "crt", ")", ".", "read", "(", ")", "if", "key", ":", "template", "[", "'privateKey'", "]", "=", "open", "(", "key", ")", ".", "read", "(", ")", "if", "csr", ":", "template", "[", "'certificateSigningRequest'", "]", "=", "open", "(", "csr", ")", ".", "read", "(", ")", "if", "icc", ":", "template", "[", "'intermediateCertificate'", "]", "=", "open", "(", "icc", ")", ".", "read", "(", ")", "if", "notes", ":", "template", "[", "'notes'", "]", "=", "notes", "manager", "=", "SoftLayer", ".", "SSLManager", "(", "env", ".", "client", ")", "manager", ".", "edit_certificate", "(", "template", ")" ]
Generate an rdate or rrule from a list of dates and add it to the vevent
def _gen_dtend_rrule ( dtstarts , vevent ) : interval = Remind . _interval ( dtstarts ) if interval > 0 and interval % 7 == 0 : rset = rrule . rruleset ( ) rset . rrule ( rrule . rrule ( freq = rrule . WEEKLY , interval = interval // 7 , count = len ( dtstarts ) ) ) vevent . rruleset = rset elif interval > 1 : rset = rrule . rruleset ( ) rset . rrule ( rrule . rrule ( freq = rrule . DAILY , interval = interval , count = len ( dtstarts ) ) ) vevent . rruleset = rset elif interval > 0 : if isinstance ( dtstarts [ 0 ] , datetime ) : rset = rrule . rruleset ( ) rset . rrule ( rrule . rrule ( freq = rrule . DAILY , count = len ( dtstarts ) ) ) vevent . rruleset = rset else : vevent . add ( 'dtend' ) . value = dtstarts [ - 1 ] + timedelta ( days = 1 ) else : rset = rrule . rruleset ( ) if isinstance ( dtstarts [ 0 ] , datetime ) : for dat in dtstarts : rset . rdate ( dat ) else : for dat in dtstarts : rset . rdate ( datetime ( dat . year , dat . month , dat . day ) ) # temporary set dtstart to a different date, so it's not # removed from rset by python-vobject works around bug in # Android: # https://github.com/rfc2822/davdroid/issues/340 vevent . dtstart . value = dtstarts [ 0 ] - timedelta ( days = 1 ) vevent . rruleset = rset vevent . dtstart . value = dtstarts [ 0 ] if not isinstance ( dtstarts [ 0 ] , datetime ) : vevent . add ( 'dtend' ) . value = dtstarts [ 0 ] + timedelta ( days = 1 )
5,723
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L172-L206
[ "def", "teardown", "(", "cluster_config_file", ",", "yes", ",", "workers_only", ",", "cluster_name", ")", ":", "teardown_cluster", "(", "cluster_config_file", ",", "yes", ",", "workers_only", ",", "cluster_name", ")" ]
Generate vevent from given event
def _gen_vevent ( self , event , vevent ) : vevent . add ( 'dtstart' ) . value = event [ 'dtstart' ] [ 0 ] vevent . add ( 'dtstamp' ) . value = datetime . fromtimestamp ( self . _mtime ) vevent . add ( 'summary' ) . value = event [ 'msg' ] vevent . add ( 'uid' ) . value = event [ 'uid' ] if 'class' in event : vevent . add ( 'class' ) . value = event [ 'class' ] if 'categories' in event and len ( event [ 'categories' ] ) > 0 : vevent . add ( 'categories' ) . value = event [ 'categories' ] if 'location' in event : vevent . add ( 'location' ) . value = event [ 'location' ] if 'description' in event : vevent . add ( 'description' ) . value = event [ 'description' ] if isinstance ( event [ 'dtstart' ] [ 0 ] , datetime ) : if self . _alarm != timedelta ( ) : valarm = vevent . add ( 'valarm' ) valarm . add ( 'trigger' ) . value = self . _alarm valarm . add ( 'action' ) . value = 'DISPLAY' valarm . add ( 'description' ) . value = event [ 'msg' ] if 'duration' in event : vevent . add ( 'duration' ) . value = event [ 'duration' ] else : vevent . add ( 'dtend' ) . value = event [ 'dtstart' ] [ 0 ] elif len ( event [ 'dtstart' ] ) == 1 : vevent . add ( 'dtend' ) . value = event [ 'dtstart' ] [ 0 ] + timedelta ( days = 1 ) if len ( event [ 'dtstart' ] ) > 1 : Remind . _gen_dtend_rrule ( event [ 'dtstart' ] , vevent )
5,724
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L208-L243
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Reload Remind files if the mtime is newer
def _update ( self ) : update = not self . _reminders with self . _lock : for fname in self . _reminders : if getmtime ( fname ) > self . _mtime : update = True break if update : self . _reminders = self . _parse_remind ( self . _filename )
5,725
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L245-L256
[ "def", "reset", "(", "self", ")", ":", "status", "=", "self", ".", "m_objPCANBasic", ".", "Reset", "(", "self", ".", "m_PcanHandle", ")", "return", "status", "==", "PCAN_ERROR_OK" ]
UIDs of all reminders in the file excluding included files If a filename is specified only it s UIDs are return otherwise all .
def get_uids ( self , filename = None ) : self . _update ( ) if filename : if filename not in self . _reminders : return [ ] return self . _reminders [ filename ] . keys ( ) return [ uid for uids in self . _reminders . values ( ) for uid in uids ]
5,726
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L268-L280
[ "def", "color_lerp", "(", "c1", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "c2", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "a", ":", "float", ")", "->", "Color", ":", "return", "Color", ".", "_new_from_cdata", "(", "lib", ".", "TCOD_color_lerp", "(", "c1", ",", "c2", ",", "a", ")", ")" ]
Return iCal objects and etags of all Remind entries in uids
def to_vobjects ( self , filename , uids = None ) : self . _update ( ) if not uids : uids = self . _reminders [ filename ] items = [ ] for uid in uids : cal = iCalendar ( ) self . _gen_vevent ( self . _reminders [ filename ] [ uid ] , cal . add ( 'vevent' ) ) etag = md5 ( ) etag . update ( self . _reminders [ filename ] [ uid ] [ 'line' ] . encode ( "utf-8" ) ) items . append ( ( uid , cal , '"%s"' % etag . hexdigest ( ) ) ) return items
5,727
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L290-L309
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
Return iCal object of Remind lines If filename and UID are specified the vObject only contains that event . If only a filename is specified the vObject contains all events in the file . Otherwise the vObject contains all all objects of all files associated with the Remind object .
def to_vobject ( self , filename = None , uid = None ) : self . _update ( ) cal = iCalendar ( ) if uid : self . _gen_vevent ( self . _reminders [ filename ] [ uid ] , cal . add ( 'vevent' ) ) elif filename : for event in self . _reminders [ filename ] . values ( ) : self . _gen_vevent ( event , cal . add ( 'vevent' ) ) else : for filename in self . _reminders : for event in self . _reminders [ filename ] . values ( ) : self . _gen_vevent ( event , cal . add ( 'vevent' ) ) return cal
5,728
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L311-L332
[ "def", "load_diagonal", "(", "cov", ",", "load", "=", "None", ")", ":", "n", ",", "m", "=", "cov", ".", "shape", "assert", "n", "==", "m", ",", "\"matrix must be square, but found shape {}\"", ".", "format", "(", "(", "n", ",", "m", ")", ")", "if", "load", "is", "None", ":", "load", "=", "np", ".", "sqrt", "(", "np", ".", "finfo", "(", "np", ".", "float64", ")", ".", "eps", ")", "# machine epsilon", "return", "cov", "+", "np", ".", "eye", "(", "n", ")", "*", "load" ]
Return iCal object of the Remind commands in lines
def stdin_to_vobject ( self , lines ) : cal = iCalendar ( ) for event in self . _parse_remind ( '-' , lines ) [ '-' ] . values ( ) : self . _gen_vevent ( event , cal . add ( 'vevent' ) ) return cal
5,729
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L334-L339
[ "def", "handle_transport_fail", "(", "self", ",", "exception", "=", "None", ",", "*", "*", "kwargs", ")", ":", "message", "=", "str", "(", "exception", ")", "logger", ".", "error", "(", "\"Failed to submit message: %r\"", ",", "message", ",", "exc_info", "=", "getattr", "(", "exception", ",", "\"print_trace\"", ",", "True", ")", ")", "self", ".", "state", ".", "set_fail", "(", ")" ]
Convert from iCal rrule to Remind recurrence syntax
def _parse_rruleset ( rruleset ) : # pylint: disable=protected-access if rruleset . _rrule [ 0 ] . _freq == 0 : return [ ] rep = [ ] if rruleset . _rrule [ 0 ] . _byweekday and len ( rruleset . _rrule [ 0 ] . _byweekday ) > 1 : rep . append ( '*1' ) elif rruleset . _rrule [ 0 ] . _freq == rrule . DAILY : rep . append ( '*%d' % rruleset . _rrule [ 0 ] . _interval ) elif rruleset . _rrule [ 0 ] . _freq == rrule . WEEKLY : rep . append ( '*%d' % ( 7 * rruleset . _rrule [ 0 ] . _interval ) ) else : return Remind . _parse_rdate ( rruleset . _rrule [ 0 ] ) if rruleset . _rrule [ 0 ] . _byweekday and len ( rruleset . _rrule [ 0 ] . _byweekday ) > 1 : daynums = set ( range ( 7 ) ) - set ( rruleset . _rrule [ 0 ] . _byweekday ) weekdays = [ 'Mon' , 'Tue' , 'Wed' , 'Thu' , 'Fri' , 'Sat' , 'Sun' ] days = [ weekdays [ day ] for day in daynums ] rep . append ( 'SKIP OMIT %s' % ' ' . join ( days ) ) if rruleset . _rrule [ 0 ] . _until : rep . append ( rruleset . _rrule [ 0 ] . _until . strftime ( 'UNTIL %b %d %Y' ) . replace ( ' 0' , ' ' ) ) elif rruleset . _rrule [ 0 ] . _count : rep . append ( rruleset [ - 1 ] . strftime ( 'UNTIL %b %d %Y' ) . replace ( ' 0' , ' ' ) ) return rep
5,730
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L348-L376
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
unify dtend and duration to the duration of the given vevent
def _event_duration ( vevent ) : if hasattr ( vevent , 'dtend' ) : return vevent . dtend . value - vevent . dtstart . value elif hasattr ( vevent , 'duration' ) and vevent . duration . value : return vevent . duration . value return timedelta ( 0 )
5,731
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L379-L385
[ "def", "_client_allowed", "(", "self", ")", ":", "client_ip", "=", "self", ".", "_client_address", "[", "0", "]", "if", "not", "client_ip", "in", "self", ".", "_settings", ".", "allowed_clients", "and", "not", "'ALL'", "in", "self", ".", "_settings", ".", "allowed_clients", ":", "content", "=", "'Access from host {} forbidden.'", ".", "format", "(", "client_ip", ")", ".", "encode", "(", "'utf-8'", ")", "self", ".", "_send_content", "(", "content", ",", "'text/html'", ")", "return", "False", "return", "True" ]
Generate a Remind command from the given vevent
def to_remind ( self , vevent , label = None , priority = None , tags = None , tail = None , sep = " " , postdate = None , posttime = None ) : remind = [ 'REM' ] trigdates = None if hasattr ( vevent , 'rrule' ) : trigdates = Remind . _parse_rruleset ( vevent . rruleset ) dtstart = vevent . dtstart . value # If we don't get timezone information, handle it as a naive datetime. # See https://github.com/jspricke/python-remind/issues/2 for reference. if isinstance ( dtstart , datetime ) and dtstart . tzinfo : dtstart = dtstart . astimezone ( self . _localtz ) dtend = None if hasattr ( vevent , 'dtend' ) : dtend = vevent . dtend . value if isinstance ( dtend , datetime ) and dtend . tzinfo : dtend = dtend . astimezone ( self . _localtz ) if not hasattr ( vevent , 'rdate' ) and not isinstance ( trigdates , str ) : remind . append ( dtstart . strftime ( '%b %d %Y' ) . replace ( ' 0' , ' ' ) ) if postdate : remind . append ( postdate ) if priority : remind . append ( 'PRIORITY %s' % priority ) if isinstance ( trigdates , list ) : remind . extend ( trigdates ) duration = Remind . _event_duration ( vevent ) if type ( dtstart ) is date and duration . days > 1 : remind . append ( '*1' ) if dtend is not None : dtend -= timedelta ( days = 1 ) remind . append ( dtend . strftime ( 'UNTIL %b %d %Y' ) . replace ( ' 0' , ' ' ) ) if isinstance ( dtstart , datetime ) : remind . append ( dtstart . strftime ( 'AT %H:%M' ) . replace ( ' 0' , ' ' ) ) if posttime : remind . append ( posttime ) if duration . total_seconds ( ) > 0 : remind . append ( 'DURATION %d:%02d' % divmod ( duration . total_seconds ( ) / 60 , 60 ) ) if hasattr ( vevent , 'rdate' ) : remind . append ( Remind . _parse_rdate ( vevent . rdate . value ) ) elif isinstance ( trigdates , str ) : remind . append ( trigdates ) if hasattr ( vevent , 'class' ) : remind . append ( 'TAG %s' % Remind . _abbr_tag ( vevent . getChildValue ( 'class' ) ) ) if tags : remind . extend ( [ 'TAG %s' % Remind . _abbr_tag ( tag ) for tag in tags ] ) if hasattr ( vevent , 'categories_list' ) : for categories in vevent . categories_list : for category in categories . value : remind . append ( 'TAG %s' % Remind . _abbr_tag ( category ) ) remind . append ( Remind . _gen_msg ( vevent , label , tail , sep ) ) return ' ' . join ( remind ) + '\n'
5,732
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L431-L499
[ "def", "detect_images_and_galleries", "(", "generators", ")", ":", "for", "generator", "in", "generators", ":", "if", "isinstance", "(", "generator", ",", "ArticlesGenerator", ")", ":", "for", "article", "in", "itertools", ".", "chain", "(", "generator", ".", "articles", ",", "generator", ".", "translations", ",", "generator", ".", "drafts", ")", ":", "detect_image", "(", "generator", ",", "article", ")", "detect_gallery", "(", "generator", ",", "article", ")", "elif", "isinstance", "(", "generator", ",", "PagesGenerator", ")", ":", "for", "page", "in", "itertools", ".", "chain", "(", "generator", ".", "pages", ",", "generator", ".", "translations", ",", "generator", ".", "hidden_pages", ")", ":", "detect_image", "(", "generator", ",", "page", ")", "detect_gallery", "(", "generator", ",", "page", ")" ]
Return Remind commands for all events of a iCalendar
def to_reminders ( self , ical , label = None , priority = None , tags = None , tail = None , sep = " " , postdate = None , posttime = None ) : if not hasattr ( ical , 'vevent_list' ) : return '' reminders = [ self . to_remind ( vevent , label , priority , tags , tail , sep , postdate , posttime ) for vevent in ical . vevent_list ] return '' . join ( reminders )
5,733
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L501-L510
[ "def", "save", "(", "self", ")", ":", "data", "=", "self", ".", "get_selected_item", "(", ")", "if", "'saved'", "not", "in", "data", ":", "self", ".", "term", ".", "flash", "(", ")", "elif", "not", "data", "[", "'saved'", "]", ":", "with", "self", ".", "term", ".", "loader", "(", "'Saving'", ")", ":", "data", "[", "'object'", "]", ".", "save", "(", ")", "if", "not", "self", ".", "term", ".", "loader", ".", "exception", ":", "data", "[", "'saved'", "]", "=", "True", "else", ":", "with", "self", ".", "term", ".", "loader", "(", "'Unsaving'", ")", ":", "data", "[", "'object'", "]", ".", "unsave", "(", ")", "if", "not", "self", ".", "term", ".", "loader", ".", "exception", ":", "data", "[", "'saved'", "]", "=", "False" ]
Append a Remind command generated from the iCalendar to the file
def append_vobject ( self , ical , filename = None ) : if not filename : filename = self . _filename elif filename not in self . _reminders : return with self . _lock : outdat = self . to_reminders ( ical ) open ( filename , 'a' ) . write ( outdat ) return Remind . _get_uid ( outdat )
5,734
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L516-L527
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
Remove the Remind command with the uid from the file
def remove ( self , uid , filename = None ) : if not filename : filename = self . _filename elif filename not in self . _reminders : return uid = uid . split ( '@' ) [ 0 ] with self . _lock : rem = open ( filename ) . readlines ( ) for ( index , line ) in enumerate ( rem ) : if uid == md5 ( line [ : - 1 ] . encode ( 'utf-8' ) ) . hexdigest ( ) : del rem [ index ] open ( filename , 'w' ) . writelines ( rem ) break
5,735
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L529-L544
[ "def", "setup_contracts_or_exit", "(", "config", ":", "Dict", "[", "str", ",", "Any", "]", ",", "network_id", ":", "int", ",", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "environment_type", "=", "config", "[", "'environment_type'", "]", "not_allowed", "=", "(", "# for now we only disallow mainnet with test configuration", "network_id", "==", "1", "and", "environment_type", "==", "Environment", ".", "DEVELOPMENT", ")", "if", "not_allowed", ":", "click", ".", "secho", "(", "f'The chosen network ({ID_TO_NETWORKNAME[network_id]}) is not a testnet, '", "f'but the \"development\" environment was selected.\\n'", "f'This is not allowed. Please start again with a safe environment setting '", "f'(--environment production).'", ",", "fg", "=", "'red'", ",", ")", "sys", ".", "exit", "(", "1", ")", "contracts", "=", "dict", "(", ")", "contracts_version", "=", "environment_type_to_contracts_version", "(", "environment_type", ")", "config", "[", "'contracts_path'", "]", "=", "contracts_precompiled_path", "(", "contracts_version", ")", "if", "network_id", "in", "ID_TO_NETWORKNAME", "and", "ID_TO_NETWORKNAME", "[", "network_id", "]", "!=", "'smoketest'", ":", "try", ":", "deployment_data", "=", "get_contracts_deployment_info", "(", "chain_id", "=", "network_id", ",", "version", "=", "contracts_version", ",", ")", "except", "ValueError", ":", "return", "contracts", ",", "False", "contracts", "=", "deployment_data", "[", "'contracts'", "]", "return", "contracts" ]
Move the Remind command with the uid from from_file to to_file
def move_vobject ( self , uid , from_file , to_file ) : if from_file not in self . _reminders or to_file not in self . _reminders : return uid = uid . split ( '@' ) [ 0 ] with self . _lock : rem = open ( from_file ) . readlines ( ) for ( index , line ) in enumerate ( rem ) : if uid == md5 ( line [ : - 1 ] . encode ( 'utf-8' ) ) . hexdigest ( ) : del rem [ index ] open ( from_file , 'w' ) . writelines ( rem ) open ( to_file , 'a' ) . write ( line ) break
5,736
https://github.com/jspricke/python-remind/blob/dda2aa8fc20b87b9c9fcbca2b67bce73911d05d1/remind.py#L568-L582
[ "def", "mean", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_is_transposed", ":", "kwargs", "[", "\"axis\"", "]", "=", "kwargs", ".", "get", "(", "\"axis\"", ",", "0", ")", "^", "1", "return", "self", ".", "transpose", "(", ")", ".", "mean", "(", "*", "*", "kwargs", ")", "# Pandas default is 0 (though not mentioned in docs)", "axis", "=", "kwargs", ".", "get", "(", "\"axis\"", ",", "0", ")", "sums", "=", "self", ".", "sum", "(", "*", "*", "kwargs", ")", "counts", "=", "self", ".", "count", "(", "axis", "=", "axis", ",", "numeric_only", "=", "kwargs", ".", "get", "(", "\"numeric_only\"", ",", "None", ")", ")", "if", "sums", ".", "_is_transposed", "and", "counts", ".", "_is_transposed", ":", "sums", "=", "sums", ".", "transpose", "(", ")", "counts", "=", "counts", ".", "transpose", "(", ")", "result", "=", "sums", ".", "binary_op", "(", "\"truediv\"", ",", "counts", ",", "axis", "=", "axis", ")", "return", "result", ".", "transpose", "(", ")", "if", "axis", "==", "0", "else", "result" ]
Expand the content of a file into a string .
def expand_include ( filename ) : open_files = set ( ) def _expand_include_rec ( filename ) : if filename in open_files : raise RuntimeError ( 'Recursive include statement detected for ' 'file: ' + filename ) else : open_files . add ( filename ) with open ( filename ) as open_file : for line in open_file : line_stripped = line . strip ( ) . replace ( "//" , "#" ) if line_stripped . startswith ( '@include ' ) : inc_to_clean = line_stripped . split ( None , 1 ) [ 1 ] inc_filename = inc_to_clean . replace ( '"' , " " ) . strip ( ) for included_line in _expand_include_rec ( inc_filename ) : yield included_line else : yield line open_files . remove ( filename ) try : lines = [ ] for line in _expand_include_rec ( filename ) : lines . append ( line ) return '' . join ( lines ) except RuntimeError : return None
5,737
https://github.com/heinzK1X/pylibconfig2/blob/f3a851ac780da28a42264c24aac51b54fbd63f81/pylibconfig2/conf_types.py#L290-L326
[ "def", "_getNearestMappingIndexList", "(", "fromValList", ",", "toValList", ")", ":", "indexList", "=", "[", "]", "for", "fromTimestamp", "in", "fromValList", ":", "smallestDiff", "=", "_getSmallestDifference", "(", "toValList", ",", "fromTimestamp", ")", "i", "=", "toValList", ".", "index", "(", "smallestDiff", ")", "indexList", ".", "append", "(", "i", ")", "return", "indexList" ]
Return the individual info in a dictionary for json .
def to_json ( self ) : self . logger . debug ( "Returning json info" ) individual_info = { 'family_id' : self . family , 'id' : self . individual_id , 'sex' : str ( self . sex ) , 'phenotype' : str ( self . phenotype ) , 'mother' : self . mother , 'father' : self . father , 'extra_info' : self . extra_info } return individual_info
5,738
https://github.com/moonso/ped_parser/blob/a7393e47139532782ea3c821aabea33d46f94323/ped_parser/individual.py#L122-L136
[ "def", "register_onchain_secret", "(", "channel_state", ":", "NettingChannelState", ",", "secret", ":", "Secret", ",", "secrethash", ":", "SecretHash", ",", "secret_reveal_block_number", ":", "BlockNumber", ",", "delete_lock", ":", "bool", "=", "True", ",", ")", "->", "None", ":", "our_state", "=", "channel_state", ".", "our_state", "partner_state", "=", "channel_state", ".", "partner_state", "register_onchain_secret_endstate", "(", "our_state", ",", "secret", ",", "secrethash", ",", "secret_reveal_block_number", ",", "delete_lock", ",", ")", "register_onchain_secret_endstate", "(", "partner_state", ",", "secret", ",", "secrethash", ",", "secret_reveal_block_number", ",", "delete_lock", ",", ")" ]
Return the individual info in a madeline formated string
def to_madeline ( self ) : #Convert sex to madeleine type self . logger . debug ( "Returning madeline info" ) if self . sex == 1 : madeline_gender = 'M' elif self . sex == 2 : madeline_gender = 'F' else : madeline_gender = '.' #Convert father to madeleine type if self . father == '0' : madeline_father = '.' else : madeline_father = self . father #Convert mother to madeleine type if self . mother == '0' : madeline_mother = '.' else : madeline_mother = self . mother #Convert phenotype to madeleine type if self . phenotype == 1 : madeline_phenotype = 'U' elif self . phenotype == 2 : madeline_phenotype = 'A' else : madeline_phenotype = '.' return "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}" . format ( self . family , self . individual_id , madeline_gender , madeline_father , madeline_mother , madeline_phenotype , self . proband , self . consultand , self . alive )
5,739
https://github.com/moonso/ped_parser/blob/a7393e47139532782ea3c821aabea33d46f94323/ped_parser/individual.py#L138-L172
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
takes in a path to a folder or file and return the module path and the path to the module
def module_name_from_path ( folder_name , verbose = False ) : # strip off endings folder_name = folder_name . split ( '.pyc' ) [ 0 ] folder_name = folder_name . split ( '.py' ) [ 0 ] folder_name = os . path . normpath ( folder_name ) path = folder_name + '/' package = get_python_package ( path ) # path = folder_name module = [ ] if verbose : print ( ( 'folder_name' , folder_name ) ) # os_sys_path = os.sys.path # # if os.path.normpath(path) in os_sys_path: # if verbose: # print('warning: path in sys.path!') # os_sys_path.remove(os.path.normpath(path)) # # # if verbose: # for elem in os_sys_path: # # print('os.sys.path', elem) while True : path = os . path . dirname ( path ) module . append ( os . path . basename ( path ) ) if os . path . basename ( path ) == package : path = os . path . dirname ( path ) break # failed to identify the module if os . path . dirname ( path ) == path : path , module = None , None break if verbose : print ( ( 'path' , path , os . path . dirname ( path ) ) ) # if path == os.path.dirname(path): # if verbose: # print('break -- os.path.dirname(path)', os.path.dirname(path)) # # path, module = None, None # break # if verbose : print ( ( 'module' , module ) ) # OLD START # while path not in os_sys_path: # path = os.path.dirname(path) # # if verbose: # print('path', path, os.path.dirname(path)) # # if path == os.path.dirname(path): # if verbose: # print('break -- os.path.dirname(path)', os.path.dirname(path)) # # path, module = None, None # break # module.append(os.path.basename(path)) # # if verbose: # print('module', module) # OLD END if verbose : print ( ( 'module' , module ) ) # module = module[:-1] # print('mod', module) # from the list construct the path like b26_toolkit.pylabcontrol.scripts and load it module . reverse ( ) module = '.' . join ( module ) return module , path
5,740
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/helper_functions.py#L23-L127
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
returns all the packages in the module
def explore_package ( module_name ) : packages = [ ] loader = pkgutil . get_loader ( module_name ) for sub_module in pkgutil . walk_packages ( [ os . path . dirname ( loader . get_filename ( ) ) ] , prefix = module_name + '.' ) : _ , sub_module_name , _ = sub_module packages . append ( sub_module_name ) return packages
5,741
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/helper_functions.py#L201-L219
[ "def", "_verify_options", "(", "options", ")", ":", "# sanity check all vals used for bitwise operations later", "bitwise_args", "=", "[", "(", "'level'", ",", "options", "[", "'level'", "]", ")", ",", "(", "'facility'", ",", "options", "[", "'facility'", "]", ")", "]", "bitwise_args", ".", "extend", "(", "[", "(", "'option'", ",", "x", ")", "for", "x", "in", "options", "[", "'options'", "]", "]", ")", "for", "opt_name", ",", "opt", "in", "bitwise_args", ":", "if", "not", "hasattr", "(", "syslog", ",", "opt", ")", ":", "log", ".", "error", "(", "'syslog has no attribute %s'", ",", "opt", ")", "return", "False", "if", "not", "isinstance", "(", "getattr", "(", "syslog", ",", "opt", ")", ",", "int", ")", ":", "log", ".", "error", "(", "'%s is not a valid syslog %s'", ",", "opt", ",", "opt_name", ")", "return", "False", "# Sanity check tag", "if", "'tag'", "in", "options", ":", "if", "not", "isinstance", "(", "options", "[", "'tag'", "]", ",", "six", ".", "string_types", ")", ":", "log", ".", "error", "(", "'tag must be a string'", ")", "return", "False", "if", "len", "(", "options", "[", "'tag'", "]", ")", ">", "32", ":", "log", ".", "error", "(", "'tag size is limited to 32 characters'", ")", "return", "False", "return", "True" ]
Create a parser by defining which input files it will read from .
def generate_from_directory ( cls , directory ) : files = [ os . path . join ( directory , f ) for f in os . listdir ( directory ) if os . path . isfile ( os . path . join ( directory , f ) ) ] return cls ( files )
5,742
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/base.py#L57-L66
[ "async", "def", "renew", "(", "self", ",", "session", ",", "*", ",", "dc", "=", "None", ")", ":", "session_id", "=", "extract_attr", "(", "session", ",", "keys", "=", "[", "\"ID\"", "]", ")", "response", "=", "await", "self", ".", "_api", ".", "put", "(", "\"/v1/session/renew\"", ",", "session_id", ",", "params", "=", "{", "\"dc\"", ":", "dc", "}", ")", "try", ":", "result", "=", "response", ".", "body", "[", "0", "]", "except", "IndexError", ":", "meta", "=", "extract_meta", "(", "response", ".", "headers", ")", "raise", "NotFound", "(", "\"No session for %r\"", "%", "session_id", ",", "meta", "=", "meta", ")", "return", "consul", "(", "result", ",", "meta", "=", "extract_meta", "(", "response", ".", "headers", ")", ")" ]
Compute the density from the output structure
def get_density ( self ) : strc = self . get_output_structure ( ) density = sum ( strc . get_masses ( ) ) / strc . get_volume ( ) * 1.660539040 return Property ( scalars = [ Scalar ( value = density ) ] , units = "g/(cm^3)" )
5,743
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/base.py#L148-L152
[ "def", "upload_model", "(", "self", ",", "path", ":", "str", ",", "meta", ":", "dict", ",", "force", ":", "bool", ")", ":", "bucket", "=", "self", ".", "connect", "(", ")", "if", "bucket", "is", "None", ":", "raise", "BackendRequiredError", "blob", "=", "bucket", ".", "blob", "(", "\"models/%s/%s.asdf\"", "%", "(", "meta", "[", "\"model\"", "]", ",", "meta", "[", "\"uuid\"", "]", ")", ")", "if", "blob", ".", "exists", "(", ")", "and", "not", "force", ":", "self", ".", "_log", ".", "error", "(", "\"Model %s already exists, aborted.\"", ",", "meta", "[", "\"uuid\"", "]", ")", "raise", "ModelAlreadyExistsError", "self", ".", "_log", ".", "info", "(", "\"Uploading %s from %s...\"", ",", "meta", "[", "\"model\"", "]", ",", "os", ".", "path", ".", "abspath", "(", "path", ")", ")", "def", "tracker", "(", "data", ")", ":", "return", "self", ".", "_Tracker", "(", "data", ",", "self", ".", "_log", ")", "make_transport", "=", "blob", ".", "_make_transport", "def", "make_transport_with_progress", "(", "client", ")", ":", "transport", "=", "make_transport", "(", "client", ")", "request", "=", "transport", ".", "request", "def", "request_with_progress", "(", "method", ",", "url", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "request", "(", "method", ",", "url", ",", "data", "=", "tracker", "(", "data", ")", ",", "headers", "=", "headers", ",", "*", "*", "kwargs", ")", "transport", ".", "request", "=", "request_with_progress", "return", "transport", "blob", ".", "_make_transport", "=", "make_transport_with_progress", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "fin", ":", "blob", ".", "upload_from_file", "(", "fin", ",", "content_type", "=", "\"application/x-yaml\"", ")", "blob", ".", "make_public", "(", ")", "return", "blob", ".", "public_url" ]
Get the number of atoms in the calculated structure .
def get_number_of_atoms ( self ) : strc = self . get_output_structure ( ) if not strc : return None return Property ( scalars = [ Scalar ( value = len ( strc ) ) ] , units = "/unit cell" )
5,744
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/base.py#L296-L304
[ "def", "sg_train", "(", "*", "*", "kwargs", ")", ":", "opt", "=", "tf", ".", "sg_opt", "(", "kwargs", ")", "assert", "opt", ".", "loss", "is", "not", "None", ",", "'loss is mandatory.'", "# default training options", "opt", "+=", "tf", ".", "sg_opt", "(", "optim", "=", "'MaxProp'", ",", "lr", "=", "0.001", ",", "beta1", "=", "0.9", ",", "beta2", "=", "0.99", ",", "category", "=", "''", ",", "ep_size", "=", "100000", ")", "# get optimizer", "train_op", "=", "sg_optim", "(", "opt", ".", "loss", ",", "optim", "=", "opt", ".", "optim", ",", "lr", "=", "0.001", ",", "beta1", "=", "opt", ".", "beta1", ",", "beta2", "=", "opt", ".", "beta2", ",", "category", "=", "opt", ".", "category", ")", "# for console logging", "loss_", "=", "opt", ".", "loss", "# use only first loss when multiple GPU case", "if", "isinstance", "(", "opt", ".", "loss", ",", "(", "tuple", ",", "list", ")", ")", ":", "loss_", "=", "opt", ".", "loss", "[", "0", "]", "# define train function", "# noinspection PyUnusedLocal", "@", "sg_train_func", "def", "train_func", "(", "sess", ",", "arg", ")", ":", "return", "sess", ".", "run", "(", "[", "loss_", ",", "train_op", "]", ")", "[", "0", "]", "# run train function", "train_func", "(", "*", "*", "opt", ")" ]
loads a . b26 file into a dictionary
def load_b26_file ( file_name ) : # file_name = "Z:\Lab\Cantilever\Measurements\\tmp_\\a" assert os . path . exists ( file_name ) with open ( file_name , 'r' ) as infile : data = yaml . safe_load ( infile ) return data
5,745
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/read_write_functions.py#L76-L92
[ "def", "_project_perturbation", "(", "perturbation", ",", "epsilon", ",", "input_image", ",", "clip_min", "=", "None", ",", "clip_max", "=", "None", ")", ":", "if", "clip_min", "is", "None", "or", "clip_max", "is", "None", ":", "raise", "NotImplementedError", "(", "\"_project_perturbation currently has clipping \"", "\"hard-coded in.\"", ")", "# Ensure inputs are in the correct range", "with", "tf", ".", "control_dependencies", "(", "[", "utils_tf", ".", "assert_less_equal", "(", "input_image", ",", "tf", ".", "cast", "(", "clip_max", ",", "input_image", ".", "dtype", ")", ")", ",", "utils_tf", ".", "assert_greater_equal", "(", "input_image", ",", "tf", ".", "cast", "(", "clip_min", ",", "input_image", ".", "dtype", ")", ")", "]", ")", ":", "clipped_perturbation", "=", "utils_tf", ".", "clip_by_value", "(", "perturbation", ",", "-", "epsilon", ",", "epsilon", ")", "new_image", "=", "utils_tf", ".", "clip_by_value", "(", "input_image", "+", "clipped_perturbation", ",", "clip_min", ",", "clip_max", ")", "return", "new_image", "-", "input_image" ]
Reading socket and receiving message from server . Check the CRC32 .
def recv_message ( self , debug = False ) : if debug : packet = self . sock . recv ( 1024 ) # reads how many bytes to read hexdump ( packet ) packet_length_data = self . sock . recv ( 4 ) # reads how many bytes to read if len ( packet_length_data ) < 4 : raise Exception ( "Nothing in the socket!" ) packet_length = struct . unpack ( "<I" , packet_length_data ) [ 0 ] packet = self . sock . recv ( packet_length - 4 ) # read the rest of bytes from socket # check the CRC32 if not crc32 ( packet_length_data + packet [ 0 : - 4 ] ) == struct . unpack ( '<I' , packet [ - 4 : ] ) [ 0 ] : raise Exception ( "CRC32 was not correct!" ) x = struct . unpack ( "<I" , packet [ : 4 ] ) auth_key_id = packet [ 4 : 12 ] if auth_key_id == b'\x00\x00\x00\x00\x00\x00\x00\x00' : # No encryption - Plain text ( message_id , message_length ) = struct . unpack ( "<QI" , packet [ 12 : 24 ] ) data = packet [ 24 : 24 + message_length ] elif auth_key_id == self . auth_key_id : pass message_key = packet [ 12 : 28 ] encrypted_data = packet [ 28 : - 4 ] aes_key , aes_iv = self . aes_calculate ( message_key , direction = "from server" ) decrypted_data = crypt . ige_decrypt ( encrypted_data , aes_key , aes_iv ) assert decrypted_data [ 0 : 8 ] == self . server_salt assert decrypted_data [ 8 : 16 ] == self . session_id message_id = decrypted_data [ 16 : 24 ] seq_no = struct . unpack ( "<I" , decrypted_data [ 24 : 28 ] ) [ 0 ] message_data_length = struct . unpack ( "<I" , decrypted_data [ 28 : 32 ] ) [ 0 ] data = decrypted_data [ 32 : 32 + message_data_length ] else : raise Exception ( "Got unknown auth_key id" ) return data
5,746
https://github.com/datamachine/twx/blob/d9633f12f3647b1e54ba87b70b39df3b7e02b4eb/twx/mtproto/mtproto.py#L248-L286
[ "def", "_maybe_restore_index_levels", "(", "self", ",", "result", ")", ":", "names_to_restore", "=", "[", "]", "for", "name", ",", "left_key", ",", "right_key", "in", "zip", "(", "self", ".", "join_names", ",", "self", ".", "left_on", ",", "self", ".", "right_on", ")", ":", "if", "(", "self", ".", "orig_left", ".", "_is_level_reference", "(", "left_key", ")", "and", "self", ".", "orig_right", ".", "_is_level_reference", "(", "right_key", ")", "and", "name", "not", "in", "result", ".", "index", ".", "names", ")", ":", "names_to_restore", ".", "append", "(", "name", ")", "if", "names_to_restore", ":", "result", ".", "set_index", "(", "names_to_restore", ",", "inplace", "=", "True", ")" ]
Returns a list of CategoryList objects associated with this model instance .
def get_category_lists ( self , init_kwargs = None , additional_parents_aliases = None ) : if self . _category_editor is not None : # Return editor lists instead of plain lists if it's enabled. return self . _category_editor . get_lists ( ) from . toolbox import get_category_lists init_kwargs = init_kwargs or { } catlist_kwargs = { } if self . _category_lists_init_kwargs is not None : catlist_kwargs . update ( self . _category_lists_init_kwargs ) catlist_kwargs . update ( init_kwargs ) lists = get_category_lists ( catlist_kwargs , additional_parents_aliases , obj = self ) return lists
5,747
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L229-L252
[ "def", "get_rsa_key", "(", "self", ",", "username", ")", ":", "try", ":", "resp", "=", "self", ".", "session", ".", "post", "(", "'https://steamcommunity.com/login/getrsakey/'", ",", "timeout", "=", "15", ",", "data", "=", "{", "'username'", ":", "username", ",", "'donotchache'", ":", "int", "(", "time", "(", ")", "*", "1000", ")", ",", "}", ",", ")", ".", "json", "(", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "raise", "HTTPError", "(", "str", "(", "e", ")", ")", "return", "resp" ]
Enables editor functionality for categories of this object .
def enable_category_lists_editor ( self , request , editor_init_kwargs = None , additional_parents_aliases = None , lists_init_kwargs = None , handler_init_kwargs = None ) : from . toolbox import CategoryRequestHandler additional_parents_aliases = additional_parents_aliases or [ ] lists_init_kwargs = lists_init_kwargs or { } editor_init_kwargs = editor_init_kwargs or { } handler_init_kwargs = handler_init_kwargs or { } handler = CategoryRequestHandler ( request , self , * * handler_init_kwargs ) lists = self . get_category_lists ( init_kwargs = lists_init_kwargs , additional_parents_aliases = additional_parents_aliases ) handler . register_lists ( lists , lists_init_kwargs = lists_init_kwargs , editor_init_kwargs = editor_init_kwargs ) self . _category_editor = handler # Set link to handler to mutate get_category_lists() behaviour. return handler . listen ( )
5,748
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L254-L277
[ "def", "setOverlayTransformOverlayRelative", "(", "self", ",", "ulOverlayHandle", ",", "ulOverlayHandleParent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformOverlayRelative", "pmatParentOverlayToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "ulOverlayHandleParent", ",", "byref", "(", "pmatParentOverlayToOverlayTransform", ")", ")", "return", "result", ",", "pmatParentOverlayToOverlayTransform" ]
Add this model instance to a category .
def add_to_category ( self , category , user ) : init_kwargs = { 'category' : category , 'creator' : user , 'linked_object' : self } tie = self . categories . model ( * * init_kwargs ) # That's a model of Tie. tie . save ( ) return tie
5,749
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L279-L293
[ "def", "_do_http", "(", "opts", ",", "profile", "=", "'default'", ")", ":", "ret", "=", "{", "}", "url", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:url'", ".", "format", "(", "profile", ")", ",", "''", ")", "user", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:user'", ".", "format", "(", "profile", ")", ",", "''", ")", "passwd", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:pass'", ".", "format", "(", "profile", ")", ",", "''", ")", "realm", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:realm'", ".", "format", "(", "profile", ")", ",", "''", ")", "timeout", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:timeout'", ".", "format", "(", "profile", ")", ",", "''", ")", "if", "not", "url", ":", "raise", "Exception", "(", "'missing url in profile {0}'", ".", "format", "(", "profile", ")", ")", "if", "user", "and", "passwd", ":", "auth", "=", "_auth", "(", "url", "=", "url", ",", "realm", "=", "realm", ",", "user", "=", "user", ",", "passwd", "=", "passwd", ")", "_install_opener", "(", "auth", ")", "url", "+=", "'?{0}'", ".", "format", "(", "_urlencode", "(", "opts", ")", ")", "for", "line", "in", "_urlopen", "(", "url", ",", "timeout", "=", "timeout", ")", ".", "read", "(", ")", ".", "splitlines", "(", ")", ":", "splt", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "if", "splt", "[", "0", "]", "in", "ret", ":", "ret", "[", "splt", "[", "0", "]", "]", "+=", "',{0}'", ".", "format", "(", "splt", "[", "1", "]", ")", "else", ":", "ret", "[", "splt", "[", "0", "]", "]", "=", "splt", "[", "1", "]", "return", "ret" ]
Removes this object from a given category .
def remove_from_category ( self , category ) : ctype = ContentType . objects . get_for_model ( self ) self . categories . model . objects . filter ( category = category , content_type = ctype , object_id = self . id ) . delete ( )
5,750
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L295-L302
[ "def", "db_create", "(", "cls", ",", "impl", ",", "working_dir", ")", ":", "global", "VIRTUALCHAIN_DB_SCRIPT", "log", ".", "debug", "(", "\"Setup chain state in {}\"", ".", "format", "(", "working_dir", ")", ")", "path", "=", "config", ".", "get_snapshots_filename", "(", "impl", ",", "working_dir", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "Exception", "(", "\"Database {} already exists\"", ")", "lines", "=", "[", "l", "+", "\";\"", "for", "l", "in", "VIRTUALCHAIN_DB_SCRIPT", ".", "split", "(", "\";\"", ")", "]", "con", "=", "sqlite3", ".", "connect", "(", "path", ",", "isolation_level", "=", "None", ",", "timeout", "=", "2", "**", "30", ")", "for", "line", "in", "lines", ":", "con", ".", "execute", "(", "line", ")", "con", ".", "row_factory", "=", "StateEngine", ".", "db_row_factory", "return", "con" ]
Returns a QuerySet of Ties for the given categories .
def get_ties_for_categories_qs ( cls , categories , user = None , status = None ) : if not isinstance ( categories , list ) : categories = [ categories ] category_ids = [ ] for category in categories : if isinstance ( category , models . Model ) : category_ids . append ( category . id ) else : category_ids . append ( category ) filter_kwargs = { 'content_type' : ContentType . objects . get_for_model ( cls , for_concrete_model = False ) , 'category_id__in' : category_ids } if user is not None : filter_kwargs [ 'creator' ] = user if status is not None : filter_kwargs [ 'status' ] = status ties = get_tie_model ( ) . objects . filter ( * * filter_kwargs ) return ties
5,751
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L305-L331
[ "def", "oauth_register", "(", "form", ")", ":", "if", "form", ".", "validate", "(", ")", ":", "data", "=", "form", ".", "to_dict", "(", ")", "if", "not", "data", ".", "get", "(", "'password'", ")", ":", "data", "[", "'password'", "]", "=", "''", "user", "=", "register_user", "(", "*", "*", "data", ")", "if", "not", "data", "[", "'password'", "]", ":", "user", ".", "password", "=", "None", "_datastore", ".", "commit", "(", ")", "return", "user" ]
Returns a QuerySet of objects of this type associated with the given category .
def get_from_category_qs ( cls , category ) : ids = cls . get_ties_for_categories_qs ( category ) . values_list ( 'object_id' ) . distinct ( ) filter_kwargs = { 'id__in' : [ i [ 0 ] for i in ids ] } return cls . objects . filter ( * * filter_kwargs )
5,752
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/models.py#L334-L343
[ "def", "authenticate", "(", "self", ",", "driver", ")", ":", "# 0 1 2 3", "events", "=", "[", "driver", ".", "username_re", ",", "driver", ".", "password_re", ",", "self", ".", "device", ".", "prompt_re", ",", "driver", ".", "rommon_re", ",", "# 4 5 6 7 8", "driver", ".", "unable_to_connect_re", ",", "driver", ".", "authentication_error_re", ",", "pexpect", ".", "TIMEOUT", ",", "pexpect", ".", "EOF", "]", "transitions", "=", "[", "(", "driver", ".", "username_re", ",", "[", "0", "]", ",", "1", ",", "partial", "(", "a_send_username", ",", "self", ".", "username", ")", ",", "10", ")", ",", "(", "driver", ".", "username_re", ",", "[", "1", "]", ",", "1", ",", "None", ",", "10", ")", ",", "(", "driver", ".", "password_re", ",", "[", "0", ",", "1", "]", ",", "2", ",", "partial", "(", "a_send_password", ",", "self", ".", "_acquire_password", "(", ")", ")", ",", "_C", "[", "'first_prompt_timeout'", "]", ")", ",", "(", "driver", ".", "username_re", ",", "[", "2", "]", ",", "-", "1", ",", "a_authentication_error", ",", "0", ")", ",", "(", "driver", ".", "password_re", ",", "[", "2", "]", ",", "-", "1", ",", "a_authentication_error", ",", "0", ")", ",", "(", "driver", ".", "authentication_error_re", ",", "[", "1", ",", "2", "]", ",", "-", "1", ",", "a_authentication_error", ",", "0", ")", ",", "(", "self", ".", "device", ".", "prompt_re", ",", "[", "0", ",", "1", ",", "2", "]", ",", "-", "1", ",", "None", ",", "0", ")", ",", "(", "driver", ".", "rommon_re", ",", "[", "0", "]", ",", "-", "1", ",", "partial", "(", "a_send", ",", "\"\\r\\n\"", ")", ",", "0", ")", ",", "(", "pexpect", ".", "TIMEOUT", ",", "[", "0", "]", ",", "1", ",", "partial", "(", "a_send", ",", "\"\\r\\n\"", ")", ",", "10", ")", ",", "(", "pexpect", ".", "TIMEOUT", ",", "[", "2", "]", ",", "-", "1", ",", "None", ",", "0", ")", ",", "(", "pexpect", ".", "TIMEOUT", ",", "[", "3", ",", "7", "]", ",", "-", "1", ",", "ConnectionTimeoutError", "(", "\"Connection Timeout\"", ",", "self", ".", "hostname", ")", ",", "0", ")", ",", "(", "driver", ".", "unable_to_connect_re", ",", "[", "0", ",", "1", ",", "2", "]", ",", "-", "1", ",", "a_unable_to_connect", ",", "0", ")", ",", "]", "self", ".", "log", "(", "\"EXPECTED_PROMPT={}\"", ".", "format", "(", "pattern_to_str", "(", "self", ".", "device", ".", "prompt_re", ")", ")", ")", "fsm", "=", "FSM", "(", "\"CONSOLE-SERVER-AUTH\"", ",", "self", ".", "device", ",", "events", ",", "transitions", ",", "timeout", "=", "_C", "[", "'connect_timeout'", "]", ",", "init_pattern", "=", "self", ".", "last_pattern", ")", "return", "fsm", ".", "run", "(", ")" ]
This is run if file is directly executed but not if imported as module . Having this in a separate function allows importing the file into interactive python and still able to execute the function for testing
def main ( ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( "-f" , "--file" , required = True , help = "input file" , type = str ) parser . add_argument ( "-l" , "--locus" , required = True , help = "Locus" , type = str ) parser . add_argument ( "-k" , "--kir" , help = "Option for running with KIR" , action = 'store_true' ) parser . add_argument ( "-s" , "--server" , help = "Option for running with a server" , action = 'store_true' ) parser . add_argument ( "-v" , "--verbose" , help = "Option for running in verbose" , action = 'store_true' ) args = parser . parse_args ( ) fastafile = args . file locus = args . locus verbose = False if args . verbose : verbose = True verbose = False if args . verbose : verbose = True kir = False if args . kir : kir = True serv = False if args . server : serv = True if verbose : logging . basicConfig ( format = '%(asctime)s - %(name)-35s - %(levelname)-5s - %(message)s' , datefmt = '%m/%d/%Y %I:%M:%S %p' , level = logging . INFO ) server = None if serv : server = BioSeqDatabase . open_database ( driver = "pymysql" , user = "root" , passwd = "" , host = "localhost" , db = "bioseqdb" ) seqann = BioSeqAnn ( verbose = True , kir = kir ) for seq in SeqIO . parse ( fastafile , "fasta" ) : ann = seqann . annotate ( seq , locus = locus ) print ( '{:*^20} {:^20} {:*^20}' . format ( "" , str ( seq . description ) , "" ) ) l = 0 for f in ann . annotation : if isinstance ( ann . annotation [ f ] , DBSeq ) : print ( f , ann . method , str ( ann . annotation [ f ] ) , sep = "\t" ) l += len ( ann . annotation [ f ] ) else : print ( f , ann . method , str ( ann . annotation [ f ] . seq ) , sep = "\t" ) l += len ( ann . annotation [ f ] . seq ) print ( "" ) if serv : server . close ( )
5,753
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/scripts/annotate_fasta.py#L17-L91
[ "def", "badge_label", "(", "self", ",", "badge", ")", ":", "kind", "=", "badge", ".", "kind", "if", "isinstance", "(", "badge", ",", "Badge", ")", "else", "badge", "return", "self", ".", "__badges__", "[", "kind", "]" ]
Extend with a full subtree < = the current minimum subtree .
def _push_subtree ( self , leaves : List [ bytes ] ) : size = len ( leaves ) if count_bits_set ( size ) != 1 : raise ValueError ( "invalid subtree with size != 2^k: %s" % size ) # in general we want the highest bit, but here it's also the lowest bit # so just reuse that code instead of writing a new highest_bit_set() subtree_h , mintree_h = lowest_bit_set ( size ) , self . __mintree_height if mintree_h > 0 and subtree_h > mintree_h : raise ValueError ( "subtree %s > current smallest subtree %s" % ( subtree_h , mintree_h ) ) root_hash , hashes = self . __hasher . _hash_full ( leaves , 0 , size ) assert hashes == ( root_hash , ) if self . hashStore : for h in hashes : self . hashStore . writeLeaf ( h ) new_node_hashes = self . __push_subtree_hash ( subtree_h , root_hash ) nodes = [ ( self . tree_size , height , h ) for h , height in new_node_hashes ] if self . hashStore : for node in nodes : self . hashStore . writeNode ( node )
5,754
https://github.com/hyperledger-archives/indy-ledger/blob/7210c3b288e07f940eddad09b1dfc6a56be846df/ledger/compact_merkle_tree.py#L91-L132
[ "def", "register_dataframe_method", "(", "method", ")", ":", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "class", "AccessorMethod", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "pandas_obj", ")", ":", "self", ".", "_obj", "=", "pandas_obj", "@", "wraps", "(", "method", ")", "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "method", "(", "self", ".", "_obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", "register_dataframe_accessor", "(", "method", ".", "__name__", ")", "(", "AccessorMethod", ")", "return", "method", "return", "inner", "(", ")" ]
resolve a local object
def resolve ( obj , pointer , registry = None ) : registry = LocalRegistry ( obj , registry or { } ) local = DocumentPointer ( pointer ) if local . document : registry [ local . document ] = obj local . document = '<local>' return registry . resolve ( local )
5,755
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/reference/__init__.py#L24-L62
[ "def", "grep_log", "(", "self", ",", "expr", ",", "filename", "=", "'system.log'", ",", "from_mark", "=", "None", ")", ":", "matchings", "=", "[", "]", "pattern", "=", "re", ".", "compile", "(", "expr", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "get_path", "(", ")", ",", "'logs'", ",", "filename", ")", ")", "as", "f", ":", "if", "from_mark", ":", "f", ".", "seek", "(", "from_mark", ")", "for", "line", "in", "f", ":", "m", "=", "pattern", ".", "search", "(", "line", ")", "if", "m", ":", "matchings", ".", "append", "(", "(", "line", ",", "m", ")", ")", "return", "matchings" ]
Add a url scheme to this endpoint . It takes a url string and create the OEmbedUrlScheme object internally .
def addUrlScheme ( self , url ) : #@TODO: validate invalid url format according to http://oembed.com/ if not isinstance ( url , str ) : raise TypeError ( 'url must be a string value' ) if not url in self . _urlSchemes : self . _urlSchemes [ url ] = OEmbedUrlScheme ( url )
5,756
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L240-L254
[ "def", "set_config", "(", "name", ",", "xpath", "=", "None", ",", "value", "=", "None", ",", "commit", "=", "False", ")", ":", "ret", "=", "_default_ret", "(", "name", ")", "result", ",", "msg", "=", "_set_config", "(", "xpath", ",", "value", ")", "ret", ".", "update", "(", "{", "'comment'", ":", "msg", ",", "'result'", ":", "result", "}", ")", "if", "not", "result", ":", "return", "ret", "if", "commit", "is", "True", ":", "ret", ".", "update", "(", "{", "'commit'", ":", "__salt__", "[", "'panos.commit'", "]", "(", ")", ",", "'result'", ":", "True", "}", ")", "return", "ret" ]
Try to find if url matches against any of the schemes within this endpoint .
def match ( self , url ) : try : urlSchemes = self . _urlSchemes . itervalues ( ) # Python 2 except AttributeError : urlSchemes = self . _urlSchemes . values ( ) # Python 3 for urlScheme in urlSchemes : if urlScheme . match ( url ) : return True return False
5,757
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L279-L298
[ "def", "read_stb", "(", "library", ",", "session", ")", ":", "status", "=", "ViUInt16", "(", ")", "ret", "=", "library", ".", "viReadSTB", "(", "session", ",", "byref", "(", "status", ")", ")", "return", "status", ".", "value", ",", "ret" ]
Format the input url and optional parameters and provides the final url where to get the given resource .
def request ( self , url , * * opt ) : params = opt params [ 'url' ] = url urlApi = self . _urlApi if 'format' in params and self . _implicitFormat : urlApi = self . _urlApi . replace ( '{format}' , params [ 'format' ] ) del params [ 'format' ] if '?' in urlApi : return "%s&%s" % ( urlApi , urllib . urlencode ( params ) ) else : return "%s?%s" % ( urlApi , urllib . urlencode ( params ) )
5,758
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L300-L323
[ "def", "compare", "(", "left", ",", "right", ")", ":", "with", "open_zip", "(", "left", ")", "as", "l", ":", "with", "open_zip", "(", "right", ")", "as", "r", ":", "return", "compare_zips", "(", "l", ",", "r", ")" ]
Convert the resource url to a complete url and then fetch the data from it .
def get ( self , url , * * opt ) : return self . fetch ( self . request ( url , * * opt ) )
5,759
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L325-L337
[ "def", "in_same_box", "(", "self", ",", "a", ",", "b", ")", ":", "assert", "a", "in", "self", ".", "micro_indices", "assert", "b", "in", "self", ".", "micro_indices", "for", "part", "in", "self", ".", "partition", ":", "if", "a", "in", "part", "and", "b", "in", "part", ":", "return", "True", "return", "False" ]
Fetch url and create a response object according to the mime - type .
def fetch ( self , url ) : opener = self . _urllib . build_opener ( ) opener . addheaders = self . _requestHeaders . items ( ) response = opener . open ( url ) headers = response . info ( ) raw = response . read ( ) raw = raw . decode ( 'utf8' ) if not 'Content-Type' in headers : raise OEmbedError ( 'Missing mime-type in response' ) if headers [ 'Content-Type' ] . find ( 'application/xml' ) != - 1 or headers [ 'Content-Type' ] . find ( 'text/xml' ) != - 1 : response = OEmbedResponse . newFromXML ( raw ) elif headers [ 'Content-Type' ] . find ( 'application/json' ) != - 1 or headers [ 'Content-Type' ] . find ( 'text/javascript' ) != - 1 or headers [ 'Content-Type' ] . find ( 'text/json' ) != - 1 : response = OEmbedResponse . newFromJSON ( raw ) else : raise OEmbedError ( 'Invalid mime-type in response - %s' % headers [ 'Content-Type' ] ) return response
5,760
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L339-L369
[ "def", "get_joystick_axes", "(", "joy", ")", ":", "count_value", "=", "ctypes", ".", "c_int", "(", "0", ")", "count", "=", "ctypes", ".", "pointer", "(", "count_value", ")", "result", "=", "_glfw", ".", "glfwGetJoystickAxes", "(", "joy", ",", "count", ")", "return", "result", ",", "count_value", ".", "value" ]
Get an OEmbedResponse from one of the providers configured in this consumer according to the resource url .
def embed ( self , url , format = 'json' , * * opt ) : if format not in [ 'json' , 'xml' ] : raise OEmbedInvalidRequest ( 'Format must be json or xml' ) opt [ 'format' ] = format return self . _request ( url , * * opt )
5,761
https://github.com/abarmat/python-oembed/blob/bb3d14213e0ac91aa998af67182826b6f1529fe6/oembed/__init__.py#L488-L504
[ "def", "_assert_sframe_equal", "(", "sf1", ",", "sf2", ",", "check_column_names", "=", "True", ",", "check_column_order", "=", "True", ",", "check_row_order", "=", "True", ",", "float_column_delta", "=", "None", ")", ":", "from", ".", ".", "import", "SFrame", "as", "_SFrame", "if", "(", "type", "(", "sf1", ")", "is", "not", "_SFrame", ")", "or", "(", "type", "(", "sf2", ")", "is", "not", "_SFrame", ")", ":", "raise", "TypeError", "(", "\"Cannot function on types other than SFrames.\"", ")", "if", "not", "check_column_order", "and", "not", "check_column_names", ":", "raise", "ValueError", "(", "\"Cannot ignore both column order and column names.\"", ")", "sf1", ".", "__materialize__", "(", ")", "sf2", ".", "__materialize__", "(", ")", "if", "sf1", ".", "num_columns", "(", ")", "!=", "sf2", ".", "num_columns", "(", ")", ":", "raise", "AssertionError", "(", "\"Number of columns mismatched: \"", "+", "str", "(", "sf1", ".", "num_columns", "(", ")", ")", "+", "\" != \"", "+", "str", "(", "sf2", ".", "num_columns", "(", ")", ")", ")", "s1_names", "=", "sf1", ".", "column_names", "(", ")", "s2_names", "=", "sf2", ".", "column_names", "(", ")", "sorted_s1_names", "=", "sorted", "(", "s1_names", ")", "sorted_s2_names", "=", "sorted", "(", "s2_names", ")", "if", "check_column_names", ":", "if", "(", "check_column_order", "and", "(", "s1_names", "!=", "s2_names", ")", ")", "or", "(", "sorted_s1_names", "!=", "sorted_s2_names", ")", ":", "raise", "AssertionError", "(", "\"SFrame does not have same column names: \"", "+", "str", "(", "sf1", ".", "column_names", "(", ")", ")", "+", "\" != \"", "+", "str", "(", "sf2", ".", "column_names", "(", ")", ")", ")", "if", "sf1", ".", "num_rows", "(", ")", "!=", "sf2", ".", "num_rows", "(", ")", ":", "raise", "AssertionError", "(", "\"Number of rows mismatched: \"", "+", "str", "(", "sf1", ".", "num_rows", "(", ")", ")", "+", "\" != \"", "+", "str", "(", "sf2", ".", "num_rows", "(", ")", ")", ")", "if", "not", "check_row_order", "and", "(", "sf1", ".", "num_rows", "(", ")", ">", "1", ")", ":", "sf1", "=", "sf1", ".", "sort", "(", "s1_names", ")", "sf2", "=", "sf2", ".", "sort", "(", "s2_names", ")", "names_to_check", "=", "None", "if", "check_column_names", ":", "names_to_check", "=", "list", "(", "zip", "(", "sorted_s1_names", ",", "sorted_s2_names", ")", ")", "else", ":", "names_to_check", "=", "list", "(", "zip", "(", "s1_names", ",", "s2_names", ")", ")", "for", "i", "in", "names_to_check", ":", "col1", "=", "sf1", "[", "i", "[", "0", "]", "]", "col2", "=", "sf2", "[", "i", "[", "1", "]", "]", "if", "col1", ".", "dtype", "!=", "col2", ".", "dtype", ":", "raise", "AssertionError", "(", "\"Columns \"", "+", "str", "(", "i", ")", "+", "\" types mismatched.\"", ")", "compare_ary", "=", "None", "if", "col1", ".", "dtype", "==", "float", "and", "float_column_delta", "is", "not", "None", ":", "dt", "=", "float_column_delta", "compare_ary", "=", "(", "(", "col1", ">", "col2", "-", "dt", ")", "&", "(", "col1", "<", "col2", "+", "dt", ")", ")", "else", ":", "compare_ary", "=", "(", "sf1", "[", "i", "[", "0", "]", "]", "==", "sf2", "[", "i", "[", "1", "]", "]", ")", "if", "not", "compare_ary", ".", "all", "(", ")", ":", "count", "=", "0", "for", "j", "in", "compare_ary", ":", "if", "not", "j", ":", "first_row", "=", "count", "break", "count", "+=", "1", "raise", "AssertionError", "(", "\"Columns \"", "+", "str", "(", "i", ")", "+", "\" are not equal! First differing element is at row \"", "+", "str", "(", "first_row", ")", "+", "\": \"", "+", "str", "(", "(", "col1", "[", "first_row", "]", ",", "col2", "[", "first_row", "]", ")", ")", ")" ]
Connect to the Herkulex bus
def connect ( portname , baudrate ) : global SERPORT try : SERPORT = serial . Serial ( portname , baudrate , timeout = 0.1 ) except : raise HerkulexError ( "could not open the serial port" )
5,762
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L152-L168
[ "def", "rmarkdown_draft", "(", "filename", ",", "template", ",", "package", ")", ":", "if", "file_exists", "(", "filename", ")", ":", "return", "filename", "draft_template", "=", "Template", "(", "'rmarkdown::draft(\"$filename\", template=\"$template\", package=\"$package\", edit=FALSE)'", ")", "draft_string", "=", "draft_template", ".", "substitute", "(", "filename", "=", "filename", ",", "template", "=", "template", ",", "package", "=", "package", ")", "report_dir", "=", "os", ".", "path", ".", "dirname", "(", "filename", ")", "rcmd", "=", "Rscript_cmd", "(", ")", "with", "chdir", "(", "report_dir", ")", ":", "do", ".", "run", "(", "[", "rcmd", ",", "\"--no-environ\"", ",", "\"-e\"", ",", "draft_string", "]", ",", "\"Creating bcbioRNASeq quality control template.\"", ")", "do", ".", "run", "(", "[", "\"sed\"", ",", "\"-i\"", ",", "\"s/YYYY-MM-DD\\///g\"", ",", "filename", "]", ",", "\"Editing bcbioRNAseq quality control template.\"", ")", "return", "filename" ]
Calculate Checksum 1
def checksum1 ( data , stringlength ) : value_buffer = 0 for count in range ( 0 , stringlength ) : value_buffer = value_buffer ^ data [ count ] return value_buffer & 0xFE
5,763
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L184-L199
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "return", "False", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: is closing\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "self", ".", "_wrap_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_internal_console_port", ",", "self", ".", "_project", ")", "self", ".", "_internal_console_port", "=", "None", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "self", ".", "_closed", "=", "True", "return", "True" ]
Send data to herkulex
def send_data ( data ) : datalength = len ( data ) csm1 = checksum1 ( data , datalength ) csm2 = checksum2 ( csm1 ) data . insert ( 0 , 0xFF ) data . insert ( 1 , 0xFF ) data . insert ( 5 , csm1 ) data . insert ( 6 , csm2 ) stringtosend = "" for i in range ( len ( data ) ) : byteformat = '%02X' % data [ i ] stringtosend = stringtosend + "\\x" + byteformat try : SERPORT . write ( stringtosend . decode ( 'string-escape' ) ) #print stringtosend except : raise HerkulexError ( "could not communicate with motors" )
5,764
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L216-L245
[ "def", "cursor", "(", "self", ")", ":", "self", ".", "_assert_open", "(", ")", "if", "self", ".", "mars_enabled", ":", "in_tran", "=", "self", ".", "_conn", ".", "tds72_transaction", "if", "in_tran", "and", "self", ".", "_dirty", ":", "try", ":", "return", "_MarsCursor", "(", "self", ",", "self", ".", "_conn", ".", "create_session", "(", "self", ".", "_tzinfo_factory", ")", ",", "self", ".", "_tzinfo_factory", ")", "except", "(", "socket", ".", "error", ",", "OSError", ")", "as", "e", ":", "self", ".", "_conn", ".", "close", "(", ")", "raise", "else", ":", "try", ":", "return", "_MarsCursor", "(", "self", ",", "self", ".", "_conn", ".", "create_session", "(", "self", ".", "_tzinfo_factory", ")", ",", "self", ".", "_tzinfo_factory", ")", "except", "(", "socket", ".", "error", ",", "OSError", ")", "as", "e", ":", "if", "e", ".", "errno", "not", "in", "(", "errno", ".", "EPIPE", ",", "errno", ".", "ECONNRESET", ")", ":", "raise", "self", ".", "_conn", ".", "close", "(", ")", "except", "ClosedConnectionError", ":", "pass", "self", ".", "_assert_open", "(", ")", "return", "_MarsCursor", "(", "self", ",", "self", ".", "_conn", ".", "create_session", "(", "self", ".", "_tzinfo_factory", ")", ",", "self", ".", "_tzinfo_factory", ")", "else", ":", "return", "Cursor", "(", "self", ",", "self", ".", "_conn", ".", "main_session", ",", "self", ".", "_tzinfo_factory", ")" ]
Clears the errors register of all Herkulex servos
def clear_errors ( ) : data = [ ] data . append ( 0x0B ) data . append ( BROADCAST_ID ) data . append ( RAM_WRITE_REQ ) data . append ( STATUS_ERROR_RAM ) data . append ( BYTE2 ) data . append ( 0x00 ) data . append ( 0x00 ) send_data ( data )
5,765
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L248-L263
[ "def", "get_covariance_table", "(", "self", ",", "chain", "=", "0", ",", "parameters", "=", "None", ",", "caption", "=", "\"Parameter Covariance\"", ",", "label", "=", "\"tab:parameter_covariance\"", ")", ":", "parameters", ",", "cov", "=", "self", ".", "get_covariance", "(", "chain", "=", "chain", ",", "parameters", "=", "parameters", ")", "return", "self", ".", "_get_2d_latex_table", "(", "parameters", ",", "cov", ",", "caption", ",", "label", ")" ]
scale a value from one range to another
def scale ( input_value , input_min , input_max , out_min , out_max ) : # Figure out how 'wide' each range is input_span = input_max - input_min output_span = out_max - out_min # Convert the left range into a 0-1 range (float) valuescaled = float ( input_value - input_min ) / float ( input_span ) # Convert the 0-1 range into a value in the right range. return out_min + ( valuescaled * output_span )
5,766
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L265-L274
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Scan for the herkulex servos connected
def scan_servos ( ) : servos = [ ] for servo_id in range ( 0x00 , 0xFE ) : model = get_model ( servo_id ) if model : servos += [ ( servo_id , model ) ] return servos
5,767
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L276-L293
[ "def", "update_attachment", "(", "self", ",", "volumeID", ",", "attachmentID", ",", "metadata", ")", ":", "log", ".", "debug", "(", "'updating metadata of attachment {} from volume {}'", ".", "format", "(", "attachmentID", ",", "volumeID", ")", ")", "modifiable_fields", "=", "[", "'name'", ",", "'mime'", ",", "'notes'", ",", "'download_count'", "]", "for", "k", "in", "metadata", ".", "keys", "(", ")", ":", "if", "k", "not", "in", "modifiable_fields", ":", "raise", "ValueError", "(", "'Not modifiable field given: {}'", ".", "format", "(", "k", ")", ")", "if", "'name'", "in", "metadata", "and", "not", "isinstance", "(", "metadata", "[", "'name'", "]", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"'name' must be a string\"", ")", "if", "'mime'", "in", "metadata", "and", "not", "isinstance", "(", "metadata", "[", "'mime'", "]", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"'mime' must be a string\"", ")", "if", "'notes'", "in", "metadata", "and", "not", "isinstance", "(", "metadata", "[", "'notes'", "]", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"'notes' must be a string\"", ")", "if", "'download_count'", "in", "metadata", "and", "not", "isinstance", "(", "metadata", "[", "'download_count'", "]", ",", "Integral", ")", ":", "raise", "ValueError", "(", "\"'download_count' must be a number\"", ")", "rawVolume", "=", "self", ".", "_req_raw_volume", "(", "volumeID", ")", "for", "attachment", "in", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", ":", "if", "attachment", "[", "'id'", "]", "==", "attachmentID", ":", "attachment", ".", "update", "(", "metadata", ")", "self", ".", "_db", ".", "modify_book", "(", "id", "=", "volumeID", ",", "body", "=", "rawVolume", "[", "'_source'", "]", ",", "version", "=", "rawVolume", "[", "'_version'", "]", ")", "return", "raise", "NotFoundException", "(", "'Could not found attachment with id {} in volume {}'", ".", "format", "(", "attachmentID", ",", "volumeID", ")", ")" ]
Get the servo model
def get_model ( servoid ) : data = [ ] data . append ( 0x09 ) data . append ( servoid ) data . append ( EEP_READ_REQ ) data . append ( MODEL_NO1_EEP ) data . append ( BYTE1 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 12 ) return ord ( rxdata [ 9 ] ) & 0xFF except : raise HerkulexError ( "could not communicate with motors" )
5,768
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L295-L321
[ "def", "state", "(", "self", ",", "time", "=", "None", ")", ":", "if", "time", "is", "None", ":", "return", "max", "(", "self", ".", "infos", "(", "type", "=", "State", ")", ",", "key", "=", "attrgetter", "(", "'creation_time'", ")", ")", "else", ":", "states", "=", "[", "s", "for", "s", "in", "self", ".", "infos", "(", "type", "=", "State", ")", "if", "s", ".", "creation_time", "<", "time", "]", "return", "max", "(", "states", ",", "key", "=", "attrgetter", "(", "'creation_time'", ")", ")" ]
Get the error status of servo
def get_servo_status ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( STATUS_ERROR_RAM ) data . append ( BYTE1 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 12 ) return ord ( rxdata [ 9 ] ) & 0xFF except : raise HerkulexError ( "could not communicate with motors" )
5,769
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L379-L405
[ "def", "index_split", "(", "index", ",", "chunks", ")", ":", "Ntotal", "=", "index", ".", "shape", "[", "0", "]", "Nsections", "=", "int", "(", "chunks", ")", "if", "Nsections", "<=", "0", ":", "raise", "ValueError", "(", "'number sections must be larger than 0.'", ")", "Neach_section", ",", "extras", "=", "divmod", "(", "Ntotal", ",", "Nsections", ")", "section_sizes", "=", "(", "[", "0", "]", "+", "extras", "*", "[", "Neach_section", "+", "1", "]", "+", "(", "Nsections", "-", "extras", ")", "*", "[", "Neach_section", "]", ")", "div_points", "=", "numpy", ".", "array", "(", "section_sizes", ")", ".", "cumsum", "(", ")", "sub_ind", "=", "[", "]", "for", "i", "in", "range", "(", "Nsections", ")", ":", "st", "=", "div_points", "[", "i", "]", "end", "=", "div_points", "[", "i", "+", "1", "]", "sub_ind", ".", "append", "(", "index", "[", "st", ":", "end", "]", ")", "return", "sub_ind" ]
Get the detailed error status of servo
def get_servo_status_detail ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( STATUS_DETAIL_RAM ) data . append ( BYTE1 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 12 ) return ord ( rxdata [ 9 ] ) & 0xFF except HerkulexError : raise HerkulexError ( "could not communicate with motors" )
5,770
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L407-L433
[ "def", "index_split", "(", "index", ",", "chunks", ")", ":", "Ntotal", "=", "index", ".", "shape", "[", "0", "]", "Nsections", "=", "int", "(", "chunks", ")", "if", "Nsections", "<=", "0", ":", "raise", "ValueError", "(", "'number sections must be larger than 0.'", ")", "Neach_section", ",", "extras", "=", "divmod", "(", "Ntotal", ",", "Nsections", ")", "section_sizes", "=", "(", "[", "0", "]", "+", "extras", "*", "[", "Neach_section", "+", "1", "]", "+", "(", "Nsections", "-", "extras", ")", "*", "[", "Neach_section", "]", ")", "div_points", "=", "numpy", ".", "array", "(", "section_sizes", ")", ".", "cumsum", "(", ")", "sub_ind", "=", "[", "]", "for", "i", "in", "range", "(", "Nsections", ")", ":", "st", "=", "div_points", "[", "i", "]", "end", "=", "div_points", "[", "i", "+", "1", "]", "sub_ind", ".", "append", "(", "index", "[", "st", ":", "end", "]", ")", "return", "sub_ind" ]
Set the LED Color of Herkulex
def set_led ( self , colorcode ) : data = [ ] data . append ( 0x0A ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( LED_CONTROL_RAM ) data . append ( 0x01 ) data . append ( colorcode ) send_data ( data )
5,771
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L435-L455
[ "def", "process_file", "(", "filename", ":", "str", ",", "filetypes", ":", "List", "[", "str", "]", ",", "move_to", ":", "str", ",", "delete_if_not_specified_file_type", ":", "bool", ",", "show_zip_output", ":", "bool", ")", "->", "None", ":", "# log.critical(\"process_file: start\")", "try", ":", "reader", "=", "CorruptedOpenXmlReader", "(", "filename", ",", "show_zip_output", "=", "show_zip_output", ")", "if", "reader", ".", "file_type", "in", "filetypes", ":", "log", ".", "info", "(", "\"Found {}: {}\"", ",", "reader", ".", "description", ",", "filename", ")", "if", "move_to", ":", "dest_file", "=", "os", ".", "path", ".", "join", "(", "move_to", ",", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "_", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "dest_file", ")", "if", "ext", "!=", "reader", ".", "suggested_extension", "(", ")", ":", "dest_file", "+=", "reader", ".", "suggested_extension", "(", ")", "reader", ".", "move_to", "(", "destination_filename", "=", "dest_file", ")", "else", ":", "log", ".", "info", "(", "\"Unrecognized or unwanted contents: \"", "+", "filename", ")", "if", "delete_if_not_specified_file_type", ":", "log", ".", "info", "(", "\"Deleting: \"", "+", "filename", ")", "os", ".", "remove", "(", "filename", ")", "except", "Exception", "as", "e", ":", "# Must explicitly catch and report errors, since otherwise they vanish", "# into the ether.", "log", ".", "critical", "(", "\"Uncaught error in subprocess: {!r}\\n{}\"", ",", "e", ",", "traceback", ".", "format_exc", "(", ")", ")", "raise" ]
Set the Brakes of Herkulex
def brake_on ( self ) : data = [ ] data . append ( 0x0A ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( TORQUE_CONTROL_RAM ) data . append ( 0x01 ) data . append ( 0x40 ) send_data ( data )
5,772
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L457-L473
[ "def", "_FlushCache", "(", "cls", ",", "format_categories", ")", ":", "if", "definitions", ".", "FORMAT_CATEGORY_ARCHIVE", "in", "format_categories", ":", "cls", ".", "_archive_remainder_list", "=", "None", "cls", ".", "_archive_scanner", "=", "None", "cls", ".", "_archive_store", "=", "None", "if", "definitions", ".", "FORMAT_CATEGORY_COMPRESSED_STREAM", "in", "format_categories", ":", "cls", ".", "_compressed_stream_remainder_list", "=", "None", "cls", ".", "_compressed_stream_scanner", "=", "None", "cls", ".", "_compressed_stream_store", "=", "None", "if", "definitions", ".", "FORMAT_CATEGORY_FILE_SYSTEM", "in", "format_categories", ":", "cls", ".", "_file_system_remainder_list", "=", "None", "cls", ".", "_file_system_scanner", "=", "None", "cls", ".", "_file_system_store", "=", "None", "if", "definitions", ".", "FORMAT_CATEGORY_STORAGE_MEDIA_IMAGE", "in", "format_categories", ":", "cls", ".", "_storage_media_image_remainder_list", "=", "None", "cls", ".", "_storage_media_image_scanner", "=", "None", "cls", ".", "_storage_media_image_store", "=", "None", "if", "definitions", ".", "FORMAT_CATEGORY_VOLUME_SYSTEM", "in", "format_categories", ":", "cls", ".", "_volume_system_remainder_list", "=", "None", "cls", ".", "_volume_system_scanner", "=", "None", "cls", ".", "_volume_system_store", "=", "None" ]
Set the torques of Herkulex to zero
def torque_off ( self ) : data = [ ] data . append ( 0x0A ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( TORQUE_CONTROL_RAM ) data . append ( 0x01 ) data . append ( 0x00 ) send_data ( data )
5,773
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L475-L492
[ "def", "get_modifications", "(", "self", ")", ":", "# Get all the specific mod types", "mod_event_types", "=", "list", "(", "ont_to_mod_type", ".", "keys", "(", ")", ")", "# Add ONT::PTMs as a special case", "mod_event_types", "+=", "[", "'ONT::PTM'", "]", "mod_events", "=", "[", "]", "for", "mod_event_type", "in", "mod_event_types", ":", "events", "=", "self", ".", "tree", ".", "findall", "(", "\"EVENT/[type='%s']\"", "%", "mod_event_type", ")", "mod_extracted", "=", "self", ".", "extracted_events", ".", "get", "(", "mod_event_type", ",", "[", "]", ")", "for", "event", "in", "events", ":", "event_id", "=", "event", ".", "attrib", ".", "get", "(", "'id'", ")", "if", "event_id", "not", "in", "mod_extracted", ":", "mod_events", ".", "append", "(", "event", ")", "# Iterate over all modification events", "for", "event", "in", "mod_events", ":", "stmts", "=", "self", ".", "_get_modification_event", "(", "event", ")", "if", "stmts", ":", "for", "stmt", "in", "stmts", ":", "self", ".", "statements", ".", "append", "(", "stmt", ")" ]
Enable the torques of Herkulex
def torque_on ( self ) : data = [ ] data . append ( 0x0A ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( TORQUE_CONTROL_RAM ) data . append ( 0x01 ) data . append ( 0x60 ) send_data ( data )
5,774
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L494-L510
[ "def", "parse_date", "(", "datestring", ")", ":", "datestring", "=", "str", "(", "datestring", ")", ".", "strip", "(", ")", "if", "not", "datestring", "[", "0", "]", ".", "isdigit", "(", ")", ":", "raise", "ParseError", "(", ")", "if", "'W'", "in", "datestring", ".", "upper", "(", ")", ":", "try", ":", "datestring", "=", "datestring", "[", ":", "-", "1", "]", "+", "str", "(", "int", "(", "datestring", "[", "-", "1", ":", "]", ")", "-", "1", ")", "except", ":", "pass", "for", "regex", ",", "pattern", "in", "DATE_FORMATS", ":", "if", "regex", ".", "match", "(", "datestring", ")", ":", "found", "=", "regex", ".", "search", "(", "datestring", ")", ".", "groupdict", "(", ")", "dt", "=", "datetime", ".", "utcnow", "(", ")", ".", "strptime", "(", "found", "[", "'matched'", "]", ",", "pattern", ")", "if", "'fraction'", "in", "found", "and", "found", "[", "'fraction'", "]", "is", "not", "None", ":", "dt", "=", "dt", ".", "replace", "(", "microsecond", "=", "int", "(", "found", "[", "'fraction'", "]", "[", "1", ":", "]", ")", ")", "if", "'timezone'", "in", "found", "and", "found", "[", "'timezone'", "]", "is", "not", "None", ":", "dt", "=", "dt", ".", "replace", "(", "tzinfo", "=", "Timezone", "(", "found", ".", "get", "(", "'timezone'", ",", "''", ")", ")", ")", "return", "dt", "return", "parse_time", "(", "datestring", ")" ]
get the torque state of motor
def get_torque_state ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( TORQUE_CONTROL_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) return bool ( ord ( rxdata [ 9 ] ) ) except HerkulexError : raise HerkulexError ( "could not communicate with motors" )
5,775
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L512-L530
[ "def", "reset", "(", "self", ")", ":", "self", ".", "idx_annotations", ".", "setText", "(", "'Load Annotation File...'", ")", "self", ".", "idx_rater", ".", "setText", "(", "''", ")", "self", ".", "annot", "=", "None", "self", ".", "dataset_markers", "=", "None", "# remove dataset marker", "self", ".", "idx_marker", ".", "clearContents", "(", ")", "self", ".", "idx_marker", ".", "setRowCount", "(", "0", ")", "# remove summary statistics", "w1", "=", "self", ".", "idx_summary", ".", "takeAt", "(", "1", ")", ".", "widget", "(", ")", "w2", "=", "self", ".", "idx_summary", ".", "takeAt", "(", "1", ")", ".", "widget", "(", ")", "self", ".", "idx_summary", ".", "removeWidget", "(", "w1", ")", "self", ".", "idx_summary", ".", "removeWidget", "(", "w2", ")", "w1", ".", "deleteLater", "(", ")", "w2", ".", "deleteLater", "(", ")", "b1", "=", "QGroupBox", "(", "'Staging'", ")", "b2", "=", "QGroupBox", "(", "'Signal quality'", ")", "self", ".", "idx_summary", ".", "addWidget", "(", "b1", ")", "self", ".", "idx_summary", ".", "addWidget", "(", "b2", ")", "# remove annotations", "self", ".", "display_eventtype", "(", ")", "self", ".", "update_annotations", "(", ")", "self", ".", "parent", ".", "create_menubar", "(", ")" ]
Set the position of Herkulex
def set_servo_position ( self , goalposition , goaltime , led ) : goalposition_msb = int ( goalposition ) >> 8 goalposition_lsb = int ( goalposition ) & 0xff data = [ ] data . append ( 0x0C ) data . append ( self . servoid ) data . append ( I_JOG_REQ ) data . append ( goalposition_lsb ) data . append ( goalposition_msb ) data . append ( led ) data . append ( self . servoid ) data . append ( goaltime ) send_data ( data )
5,776
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L532-L560
[ "def", "reduce_to_cycles", "(", "self", ")", ":", "if", "not", "self", ".", "_reduced", ":", "reduced", "=", "copy", "(", "self", ")", "reduced", ".", "objects", "=", "self", ".", "objects", "[", ":", "]", "reduced", ".", "metadata", "=", "[", "]", "reduced", ".", "edges", "=", "[", "]", "self", ".", "num_in_cycles", "=", "reduced", ".", "_reduce_to_cycles", "(", ")", "reduced", ".", "num_in_cycles", "=", "self", ".", "num_in_cycles", "if", "self", ".", "num_in_cycles", ":", "reduced", ".", "_get_edges", "(", ")", "reduced", ".", "_annotate_objects", "(", ")", "for", "meta", "in", "reduced", ".", "metadata", ":", "meta", ".", "cycle", "=", "True", "else", ":", "reduced", "=", "None", "self", ".", "_reduced", "=", "reduced", "return", "self", ".", "_reduced" ]
Gets the current position of Herkulex
def get_servo_position ( self ) : #global SERPORT data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( CALIBRATED_POSITION_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) if ( self . servomodel == 0x06 ) or ( self . servomodel == 0x04 ) : return ( ( ord ( rxdata [ 10 ] ) & 0xff ) << 8 ) | ( ord ( rxdata [ 9 ] ) & 0xFF ) else : #print ord(rxdata[9]),ord(rxdata[10]) return ( ( ord ( rxdata [ 10 ] ) & 0x03 ) << 8 ) | ( ord ( rxdata [ 9 ] ) & 0xFF ) except HerkulexError : print "Could not read from the servos. Check connection"
5,777
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L562-L594
[ "def", "reduce_to_cycles", "(", "self", ")", ":", "if", "not", "self", ".", "_reduced", ":", "reduced", "=", "copy", "(", "self", ")", "reduced", ".", "objects", "=", "self", ".", "objects", "[", ":", "]", "reduced", ".", "metadata", "=", "[", "]", "reduced", ".", "edges", "=", "[", "]", "self", ".", "num_in_cycles", "=", "reduced", ".", "_reduce_to_cycles", "(", ")", "reduced", ".", "num_in_cycles", "=", "self", ".", "num_in_cycles", "if", "self", ".", "num_in_cycles", ":", "reduced", ".", "_get_edges", "(", ")", "reduced", ".", "_annotate_objects", "(", ")", "for", "meta", "in", "reduced", ".", "metadata", ":", "meta", ".", "cycle", "=", "True", "else", ":", "reduced", "=", "None", "self", ".", "_reduced", "=", "reduced", "return", "self", ".", "_reduced" ]
Gets the current temperature of Herkulex
def get_servo_temperature ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( TEMPERATURE_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) return ord ( rxdata [ 9 ] ) except HerkulexError : raise HerkulexError ( "Could not communicate with motors" )
5,778
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L596-L621
[ "def", "associate_network_acl_to_subnet", "(", "network_acl_id", "=", "None", ",", "subnet_id", "=", "None", ",", "network_acl_name", "=", "None", ",", "subnet_name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "network_acl_name", ":", "network_acl_id", "=", "_get_resource_id", "(", "'network_acl'", ",", "network_acl_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "not", "network_acl_id", ":", "return", "{", "'associated'", ":", "False", ",", "'error'", ":", "{", "'message'", ":", "'Network ACL {0} does not exist.'", ".", "format", "(", "network_acl_name", ")", "}", "}", "if", "subnet_name", ":", "subnet_id", "=", "_get_resource_id", "(", "'subnet'", ",", "subnet_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "not", "subnet_id", ":", "return", "{", "'associated'", ":", "False", ",", "'error'", ":", "{", "'message'", ":", "'Subnet {0} does not exist.'", ".", "format", "(", "subnet_name", ")", "}", "}", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "association_id", "=", "conn", ".", "associate_network_acl", "(", "network_acl_id", ",", "subnet_id", ")", "if", "association_id", ":", "log", ".", "info", "(", "'Network ACL with id %s was associated with subnet %s'", ",", "network_acl_id", ",", "subnet_id", ")", "return", "{", "'associated'", ":", "True", ",", "'id'", ":", "association_id", "}", "else", ":", "log", ".", "warning", "(", "'Network ACL with id %s was not associated with subnet %s'", ",", "network_acl_id", ",", "subnet_id", ")", "return", "{", "'associated'", ":", "False", ",", "'error'", ":", "{", "'message'", ":", "'ACL could not be assocaited.'", "}", "}", "except", "BotoServerError", "as", "e", ":", "return", "{", "'associated'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto.get_error'", "]", "(", "e", ")", "}" ]
Gets the current torque of Herkulex
def get_servo_torque ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( PWM_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) if ord ( rxdata [ 10 ] ) <= 127 : return ( ( ord ( rxdata [ 10 ] ) & 0x03 ) << 8 ) | ( ord ( rxdata [ 9 ] ) & 0xFF ) else : return ( ord ( rxdata [ 10 ] ) - 0xFF ) * 0xFF + ( ord ( rxdata [ 9 ] ) & 0xFF ) - 0xFF except HerkulexError : raise HerkulexError ( "could not communicate with motors" )
5,779
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L623-L654
[ "def", "_add_section", "(", "self", ",", "section", ")", ":", "section", ".", "rid", "=", "0", "plen", "=", "0", "while", "self", ".", "_merge", "and", "self", ".", "_sections", "and", "plen", "!=", "len", "(", "self", ".", "_sections", ")", ":", "plen", "=", "len", "(", "self", ".", "_sections", ")", "self", ".", "_sections", "=", "[", "s", "for", "s", "in", "self", ".", "_sections", "if", "not", "section", ".", "join", "(", "s", ")", "]", "self", ".", "_sections", ".", "append", "(", "section", ")" ]
Set the Herkulex in continuous rotation mode
def set_servo_speed ( self , goalspeed , led ) : if goalspeed > 0 : goalspeed_msb = ( int ( goalspeed ) & 0xFF00 ) >> 8 goalspeed_lsb = int ( goalspeed ) & 0xff elif goalspeed < 0 : goalspeed_msb = 64 + ( 255 - ( ( int ( goalspeed ) & 0xFF00 ) >> 8 ) ) goalspeed_lsb = ( abs ( goalspeed ) & 0xff ) #print goalspeed_msb,goalspeed_lsb data = [ ] data . append ( 0x0C ) data . append ( self . servoid ) data . append ( I_JOG_REQ ) data . append ( goalspeed_lsb ) data . append ( goalspeed_msb ) data . append ( 0x02 | led ) data . append ( self . servoid ) data . append ( 0x00 ) send_data ( data )
5,780
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L656-L686
[ "def", "parse_name", "(", "cls", ",", "name", ")", ":", "parse_dict", "=", "dict", ".", "fromkeys", "(", "cls", ".", "PARSABLE", ",", "None", ")", "parse_dict", "[", "'date'", "]", "=", "cls", ".", "get_date", "(", "name", ")", "parse_dict", "[", "'version'", "]", "=", "cls", ".", "get_version", "(", "name", ")", "parse_dict", "[", "'udim'", "]", "=", "cls", ".", "get_udim", "(", "name", ")", "parse_dict", "[", "'side'", "]", "=", "cls", ".", "get_side", "(", "name", ")", "parse_dict", "[", "'basename'", "]", "=", "cls", ".", "get_base_naive", "(", "cls", ".", "_reduce_name", "(", "name", ",", "parse_dict", ")", ")", "return", "parse_dict" ]
Set the P gain of the position PID
def set_position_p ( self , pvalue ) : pvalue_msb = int ( pvalue ) >> 8 pvalue_lsb = int ( pvalue ) & 0xff data = [ ] data . append ( 0x0B ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( POSITION_KP_RAM ) data . append ( BYTE2 ) data . append ( pvalue_lsb ) data . append ( pvalue_msb ) send_data ( data )
5,781
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L688-L705
[ "def", "_create_events_writer", "(", "self", ",", "directory", ")", ":", "total_size", "=", "0", "events_files", "=", "self", ".", "_fetch_events_files_on_disk", "(", ")", "for", "file_name", "in", "events_files", ":", "file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_events_directory", ",", "file_name", ")", "total_size", "+=", "tf", ".", "io", ".", "gfile", ".", "stat", "(", "file_path", ")", ".", "length", "if", "total_size", ">=", "self", ".", "total_file_size_cap_bytes", ":", "# The total size written to disk is too big. Delete events files until", "# the size is below the cap.", "for", "file_name", "in", "events_files", ":", "if", "total_size", "<", "self", ".", "total_file_size_cap_bytes", ":", "break", "file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_events_directory", ",", "file_name", ")", "file_size", "=", "tf", ".", "io", ".", "gfile", ".", "stat", "(", "file_path", ")", ".", "length", "try", ":", "tf", ".", "io", ".", "gfile", ".", "remove", "(", "file_path", ")", "total_size", "-=", "file_size", "logger", ".", "info", "(", "\"Deleted %s because events files take up over %d bytes\"", ",", "file_path", ",", "self", ".", "total_file_size_cap_bytes", ")", "except", "IOError", "as", "err", ":", "logger", ".", "error", "(", "\"Deleting %s failed: %s\"", ",", "file_path", ",", "err", ")", "# We increment this index because each events writer must differ in prefix.", "self", ".", "_events_file_count", "+=", "1", "file_path", "=", "\"%s.%d.%d\"", "%", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "DEBUGGER_EVENTS_FILE_STARTING_TEXT", ")", ",", "time", ".", "time", "(", ")", ",", "self", ".", "_events_file_count", ")", "logger", ".", "info", "(", "\"Creating events file %s\"", ",", "file_path", ")", "return", "pywrap_tensorflow", ".", "EventsWriter", "(", "tf", ".", "compat", ".", "as_bytes", "(", "file_path", ")", ")" ]
Set the I gain of the position PID
def set_position_i ( self , ivalue ) : ivalue_msb = int ( ivalue ) >> 8 ivalue_lsb = int ( ivalue ) & 0xff data = [ ] data . append ( 0x0B ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( POSITION_KI_RAM ) data . append ( BYTE2 ) data . append ( ivalue_lsb ) data . append ( ivalue_msb ) send_data ( data )
5,782
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L707-L724
[ "def", "restore_from_disk", "(", "self", ",", "clean_old_snapshot", "=", "False", ")", ":", "base_filename", "=", "\"%s/%s_%s_*.dat\"", "%", "(", "self", ".", "snapshot_path", ",", "self", ".", "name", ",", "self", ".", "expiration", ")", "availables_snapshots", "=", "glob", ".", "glob", "(", "base_filename", ")", "last_period", "=", "self", ".", "current_period", "-", "dt", ".", "timedelta", "(", "days", "=", "self", ".", "expiration", "-", "1", ")", "for", "filename", "in", "availables_snapshots", ":", "snapshot_period", "=", "dt", ".", "datetime", ".", "strptime", "(", "filename", ".", "split", "(", "'_'", ")", "[", "-", "1", "]", ".", "strip", "(", "'.dat'", ")", ",", "\"%Y-%m-%d\"", ")", "if", "snapshot_period", "<", "last_period", "and", "not", "clean_old_snapshot", ":", "continue", "else", ":", "self", ".", "_union_bf_from_file", "(", "filename", ")", "if", "snapshot_period", "==", "self", ".", "current_period", ":", "self", ".", "_union_bf_from_file", "(", "filename", ",", "current", "=", "True", ")", "if", "snapshot_period", "<", "last_period", "and", "clean_old_snapshot", ":", "os", ".", "remove", "(", "filename", ")", "self", ".", "ready", "=", "True" ]
Set the D gain of the PID
def set_position_d ( self , dvalue ) : dvalue_msb = int ( dvalue ) >> 8 dvalue_lsb = int ( dvalue ) & 0xff data = [ ] data . append ( 0x0B ) data . append ( self . servoid ) data . append ( RAM_WRITE_REQ ) data . append ( POSITION_KD_RAM ) data . append ( BYTE2 ) data . append ( dvalue_lsb ) data . append ( dvalue_msb ) send_data ( data )
5,783
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L726-L742
[ "def", "_getNearestMappingIndexList", "(", "fromValList", ",", "toValList", ")", ":", "indexList", "=", "[", "]", "for", "fromTimestamp", "in", "fromValList", ":", "smallestDiff", "=", "_getSmallestDifference", "(", "toValList", ",", "fromTimestamp", ")", "i", "=", "toValList", ".", "index", "(", "smallestDiff", ")", "indexList", ".", "append", "(", "i", ")", "return", "indexList" ]
Get the P value of the current PID for position
def get_position_p ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( POSITION_KP_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) return ( ord ( rxdata [ 10 ] ) * 256 ) + ( ord ( rxdata [ 9 ] ) & 0xff ) except HerkulexError : raise HerkulexError ( "could not communicate with motors" )
5,784
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L744-L760
[ "def", "restore", "(", "archive", ",", "oqdata", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "oqdata", ")", ":", "sys", ".", "exit", "(", "'%s exists already'", "%", "oqdata", ")", "if", "'://'", "in", "archive", ":", "# get the zip archive from an URL", "resp", "=", "requests", ".", "get", "(", "archive", ")", "_", ",", "archive", "=", "archive", ".", "rsplit", "(", "'/'", ",", "1", ")", "with", "open", "(", "archive", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "resp", ".", "content", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "archive", ")", ":", "sys", ".", "exit", "(", "'%s does not exist'", "%", "archive", ")", "t0", "=", "time", ".", "time", "(", ")", "oqdata", "=", "os", ".", "path", ".", "abspath", "(", "oqdata", ")", "assert", "archive", ".", "endswith", "(", "'.zip'", ")", ",", "archive", "os", ".", "mkdir", "(", "oqdata", ")", "zipfile", ".", "ZipFile", "(", "archive", ")", ".", "extractall", "(", "oqdata", ")", "dbpath", "=", "os", ".", "path", ".", "join", "(", "oqdata", ",", "'db.sqlite3'", ")", "db", "=", "Db", "(", "sqlite3", ".", "connect", ",", "dbpath", ",", "isolation_level", "=", "None", ",", "detect_types", "=", "sqlite3", ".", "PARSE_DECLTYPES", ")", "n", "=", "0", "for", "fname", "in", "os", ".", "listdir", "(", "oqdata", ")", ":", "mo", "=", "re", ".", "match", "(", "'calc_(\\d+)\\.hdf5'", ",", "fname", ")", "if", "mo", ":", "job_id", "=", "int", "(", "mo", ".", "group", "(", "1", ")", ")", "fullname", "=", "os", ".", "path", ".", "join", "(", "oqdata", ",", "fname", ")", "[", ":", "-", "5", "]", "# strip .hdf5", "db", "(", "\"UPDATE job SET user_name=?x, ds_calc_dir=?x WHERE id=?x\"", ",", "getpass", ".", "getuser", "(", ")", ",", "fullname", ",", "job_id", ")", "safeprint", "(", "'Restoring '", "+", "fname", ")", "n", "+=", "1", "dt", "=", "time", ".", "time", "(", ")", "-", "t0", "safeprint", "(", "'Extracted %d calculations into %s in %d seconds'", "%", "(", "n", ",", "oqdata", ",", "dt", ")", ")" ]
Get the I value of the current PID for position
def get_position_i ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( POSITION_KI_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) return ( ord ( rxdata [ 10 ] ) * 256 ) + ( ord ( rxdata [ 9 ] ) & 0xff ) except HerkulexError : raise HerkulexError ( "Could not read from motors" )
5,785
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L762-L778
[ "def", "get_editor_query", "(", "sql", ")", ":", "sql", "=", "sql", ".", "strip", "(", ")", "# The reason we can't simply do .strip('\\e') is that it strips characters,", "# not a substring. So it'll strip \"e\" in the end of the sql also!", "# Ex: \"select * from style\\e\" -> \"select * from styl\".", "pattern", "=", "re", ".", "compile", "(", "'(^\\\\\\e|\\\\\\e$)'", ")", "while", "pattern", ".", "search", "(", "sql", ")", ":", "sql", "=", "pattern", ".", "sub", "(", "''", ",", "sql", ")", "return", "sql" ]
Get the D value of the current PID for position
def get_position_d ( self ) : data = [ ] data . append ( 0x09 ) data . append ( self . servoid ) data . append ( RAM_READ_REQ ) data . append ( POSITION_KD_RAM ) data . append ( BYTE2 ) send_data ( data ) rxdata = [ ] try : rxdata = SERPORT . read ( 13 ) return ( ord ( rxdata [ 10 ] ) * 256 ) + ( ord ( rxdata [ 9 ] ) & 0xff ) except HerkulexError : raise HerkulexError ( "could not communicate with motors" )
5,786
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L780-L796
[ "def", "clean", "(", "file_", ",", "imports", ")", ":", "modules_not_imported", "=", "compare_modules", "(", "file_", ",", "imports", ")", "re_remove", "=", "re", ".", "compile", "(", "\"|\"", ".", "join", "(", "modules_not_imported", ")", ")", "to_write", "=", "[", "]", "try", ":", "f", "=", "open_func", "(", "file_", ",", "\"r+\"", ")", "except", "OSError", ":", "logging", ".", "error", "(", "\"Failed on file: {}\"", ".", "format", "(", "file_", ")", ")", "raise", "else", ":", "for", "i", "in", "f", ".", "readlines", "(", ")", ":", "if", "re_remove", ".", "match", "(", "i", ")", "is", "None", ":", "to_write", ".", "append", "(", "i", ")", "f", ".", "seek", "(", "0", ")", "f", ".", "truncate", "(", ")", "for", "i", "in", "to_write", ":", "f", ".", "write", "(", "i", ")", "finally", ":", "f", ".", "close", "(", ")", "logging", ".", "info", "(", "\"Successfully cleaned up requirements in \"", "+", "file_", ")" ]
saves the PID values from RAM to EEPROM
def save_pid_eeprom ( self ) : pval = self . get_position_p ( ) ival = self . get_position_i ( ) dval = self . get_position_d ( ) #write P value pvalue_msb = int ( pval ) >> 8 pvalue_lsb = int ( pval ) & 0xff data_p = [ ] data_p . append ( 0x0B ) data_p . append ( self . servoid ) data_p . append ( EEP_WRITE_REQ ) data_p . append ( POSITION_KP_EEP ) data_p . append ( BYTE2 ) data_p . append ( pvalue_lsb ) data_p . append ( pvalue_msb ) send_data ( data_p ) # write I value ivalue_msb = int ( ival ) >> 8 ivalue_lsb = int ( ival ) & 0xff data_i = [ ] data_i . append ( 0x0B ) data_i . append ( self . servoid ) data_i . append ( EEP_WRITE_REQ ) data_i . append ( POSITION_KI_EEP ) data_i . append ( BYTE2 ) data_i . append ( ivalue_lsb ) data_i . append ( ivalue_msb ) send_data ( data_i ) # write D value dvalue_msb = int ( dval ) >> 8 dvalue_lsb = int ( dval ) & 0xff data_d = [ ] data_d . append ( 0x0B ) data_d . append ( self . servoid ) data_d . append ( EEP_WRITE_REQ ) data_d . append ( POSITION_KD_EEP ) data_d . append ( BYTE2 ) data_d . append ( dvalue_lsb ) data_d . append ( dvalue_msb ) send_data ( data_d )
5,787
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L798-L846
[ "def", "_is_allowed", "(", "input", ")", ":", "gnupg_options", "=", "_get_all_gnupg_options", "(", ")", "allowed", "=", "_get_options_group", "(", "\"allowed\"", ")", "## these are the allowed options we will handle so far, all others should", "## be dropped. this dance is so that when new options are added later, we", "## merely add the to the _allowed list, and the `` _allowed.issubset``", "## assertion will check that GPG will recognise them", "try", ":", "## check that allowed is a subset of all gnupg_options", "assert", "allowed", ".", "issubset", "(", "gnupg_options", ")", "except", "AssertionError", ":", "raise", "UsageError", "(", "\"'allowed' isn't a subset of known options, diff: %s\"", "%", "allowed", ".", "difference", "(", "gnupg_options", ")", ")", "## if we got a list of args, join them", "##", "## see TODO file, tag :cleanup:", "if", "not", "isinstance", "(", "input", ",", "str", ")", ":", "input", "=", "' '", ".", "join", "(", "[", "x", "for", "x", "in", "input", "]", ")", "if", "isinstance", "(", "input", ",", "str", ")", ":", "if", "input", ".", "find", "(", "'_'", ")", ">", "0", ":", "if", "not", "input", ".", "startswith", "(", "'--'", ")", ":", "hyphenated", "=", "_hyphenate", "(", "input", ",", "add_prefix", "=", "True", ")", "else", ":", "hyphenated", "=", "_hyphenate", "(", "input", ")", "else", ":", "hyphenated", "=", "input", "## xxx we probably want to use itertools.dropwhile here", "try", ":", "assert", "hyphenated", "in", "allowed", "except", "AssertionError", "as", "ae", ":", "dropped", "=", "_fix_unsafe", "(", "hyphenated", ")", "log", ".", "warn", "(", "\"_is_allowed(): Dropping option '%s'...\"", "%", "dropped", ")", "raise", "ProtectedOption", "(", "\"Option '%s' not supported.\"", "%", "dropped", ")", "else", ":", "return", "input", "return", "None" ]
Gets the current angle of the servo in degrees
def get_servo_angle ( self ) : servoposition = self . get_servo_position ( ) if ( self . servomodel == 0x06 ) or ( self . servomodel == 0x04 ) : return scale ( servoposition , 10627 , 22129 , - 159.9 , 159.6 ) else : return scale ( servoposition , 21 , 1002 , - 150 , 150 )
5,788
https://github.com/sastrarobotics/pyHerkulex/blob/3a42046cbfea8c7e343a04f42facba5e7bca570e/herkulex.py#L870-L883
[ "def", "network_delete_event", "(", "self", ",", "network_info", ")", ":", "net_id", "=", "network_info", "[", "'network_id'", "]", "if", "net_id", "not", "in", "self", ".", "network", ":", "LOG", ".", "error", "(", "'network_delete_event: net_id %s does not exist.'", ",", "net_id", ")", "return", "segid", "=", "self", ".", "network", "[", "net_id", "]", ".", "get", "(", "'segmentation_id'", ")", "tenant_id", "=", "self", ".", "network", "[", "net_id", "]", ".", "get", "(", "'tenant_id'", ")", "tenant_name", "=", "self", ".", "get_project_name", "(", "tenant_id", ")", "net", "=", "utils", ".", "Dict2Obj", "(", "self", ".", "network", "[", "net_id", "]", ")", "if", "not", "tenant_name", ":", "LOG", ".", "error", "(", "'Project %(tenant_id)s does not exist.'", ",", "{", "'tenant_id'", ":", "tenant_id", "}", ")", "self", ".", "update_network_db", "(", "net", ".", "id", ",", "constants", ".", "DELETE_FAIL", ")", "return", "try", ":", "self", ".", "dcnm_client", ".", "delete_network", "(", "tenant_name", ",", "net", ")", "# Put back the segmentation id into the pool.", "self", ".", "seg_drvr", ".", "release_segmentation_id", "(", "segid", ")", "# Remove entry from database and cache.", "self", ".", "delete_network_db", "(", "net_id", ")", "del", "self", ".", "network", "[", "net_id", "]", "snets", "=", "[", "k", "for", "k", "in", "self", ".", "subnet", "if", "(", "self", ".", "subnet", "[", "k", "]", ".", "get", "(", "'network_id'", ")", "==", "net_id", ")", "]", "[", "self", ".", "subnet", ".", "pop", "(", "s", ")", "for", "s", "in", "snets", "]", "except", "dexc", ".", "DfaClientRequestFailed", ":", "LOG", ".", "error", "(", "'Failed to create network %(net)s.'", ",", "{", "'net'", ":", "net", ".", "name", "}", ")", "self", ".", "update_network_db", "(", "net_id", ",", "constants", ".", "DELETE_FAIL", ")", "# deleting all related VMs", "instances", "=", "self", ".", "get_vms", "(", ")", "instances_related", "=", "[", "k", "for", "k", "in", "instances", "if", "k", ".", "network_id", "==", "net_id", "]", "for", "vm", "in", "instances_related", ":", "LOG", ".", "debug", "(", "\"deleting vm %s because network is deleted\"", ",", "vm", ".", "name", ")", "self", ".", "delete_vm_function", "(", "vm", ".", "port_id", ",", "vm", ")", "self", ".", "network_del_notif", "(", "tenant_id", ",", "tenant_name", ",", "net_id", ")" ]
Temporary disable logging .
def disable_logging ( func ) : return func handler = logging . NullHandler ( ) @ wraps ( func ) def wrapper ( * args , * * kwargs ) : logger = logging . getLogger ( ) logger . addHandler ( handler ) resp = func ( * args , * * kwargs ) logger . removeHandler ( handler ) return resp return wrapper
5,789
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/cli.py#L28-L42
[ "def", "download_song", "(", "song_url", ",", "song_title", ")", ":", "outtmpl", "=", "song_title", "+", "'.%(ext)s'", "ydl_opts", "=", "{", "'format'", ":", "'bestaudio/best'", ",", "'outtmpl'", ":", "outtmpl", ",", "'postprocessors'", ":", "[", "{", "'key'", ":", "'FFmpegExtractAudio'", ",", "'preferredcodec'", ":", "'mp3'", ",", "'preferredquality'", ":", "'192'", ",", "}", ",", "{", "'key'", ":", "'FFmpegMetadata'", "}", ",", "]", ",", "}", "with", "youtube_dl", ".", "YoutubeDL", "(", "ydl_opts", ")", "as", "ydl", ":", "info_dict", "=", "ydl", ".", "extract_info", "(", "song_url", ",", "download", "=", "True", ")" ]
Format output .
def format_output ( func ) : return func @ wraps ( func ) def wrapper ( * args , * * kwargs ) : try : response = func ( * args , * * kwargs ) except Exception as error : print ( colored ( error , 'red' ) , file = sys . stderr ) sys . exit ( 1 ) else : print ( response ) sys . exit ( 0 ) return wrapper
5,790
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/cli.py#L45-L61
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
start the read_probe thread
def start ( self , * args , * * kwargs ) : self . _stop = False super ( ReadProbes , self ) . start ( * args , * * kwargs )
5,791
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/read_probes.py#L70-L75
[ "def", "to_lal_unit", "(", "aunit", ")", ":", "if", "isinstance", "(", "aunit", ",", "string_types", ")", ":", "aunit", "=", "units", ".", "Unit", "(", "aunit", ")", "aunit", "=", "aunit", ".", "decompose", "(", ")", "lunit", "=", "lal", ".", "Unit", "(", ")", "for", "base", ",", "power", "in", "zip", "(", "aunit", ".", "bases", ",", "aunit", ".", "powers", ")", ":", "# try this base", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "base", "]", "except", "KeyError", ":", "lalbase", "=", "None", "# otherwise loop through the equivalent bases", "for", "eqbase", "in", "base", ".", "find_equivalent_units", "(", ")", ":", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "eqbase", "]", "except", "KeyError", ":", "continue", "# if we didn't find anything, raise an exception", "if", "lalbase", "is", "None", ":", "raise", "ValueError", "(", "\"LAL has no unit corresponding to %r\"", "%", "base", ")", "lunit", "*=", "lalbase", "**", "power", "return", "lunit" ]
quit the read_probe thread
def quit ( self , * args , * * kwargs ) : # real signature unknown self . _stop = True super ( ReadProbes , self ) . quit ( * args , * * kwargs )
5,792
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/read_probes.py#L78-L83
[ "def", "to_lal_unit", "(", "aunit", ")", ":", "if", "isinstance", "(", "aunit", ",", "string_types", ")", ":", "aunit", "=", "units", ".", "Unit", "(", "aunit", ")", "aunit", "=", "aunit", ".", "decompose", "(", ")", "lunit", "=", "lal", ".", "Unit", "(", ")", "for", "base", ",", "power", "in", "zip", "(", "aunit", ".", "bases", ",", "aunit", ".", "powers", ")", ":", "# try this base", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "base", "]", "except", "KeyError", ":", "lalbase", "=", "None", "# otherwise loop through the equivalent bases", "for", "eqbase", "in", "base", ".", "find_equivalent_units", "(", ")", ":", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "eqbase", "]", "except", "KeyError", ":", "continue", "# if we didn't find anything, raise an exception", "if", "lalbase", "is", "None", ":", "raise", "ValueError", "(", "\"LAL has no unit corresponding to %r\"", "%", "base", ")", "lunit", "*=", "lalbase", "**", "power", "return", "lunit" ]
sets the current subscript and keeps a counter of how ofter a particular subscript has been executed this information is usefull when implementing a status update or plotting functions that depend on which subscript is being executed
def _set_current_subscript ( self , active ) : current_subscript = self . sender ( ) if active : for subscript_name in list ( self . _current_subscript_stage [ 'subscript_exec_count' ] . keys ( ) ) : if subscript_name == current_subscript . name : self . _current_subscript_stage [ 'subscript_exec_count' ] [ subscript_name ] += 1 self . _current_subscript_stage [ 'current_subscript' ] = current_subscript else : self . _current_subscript_stage [ 'current_subscript' ] = current_subscript for subscript_name in list ( self . _current_subscript_stage [ 'subscript_exec_count' ] . keys ( ) ) : # calculate the average duration to execute the subscript if subscript_name == current_subscript . name : duration = current_subscript . end_time - current_subscript . start_time if subscript_name in self . _current_subscript_stage [ 'subscript_exec_duration' ] : duration_old = self . _current_subscript_stage [ 'subscript_exec_duration' ] [ subscript_name ] else : duration_old = datetime . timedelta ( 0 ) exec_count = self . _current_subscript_stage [ 'subscript_exec_count' ] [ subscript_name ] duration_new = ( duration_old * ( exec_count - 1 ) + duration ) self . _current_subscript_stage [ 'subscript_exec_duration' ] [ subscript_name ] = ( duration_old * ( exec_count - 1 ) + duration ) / exec_count
5,793
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L147-L185
[ "def", "dry_static_energy", "(", "heights", ",", "temperature", ")", ":", "return", "(", "mpconsts", ".", "g", "*", "heights", "+", "mpconsts", ".", "Cp_d", "*", "temperature", ")", ".", "to", "(", "'kJ/kg'", ")" ]
estimates the time remaining until script is finished
def remaining_time ( self ) : elapsed_time = ( datetime . datetime . now ( ) - self . start_time ) . total_seconds ( ) # safety to avoid devision by zero if self . progress == 0 : self . progress = 1 estimated_total_time = 100. / self . progress * elapsed_time return datetime . timedelta ( seconds = max ( estimated_total_time - elapsed_time , 0 ) )
5,794
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L340-L351
[ "def", "DiffAnyArrays", "(", "self", ",", "oldObj", ",", "newObj", ",", "isElementLinks", ")", ":", "if", "len", "(", "oldObj", ")", "!=", "len", "(", "newObj", ")", ":", "__Log__", ".", "debug", "(", "'DiffAnyArrays: Array lengths do not match. %d != %d'", "%", "(", "len", "(", "oldObj", ")", ",", "len", "(", "newObj", ")", ")", ")", "return", "False", "for", "i", ",", "j", "in", "zip", "(", "oldObj", ",", "newObj", ")", ":", "if", "not", "self", ".", "DiffAnyObjects", "(", "i", ",", "j", ",", "isElementLinks", ")", ":", "__Log__", ".", "debug", "(", "'DiffAnyArrays: One of the elements do not match.'", ")", "return", "False", "return", "True" ]
stops itself and all the subscript
def stop ( self ) : for subscript in list ( self . scripts . values ( ) ) : subscript . stop ( ) print ( ( '--- stopping: ' , self . name ) ) self . _abort = True
5,795
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L447-L454
[ "def", "anonymous_login", "(", "self", ")", ":", "self", ".", "_LOG", ".", "debug", "(", "\"Attempting Anonymous login\"", ")", "self", ".", "_pre_login", "(", ")", "self", ".", "username", "=", "None", "self", ".", "login_key", "=", "None", "message", "=", "MsgProto", "(", "EMsg", ".", "ClientLogon", ")", "message", ".", "header", ".", "steamid", "=", "SteamID", "(", "type", "=", "'AnonUser'", ",", "universe", "=", "'Public'", ")", "message", ".", "body", ".", "protocol_version", "=", "65579", "self", ".", "send", "(", "message", ")", "resp", "=", "self", ".", "wait_msg", "(", "EMsg", ".", "ClientLogOnResponse", ",", "timeout", "=", "30", ")", "return", "EResult", "(", "resp", ".", "body", ".", "eresult", ")", "if", "resp", "else", "EResult", ".", "Fail" ]
wrapper to get the module for a script
def get_script_module ( script_information , package = 'pylabcontrol' , verbose = False ) : module , _ , _ , _ , _ , _ , _ = Script . get_script_information ( script_information = script_information , package = package , verbose = verbose ) return module
5,796
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L1233-L1250
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
create an copy of the script
def duplicate ( self ) : # get settings of script class_of_script = self . __class__ script_name = self . name script_instruments = self . instruments sub_scripts = self . scripts script_settings = self . settings log_function = self . log_function data_path = self . data_path #create a new instance of same script type class_creation_string = '' if script_instruments != { } : class_creation_string += ', instruments = script_instruments' if sub_scripts != { } : class_creation_string += ', scripts = sub_scripts' if script_settings != { } : class_creation_string += ', settings = script_settings' if log_function is not None : class_creation_string += ', log_function = log_function' if data_path is not None : class_creation_string += ', data_path = data_path' class_creation_string = 'class_of_script(name=script_name{:s})' . format ( class_creation_string ) # create instance script_instance = eval ( class_creation_string ) # copy some other properties that might be checked later for the duplicated script script_instance . data = deepcopy ( self . data ) script_instance . start_time = self . start_time script_instance . end_time = self . end_time script_instance . is_running = self . is_running return script_instance
5,797
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L1252-L1292
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
plots the data contained in self . data which should be a dictionary or a deque of dictionaries for the latter use the last entry
def plot_validate ( self , figure_list ) : axes_list = self . get_axes_layout_validate ( figure_list ) self . _plot_validate ( axes_list )
5,798
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/scripts.py#L1379-L1386
[ "def", "_unbind_topics", "(", "self", ",", "topics", ")", ":", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "status", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "tracing", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "streaming", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "response", ")" ]
Sample k times out of alphabet how many different?
def uniqueof20 ( k , rep = 10000 ) : alphabet = 'ACDEFGHIKLMNPQRSTVWY' reps = [ len ( set ( random . choice ( alphabet ) for i in range ( k ) ) ) for j in range ( rep ) ] return sum ( reps ) / len ( reps )
5,799
https://github.com/etal/biofrills/blob/36684bb6c7632f96215e8b2b4ebc86640f331bcd/biofrills/stats/__init__.py#L6-L12
[ "def", "assure_volume", "(", "fnc", ")", ":", "@", "wraps", "(", "fnc", ")", "def", "_wrapped", "(", "self", ",", "volume", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "volume", ",", "CloudBlockStorageVolume", ")", ":", "# Must be the ID", "volume", "=", "self", ".", "_manager", ".", "get", "(", "volume", ")", "return", "fnc", "(", "self", ",", "volume", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped" ]