idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
11,000
def updateJoin ( self ) : text = self . uiJoinSBTN . currentAction ( ) . text ( ) if text == 'AND' : joiner = QueryCompound . Op . And else : joiner = QueryCompound . Op . Or self . _containerWidget . setCurrentJoiner ( self . joiner ( ) )
Updates the joining method used by the system .
72
10
11,001
def is_chinese ( name ) : if not name : return False for ch in name : ordch = ord ( ch ) if not ( 0x3400 <= ordch <= 0x9fff ) and not ( 0x20000 <= ordch <= 0x2ceaf ) and not ( 0xf900 <= ordch <= ordch ) and not ( 0x2f800 <= ordch <= 0x2fa1f ) : return False return True
Check if a symbol is a Chinese character .
97
9
11,002
def pinyin ( char , variant = 'mandarin' , sep = ' ' , out = 'tones' ) : if len ( char ) > 1 : return sep . join ( [ pinyin ( c , variant = variant , sep = sep , out = out ) for c in char ] ) if not is_chinese ( char ) : return char if char in _cd . GBK : char = gbk2big5 ( char ) out_char = _cd . UNIHAN . get ( char , { variant : '?({0}' . format ( char ) } ) . get ( variant , '!({0})' . format ( char ) ) if out != 'tones' : out_char = '' . join ( [ tone_converter . get ( x , x ) for x in out_char ] ) return out_char
Retrieve Pinyin of a character .
185
9
11,003
def parse_baxter ( reading ) : initial = '' medial = '' final = '' tone = '' # determine environments inienv = True medienv = False finenv = False tonenv = False inichars = "pbmrtdnkgnsyhzl'x" chars = list ( reading ) for char in chars : # switch environments if char in 'jw' and not finenv : inienv , medienv , finenv , tonenv = False , True , False , False elif char not in inichars or finenv : if char in 'XH' : inienv , medienv , finenv , tonenv = False , False , False , True else : inienv , medienv , finenv , tonenv = False , False , True , False # fill in slots if inienv : initial += char if medienv : medial += char if finenv : final += char if tonenv : tone += char # post-parse tone if not tone and final [ - 1 ] in 'ptk' : tone = 'R' elif not tone : tone = 'P' # post-parse medial if 'j' not in medial and 'y' in initial : medial += 'j' # post-parse labial if final [ 0 ] in 'u' and 'w' not in medial : medial = 'w' + medial return initial , medial , final , tone
Parse a Baxter string and render it with all its contents namely initial medial final and tone .
302
19
11,004
def chars2gloss ( chars ) : out = [ ] chars = gbk2big5 ( chars ) for char in chars : tmp = [ ] if char in _cd . TLS : for entry in _cd . TLS [ char ] : baxter = _cd . TLS [ char ] [ entry ] [ 'UNIHAN_GLOSS' ] if baxter != '?' : tmp += [ baxter ] out += [ ',' . join ( tmp ) ] return out
Get the TLS basic gloss for a characters .
102
9
11,005
def baxter2ipa ( mch , segmented = False ) : out = mch if out [ - 1 ] in 'ptk' : out += 'R' elif out [ - 1 ] not in 'XHP' : out += 'P' for s , t in _cd . GY [ 'ipa' ] : out = out . replace ( s , t ) if segmented : return parse_chinese_morphemes ( out ) return out
Very simple aber convient - enough conversion from baxter MCH to IPA MCH . this is also more or less already implemented in MiddleChinese
102
29
11,006
def gbk2big5 ( chars ) : out = '' for char in chars : if char in _cd . GBK : out += _cd . BIG5 [ _cd . GBK . index ( char ) ] else : out += char return out
Convert from gbk format to big5 representation of chars .
54
14
11,007
def big52gbk ( chars ) : out = '' for char in chars : if char in _cd . BIG5 : out += _cd . GBK [ _cd . BIG5 . index ( char ) ] else : out += char return out
Convert from long chars to short chars .
52
9
11,008
def add_transform ( self , key , xslt ) : self . _remove_converter ( key ) self . _xsltLibrary [ key ] = xslt self . _add_converter ( key )
Add or update a transform .
49
6
11,009
def _refresh_converters ( self ) : self . _converters . clear ( ) return reduce ( lambda a , b : a and b , [ self . _add_converter ( k ) for k in list ( self . _xsltLibrary . keys ( ) ) ] , True )
Refresh all of the converters in the py4j library
66
13
11,010
def transform ( self , key , xml , * * kwargs ) : if key in self . _xsltLibrary and self . gateway_connected ( ) and key in self . _converters : return self . _converters [ key ] . transform ( xml , self . _parms ( * * kwargs ) ) return None
Transform the supplied XML using the transform identified by key
73
10
11,011
async def items ( self , * , dc = None , watch = None , consistency = None ) : response = await self . _api . get ( "/v1/query" , params = { "dc" : dc } ) return response . body
Provides a listing of all prepared queries
53
8
11,012
async def create ( self , query , * , dc = None ) : if "Token" in query : # in case of a full token object... query [ "Token" ] = extract_attr ( query [ "Token" ] , keys = [ "ID" ] ) response = await self . _api . post ( "/v1/query" , params = { "dc" : dc } , data = query ) return response . body
Creates a new prepared query
93
6
11,013
async def update ( self , query , * , dc = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . put ( "/v1/query" , query_id , params = { "dc" : dc } , data = query ) return response . status == 200
Updates existing prepared query
75
5
11,014
async def delete ( self , query , * , dc = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . delete ( "/v1/query" , query_id , params = { "dc" : dc } ) return response . status == 200
Delete existing prepared query
71
4
11,015
async def execute ( self , query , * , dc = None , near = None , limit = None , consistency = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . get ( "/v1/query/%s/execute" % query_id , params = { "dc" : dc , "near" : near , "limit" : limit } , consistency = consistency ) return response . body
Executes a prepared query
102
5
11,016
def load ( self ) : if self . _loaded : return rset = self . recordSet ( ) QApplication . setOverrideCursor ( Qt . WaitCursor ) self . loadRecords ( rset ) QApplication . restoreOverrideCursor ( )
Loads the records from the query set linked with this item .
54
13
11,017
def _tabulate ( rows , headers , spacing = 5 ) : if len ( rows ) == 0 : return "None\n" assert len ( rows [ 0 ] ) == len ( headers ) count = len ( rows [ 0 ] ) widths = [ 0 for _ in range ( count ) ] rows = [ headers ] + rows for row in rows : for index , field in enumerate ( row ) : if len ( str ( field ) ) > widths [ index ] : widths [ index ] = len ( str ( field ) ) output = "" for row in rows : for index , field in enumerate ( row ) : field = str ( field ) output += field + ( widths [ index ] - len ( field ) + spacing ) * " " output += "\n" return output
Prepare simple table with spacing based on content
167
9
11,018
def add_item ( self , item ) : if not ( isinstance ( item . name , basestring ) and isinstance ( item . description , basestring ) ) : raise TypeError ( "Name and description should be strings, are of type {} and {}" . format ( type ( item . name ) , type ( item . description ) ) ) if not ( isinstance ( item . flag_type , FlagType ) ) : raise TypeError ( "Flag type should be of type FlagType, is of {}" . format ( type ( item . flag_type ) ) ) if item . name not in self . _flags : if item . default is not None : if item . default is not False : item . description = item . description + " (default: %(default)s)" self . _flags [ item . name ] = item else : self . _flags [ item . name ] = item
Add single command line flag
192
5
11,019
def add_multiple ( self , flags ) : if not isinstance ( flags , list ) : raise TypeError ( "Expected list of flags, got object of type{}" . format ( type ( flags ) ) ) for flag in flags : if isinstance ( flag , Flag ) : self . add_item ( flag ) elif isinstance ( flag , tuple ) : try : item = Flag ( * flag ) self . add_item ( item ) except TypeError as e : raise TypeError ( "Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}" . format ( ", " . join ( Flag . REQUIRED_FIELDS ) , ", " . join ( Flag . OPTIONAL_FIELDS ) , flag ) )
Add multiple command line flags
167
5
11,020
def gotoNext ( self ) : scene = self . scene ( ) date = scene . currentDate ( ) # go forward a day if ( scene . currentMode ( ) == scene . Mode . Day ) : scene . setCurrentDate ( date . addDays ( 1 ) ) # go forward a week elif ( scene . currentMode ( ) == scene . Mode . Week ) : scene . setCurrentDate ( date . addDays ( 7 ) ) # go forward a month elif ( scene . currentMode ( ) == scene . Mode . Month ) : scene . setCurrentDate ( date . addMonths ( 1 ) )
Goes to the next date based on the current mode and date .
130
14
11,021
def zoomExtents ( self ) : rect = self . scene ( ) . visibleItemsBoundingRect ( ) vrect = self . viewportRect ( ) if rect . width ( ) : changed = False scene_rect = self . scene ( ) . sceneRect ( ) if scene_rect . width ( ) < rect . width ( ) : scene_rect . setWidth ( rect . width ( ) + 150 ) scene_rect . setX ( - scene_rect . width ( ) / 2.0 ) changed = True if scene_rect . height ( ) < rect . height ( ) : scene_rect . setHeight ( rect . height ( ) + 150 ) scene_rect . setY ( - scene_rect . height ( ) / 2.0 ) changed = True if changed : self . scene ( ) . setSceneRect ( scene_rect ) self . fitInView ( rect , Qt . KeepAspectRatio ) if not self . signalsBlocked ( ) : self . zoomAmountChanged . emit ( self . zoomAmount ( ) )
Fits all the nodes in the view .
221
9
11,022
def zipdir ( path , ziph , * * kwargs ) : str_arcroot = "" for k , v in kwargs . items ( ) : if k == 'arcroot' : str_arcroot = v for root , dirs , files in os . walk ( path ) : for file in files : str_arcfile = os . path . join ( root , file ) if len ( str_arcroot ) : str_arcname = str_arcroot . split ( '/' ) [ - 1 ] + str_arcfile . split ( str_arcroot ) [ 1 ] else : str_arcname = str_arcfile try : ziph . write ( str_arcfile , arcname = str_arcname ) except : print ( "Skipping %s" % str_arcfile )
Zip up a directory .
176
5
11,023
def zip_process ( * * kwargs ) : str_localPath = "" str_zipFileName = "" str_action = "zip" str_arcroot = "" for k , v in kwargs . items ( ) : if k == 'path' : str_localPath = v if k == 'action' : str_action = v if k == 'payloadFile' : str_zipFileName = v if k == 'arcroot' : str_arcroot = v if str_action == 'zip' : str_mode = 'w' str_zipFileName = '%s.zip' % uuid . uuid4 ( ) else : str_mode = 'r' try : ziphandler = zipfile . ZipFile ( str_zipFileName , str_mode , zipfile . ZIP_DEFLATED ) if str_mode == 'w' : if os . path . isdir ( str_localPath ) : zipdir ( str_localPath , ziphandler , arcroot = str_arcroot ) else : if len ( str_arcroot ) : str_arcname = str_arcroot . split ( '/' ) [ - 1 ] + str_localPath . split ( str_arcroot ) [ 1 ] else : str_arcname = str_localPath try : ziphandler . write ( str_localPath , arcname = str_arcname ) except : ziphandler . close ( ) os . remove ( str_zipFileName ) return { 'msg' : json . dumps ( { "msg" : "No file or directory found for '%s'" % str_localPath } ) , 'status' : False } if str_mode == 'r' : ziphandler . extractall ( str_localPath ) ziphandler . close ( ) str_msg = '%s operation successful' % str_action b_status = True except : str_msg = '%s operation failed' % str_action b_status = False return { 'msg' : str_msg , 'fileProcessed' : str_zipFileName , 'status' : b_status , 'path' : str_localPath , 'zipmode' : str_mode , 'filesize' : "{:,}" . format ( os . stat ( str_zipFileName ) . st_size ) , 'timestamp' : '%s' % datetime . datetime . now ( ) }
Process zip operations .
520
4
11,024
def base64_process ( * * kwargs ) : str_fileToSave = "" str_fileToRead = "" str_action = "encode" data = None for k , v in kwargs . items ( ) : if k == 'action' : str_action = v if k == 'payloadBytes' : data = v if k == 'payloadFile' : str_fileToRead = v if k == 'saveToFile' : str_fileToSave = v # if k == 'sourcePath': str_sourcePath = v if str_action == "encode" : # Encode the contents of the file at targetPath as ASCII for transmission if len ( str_fileToRead ) : with open ( str_fileToRead , 'rb' ) as f : data = f . read ( ) f . close ( ) data_b64 = base64 . b64encode ( data ) with open ( str_fileToSave , 'wb' ) as f : f . write ( data_b64 ) f . close ( ) return { 'msg' : 'Encode successful' , 'fileProcessed' : str_fileToSave , 'status' : True # 'encodedBytes': data_b64 } if str_action == "decode" : # if len(data) % 4: # not a multiple of 4, add padding: # data += '=' * (4 - len(data) % 4) # adding 3 padding = will never succumb to the TypeError and will always produce the same result. # https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770 bytes_decoded = base64 . b64decode ( data + "===" ) with open ( str_fileToSave , 'wb' ) as f : f . write ( bytes_decoded ) f . close ( ) return { 'msg' : 'Decode successful' , 'fileProcessed' : str_fileToSave , 'status' : True # 'decodedBytes': bytes_decoded }
Process base64 file io
449
5
11,025
def storage_resolveBasedOnKey ( self , * args , * * kwargs ) : global Gd_internalvar d_msg = { 'action' : 'internalctl' , 'meta' : { 'var' : 'key2address' , 'compute' : '<key>' } } str_key = "" b_status = False for k , v in kwargs . items ( ) : if k == 'key' : str_key = v d_msg [ 'meta' ] [ 'key' ] = str_key # d_ret = self . pullPath_core ( d_msg = d_msg ) return { 'status' : b_status , 'path' : str_internalLocation }
Call the remote service and ask for the storage location based on the key .
159
15
11,026
def remoteLocation_resolveSimple ( self , d_remote ) : b_status = False str_remotePath = "" if 'path' in d_remote . keys ( ) : str_remotePath = d_remote [ 'path' ] b_status = True if 'key' in d_remote . keys ( ) : str_remotePath = d_remote [ 'key' ] b_status = True return { 'status' : b_status , 'path' : str_remotePath }
Resolve the remote path location by returning either the path or key parameter in the remote JSON record .
107
20
11,027
def remoteLocation_resolve ( self , d_remote ) : b_status = False str_remotePath = "" if 'path' in d_remote . keys ( ) : str_remotePath = d_remote [ 'path' ] b_status = True if 'key' in d_remote . keys ( ) : d_ret = self . storage_resolveBasedOnKey ( key = d_remote [ 'key' ] ) if d_ret [ 'status' ] : b_status = True str_remotePath = d_ret [ 'path' ] return { 'status' : b_status , 'path' : str_remotePath }
Resolve the remote path location
141
6
11,028
def path_localLocationCheck ( self , d_msg , * * kwargs ) : b_pull = False d_meta = d_msg [ 'meta' ] if 'do' in d_meta : if d_meta [ 'do' ] == 'pull' : b_pull = True if 'local' in d_meta : d_local = d_meta [ 'local' ] if 'to' in d_meta : d_local = d_meta [ 'to' ] str_localPathFull = d_local [ 'path' ] str_localPath , str_unpack = os . path . split ( str_localPathFull ) str_msg = '' str_checkedDir = str_localPathFull b_isFile = os . path . isfile ( str_localPathFull ) b_isDir = os . path . isdir ( str_localPathFull ) b_exists = os . path . exists ( str_localPathFull ) if 'pull' in d_msg [ 'action' ] or b_pull : # If we are "pulling" data to local, then we assume the local # directory does not exist. If it does, and if 'createDir' is 'true', # we remove the localPath and re-create it, thus assuring it will # only contain the info pulled from the remote source. # If 'writeInExisting' is 'true', then execution continues, but # may fail if the pulled target exists in the localPath. str_checkedDir = str_localPath b_isFile = os . path . isfile ( str_localPath ) b_isDir = os . path . isdir ( str_localPath ) b_exists = os . path . exists ( str_localPath ) if 'createDir' in d_local . keys ( ) : if d_local [ 'createDir' ] : if os . path . isdir ( str_localPathFull ) : self . dp . qprint ( 'Removing local path %s...' % str_localPathFull ) shutil . rmtree ( str_localPathFull ) str_msg = 'Removed existing local path... ' self . dp . qprint ( 'Creating empty local path %s...' % str_localPathFull ) os . makedirs ( str_localPathFull ) b_exists = True str_msg += 'Created new local path' else : str_msg = 'local path already exists!' if 'writeInExisting' in d_local . keys ( ) : if not d_local [ 'writeInExisting' ] : if b_isDir : b_exists = False else : if b_isDir : b_exists = False d_ret = { 'action' : d_msg [ 'action' ] , 'dir' : str_checkedDir , 'status' : b_exists , 'isfile' : b_isFile , 'isdir' : b_isDir , 'msg' : str_msg } return { 'check' : d_ret , 'status' : d_ret [ 'status' ] , 'timestamp' : '%s' % datetime . datetime . now ( ) }
Check if a path exists on the local filesystem
698
9
11,029
def find_executable ( name ) : if sys . platform . startswith ( 'win' ) or os . name . startswith ( 'os2' ) : name = name + '.exe' executable_path = find_file ( name , deep = True ) return executable_path
Returns the path of an executable file .
62
8
11,030
def readCorpus ( location ) : print ( "Reading corpus from file(s)..." ) corpus = '' if '.txt' in location : with open ( location ) as fp : corpus = fp . read ( ) else : dirFiles = listdir ( location ) nFiles = len ( dirFiles ) for f in tqdm ( dirFiles ) : with open ( location + "/" + f ) as fp : corpus += fp . read ( ) return corpus
Returns the contents of a file or a group of files as a string .
100
15
11,031
def validate ( data ) : text = data . get ( 'text' ) if not isinstance ( text , _string_types ) or len ( text ) == 0 : raise ValueError ( 'text field is required and should not be empty' ) if 'markdown' in data and not type ( data [ 'markdown' ] ) is bool : raise ValueError ( 'markdown field should be bool' ) if 'attachments' in data : if not isinstance ( data [ 'attachments' ] , ( list , tuple ) ) : raise ValueError ( 'attachments field should be list or tuple' ) for attachment in data [ 'attachments' ] : if 'text' not in attachment and 'title' not in attachment : raise ValueError ( 'text or title is required in attachment' ) return True
Validates incoming data
172
4
11,032
def send ( url , data ) : validate ( data ) return requests . post ( url , json = data )
Sends an incoming message
23
5
11,033
def switch_or_run ( cmd , venv_name = None ) : if cmd : return _run ( venv_name , cmd ) inenv = InenvManager ( ) if not os . getenv ( INENV_ENV_VAR ) : activator_warn ( inenv ) return else : venv = inenv . get_prepped_venv ( venv_name ) inenv . clear_extra_source_file ( ) inenv . write_extra_source_file ( "source {}" . format ( venv . activate_shell_file ) ) inenv . write_extra_source_file ( override_envars_and_deactivate ( inenv . get_envvars ( venv_name ) ) ) if autojump_enabled ( ) : directory = inenv . guess_contents_dir ( venv_name ) inenv . write_extra_source_file ( 'cd {}' . format ( directory ) ) click . secho ( "Jumping to {}" . format ( directory ) , fg = 'green' ) sys . exit ( EVAL_EXIT_CODE )
Switch or run in this env
247
6
11,034
def rm ( venv_name ) : inenv = InenvManager ( ) venv = inenv . get_venv ( venv_name ) click . confirm ( "Delete dir {}" . format ( venv . path ) ) shutil . rmtree ( venv . path )
Removes the venv by name
63
7
11,035
def root ( venv_name ) : inenv = InenvManager ( ) inenv . get_venv ( venv_name ) venv = inenv . registered_venvs [ venv_name ] click . secho ( venv [ 'root' ] )
Print the root directory of a virtualenv
59
8
11,036
def init ( venv_name ) : inenv = InenvManager ( ) inenv . get_prepped_venv ( venv_name , skip_cached = False ) if not os . getenv ( INENV_ENV_VAR ) : activator_warn ( inenv ) click . secho ( "Your venv is ready. Enjoy!" , fg = 'green' )
Initializez a virtualenv
88
6
11,037
def autojump ( ) : currently_enabled = autojump_enabled ( ) toggle_autojump ( ) if not currently_enabled : click . secho ( "Autojump enabled" , fg = 'green' ) else : click . secho ( "Autojump disabled" , fg = 'red' )
Initializes a virtualenv
67
5
11,038
def clear ( self ) : self . _minimum = None self . _maximum = None self . _step = None self . _notches = None self . _format = None self . _formatter = None self . _padEnd = 0 self . _padStart = 0
Clears all the cached information about this ruler .
58
10
11,039
def keyPressEvent ( self , event ) : if event . key ( ) in ( Qt . Key_Enter , Qt . Key_Return ) : self . queryEntered . emit ( self . query ( ) ) super ( XOrbQuickFilterWidget , self ) . keyPressEvent ( event )
Listens for the enter event to check if the query is setup .
63
14
11,040
def rebuild ( self ) : table = self . tableType ( ) form = nativestring ( self . filterFormat ( ) ) if not table and form : if self . layout ( ) . count ( ) == 0 : self . layout ( ) . addWidget ( QLabel ( form , self ) ) else : self . layout ( ) . itemAt ( 0 ) . widget ( ) . setText ( form ) return elif not form : return for child in self . findChildren ( QWidget ) : child . close ( ) child . setParent ( None ) child . deleteLater ( ) self . setUpdatesEnabled ( False ) schema = table . schema ( ) vlayout = self . layout ( ) for i in range ( vlayout . count ( ) ) : vlayout . takeAt ( 0 ) self . _plugins = [ ] for line in form . split ( '\n' ) : row = QHBoxLayout ( ) row . setContentsMargins ( 0 , 0 , 0 , 0 ) row . setSpacing ( 0 ) for label , lookup in FORMAT_SPLITTER . findall ( line ) : # create the label lbl = QLabel ( label , self ) row . addWidget ( lbl ) # create the query plugin opts = lookup . split ( ':' ) if len ( opts ) == 1 : opts . append ( 'is' ) column = schema . column ( opts [ 0 ] ) if not column : continue plugin = self . pluginFactory ( ) . plugin ( column ) if not plugin : continue editor = plugin . createEditor ( self , column , opts [ 1 ] , None ) if editor : editor . setObjectName ( opts [ 0 ] ) row . addWidget ( editor ) self . _plugins . append ( ( opts [ 0 ] , opts [ 1 ] , plugin , editor ) ) row . addStretch ( 1 ) vlayout . addLayout ( row ) self . setUpdatesEnabled ( True ) self . adjustSize ( )
Rebuilds the data associated with this filter widget .
425
11
11,041
def showMenu ( self , point ) : menu = QMenu ( self ) acts = { } acts [ 'edit' ] = menu . addAction ( 'Edit quick filter...' ) trigger = menu . exec_ ( self . mapToGlobal ( point ) ) if trigger == acts [ 'edit' ] : text , accepted = XTextEdit . getText ( self . window ( ) , 'Edit Format' , 'Format:' , self . filterFormat ( ) , wrapped = False ) if accepted : self . setFilterFormat ( text )
Displays the menu for this filter widget .
113
9
11,042
def set_nonblock ( fd ) : # type: (int) -> None fcntl . fcntl ( fd , fcntl . F_SETFL , fcntl . fcntl ( fd , fcntl . F_GETFL ) | os . O_NONBLOCK )
Set the given file descriptor to non - blocking mode .
74
11
11,043
def read ( self ) : # type: () -> str s = self . _buf + self . _raw . read ( ) self . _buf = '' return s
Read data from the stream .
35
6
11,044
def on_open ( self , ws ) : def keep_alive ( interval ) : while True : time . sleep ( interval ) self . ping ( ) start_new_thread ( keep_alive , ( self . keep_alive_interval , ) )
Websocket on_open event handler
58
8
11,045
def on_message ( self , ws , message ) : try : data = json . loads ( message ) except Exception : self . _set_error ( message , "decode message failed" ) else : self . _inbox . put ( RTMMessage ( data ) )
Websocket on_message event handler
59
8
11,046
def send ( self , message ) : if "call_id" not in message : message [ "call_id" ] = self . gen_call_id ( ) self . _ws . send ( message . to_json ( ) )
Sends a RTMMessage Should be called after starting the loop
51
13
11,047
def get_message ( self , block = False , timeout = None ) : try : message = self . _inbox . get ( block = block , timeout = timeout ) return message except Exception : return None
Removes and returns a RTMMessage from self . _inbox
43
14
11,048
def get_error ( self , block = False , timeout = None ) : try : error = self . _errors . get ( block = block , timeout = timeout ) return error except Exception : return None
Removes and returns an error from self . _errors
42
11
11,049
def createEditor ( self , parent , column , operator , value ) : editor = super ( EnumPlugin , self ) . createEditor ( parent , column , operator , value ) editor . setEnum ( column . enum ( ) ) if operator in ( 'contains' , 'does not contain' ) : editor . setCheckable ( True ) editor . setCurrentValue ( value ) return editor
Creates a new editor for the system .
83
9
11,050
def setupQuery ( self , query , op , editor ) : if editor is not None : value = editor . currentRecord ( ) if value is None : return False return super ( ForeignKeyPlugin , self ) . setupQuery ( query , op , editor )
Sets up the query for this editor .
53
9
11,051
def color_gen ( colormap = 'viridis' , key = None , n = 15 ) : if colormap in dir ( bpal ) : palette = getattr ( bpal , colormap ) if isinstance ( palette , dict ) : if key is None : key = list ( palette . keys ( ) ) [ 0 ] palette = palette [ key ] elif callable ( palette ) : palette = palette ( n ) else : raise TypeError ( "pallette must be a bokeh palette name or a sequence of color hex values." ) elif isinstance ( colormap , ( list , tuple ) ) : palette = colormap else : raise TypeError ( "pallette must be a bokeh palette name or a sequence of color hex values." ) yield from itertools . cycle ( palette )
Color generator for Bokeh plots
176
7
11,052
def filters ( filter_directory = None , update = False , fmt = 'table' , * * kwargs ) : if filter_directory is None : filter_directory = resource_filename ( 'svo_filters' , 'data/filters/' ) # Get the pickle path and make sure file exists p_path = os . path . join ( filter_directory , 'filter_list.p' ) updated = False if not os . path . isfile ( p_path ) : os . system ( 'touch {}' . format ( p_path ) ) if update : print ( 'Loading filters into table...' ) # Get all the filters (except the pickle) files = glob ( filter_directory + '*' ) files = [ f for f in files if not f . endswith ( '.p' ) ] bands = [ os . path . basename ( b ) for b in files ] tables = [ ] for band in bands : # Load the filter band = band . replace ( '.txt' , '' ) filt = Filter ( band , * * kwargs ) filt . Band = band # Put metadata into table with correct dtypes info = filt . info ( True ) vals = [ float ( i ) if i . replace ( '.' , '' ) . replace ( '-' , '' ) . replace ( '+' , '' ) . isnumeric ( ) else i for i in info [ 'Values' ] ] dtypes = np . array ( [ type ( i ) for i in vals ] ) table = at . Table ( np . array ( [ vals ] ) , names = info [ 'Attributes' ] , dtype = dtypes ) tables . append ( table ) del filt , info , table # Write to the pickle with open ( p_path , 'wb' ) as file : pickle . dump ( at . vstack ( tables ) , file ) # Load the saved pickle data = { } if os . path . isfile ( p_path ) : with open ( p_path , 'rb' ) as file : data = pickle . load ( file ) # Return the data if data : if fmt == 'dict' : data = { r [ 0 ] : { k : r [ k ] . value if hasattr ( r [ k ] , 'unit' ) else r [ k ] for k in data . keys ( ) [ 1 : ] } for r in data } else : # Add Band as index data . add_index ( 'Band' ) return data # Or try to generate it once else : if not updated : updated = True filters ( update = True ) else : print ( 'No filters found in' , filter_directory )
Get a list of the available filters
578
7
11,053
def rebin_spec ( spec , wavnew , oversamp = 100 , plot = False ) : wave , flux = spec nlam = len ( wave ) x0 = np . arange ( nlam , dtype = float ) x0int = np . arange ( ( nlam - 1. ) * oversamp + 1. , dtype = float ) / oversamp w0int = np . interp ( x0int , x0 , wave ) spec0int = np . interp ( w0int , wave , flux ) / oversamp # Set up the bin edges for down-binning maxdiffw1 = np . diff ( wavnew ) . max ( ) w1bins = np . concatenate ( ( [ wavnew [ 0 ] - maxdiffw1 ] , .5 * ( wavnew [ 1 : : ] + wavnew [ 0 : - 1 ] ) , [ wavnew [ - 1 ] + maxdiffw1 ] ) ) # Bin down the interpolated spectrum: w1bins = np . sort ( w1bins ) nbins = len ( w1bins ) - 1 specnew = np . zeros ( nbins ) inds2 = [ [ w0int . searchsorted ( w1bins [ ii ] , side = 'left' ) , w0int . searchsorted ( w1bins [ ii + 1 ] , side = 'left' ) ] for ii in range ( nbins ) ] for ii in range ( nbins ) : specnew [ ii ] = np . sum ( spec0int [ inds2 [ ii ] [ 0 ] : inds2 [ ii ] [ 1 ] ] ) return specnew
Rebin a spectrum to a new wavelength array while preserving the total flux
372
14
11,054
def bin ( self , n_bins = 1 , pixels_per_bin = None , wave_min = None , wave_max = None ) : # Get wavelength limits if wave_min is not None : self . wave_min = wave_min if wave_max is not None : self . wave_max = wave_max # Trim the wavelength by the given min and max raw_wave = self . raw [ 0 ] whr = np . logical_and ( raw_wave * q . AA >= self . wave_min , raw_wave * q . AA <= self . wave_max ) self . wave = ( raw_wave [ whr ] * q . AA ) . to ( self . wave_units ) self . throughput = self . raw [ 1 ] [ whr ] print ( 'Bandpass trimmed to' , '{} - {}' . format ( self . wave_min , self . wave_max ) ) # Calculate the number of bins and channels pts = len ( self . wave ) if isinstance ( pixels_per_bin , int ) : self . pixels_per_bin = pixels_per_bin self . n_bins = int ( pts / self . pixels_per_bin ) elif isinstance ( n_bins , int ) : self . n_bins = n_bins self . pixels_per_bin = int ( pts / self . n_bins ) else : raise ValueError ( "Please specify 'n_bins' OR 'pixels_per_bin' as integers." ) print ( '{} bins of {} pixels each.' . format ( self . n_bins , self . pixels_per_bin ) ) # Trim throughput edges so that there are an integer number of bins new_len = self . n_bins * self . pixels_per_bin start = ( pts - new_len ) // 2 self . wave = self . wave [ start : new_len + start ] . reshape ( self . n_bins , self . pixels_per_bin ) self . throughput = self . throughput [ start : new_len + start ] . reshape ( self . n_bins , self . pixels_per_bin )
Break the filter up into bins and apply a throughput to each bin useful for G141 G102 and other grisms
476
23
11,055
def centers ( self ) : # Get the bin centers w_cen = np . nanmean ( self . wave . value , axis = 1 ) f_cen = np . nanmean ( self . throughput , axis = 1 ) return np . asarray ( [ w_cen , f_cen ] )
A getter for the wavelength bin centers and average fluxes
67
12
11,056
def flux_units ( self , units ) : # Check that the units are valid dtypes = ( q . core . PrefixUnit , q . quantity . Quantity , q . core . CompositeUnit ) if not isinstance ( units , dtypes ) : raise ValueError ( units , "units not understood." ) # Check that the units changed if units != self . flux_units : # Convert to new units sfd = q . spectral_density ( self . wave_eff ) self . zp = self . zp . to ( units , equivalencies = sfd ) # Store new units self . _flux_units = units
A setter for the flux units
133
7
11,057
def info ( self , fetch = False ) : # Get the info from the class tp = ( int , bytes , bool , str , float , tuple , list , np . ndarray ) info = [ [ k , str ( v ) ] for k , v in vars ( self ) . items ( ) if isinstance ( v , tp ) and k not in [ 'rsr' , 'raw' , 'centers' ] and not k . startswith ( '_' ) ] # Make the table table = at . Table ( np . asarray ( info ) . reshape ( len ( info ) , 2 ) , names = [ 'Attributes' , 'Values' ] ) # Sort and print table . sort ( 'Attributes' ) if fetch : return table else : table . pprint ( max_width = - 1 , max_lines = - 1 , align = [ '>' , '<' ] )
Print a table of info about the current filter
198
9
11,058
def load_TopHat ( self , wave_min , wave_max , pixels_per_bin = 100 ) : # Get min, max, effective wavelengths and width self . pixels_per_bin = pixels_per_bin self . n_bins = 1 self . _wave_units = q . AA wave_min = wave_min . to ( self . wave_units ) wave_max = wave_max . to ( self . wave_units ) # Create the RSR curve self . _wave = np . linspace ( wave_min , wave_max , pixels_per_bin ) self . _throughput = np . ones_like ( self . wave ) self . raw = np . array ( [ self . wave . value , self . throughput ] ) # Calculate the effective wavelength wave_eff = ( ( wave_min + wave_max ) / 2. ) . value width = ( wave_max - wave_min ) . value # Add the attributes self . path = '' self . refs = '' self . Band = 'Top Hat' self . CalibrationReference = '' self . FWHM = width self . Facility = '-' self . FilterProfileService = '-' self . MagSys = '-' self . PhotCalID = '' self . PhotSystem = '' self . ProfileReference = '' self . WavelengthMin = wave_min . value self . WavelengthMax = wave_max . value self . WavelengthCen = wave_eff self . WavelengthEff = wave_eff self . WavelengthMean = wave_eff self . WavelengthPeak = wave_eff self . WavelengthPhot = wave_eff self . WavelengthPivot = wave_eff self . WavelengthUCD = '' self . WidthEff = width self . ZeroPoint = 0 self . ZeroPointType = '' self . ZeroPointUnit = 'Jy' self . filterID = 'Top Hat'
Loads a top hat filter given wavelength min and max values
410
12
11,059
def overlap ( self , spectrum ) : swave = self . wave [ np . where ( self . throughput != 0 ) ] s1 , s2 = swave . min ( ) , swave . max ( ) owave = spectrum [ 0 ] o1 , o2 = owave . min ( ) , owave . max ( ) if ( s1 >= o1 and s2 <= o2 ) : ans = 'full' elif ( s2 < o1 ) or ( o2 < s1 ) : ans = 'none' else : ans = 'partial' return ans
Tests for overlap of this filter with a spectrum
123
10
11,060
def plot ( self , fig = None , draw = True ) : COLORS = color_gen ( 'Category10' ) # Make the figure if fig is None : xlab = 'Wavelength [{}]' . format ( self . wave_units ) ylab = 'Throughput' title = self . filterID fig = figure ( title = title , x_axis_label = xlab , y_axis_label = ylab ) # Plot the raw curve fig . line ( ( self . raw [ 0 ] * q . AA ) . to ( self . wave_units ) , self . raw [ 1 ] , alpha = 0.1 , line_width = 8 , color = 'black' ) # Plot each with bin centers for x , y in self . rsr : fig . line ( x , y , color = next ( COLORS ) , line_width = 2 ) fig . circle ( * self . centers , size = 8 , color = 'black' ) if draw : show ( fig ) else : return fig
Plot the filter
217
3
11,061
def throughput ( self , points ) : # Test shape if not points . shape == self . wave . shape : raise ValueError ( "Throughput and wavelength must be same shape." ) self . _throughput = points
A setter for the throughput
45
6
11,062
def wave ( self , wavelength ) : # Test units if not isinstance ( wavelength , q . quantity . Quantity ) : raise ValueError ( "Wavelength must be in length units." ) self . _wave = wavelength self . wave_units = wavelength . unit
A setter for the wavelength
54
6
11,063
def wave_units ( self , units ) : # Make sure it's length units if not units . is_equivalent ( q . m ) : raise ValueError ( units , ": New wavelength units must be a length." ) # Update the units self . _wave_units = units # Update all the wavelength values self . _wave = self . wave . to ( self . wave_units ) . round ( 5 ) self . wave_min = self . wave_min . to ( self . wave_units ) . round ( 5 ) self . wave_max = self . wave_max . to ( self . wave_units ) . round ( 5 ) self . wave_eff = self . wave_eff . to ( self . wave_units ) . round ( 5 ) self . wave_center = self . wave_center . to ( self . wave_units ) . round ( 5 ) self . wave_mean = self . wave_mean . to ( self . wave_units ) . round ( 5 ) self . wave_peak = self . wave_peak . to ( self . wave_units ) . round ( 5 ) self . wave_phot = self . wave_phot . to ( self . wave_units ) . round ( 5 ) self . wave_pivot = self . wave_pivot . to ( self . wave_units ) . round ( 5 ) self . width_eff = self . width_eff . to ( self . wave_units ) . round ( 5 ) self . fwhm = self . fwhm . to ( self . wave_units ) . round ( 5 )
A setter for the wavelength units
340
7
11,064
def clear ( self ) : # clear the actions from this widget for act in self . actions ( ) : act . setParent ( None ) act . deleteLater ( ) # clear the labels from this widget for lbl in self . actionLabels ( ) : lbl . close ( ) lbl . deleteLater ( )
Clears out all the actions and items from this toolbar .
67
12
11,065
def resizeToMinimum ( self ) : offset = self . padding ( ) min_size = self . minimumPixmapSize ( ) if self . position ( ) in ( XDockToolbar . Position . East , XDockToolbar . Position . West ) : self . resize ( min_size . width ( ) + offset , self . height ( ) ) elif self . position ( ) in ( XDockToolbar . Position . North , XDockToolbar . Position . South ) : self . resize ( self . width ( ) , min_size . height ( ) + offset )
Resizes the dock toolbar to the minimum sizes .
123
10
11,066
def unholdAction ( self ) : self . _actionHeld = False point = self . mapFromGlobal ( QCursor . pos ( ) ) self . setCurrentAction ( self . actionAt ( point ) )
Unholds the action from being blocked on the leave event .
45
13
11,067
def apply ( self ) : font = self . value ( 'font' ) try : font . setPointSize ( self . value ( 'fontSize' ) ) # errors in linux for some reason except TypeError : pass palette = self . value ( 'colorSet' ) . palette ( ) if ( unwrapVariant ( QApplication . instance ( ) . property ( 'useScheme' ) ) ) : QApplication . instance ( ) . setFont ( font ) QApplication . instance ( ) . setPalette ( palette ) # hack to support MDI Areas for widget in QApplication . topLevelWidgets ( ) : for area in widget . findChildren ( QMdiArea ) : area . setPalette ( palette ) else : logger . debug ( 'The application doesnt have the useScheme property.' )
Applies the scheme to the current application .
172
9
11,068
def reset ( self ) : self . setValue ( 'colorSet' , XPaletteColorSet ( ) ) self . setValue ( 'font' , QApplication . font ( ) ) self . setValue ( 'fontSize' , QApplication . font ( ) . pointSize ( ) )
Resets the values to the current application information .
62
10
11,069
def pickAttachment ( self ) : filename = QFileDialog . getOpenFileName ( self . window ( ) , 'Select Attachment' , '' , 'All Files (*.*)' ) if type ( filename ) == tuple : filename = nativestring ( filename [ 0 ] ) filename = nativestring ( filename ) if filename : self . addAttachment ( os . path . basename ( filename ) , filename )
Prompts the user to select an attachment to add to this edit .
90
15
11,070
def resizeToContents ( self ) : if self . _toolbar . isVisible ( ) : doc = self . document ( ) h = doc . documentLayout ( ) . documentSize ( ) . height ( ) offset = 34 # update the attachments edit edit = self . _attachmentsEdit if self . _attachments : edit . move ( 2 , self . height ( ) - edit . height ( ) - 31 ) edit . setTags ( sorted ( self . _attachments . keys ( ) ) ) edit . show ( ) offset = 34 + edit . height ( ) else : edit . hide ( ) offset = 34 self . setFixedHeight ( h + offset ) self . _toolbar . move ( 2 , self . height ( ) - 32 ) else : super ( XCommentEdit , self ) . resizeToContents ( )
Resizes this toolbar based on the contents of its text .
174
12
11,071
async def configuration ( self , * , dc = None , consistency = None ) : response = await self . _api . get ( "/v1/operator/raft/configuration" , params = { "dc" : dc } , consistency = consistency ) return response . body
Inspects the Raft configuration
58
7
11,072
async def peer_delete ( self , * , dc = None , address ) : address = extract_attr ( address , keys = [ "Address" ] ) params = { "dc" : dc , "address" : address } response = await self . _api . delete ( "/v1/operator/raft/peer" , params = params ) return response . status < 400
Remove the server with given address from the Raft configuration
80
11
11,073
def autoLayout ( self ) : try : direction = self . currentSlide ( ) . scene ( ) . direction ( ) except AttributeError : direction = QtGui . QBoxLayout . TopToBottom size = self . size ( ) self . _slideshow . resize ( size ) prev = self . _previousButton next = self . _nextButton if direction == QtGui . QBoxLayout . BottomToTop : y = 9 else : y = size . height ( ) - prev . height ( ) - 9 prev . move ( 9 , y ) next . move ( size . width ( ) - next . width ( ) - 9 , y ) # update the layout for the slides for i in range ( self . _slideshow . count ( ) ) : widget = self . _slideshow . widget ( i ) widget . scene ( ) . autoLayout ( size )
Automatically lays out the contents for this widget .
185
10
11,074
def goForward ( self ) : if self . _slideshow . currentIndex ( ) == self . _slideshow . count ( ) - 1 : self . finished . emit ( ) else : self . _slideshow . slideInNext ( )
Moves to the next slide or finishes the walkthrough .
51
12
11,075
def updateUi ( self ) : index = self . _slideshow . currentIndex ( ) count = self . _slideshow . count ( ) self . _previousButton . setVisible ( index != 0 ) self . _nextButton . setText ( 'Finish' if index == count - 1 else 'Next' ) self . autoLayout ( )
Updates the interface to show the selection buttons .
75
10
11,076
def parse_unit ( prop , dictionary , dt = None ) : # add the observation's time try : dt = timezone . parse_datetime ( dictionary . get ( 'date_time' ) ) except TypeError : dt = None # 'prop' is a stub of the property's attribute key, so search for matches matches = [ k for k in dictionary . keys ( ) if prop in k ] try : value = dictionary [ matches [ 0 ] ] unit = re . search ( r' \(([^)]+)\)' , matches [ 0 ] ) except IndexError : # No matches: fail out return None # Sometimes we get a list of values (e.g. waves) if ';' in value : # Ignore empty values values = [ val for val in value . split ( ';' ) if val != '' ] if unit : return [ Observation ( v , unit . group ( 1 ) , dt ) for v in values ] else : return values # Sometimes there's no value! Sometimes there's no unit! if not value or not unit : return value or None return Observation ( value , unit . group ( 1 ) , dt )
Do a fuzzy match for prop in the dictionary taking into account unit suffix .
247
15
11,077
def clear ( self ) : for i in range ( self . count ( ) ) : widget = self . widget ( i ) if widget is not None : widget . close ( ) widget . setParent ( None ) widget . deleteLater ( )
Clears all the container for this query widget .
50
10
11,078
def cleanupContainers ( self ) : for i in range ( self . count ( ) - 1 , self . currentIndex ( ) , - 1 ) : widget = self . widget ( i ) widget . close ( ) widget . setParent ( None ) widget . deleteLater ( )
Cleans up all containers to the right of the current one .
58
13
11,079
def exitContainer ( self ) : try : entry = self . _compoundStack . pop ( ) except IndexError : return container = self . currentContainer ( ) entry . setQuery ( container . query ( ) ) self . slideInPrev ( )
Removes the current query container .
52
7
11,080
def rebuild ( self ) : self . setUpdatesEnabled ( False ) self . blockSignals ( True ) # clear out all the subwidgets for this widget for child in self . findChildren ( QObject ) : child . setParent ( None ) child . deleteLater ( ) # load up all the interface for this widget schema = self . schema ( ) if ( schema ) : self . setEnabled ( True ) uifile = self . uiFile ( ) # load a user defined file if ( uifile ) : projexui . loadUi ( '' , self , uifile ) for widget in self . findChildren ( XOrbColumnEdit ) : columnName = widget . columnName ( ) column = schema . column ( columnName ) if ( column ) : widget . setColumn ( column ) else : logger . debug ( '%s is not a valid column of %s' % ( columnName , schema . name ( ) ) ) # dynamically load files else : layout = QFormLayout ( ) layout . setContentsMargins ( 0 , 0 , 0 , 0 ) columns = schema . columns ( ) columns . sort ( key = lambda x : x . displayName ( ) ) record = self . record ( ) for column in columns : # ignore protected columns if ( column . name ( ) . startswith ( '_' ) ) : continue label = column . displayName ( ) coltype = column . columnType ( ) name = column . name ( ) # create the column edit widget widget = XOrbColumnEdit ( self ) widget . setObjectName ( 'ui_' + name ) widget . setColumnName ( name ) widget . setColumnType ( coltype ) widget . setColumn ( column ) layout . addRow ( QLabel ( label , self ) , widget ) self . setLayout ( layout ) self . adjustSize ( ) self . setWindowTitle ( 'Edit %s' % schema . name ( ) ) else : self . setEnabled ( False ) self . setUpdatesEnabled ( True ) self . blockSignals ( False )
Rebuilds the interface for this widget based on the current model .
438
14
11,081
def save ( self ) : schema = self . schema ( ) if ( not schema ) : self . saved . emit ( ) return record = self . record ( ) if not record : record = self . _model ( ) # validate the information save_data = [ ] column_edits = self . findChildren ( XOrbColumnEdit ) for widget in column_edits : columnName = widget . columnName ( ) column = schema . column ( columnName ) if ( not column ) : logger . warning ( '%s is not a valid column of %s.' % ( columnName , schema . name ( ) ) ) continue value = widget . value ( ) if ( value == IGNORED ) : continue # check for required columns if ( column . required ( ) and not value ) : name = column . displayName ( ) QMessageBox . information ( self , 'Missing Required Field' , '%s is a required field.' % name ) return # check for unique columns elif ( column . unique ( ) ) : # check for uniqueness query = Q ( column . name ( ) ) == value if ( record . isRecord ( ) ) : query &= Q ( self . _model ) != record columns = self . _model . schema ( ) . primaryColumns ( ) result = self . _model . select ( columns = columns , where = query ) if ( result . total ( ) ) : QMessageBox . information ( self , 'Duplicate Entry' , '%s already exists.' % value ) return save_data . append ( ( column , value ) ) # record the properties for the record for column , value in save_data : record . setRecordValue ( column . name ( ) , value ) self . _record = record self . saved . emit ( )
Saves the values from the editor to the system .
375
11
11,082
def acceptText ( self ) : if not self . signalsBlocked ( ) : self . textEntered . emit ( self . toPlainText ( ) ) self . htmlEntered . emit ( self . toHtml ( ) ) self . returnPressed . emit ( )
Emits the editing finished signals for this widget .
59
10
11,083
def clear ( self ) : super ( XTextEdit , self ) . clear ( ) self . textEntered . emit ( '' ) self . htmlEntered . emit ( '' ) if self . autoResizeToContents ( ) : self . resizeToContents ( )
Clears the text for this edit and resizes the toolbar information .
56
14
11,084
def paste ( self ) : html = QApplication . clipboard ( ) . text ( ) if not self . isRichTextEditEnabled ( ) : self . insertPlainText ( projex . text . toAscii ( html ) ) else : super ( XTextEdit , self ) . paste ( )
Pastes text from the clipboard into this edit .
65
10
11,085
def resizeToContents ( self ) : doc = self . document ( ) h = doc . documentLayout ( ) . documentSize ( ) . height ( ) self . setFixedHeight ( h + 4 )
Resizes this widget to fit the contents of its text .
42
12
11,086
def matchCollapsedState ( self ) : collapsed = not self . isChecked ( ) if self . _inverted : collapsed = not collapsed if ( not self . isCollapsible ( ) or not collapsed ) : for child in self . children ( ) : if ( not isinstance ( child , QWidget ) ) : continue child . show ( ) self . setMaximumHeight ( MAX_INT ) self . adjustSize ( ) if ( self . parent ( ) ) : self . parent ( ) . adjustSize ( ) else : self . setMaximumHeight ( self . collapsedHeight ( ) ) for child in self . children ( ) : if ( not isinstance ( child , QWidget ) ) : continue child . hide ( )
Matches the collapsed state for this groupbox .
152
10
11,087
def import_qt ( glbls ) : if 'QtCore' in glbls : return from projexui . qt import QtCore , QtGui , wrapVariant , uic from projexui . widgets . xloggersplashscreen import XLoggerSplashScreen glbls [ 'QtCore' ] = QtCore glbls [ 'QtGui' ] = QtGui glbls [ 'wrapVariant' ] = wrapVariant glbls [ 'uic' ] = uic glbls [ 'XLoggerSplashScreen' ] = XLoggerSplashScreen
Delayed qt loader .
139
6
11,088
def encode_value ( value , flags = None , base64 = False ) : if flags : # still a no-operation logger . debug ( "Flag %s encoding not implemented yet" % flags ) if not isinstance ( value , bytes ) : raise ValueError ( "value must be bytes" ) return b64encode ( value ) if base64 else value
Mostly used by payloads
76
6
11,089
def _finishAnimation ( self ) : self . setCurrentIndex ( self . _nextIndex ) self . widget ( self . _lastIndex ) . hide ( ) self . widget ( self . _lastIndex ) . move ( self . _lastPoint ) self . _active = False if not self . signalsBlocked ( ) : self . animationFinished . emit ( )
Cleans up post - animation .
79
7
11,090
def clear ( self ) : for i in range ( self . count ( ) - 1 , - 1 , - 1 ) : w = self . widget ( i ) if w : self . removeWidget ( w ) w . close ( ) w . deleteLater ( )
Clears out the widgets from this stack .
55
9
11,091
def list ( conf ) : try : config = init_config ( conf ) conn = get_conn ( config . get ( 'DEFAULT' , 'statusdb' ) ) cur = conn . cursor ( ) sqlstr = '''select * from client_status order by ctime desc ''' cur . execute ( sqlstr ) result = cur . fetchall ( ) conn . commit ( ) conn . close ( ) for r in result : print r except Exception , e : traceback . print_exc ( )
OpenVPN status list method
107
5
11,092
def cli ( conf ) : try : config = init_config ( conf ) debug = config . getboolean ( 'DEFAULT' , 'debug' ) conn = get_conn ( config . get ( 'DEFAULT' , 'statusdb' ) ) cur = conn . cursor ( ) sqlstr = '''create table client_status (session_id text PRIMARY KEY, username text, userip text, realip text, realport int,ctime int, inbytes int, outbytes int, acct_interval int, session_timeout int, uptime int) ''' try : cur . execute ( 'drop table client_status' ) except : pass cur . execute ( sqlstr ) print 'flush client status database' conn . commit ( ) conn . close ( ) except : traceback . print_exc ( )
OpenVPN status initdb method
178
6
11,093
async def register ( self , service ) : response = await self . _api . put ( "/v1/agent/service/register" , data = service ) return response . status == 200
Registers a new local service .
41
7
11,094
async def deregister ( self , service ) : service_id = extract_attr ( service , keys = [ "ServiceID" , "ID" ] ) response = await self . _api . get ( "/v1/agent/service/deregister" , service_id ) return response . status == 200
Deregisters a local service
68
7
11,095
async def disable ( self , service , * , reason = None ) : return await self . maintenance ( service , False , reason = reason )
Enters maintenance mode for service
30
6
11,096
async def enable ( self , service , * , reason = None ) : return await self . maintenance ( service , False , reason = reason )
Resumes normal operation for service
30
6
11,097
def mppe_chap2_gen_keys ( password , nt_response ) : password_hash = mschap . nt_password_hash ( password ) password_hash_hash = mschap . hash_nt_password_hash ( password_hash ) master_key = get_master_key ( password_hash_hash , nt_response ) master_send_key = get_asymetric_start_key ( master_key , 16 , True , True ) master_recv_key = get_asymetric_start_key ( master_key , 16 , False , True ) return master_send_key , master_recv_key
3 . 3 . Generating 128 - bit Session Keys
146
11
11,098
def query ( self , sql , args = None , many = None , as_dict = False ) : con = self . pool . pop ( ) c = None try : c = con . cursor ( as_dict ) LOGGER . debug ( "Query sql: " + sql + " args:" + str ( args ) ) c . execute ( sql , args ) if many and many > 0 : return self . _yield ( con , c , many ) else : return c . fetchall ( ) except Exception as e : LOGGER . error ( "Error Qeury on %s" , str ( e ) ) raise DBError ( e . args [ 0 ] , e . args [ 1 ] ) finally : many or ( c and c . close ( ) ) many or ( con and self . pool . push ( con ) )
The connection raw sql query when select table show table to fetch records it is compatible the dbi execute method .
175
22
11,099
def connection_class ( self , adapter ) : if self . adapters . get ( adapter ) : return self . adapters [ adapter ] try : class_prefix = getattr ( __import__ ( 'db.' + adapter , globals ( ) , locals ( ) , [ '__class_prefix__' ] ) , '__class_prefix__' ) driver = self . _import_class ( 'db.' + adapter + '.connection.' + class_prefix + 'Connection' ) except ImportError : raise DBError ( "Must install adapter `%s` or doesn't support" % ( adapter ) ) self . adapters [ adapter ] = driver return driver
Get connection class by adapter
137
5