code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _absf(ins): ''' Absolute value of top of the stack (48 bits) ''' output = _float_oper(ins.quad[2]) output.append('res 7, e') # Just resets the sign bit! output.extend(_fpush()) return output
def function[_absf, parameter[ins]]: constant[ Absolute value of top of the stack (48 bits) ] variable[output] assign[=] call[name[_float_oper], parameter[call[name[ins].quad][constant[2]]]] call[name[output].append, parameter[constant[res 7, e]]] call[name[output].extend, parameter[call[name[_fpush], parameter[]]]] return[name[output]]
keyword[def] identifier[_absf] ( identifier[ins] ): literal[string] identifier[output] = identifier[_float_oper] ( identifier[ins] . identifier[quad] [ literal[int] ]) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[extend] ( identifier[_fpush] ()) keyword[return] identifier[output]
def _absf(ins): """ Absolute value of top of the stack (48 bits) """ output = _float_oper(ins.quad[2]) output.append('res 7, e') # Just resets the sign bit! output.extend(_fpush()) return output
def _do_lumping(self): """Do the MVCA lumping. """ model = LandmarkAgglomerative(linkage='ward', n_clusters=self.n_macrostates, metric=self.metric, n_landmarks=self.n_landmarks, landmark_strategy=self.landmark_strategy, random_state=self.random_state) model.fit([self.transmat_]) if self.fit_only: microstate_mapping_ = model.landmark_labels_ else: microstate_mapping_ = model.transform([self.transmat_])[0] self.microstate_mapping_ = microstate_mapping_
def function[_do_lumping, parameter[self]]: constant[Do the MVCA lumping. ] variable[model] assign[=] call[name[LandmarkAgglomerative], parameter[]] call[name[model].fit, parameter[list[[<ast.Attribute object at 0x7da1b0785d50>]]]] if name[self].fit_only begin[:] variable[microstate_mapping_] assign[=] name[model].landmark_labels_ name[self].microstate_mapping_ assign[=] name[microstate_mapping_]
keyword[def] identifier[_do_lumping] ( identifier[self] ): literal[string] identifier[model] = identifier[LandmarkAgglomerative] ( identifier[linkage] = literal[string] , identifier[n_clusters] = identifier[self] . identifier[n_macrostates] , identifier[metric] = identifier[self] . identifier[metric] , identifier[n_landmarks] = identifier[self] . identifier[n_landmarks] , identifier[landmark_strategy] = identifier[self] . identifier[landmark_strategy] , identifier[random_state] = identifier[self] . identifier[random_state] ) identifier[model] . identifier[fit] ([ identifier[self] . identifier[transmat_] ]) keyword[if] identifier[self] . identifier[fit_only] : identifier[microstate_mapping_] = identifier[model] . identifier[landmark_labels_] keyword[else] : identifier[microstate_mapping_] = identifier[model] . identifier[transform] ([ identifier[self] . identifier[transmat_] ])[ literal[int] ] identifier[self] . identifier[microstate_mapping_] = identifier[microstate_mapping_]
def _do_lumping(self): """Do the MVCA lumping. """ model = LandmarkAgglomerative(linkage='ward', n_clusters=self.n_macrostates, metric=self.metric, n_landmarks=self.n_landmarks, landmark_strategy=self.landmark_strategy, random_state=self.random_state) model.fit([self.transmat_]) if self.fit_only: microstate_mapping_ = model.landmark_labels_ # depends on [control=['if'], data=[]] else: microstate_mapping_ = model.transform([self.transmat_])[0] self.microstate_mapping_ = microstate_mapping_
def exit(self, status=0, message=None): """ Handle general message exits (e.g. version). """ if message: raise HelpBanner(message.strip(), code=status)
def function[exit, parameter[self, status, message]]: constant[ Handle general message exits (e.g. version). ] if name[message] begin[:] <ast.Raise object at 0x7da204564c70>
keyword[def] identifier[exit] ( identifier[self] , identifier[status] = literal[int] , identifier[message] = keyword[None] ): literal[string] keyword[if] identifier[message] : keyword[raise] identifier[HelpBanner] ( identifier[message] . identifier[strip] (), identifier[code] = identifier[status] )
def exit(self, status=0, message=None): """ Handle general message exits (e.g. version). """ if message: raise HelpBanner(message.strip(), code=status) # depends on [control=['if'], data=[]]
def keyPressEvent(self, event): """Reimplement Qt method""" if event == QKeySequence.Copy: self.copy() else: QTableView.keyPressEvent(self, event)
def function[keyPressEvent, parameter[self, event]]: constant[Reimplement Qt method] if compare[name[event] equal[==] name[QKeySequence].Copy] begin[:] call[name[self].copy, parameter[]]
keyword[def] identifier[keyPressEvent] ( identifier[self] , identifier[event] ): literal[string] keyword[if] identifier[event] == identifier[QKeySequence] . identifier[Copy] : identifier[self] . identifier[copy] () keyword[else] : identifier[QTableView] . identifier[keyPressEvent] ( identifier[self] , identifier[event] )
def keyPressEvent(self, event): """Reimplement Qt method""" if event == QKeySequence.Copy: self.copy() # depends on [control=['if'], data=[]] else: QTableView.keyPressEvent(self, event)
def open_multi(cls, sock, chunk_types, isattys, chunk_eof_type=None, buf_size=None, select_timeout=None): """Yields the write sides of pipes that will copy appropriately chunked values to the socket.""" cls._assert_aligned(chunk_types, isattys) # N.B. This is purely to permit safe handling of a dynamic number of contextmanagers. with ExitStack() as stack: read_fds, write_fds = list(zip( # Allocate one pipe pair per chunk type provided. *(stack.enter_context(_pipe(isatty)) for isatty in isattys) )) writer = NailgunStreamWriter( read_fds, sock, chunk_types, chunk_eof_type, buf_size=buf_size, select_timeout=select_timeout ) with writer.running(): yield write_fds, writer
def function[open_multi, parameter[cls, sock, chunk_types, isattys, chunk_eof_type, buf_size, select_timeout]]: constant[Yields the write sides of pipes that will copy appropriately chunked values to the socket.] call[name[cls]._assert_aligned, parameter[name[chunk_types], name[isattys]]] with call[name[ExitStack], parameter[]] begin[:] <ast.Tuple object at 0x7da1b2279fc0> assign[=] call[name[list], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da1b227baf0>]]]] variable[writer] assign[=] call[name[NailgunStreamWriter], parameter[name[read_fds], name[sock], name[chunk_types], name[chunk_eof_type]]] with call[name[writer].running, parameter[]] begin[:] <ast.Yield object at 0x7da1b1d37df0>
keyword[def] identifier[open_multi] ( identifier[cls] , identifier[sock] , identifier[chunk_types] , identifier[isattys] , identifier[chunk_eof_type] = keyword[None] , identifier[buf_size] = keyword[None] , identifier[select_timeout] = keyword[None] ): literal[string] identifier[cls] . identifier[_assert_aligned] ( identifier[chunk_types] , identifier[isattys] ) keyword[with] identifier[ExitStack] () keyword[as] identifier[stack] : identifier[read_fds] , identifier[write_fds] = identifier[list] ( identifier[zip] ( *( identifier[stack] . identifier[enter_context] ( identifier[_pipe] ( identifier[isatty] )) keyword[for] identifier[isatty] keyword[in] identifier[isattys] ) )) identifier[writer] = identifier[NailgunStreamWriter] ( identifier[read_fds] , identifier[sock] , identifier[chunk_types] , identifier[chunk_eof_type] , identifier[buf_size] = identifier[buf_size] , identifier[select_timeout] = identifier[select_timeout] ) keyword[with] identifier[writer] . identifier[running] (): keyword[yield] identifier[write_fds] , identifier[writer]
def open_multi(cls, sock, chunk_types, isattys, chunk_eof_type=None, buf_size=None, select_timeout=None): """Yields the write sides of pipes that will copy appropriately chunked values to the socket.""" cls._assert_aligned(chunk_types, isattys) # N.B. This is purely to permit safe handling of a dynamic number of contextmanagers. with ExitStack() as stack: # Allocate one pipe pair per chunk type provided. (read_fds, write_fds) = list(zip(*(stack.enter_context(_pipe(isatty)) for isatty in isattys))) writer = NailgunStreamWriter(read_fds, sock, chunk_types, chunk_eof_type, buf_size=buf_size, select_timeout=select_timeout) with writer.running(): yield (write_fds, writer) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['stack']]
def MA_serial(self,days,rev=0): """ see make_serial() 收盤價移動平均 list 化,資料格式請見 def make_serial() """ return self.make_serial(self.raw_data,days,rev)
def function[MA_serial, parameter[self, days, rev]]: constant[ see make_serial() 收盤價移動平均 list 化,資料格式請見 def make_serial() ] return[call[name[self].make_serial, parameter[name[self].raw_data, name[days], name[rev]]]]
keyword[def] identifier[MA_serial] ( identifier[self] , identifier[days] , identifier[rev] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[make_serial] ( identifier[self] . identifier[raw_data] , identifier[days] , identifier[rev] )
def MA_serial(self, days, rev=0): """ see make_serial() 收盤價移動平均 list 化,資料格式請見 def make_serial() """ return self.make_serial(self.raw_data, days, rev)
def processResponse(self, arg, replytype, **kw): """ Parameters: arg -- deferred replytype -- typecode """ if self.debug: log.msg('--->PROCESS REQUEST\n%s' %arg, debug=1) for h in self.handlers: arg.addCallback(h.processResponse, **kw) arg.addCallback(self.parseResponse, replytype)
def function[processResponse, parameter[self, arg, replytype]]: constant[ Parameters: arg -- deferred replytype -- typecode ] if name[self].debug begin[:] call[name[log].msg, parameter[binary_operation[constant[--->PROCESS REQUEST %s] <ast.Mod object at 0x7da2590d6920> name[arg]]]] for taget[name[h]] in starred[name[self].handlers] begin[:] call[name[arg].addCallback, parameter[name[h].processResponse]] call[name[arg].addCallback, parameter[name[self].parseResponse, name[replytype]]]
keyword[def] identifier[processResponse] ( identifier[self] , identifier[arg] , identifier[replytype] ,** identifier[kw] ): literal[string] keyword[if] identifier[self] . identifier[debug] : identifier[log] . identifier[msg] ( literal[string] % identifier[arg] , identifier[debug] = literal[int] ) keyword[for] identifier[h] keyword[in] identifier[self] . identifier[handlers] : identifier[arg] . identifier[addCallback] ( identifier[h] . identifier[processResponse] ,** identifier[kw] ) identifier[arg] . identifier[addCallback] ( identifier[self] . identifier[parseResponse] , identifier[replytype] )
def processResponse(self, arg, replytype, **kw): """ Parameters: arg -- deferred replytype -- typecode """ if self.debug: log.msg('--->PROCESS REQUEST\n%s' % arg, debug=1) # depends on [control=['if'], data=[]] for h in self.handlers: arg.addCallback(h.processResponse, **kw) # depends on [control=['for'], data=['h']] arg.addCallback(self.parseResponse, replytype)
async def wait(self): """Wait until the connection with the server ends. Client applications can use this function to block the main thread during the life of the connection. Note: this method is a coroutine. """ while True: await self.eio.wait() await self.sleep(1) # give the reconnect task time to start up if not self._reconnect_task: break await self._reconnect_task if self.eio.state != 'connected': break
<ast.AsyncFunctionDef object at 0x7da18ede7760>
keyword[async] keyword[def] identifier[wait] ( identifier[self] ): literal[string] keyword[while] keyword[True] : keyword[await] identifier[self] . identifier[eio] . identifier[wait] () keyword[await] identifier[self] . identifier[sleep] ( literal[int] ) keyword[if] keyword[not] identifier[self] . identifier[_reconnect_task] : keyword[break] keyword[await] identifier[self] . identifier[_reconnect_task] keyword[if] identifier[self] . identifier[eio] . identifier[state] != literal[string] : keyword[break]
async def wait(self): """Wait until the connection with the server ends. Client applications can use this function to block the main thread during the life of the connection. Note: this method is a coroutine. """ while True: await self.eio.wait() await self.sleep(1) # give the reconnect task time to start up if not self._reconnect_task: break # depends on [control=['if'], data=[]] await self._reconnect_task if self.eio.state != 'connected': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def _IsText(self, bytes_in, encoding=None): """Examine the bytes in and determine if they are indicative of text. Parsers need quick and at least semi reliable method of discovering whether or not a particular byte stream is text or resembles text or not. This can be used in text parsers to determine if a file is a text file or not for instance. The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or method supplied character encoding. Otherwise it will make the assumption the byte sequence is not text, but a byte sequence. Args: bytes_in (bytes|str): byte stream to examine. encoding (Optional[str]): encoding to test, if not defined ASCII and UTF-8 are tried. Returns: bool: True if the bytes stream contains text. """ # TODO: Improve speed and accuracy of this method. # Start with the assumption we are dealing with text. is_text = True if isinstance(bytes_in, py2to3.UNICODE_TYPE): return is_text # Check if this is ASCII text string. for value in bytes_in: if py2to3.PY_2: value = ord(value) if not 31 < value < 128: is_text = False break # We have an ASCII string. if is_text: return is_text # Check if this is UTF-8 try: bytes_in.decode('utf-8') return True except UnicodeDecodeError: pass if encoding: try: bytes_in.decode(encoding) return True except LookupError: logger.error('Unsupported encoding: {0:s}'.format(encoding)) except UnicodeDecodeError: pass return False
def function[_IsText, parameter[self, bytes_in, encoding]]: constant[Examine the bytes in and determine if they are indicative of text. Parsers need quick and at least semi reliable method of discovering whether or not a particular byte stream is text or resembles text or not. This can be used in text parsers to determine if a file is a text file or not for instance. The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or method supplied character encoding. Otherwise it will make the assumption the byte sequence is not text, but a byte sequence. Args: bytes_in (bytes|str): byte stream to examine. encoding (Optional[str]): encoding to test, if not defined ASCII and UTF-8 are tried. Returns: bool: True if the bytes stream contains text. ] variable[is_text] assign[=] constant[True] if call[name[isinstance], parameter[name[bytes_in], name[py2to3].UNICODE_TYPE]] begin[:] return[name[is_text]] for taget[name[value]] in starred[name[bytes_in]] begin[:] if name[py2to3].PY_2 begin[:] variable[value] assign[=] call[name[ord], parameter[name[value]]] if <ast.UnaryOp object at 0x7da20c7c83d0> begin[:] variable[is_text] assign[=] constant[False] break if name[is_text] begin[:] return[name[is_text]] <ast.Try object at 0x7da20c7cba30> if name[encoding] begin[:] <ast.Try object at 0x7da20c7caec0> return[constant[False]]
keyword[def] identifier[_IsText] ( identifier[self] , identifier[bytes_in] , identifier[encoding] = keyword[None] ): literal[string] identifier[is_text] = keyword[True] keyword[if] identifier[isinstance] ( identifier[bytes_in] , identifier[py2to3] . identifier[UNICODE_TYPE] ): keyword[return] identifier[is_text] keyword[for] identifier[value] keyword[in] identifier[bytes_in] : keyword[if] identifier[py2to3] . identifier[PY_2] : identifier[value] = identifier[ord] ( identifier[value] ) keyword[if] keyword[not] literal[int] < identifier[value] < literal[int] : identifier[is_text] = keyword[False] keyword[break] keyword[if] identifier[is_text] : keyword[return] identifier[is_text] keyword[try] : identifier[bytes_in] . identifier[decode] ( literal[string] ) keyword[return] keyword[True] keyword[except] identifier[UnicodeDecodeError] : keyword[pass] keyword[if] identifier[encoding] : keyword[try] : identifier[bytes_in] . identifier[decode] ( identifier[encoding] ) keyword[return] keyword[True] keyword[except] identifier[LookupError] : identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[encoding] )) keyword[except] identifier[UnicodeDecodeError] : keyword[pass] keyword[return] keyword[False]
def _IsText(self, bytes_in, encoding=None): """Examine the bytes in and determine if they are indicative of text. Parsers need quick and at least semi reliable method of discovering whether or not a particular byte stream is text or resembles text or not. This can be used in text parsers to determine if a file is a text file or not for instance. The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or method supplied character encoding. Otherwise it will make the assumption the byte sequence is not text, but a byte sequence. Args: bytes_in (bytes|str): byte stream to examine. encoding (Optional[str]): encoding to test, if not defined ASCII and UTF-8 are tried. Returns: bool: True if the bytes stream contains text. """ # TODO: Improve speed and accuracy of this method. # Start with the assumption we are dealing with text. is_text = True if isinstance(bytes_in, py2to3.UNICODE_TYPE): return is_text # depends on [control=['if'], data=[]] # Check if this is ASCII text string. for value in bytes_in: if py2to3.PY_2: value = ord(value) # depends on [control=['if'], data=[]] if not 31 < value < 128: is_text = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['value']] # We have an ASCII string. if is_text: return is_text # depends on [control=['if'], data=[]] # Check if this is UTF-8 try: bytes_in.decode('utf-8') return True # depends on [control=['try'], data=[]] except UnicodeDecodeError: pass # depends on [control=['except'], data=[]] if encoding: try: bytes_in.decode(encoding) return True # depends on [control=['try'], data=[]] except LookupError: logger.error('Unsupported encoding: {0:s}'.format(encoding)) # depends on [control=['except'], data=[]] except UnicodeDecodeError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return False
def filter_by_widget(self, widget_type): """ Returns a LayoutSlice pointing to fields with widgets of `widget_type` """ self._check_layout_and_form() layout_field_names = self.layout.get_field_names() # Let's filter all fields with widgets like widget_type filtered_fields = [] for pointer in layout_field_names: if isinstance(self.form.fields[pointer[1]].widget, widget_type): filtered_fields.append(pointer) return LayoutSlice(self.layout, filtered_fields)
def function[filter_by_widget, parameter[self, widget_type]]: constant[ Returns a LayoutSlice pointing to fields with widgets of `widget_type` ] call[name[self]._check_layout_and_form, parameter[]] variable[layout_field_names] assign[=] call[name[self].layout.get_field_names, parameter[]] variable[filtered_fields] assign[=] list[[]] for taget[name[pointer]] in starred[name[layout_field_names]] begin[:] if call[name[isinstance], parameter[call[name[self].form.fields][call[name[pointer]][constant[1]]].widget, name[widget_type]]] begin[:] call[name[filtered_fields].append, parameter[name[pointer]]] return[call[name[LayoutSlice], parameter[name[self].layout, name[filtered_fields]]]]
keyword[def] identifier[filter_by_widget] ( identifier[self] , identifier[widget_type] ): literal[string] identifier[self] . identifier[_check_layout_and_form] () identifier[layout_field_names] = identifier[self] . identifier[layout] . identifier[get_field_names] () identifier[filtered_fields] =[] keyword[for] identifier[pointer] keyword[in] identifier[layout_field_names] : keyword[if] identifier[isinstance] ( identifier[self] . identifier[form] . identifier[fields] [ identifier[pointer] [ literal[int] ]]. identifier[widget] , identifier[widget_type] ): identifier[filtered_fields] . identifier[append] ( identifier[pointer] ) keyword[return] identifier[LayoutSlice] ( identifier[self] . identifier[layout] , identifier[filtered_fields] )
def filter_by_widget(self, widget_type): """ Returns a LayoutSlice pointing to fields with widgets of `widget_type` """ self._check_layout_and_form() layout_field_names = self.layout.get_field_names() # Let's filter all fields with widgets like widget_type filtered_fields = [] for pointer in layout_field_names: if isinstance(self.form.fields[pointer[1]].widget, widget_type): filtered_fields.append(pointer) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pointer']] return LayoutSlice(self.layout, filtered_fields)
def wheel(): ''' Return all inline documentation for wheel modules CLI Example: .. code-block:: bash salt-run doc.wheel ''' client = salt.wheel.Wheel(__opts__) ret = client.get_docs() return ret
def function[wheel, parameter[]]: constant[ Return all inline documentation for wheel modules CLI Example: .. code-block:: bash salt-run doc.wheel ] variable[client] assign[=] call[name[salt].wheel.Wheel, parameter[name[__opts__]]] variable[ret] assign[=] call[name[client].get_docs, parameter[]] return[name[ret]]
keyword[def] identifier[wheel] (): literal[string] identifier[client] = identifier[salt] . identifier[wheel] . identifier[Wheel] ( identifier[__opts__] ) identifier[ret] = identifier[client] . identifier[get_docs] () keyword[return] identifier[ret]
def wheel(): """ Return all inline documentation for wheel modules CLI Example: .. code-block:: bash salt-run doc.wheel """ client = salt.wheel.Wheel(__opts__) ret = client.get_docs() return ret
def L(s, o, N=5): """ Likelihood input: s: simulated o: observed output: L: likelihood """ # s,o = filter_nan(s,o) return np.exp(-N*np.sum((s-o)**2)/np.sum((o-np.mean(o))**2))
def function[L, parameter[s, o, N]]: constant[ Likelihood input: s: simulated o: observed output: L: likelihood ] return[call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da204564550> * call[name[np].sum, parameter[binary_operation[binary_operation[name[s] - name[o]] ** constant[2]]]]] / call[name[np].sum, parameter[binary_operation[binary_operation[name[o] - call[name[np].mean, parameter[name[o]]]] ** constant[2]]]]]]]]
keyword[def] identifier[L] ( identifier[s] , identifier[o] , identifier[N] = literal[int] ): literal[string] keyword[return] identifier[np] . identifier[exp] (- identifier[N] * identifier[np] . identifier[sum] (( identifier[s] - identifier[o] )** literal[int] )/ identifier[np] . identifier[sum] (( identifier[o] - identifier[np] . identifier[mean] ( identifier[o] ))** literal[int] ))
def L(s, o, N=5): """ Likelihood input: s: simulated o: observed output: L: likelihood """ # s,o = filter_nan(s,o) return np.exp(-N * np.sum((s - o) ** 2) / np.sum((o - np.mean(o)) ** 2))
def _create_filter_by(self): """Transform the json-server filter arguments to model-resource ones.""" filter_by = [] for name, values in request.args.copy().lists(): # copy.lists works in py2 and py3 if name not in _SKIPPED_ARGUMENTS: column = _re_column_name.search(name).group(1) if column not in self._model_columns: continue for value in values: if name.endswith('_ne'): filter_by.append(name[:-3] + '!=' + value) elif name.endswith('_lte'): filter_by.append(name[:-4] + '<=' + value) elif name.endswith('_gte'): filter_by.append(name[:-4] + '>=' + value) elif name.endswith('_like'): filter_by.append(name[:-5] + '::like::%' + value + '%') else: filter_by.append(name.replace('__', '.') + '==' + value) filter_by += self._create_fulltext_query() return ','.join(filter_by)
def function[_create_filter_by, parameter[self]]: constant[Transform the json-server filter arguments to model-resource ones.] variable[filter_by] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b196e5c0>, <ast.Name object at 0x7da1b196e3b0>]]] in starred[call[call[name[request].args.copy, parameter[]].lists, parameter[]]] begin[:] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[_SKIPPED_ARGUMENTS]] begin[:] variable[column] assign[=] call[call[name[_re_column_name].search, parameter[name[name]]].group, parameter[constant[1]]] if compare[name[column] <ast.NotIn object at 0x7da2590d7190> name[self]._model_columns] begin[:] continue for taget[name[value]] in starred[name[values]] begin[:] if call[name[name].endswith, parameter[constant[_ne]]] begin[:] call[name[filter_by].append, parameter[binary_operation[binary_operation[call[name[name]][<ast.Slice object at 0x7da1b196f790>] + constant[!=]] + name[value]]]] <ast.AugAssign object at 0x7da1b196e9e0> return[call[constant[,].join, parameter[name[filter_by]]]]
keyword[def] identifier[_create_filter_by] ( identifier[self] ): literal[string] identifier[filter_by] =[] keyword[for] identifier[name] , identifier[values] keyword[in] identifier[request] . identifier[args] . identifier[copy] (). identifier[lists] (): keyword[if] identifier[name] keyword[not] keyword[in] identifier[_SKIPPED_ARGUMENTS] : identifier[column] = identifier[_re_column_name] . identifier[search] ( identifier[name] ). identifier[group] ( literal[int] ) keyword[if] identifier[column] keyword[not] keyword[in] identifier[self] . identifier[_model_columns] : keyword[continue] keyword[for] identifier[value] keyword[in] identifier[values] : keyword[if] identifier[name] . identifier[endswith] ( literal[string] ): identifier[filter_by] . identifier[append] ( identifier[name] [:- literal[int] ]+ literal[string] + identifier[value] ) keyword[elif] identifier[name] . identifier[endswith] ( literal[string] ): identifier[filter_by] . identifier[append] ( identifier[name] [:- literal[int] ]+ literal[string] + identifier[value] ) keyword[elif] identifier[name] . identifier[endswith] ( literal[string] ): identifier[filter_by] . identifier[append] ( identifier[name] [:- literal[int] ]+ literal[string] + identifier[value] ) keyword[elif] identifier[name] . identifier[endswith] ( literal[string] ): identifier[filter_by] . identifier[append] ( identifier[name] [:- literal[int] ]+ literal[string] + identifier[value] + literal[string] ) keyword[else] : identifier[filter_by] . identifier[append] ( identifier[name] . identifier[replace] ( literal[string] , literal[string] )+ literal[string] + identifier[value] ) identifier[filter_by] += identifier[self] . identifier[_create_fulltext_query] () keyword[return] literal[string] . identifier[join] ( identifier[filter_by] )
def _create_filter_by(self): """Transform the json-server filter arguments to model-resource ones.""" filter_by = [] for (name, values) in request.args.copy().lists(): # copy.lists works in py2 and py3 if name not in _SKIPPED_ARGUMENTS: column = _re_column_name.search(name).group(1) if column not in self._model_columns: continue # depends on [control=['if'], data=[]] for value in values: if name.endswith('_ne'): filter_by.append(name[:-3] + '!=' + value) # depends on [control=['if'], data=[]] elif name.endswith('_lte'): filter_by.append(name[:-4] + '<=' + value) # depends on [control=['if'], data=[]] elif name.endswith('_gte'): filter_by.append(name[:-4] + '>=' + value) # depends on [control=['if'], data=[]] elif name.endswith('_like'): filter_by.append(name[:-5] + '::like::%' + value + '%') # depends on [control=['if'], data=[]] else: filter_by.append(name.replace('__', '.') + '==' + value) # depends on [control=['for'], data=['value']] # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]] filter_by += self._create_fulltext_query() return ','.join(filter_by)
def _transfer_result(fut1, fut2): """Helper to transfer result or errors from one Future to another.""" exc = fut1.get_exception() if exc is not None: tb = fut1.get_traceback() fut2.set_exception(exc, tb) else: val = fut1.get_result() fut2.set_result(val)
def function[_transfer_result, parameter[fut1, fut2]]: constant[Helper to transfer result or errors from one Future to another.] variable[exc] assign[=] call[name[fut1].get_exception, parameter[]] if compare[name[exc] is_not constant[None]] begin[:] variable[tb] assign[=] call[name[fut1].get_traceback, parameter[]] call[name[fut2].set_exception, parameter[name[exc], name[tb]]]
keyword[def] identifier[_transfer_result] ( identifier[fut1] , identifier[fut2] ): literal[string] identifier[exc] = identifier[fut1] . identifier[get_exception] () keyword[if] identifier[exc] keyword[is] keyword[not] keyword[None] : identifier[tb] = identifier[fut1] . identifier[get_traceback] () identifier[fut2] . identifier[set_exception] ( identifier[exc] , identifier[tb] ) keyword[else] : identifier[val] = identifier[fut1] . identifier[get_result] () identifier[fut2] . identifier[set_result] ( identifier[val] )
def _transfer_result(fut1, fut2): """Helper to transfer result or errors from one Future to another.""" exc = fut1.get_exception() if exc is not None: tb = fut1.get_traceback() fut2.set_exception(exc, tb) # depends on [control=['if'], data=['exc']] else: val = fut1.get_result() fut2.set_result(val)
def render_trace( self, trace_list=None, file=sys.stdout, render_cls=default_renderer(), symbol_len=5, segment_size=5, segment_delim=' ', extra_line=True): """ Render the trace to a file using unicode and ASCII escape sequences. :param trace_list: A list of signals to be output in the specified order. :param file: The place to write output, default to stdout. :param render_cls: A class that translates traces into output bytes. :param symbol_len: The "length" of each rendered cycle in characters. :param segment_size: Traces are broken in the segments of this number of cycles. :param segment_delim: The character to be output between segments. :param extra_line: A Boolean to determin if we should print a blank line between signals. The resulting output can be viewed directly on the terminal or looked at with "more" or "less -R" which both should handle the ASCII escape sequences used in rendering. render_trace takes the following optional arguments. """ if _currently_in_ipython(): from IPython.display import display, HTML, Javascript # pylint: disable=import-error from .inputoutput import trace_to_html htmlstring = trace_to_html(self, trace_list=trace_list, sortkey=_trace_sort_key) html_elem = HTML(htmlstring) display(html_elem) # print(htmlstring) js_stuff = """ $.when( $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/skins/default.js"), $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/wavedrom.min.js"), $.Deferred(function( deferred ){ $( deferred.resolve ); })).done(function(){ WaveDrom.ProcessAll(); });""" display(Javascript(js_stuff)) else: self.render_trace_to_text( trace_list=trace_list, file=file, render_cls=render_cls, symbol_len=symbol_len, segment_size=segment_size, segment_delim=segment_delim, extra_line=extra_line)
def function[render_trace, parameter[self, trace_list, file, render_cls, symbol_len, segment_size, segment_delim, extra_line]]: constant[ Render the trace to a file using unicode and ASCII escape sequences. :param trace_list: A list of signals to be output in the specified order. :param file: The place to write output, default to stdout. :param render_cls: A class that translates traces into output bytes. :param symbol_len: The "length" of each rendered cycle in characters. :param segment_size: Traces are broken in the segments of this number of cycles. :param segment_delim: The character to be output between segments. :param extra_line: A Boolean to determin if we should print a blank line between signals. The resulting output can be viewed directly on the terminal or looked at with "more" or "less -R" which both should handle the ASCII escape sequences used in rendering. render_trace takes the following optional arguments. ] if call[name[_currently_in_ipython], parameter[]] begin[:] from relative_module[IPython.display] import module[display], module[HTML], module[Javascript] from relative_module[inputoutput] import module[trace_to_html] variable[htmlstring] assign[=] call[name[trace_to_html], parameter[name[self]]] variable[html_elem] assign[=] call[name[HTML], parameter[name[htmlstring]]] call[name[display], parameter[name[html_elem]]] variable[js_stuff] assign[=] constant[ $.when( $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/skins/default.js"), $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/wavedrom.min.js"), $.Deferred(function( deferred ){ $( deferred.resolve ); })).done(function(){ WaveDrom.ProcessAll(); });] call[name[display], parameter[call[name[Javascript], parameter[name[js_stuff]]]]]
keyword[def] identifier[render_trace] ( identifier[self] , identifier[trace_list] = keyword[None] , identifier[file] = identifier[sys] . identifier[stdout] , identifier[render_cls] = identifier[default_renderer] (), identifier[symbol_len] = literal[int] , identifier[segment_size] = literal[int] , identifier[segment_delim] = literal[string] , identifier[extra_line] = keyword[True] ): literal[string] keyword[if] identifier[_currently_in_ipython] (): keyword[from] identifier[IPython] . identifier[display] keyword[import] identifier[display] , identifier[HTML] , identifier[Javascript] keyword[from] . identifier[inputoutput] keyword[import] identifier[trace_to_html] identifier[htmlstring] = identifier[trace_to_html] ( identifier[self] , identifier[trace_list] = identifier[trace_list] , identifier[sortkey] = identifier[_trace_sort_key] ) identifier[html_elem] = identifier[HTML] ( identifier[htmlstring] ) identifier[display] ( identifier[html_elem] ) identifier[js_stuff] = literal[string] identifier[display] ( identifier[Javascript] ( identifier[js_stuff] )) keyword[else] : identifier[self] . identifier[render_trace_to_text] ( identifier[trace_list] = identifier[trace_list] , identifier[file] = identifier[file] , identifier[render_cls] = identifier[render_cls] , identifier[symbol_len] = identifier[symbol_len] , identifier[segment_size] = identifier[segment_size] , identifier[segment_delim] = identifier[segment_delim] , identifier[extra_line] = identifier[extra_line] )
def render_trace(self, trace_list=None, file=sys.stdout, render_cls=default_renderer(), symbol_len=5, segment_size=5, segment_delim=' ', extra_line=True): """ Render the trace to a file using unicode and ASCII escape sequences. :param trace_list: A list of signals to be output in the specified order. :param file: The place to write output, default to stdout. :param render_cls: A class that translates traces into output bytes. :param symbol_len: The "length" of each rendered cycle in characters. :param segment_size: Traces are broken in the segments of this number of cycles. :param segment_delim: The character to be output between segments. :param extra_line: A Boolean to determin if we should print a blank line between signals. The resulting output can be viewed directly on the terminal or looked at with "more" or "less -R" which both should handle the ASCII escape sequences used in rendering. render_trace takes the following optional arguments. """ if _currently_in_ipython(): from IPython.display import display, HTML, Javascript # pylint: disable=import-error from .inputoutput import trace_to_html htmlstring = trace_to_html(self, trace_list=trace_list, sortkey=_trace_sort_key) html_elem = HTML(htmlstring) display(html_elem) # print(htmlstring) js_stuff = '\n $.when(\n $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/skins/default.js"),\n $.getScript("https://cdnjs.cloudflare.com/ajax/libs/wavedrom/1.6.2/wavedrom.min.js"),\n $.Deferred(function( deferred ){\n $( deferred.resolve );\n })).done(function(){\n WaveDrom.ProcessAll();\n });' display(Javascript(js_stuff)) # depends on [control=['if'], data=[]] else: self.render_trace_to_text(trace_list=trace_list, file=file, render_cls=render_cls, symbol_len=symbol_len, segment_size=segment_size, segment_delim=segment_delim, extra_line=extra_line)
def _parse(data: str) -> list: """ Parses the given data string and returns a list of rule objects. """ if isinstance(data, bytes): data = data.decode('utf-8') lines = ( item for item in (item.strip() for item in data.split('\n')) if len(item) and not item.startswith('#') ) rules = [] for line in lines: rules.append( Rule.parse(line) ) return rules
def function[_parse, parameter[data]]: constant[ Parses the given data string and returns a list of rule objects. ] if call[name[isinstance], parameter[name[data], name[bytes]]] begin[:] variable[data] assign[=] call[name[data].decode, parameter[constant[utf-8]]] variable[lines] assign[=] <ast.GeneratorExp object at 0x7da20e9560e0> variable[rules] assign[=] list[[]] for taget[name[line]] in starred[name[lines]] begin[:] call[name[rules].append, parameter[call[name[Rule].parse, parameter[name[line]]]]] return[name[rules]]
keyword[def] identifier[_parse] ( identifier[data] : identifier[str] )-> identifier[list] : literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[bytes] ): identifier[data] = identifier[data] . identifier[decode] ( literal[string] ) identifier[lines] =( identifier[item] keyword[for] identifier[item] keyword[in] ( identifier[item] . identifier[strip] () keyword[for] identifier[item] keyword[in] identifier[data] . identifier[split] ( literal[string] )) keyword[if] identifier[len] ( identifier[item] ) keyword[and] keyword[not] identifier[item] . identifier[startswith] ( literal[string] ) ) identifier[rules] =[] keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[rules] . identifier[append] ( identifier[Rule] . identifier[parse] ( identifier[line] ) ) keyword[return] identifier[rules]
def _parse(data: str) -> list: """ Parses the given data string and returns a list of rule objects. """ if isinstance(data, bytes): data = data.decode('utf-8') # depends on [control=['if'], data=[]] lines = (item for item in (item.strip() for item in data.split('\n')) if len(item) and (not item.startswith('#'))) rules = [] for line in lines: rules.append(Rule.parse(line)) # depends on [control=['for'], data=['line']] return rules
def batch_normalize_with_arguments(x, arguments): """Applies batch normalization to x as specified in arguments. Args: x: A Pretty Tensor. arguments: Either a boolean to batch_normalize or a BatchNormalizationArguments Returns: x with batch normalization applied. """ x = prettytensor.wrap(x) # Backwards compatibility. if isinstance(arguments, bool): if arguments: return x.batch_normalize() else: return x # pylint: disable=protected-access kwargs = arguments._asdict() defaults = prettytensor._defaults # pylint: enable=protected-access for arg in ('learned_moments_update_rate', 'variance_epsilon', 'scale_after_normalization'): if kwargs.get(arg, None) is None: if arg in defaults: kwargs[arg] = defaults[arg] else: del kwargs[arg] return x.batch_normalize(**kwargs)
def function[batch_normalize_with_arguments, parameter[x, arguments]]: constant[Applies batch normalization to x as specified in arguments. Args: x: A Pretty Tensor. arguments: Either a boolean to batch_normalize or a BatchNormalizationArguments Returns: x with batch normalization applied. ] variable[x] assign[=] call[name[prettytensor].wrap, parameter[name[x]]] if call[name[isinstance], parameter[name[arguments], name[bool]]] begin[:] if name[arguments] begin[:] return[call[name[x].batch_normalize, parameter[]]] variable[kwargs] assign[=] call[name[arguments]._asdict, parameter[]] variable[defaults] assign[=] name[prettytensor]._defaults for taget[name[arg]] in starred[tuple[[<ast.Constant object at 0x7da2047eace0>, <ast.Constant object at 0x7da2047ebac0>, <ast.Constant object at 0x7da2047e9d20>]]] begin[:] if compare[call[name[kwargs].get, parameter[name[arg], constant[None]]] is constant[None]] begin[:] if compare[name[arg] in name[defaults]] begin[:] call[name[kwargs]][name[arg]] assign[=] call[name[defaults]][name[arg]] return[call[name[x].batch_normalize, parameter[]]]
keyword[def] identifier[batch_normalize_with_arguments] ( identifier[x] , identifier[arguments] ): literal[string] identifier[x] = identifier[prettytensor] . identifier[wrap] ( identifier[x] ) keyword[if] identifier[isinstance] ( identifier[arguments] , identifier[bool] ): keyword[if] identifier[arguments] : keyword[return] identifier[x] . identifier[batch_normalize] () keyword[else] : keyword[return] identifier[x] identifier[kwargs] = identifier[arguments] . identifier[_asdict] () identifier[defaults] = identifier[prettytensor] . identifier[_defaults] keyword[for] identifier[arg] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[if] identifier[kwargs] . identifier[get] ( identifier[arg] , keyword[None] ) keyword[is] keyword[None] : keyword[if] identifier[arg] keyword[in] identifier[defaults] : identifier[kwargs] [ identifier[arg] ]= identifier[defaults] [ identifier[arg] ] keyword[else] : keyword[del] identifier[kwargs] [ identifier[arg] ] keyword[return] identifier[x] . identifier[batch_normalize] (** identifier[kwargs] )
def batch_normalize_with_arguments(x, arguments): """Applies batch normalization to x as specified in arguments. Args: x: A Pretty Tensor. arguments: Either a boolean to batch_normalize or a BatchNormalizationArguments Returns: x with batch normalization applied. """ x = prettytensor.wrap(x) # Backwards compatibility. if isinstance(arguments, bool): if arguments: return x.batch_normalize() # depends on [control=['if'], data=[]] else: return x # depends on [control=['if'], data=[]] # pylint: disable=protected-access kwargs = arguments._asdict() defaults = prettytensor._defaults # pylint: enable=protected-access for arg in ('learned_moments_update_rate', 'variance_epsilon', 'scale_after_normalization'): if kwargs.get(arg, None) is None: if arg in defaults: kwargs[arg] = defaults[arg] # depends on [control=['if'], data=['arg', 'defaults']] else: del kwargs[arg] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']] return x.batch_normalize(**kwargs)
def _get_data_files(self): """ Override of build_py.get_data_files that includes out of tree configs. These are currently hardcoded to include everything in ../shared-data/robot-data, which will move to opentrons/config/shared-data """ files = super()._get_data_files() # We don’t really want to duplicate logic used in the original # implementation, but we can back out what it did with commonpath - # should be something ending in opentrons build_base = os.path.commonpath([f[2] for f in files]) # We want a list of paths to only files relative to ../shared-data to_include = get_shared_data_files() destination = os.path.join(build_base, DEST_BASE_PATH) # And finally, tell the system about our files files.append(('opentrons', SHARED_DATA_PATH, destination, to_include)) return files
def function[_get_data_files, parameter[self]]: constant[ Override of build_py.get_data_files that includes out of tree configs. These are currently hardcoded to include everything in ../shared-data/robot-data, which will move to opentrons/config/shared-data ] variable[files] assign[=] call[call[name[super], parameter[]]._get_data_files, parameter[]] variable[build_base] assign[=] call[name[os].path.commonpath, parameter[<ast.ListComp object at 0x7da18f09d840>]] variable[to_include] assign[=] call[name[get_shared_data_files], parameter[]] variable[destination] assign[=] call[name[os].path.join, parameter[name[build_base], name[DEST_BASE_PATH]]] call[name[files].append, parameter[tuple[[<ast.Constant object at 0x7da18f09e920>, <ast.Name object at 0x7da18f09d270>, <ast.Name object at 0x7da18f09d6c0>, <ast.Name object at 0x7da18f09e2c0>]]]] return[name[files]]
keyword[def] identifier[_get_data_files] ( identifier[self] ): literal[string] identifier[files] = identifier[super] (). identifier[_get_data_files] () identifier[build_base] = identifier[os] . identifier[path] . identifier[commonpath] ([ identifier[f] [ literal[int] ] keyword[for] identifier[f] keyword[in] identifier[files] ]) identifier[to_include] = identifier[get_shared_data_files] () identifier[destination] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_base] , identifier[DEST_BASE_PATH] ) identifier[files] . identifier[append] (( literal[string] , identifier[SHARED_DATA_PATH] , identifier[destination] , identifier[to_include] )) keyword[return] identifier[files]
def _get_data_files(self): """ Override of build_py.get_data_files that includes out of tree configs. These are currently hardcoded to include everything in ../shared-data/robot-data, which will move to opentrons/config/shared-data """ files = super()._get_data_files() # We don’t really want to duplicate logic used in the original # implementation, but we can back out what it did with commonpath - # should be something ending in opentrons build_base = os.path.commonpath([f[2] for f in files]) # We want a list of paths to only files relative to ../shared-data to_include = get_shared_data_files() destination = os.path.join(build_base, DEST_BASE_PATH) # And finally, tell the system about our files files.append(('opentrons', SHARED_DATA_PATH, destination, to_include)) return files
def get_tile_id(self): """Creates ESA tile ID :return: ESA tile ID :rtype: str """ tree = get_xml(self.get_url(AwsConstants.METADATA)) tile_id_tag = 'TILE_ID_2A' if self.data_source is DataSource.SENTINEL2_L2A and self.baseline <= '02.06' else\ 'TILE_ID' tile_id = tree[0].find(tile_id_tag).text if self.safe_type is not EsaSafeType.OLD_TYPE: info = tile_id.split('_') tile_id = '_'.join([info[3], info[-2], info[-3], self.get_sensing_time()]) return tile_id
def function[get_tile_id, parameter[self]]: constant[Creates ESA tile ID :return: ESA tile ID :rtype: str ] variable[tree] assign[=] call[name[get_xml], parameter[call[name[self].get_url, parameter[name[AwsConstants].METADATA]]]] variable[tile_id_tag] assign[=] <ast.IfExp object at 0x7da1b18b7130> variable[tile_id] assign[=] call[call[name[tree]][constant[0]].find, parameter[name[tile_id_tag]]].text if compare[name[self].safe_type is_not name[EsaSafeType].OLD_TYPE] begin[:] variable[info] assign[=] call[name[tile_id].split, parameter[constant[_]]] variable[tile_id] assign[=] call[constant[_].join, parameter[list[[<ast.Subscript object at 0x7da1b18b48b0>, <ast.Subscript object at 0x7da1b18b45b0>, <ast.Subscript object at 0x7da1b18b5960>, <ast.Call object at 0x7da1b18b6c80>]]]] return[name[tile_id]]
keyword[def] identifier[get_tile_id] ( identifier[self] ): literal[string] identifier[tree] = identifier[get_xml] ( identifier[self] . identifier[get_url] ( identifier[AwsConstants] . identifier[METADATA] )) identifier[tile_id_tag] = literal[string] keyword[if] identifier[self] . identifier[data_source] keyword[is] identifier[DataSource] . identifier[SENTINEL2_L2A] keyword[and] identifier[self] . identifier[baseline] <= literal[string] keyword[else] literal[string] identifier[tile_id] = identifier[tree] [ literal[int] ]. identifier[find] ( identifier[tile_id_tag] ). identifier[text] keyword[if] identifier[self] . identifier[safe_type] keyword[is] keyword[not] identifier[EsaSafeType] . identifier[OLD_TYPE] : identifier[info] = identifier[tile_id] . identifier[split] ( literal[string] ) identifier[tile_id] = literal[string] . identifier[join] ([ identifier[info] [ literal[int] ], identifier[info] [- literal[int] ], identifier[info] [- literal[int] ], identifier[self] . identifier[get_sensing_time] ()]) keyword[return] identifier[tile_id]
def get_tile_id(self): """Creates ESA tile ID :return: ESA tile ID :rtype: str """ tree = get_xml(self.get_url(AwsConstants.METADATA)) tile_id_tag = 'TILE_ID_2A' if self.data_source is DataSource.SENTINEL2_L2A and self.baseline <= '02.06' else 'TILE_ID' tile_id = tree[0].find(tile_id_tag).text if self.safe_type is not EsaSafeType.OLD_TYPE: info = tile_id.split('_') tile_id = '_'.join([info[3], info[-2], info[-3], self.get_sensing_time()]) # depends on [control=['if'], data=[]] return tile_id
def dedents(self, s, stacklevel=3): """ Dedent a string and substitute with the :attr:`params` attribute Parameters ---------- s: str string to dedent and insert the sections of the :attr:`params` attribute stacklevel: int The stacklevel for the warning raised in :func:`safe_module` when encountering an invalid key in the string""" s = dedents(s) return safe_modulo(s, self.params, stacklevel=stacklevel)
def function[dedents, parameter[self, s, stacklevel]]: constant[ Dedent a string and substitute with the :attr:`params` attribute Parameters ---------- s: str string to dedent and insert the sections of the :attr:`params` attribute stacklevel: int The stacklevel for the warning raised in :func:`safe_module` when encountering an invalid key in the string] variable[s] assign[=] call[name[dedents], parameter[name[s]]] return[call[name[safe_modulo], parameter[name[s], name[self].params]]]
keyword[def] identifier[dedents] ( identifier[self] , identifier[s] , identifier[stacklevel] = literal[int] ): literal[string] identifier[s] = identifier[dedents] ( identifier[s] ) keyword[return] identifier[safe_modulo] ( identifier[s] , identifier[self] . identifier[params] , identifier[stacklevel] = identifier[stacklevel] )
def dedents(self, s, stacklevel=3): """ Dedent a string and substitute with the :attr:`params` attribute Parameters ---------- s: str string to dedent and insert the sections of the :attr:`params` attribute stacklevel: int The stacklevel for the warning raised in :func:`safe_module` when encountering an invalid key in the string""" s = dedents(s) return safe_modulo(s, self.params, stacklevel=stacklevel)
def parse_address(self, address, line_number=-1): """ Return an Address object from the given address. Passes itself to the Address constructor to use all the custom loaded suffixes, cities, etc. """ return Address(address, self, line_number, self.logger)
def function[parse_address, parameter[self, address, line_number]]: constant[ Return an Address object from the given address. Passes itself to the Address constructor to use all the custom loaded suffixes, cities, etc. ] return[call[name[Address], parameter[name[address], name[self], name[line_number], name[self].logger]]]
keyword[def] identifier[parse_address] ( identifier[self] , identifier[address] , identifier[line_number] =- literal[int] ): literal[string] keyword[return] identifier[Address] ( identifier[address] , identifier[self] , identifier[line_number] , identifier[self] . identifier[logger] )
def parse_address(self, address, line_number=-1): """ Return an Address object from the given address. Passes itself to the Address constructor to use all the custom loaded suffixes, cities, etc. """ return Address(address, self, line_number, self.logger)
def from_private_key(path: str) -> SigningKeyType: """ Read authentication file Add public key attribute :param path: Authentication file path """ key = load_key(path) key.pubkey = Base58Encoder.encode(key.vk) return key
def function[from_private_key, parameter[path]]: constant[ Read authentication file Add public key attribute :param path: Authentication file path ] variable[key] assign[=] call[name[load_key], parameter[name[path]]] name[key].pubkey assign[=] call[name[Base58Encoder].encode, parameter[name[key].vk]] return[name[key]]
keyword[def] identifier[from_private_key] ( identifier[path] : identifier[str] )-> identifier[SigningKeyType] : literal[string] identifier[key] = identifier[load_key] ( identifier[path] ) identifier[key] . identifier[pubkey] = identifier[Base58Encoder] . identifier[encode] ( identifier[key] . identifier[vk] ) keyword[return] identifier[key]
def from_private_key(path: str) -> SigningKeyType: """ Read authentication file Add public key attribute :param path: Authentication file path """ key = load_key(path) key.pubkey = Base58Encoder.encode(key.vk) return key
def ensembl_to_kegg(organism,kegg_db): """ Looks up KEGG mappings of KEGG ids to ensembl ids :param organism: an organisms as listed in organismsKEGG() :param kegg_db: a matching KEGG db as reported in databasesKEGG :returns: a Pandas dataframe of with 'KEGGid' and 'ENSid'. """ print("KEGG API: http://rest.genome.jp/link/"+kegg_db+"/"+organism) sys.stdout.flush() kegg_ens=urlopen("http://rest.genome.jp/link/"+kegg_db+"/"+organism).read() kegg_ens=kegg_ens.split("\n") final=[] for i in kegg_ens: final.append(i.split("\t")) df=pd.DataFrame(final[0:len(final)-1])[[0,1]] ens_id=pd.DataFrame(df[1].str.split(":").tolist())[1] df=pd.concat([df,ens_id],axis=1) df.columns=['KEGGid','ensDB','ENSid'] df=df[['KEGGid','ENSid']] return df
def function[ensembl_to_kegg, parameter[organism, kegg_db]]: constant[ Looks up KEGG mappings of KEGG ids to ensembl ids :param organism: an organisms as listed in organismsKEGG() :param kegg_db: a matching KEGG db as reported in databasesKEGG :returns: a Pandas dataframe of with 'KEGGid' and 'ENSid'. ] call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[KEGG API: http://rest.genome.jp/link/] + name[kegg_db]] + constant[/]] + name[organism]]]] call[name[sys].stdout.flush, parameter[]] variable[kegg_ens] assign[=] call[call[name[urlopen], parameter[binary_operation[binary_operation[binary_operation[constant[http://rest.genome.jp/link/] + name[kegg_db]] + constant[/]] + name[organism]]]].read, parameter[]] variable[kegg_ens] assign[=] call[name[kegg_ens].split, parameter[constant[ ]]] variable[final] assign[=] list[[]] for taget[name[i]] in starred[name[kegg_ens]] begin[:] call[name[final].append, parameter[call[name[i].split, parameter[constant[ ]]]]] variable[df] assign[=] call[call[name[pd].DataFrame, parameter[call[name[final]][<ast.Slice object at 0x7da18ede7cd0>]]]][list[[<ast.Constant object at 0x7da18ede46d0>, <ast.Constant object at 0x7da18ede4880>]]] variable[ens_id] assign[=] call[call[name[pd].DataFrame, parameter[call[call[call[name[df]][constant[1]].str.split, parameter[constant[:]]].tolist, parameter[]]]]][constant[1]] variable[df] assign[=] call[name[pd].concat, parameter[list[[<ast.Name object at 0x7da20e9b2800>, <ast.Name object at 0x7da20e9b3fd0>]]]] name[df].columns assign[=] list[[<ast.Constant object at 0x7da20e9b2830>, <ast.Constant object at 0x7da20e9b3a30>, <ast.Constant object at 0x7da20e9b0c10>]] variable[df] assign[=] call[name[df]][list[[<ast.Constant object at 0x7da20e9b3fa0>, <ast.Constant object at 0x7da20e9b0910>]]] return[name[df]]
keyword[def] identifier[ensembl_to_kegg] ( identifier[organism] , identifier[kegg_db] ): literal[string] identifier[print] ( literal[string] + identifier[kegg_db] + literal[string] + identifier[organism] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[kegg_ens] = identifier[urlopen] ( literal[string] + identifier[kegg_db] + literal[string] + identifier[organism] ). identifier[read] () identifier[kegg_ens] = identifier[kegg_ens] . identifier[split] ( literal[string] ) identifier[final] =[] keyword[for] identifier[i] keyword[in] identifier[kegg_ens] : identifier[final] . identifier[append] ( identifier[i] . identifier[split] ( literal[string] )) identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[final] [ literal[int] : identifier[len] ( identifier[final] )- literal[int] ])[[ literal[int] , literal[int] ]] identifier[ens_id] = identifier[pd] . identifier[DataFrame] ( identifier[df] [ literal[int] ]. identifier[str] . identifier[split] ( literal[string] ). identifier[tolist] ())[ literal[int] ] identifier[df] = identifier[pd] . identifier[concat] ([ identifier[df] , identifier[ens_id] ], identifier[axis] = literal[int] ) identifier[df] . identifier[columns] =[ literal[string] , literal[string] , literal[string] ] identifier[df] = identifier[df] [[ literal[string] , literal[string] ]] keyword[return] identifier[df]
def ensembl_to_kegg(organism, kegg_db): """ Looks up KEGG mappings of KEGG ids to ensembl ids :param organism: an organisms as listed in organismsKEGG() :param kegg_db: a matching KEGG db as reported in databasesKEGG :returns: a Pandas dataframe of with 'KEGGid' and 'ENSid'. """ print('KEGG API: http://rest.genome.jp/link/' + kegg_db + '/' + organism) sys.stdout.flush() kegg_ens = urlopen('http://rest.genome.jp/link/' + kegg_db + '/' + organism).read() kegg_ens = kegg_ens.split('\n') final = [] for i in kegg_ens: final.append(i.split('\t')) # depends on [control=['for'], data=['i']] df = pd.DataFrame(final[0:len(final) - 1])[[0, 1]] ens_id = pd.DataFrame(df[1].str.split(':').tolist())[1] df = pd.concat([df, ens_id], axis=1) df.columns = ['KEGGid', 'ensDB', 'ENSid'] df = df[['KEGGid', 'ENSid']] return df
def add_missing(self, aesthetics): """ Add missing but required scales. Parameters ---------- aesthetics : list | tuple Aesthetic names. Typically, ('x', 'y'). """ # Keep only aesthetics that don't have scales aesthetics = set(aesthetics) - set(self.input()) for ae in aesthetics: scale_name = 'scale_{}_continuous'.format(ae) scale_f = Registry[scale_name] self.append(scale_f())
def function[add_missing, parameter[self, aesthetics]]: constant[ Add missing but required scales. Parameters ---------- aesthetics : list | tuple Aesthetic names. Typically, ('x', 'y'). ] variable[aesthetics] assign[=] binary_operation[call[name[set], parameter[name[aesthetics]]] - call[name[set], parameter[call[name[self].input, parameter[]]]]] for taget[name[ae]] in starred[name[aesthetics]] begin[:] variable[scale_name] assign[=] call[constant[scale_{}_continuous].format, parameter[name[ae]]] variable[scale_f] assign[=] call[name[Registry]][name[scale_name]] call[name[self].append, parameter[call[name[scale_f], parameter[]]]]
keyword[def] identifier[add_missing] ( identifier[self] , identifier[aesthetics] ): literal[string] identifier[aesthetics] = identifier[set] ( identifier[aesthetics] )- identifier[set] ( identifier[self] . identifier[input] ()) keyword[for] identifier[ae] keyword[in] identifier[aesthetics] : identifier[scale_name] = literal[string] . identifier[format] ( identifier[ae] ) identifier[scale_f] = identifier[Registry] [ identifier[scale_name] ] identifier[self] . identifier[append] ( identifier[scale_f] ())
def add_missing(self, aesthetics): """ Add missing but required scales. Parameters ---------- aesthetics : list | tuple Aesthetic names. Typically, ('x', 'y'). """ # Keep only aesthetics that don't have scales aesthetics = set(aesthetics) - set(self.input()) for ae in aesthetics: scale_name = 'scale_{}_continuous'.format(ae) scale_f = Registry[scale_name] self.append(scale_f()) # depends on [control=['for'], data=['ae']]
def ensure_permissions(path, user, group, permissions, maxdepth=-1): """Ensure permissions for path. If path is a file, apply to file and return. If path is a directory, apply recursively (if required) to directory contents and return. :param user: user name :param group: group name :param permissions: octal permissions :param maxdepth: maximum recursion depth. A negative maxdepth allows infinite recursion and maxdepth=0 means no recursion. :returns: None """ if not os.path.exists(path): log("File '%s' does not exist - cannot set permissions" % (path), level=WARNING) return _user = pwd.getpwnam(user) os.chown(path, _user.pw_uid, grp.getgrnam(group).gr_gid) os.chmod(path, permissions) if maxdepth == 0: log("Max recursion depth reached - skipping further recursion", level=DEBUG) return elif maxdepth > 0: maxdepth -= 1 if os.path.isdir(path): contents = glob.glob("%s/*" % (path)) for c in contents: ensure_permissions(c, user=user, group=group, permissions=permissions, maxdepth=maxdepth)
def function[ensure_permissions, parameter[path, user, group, permissions, maxdepth]]: constant[Ensure permissions for path. If path is a file, apply to file and return. If path is a directory, apply recursively (if required) to directory contents and return. :param user: user name :param group: group name :param permissions: octal permissions :param maxdepth: maximum recursion depth. A negative maxdepth allows infinite recursion and maxdepth=0 means no recursion. :returns: None ] if <ast.UnaryOp object at 0x7da2054a5d80> begin[:] call[name[log], parameter[binary_operation[constant[File '%s' does not exist - cannot set permissions] <ast.Mod object at 0x7da2590d6920> name[path]]]] return[None] variable[_user] assign[=] call[name[pwd].getpwnam, parameter[name[user]]] call[name[os].chown, parameter[name[path], name[_user].pw_uid, call[name[grp].getgrnam, parameter[name[group]]].gr_gid]] call[name[os].chmod, parameter[name[path], name[permissions]]] if compare[name[maxdepth] equal[==] constant[0]] begin[:] call[name[log], parameter[constant[Max recursion depth reached - skipping further recursion]]] return[None] if call[name[os].path.isdir, parameter[name[path]]] begin[:] variable[contents] assign[=] call[name[glob].glob, parameter[binary_operation[constant[%s/*] <ast.Mod object at 0x7da2590d6920> name[path]]]] for taget[name[c]] in starred[name[contents]] begin[:] call[name[ensure_permissions], parameter[name[c]]]
keyword[def] identifier[ensure_permissions] ( identifier[path] , identifier[user] , identifier[group] , identifier[permissions] , identifier[maxdepth] =- literal[int] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ): identifier[log] ( literal[string] %( identifier[path] ), identifier[level] = identifier[WARNING] ) keyword[return] identifier[_user] = identifier[pwd] . identifier[getpwnam] ( identifier[user] ) identifier[os] . identifier[chown] ( identifier[path] , identifier[_user] . identifier[pw_uid] , identifier[grp] . identifier[getgrnam] ( identifier[group] ). identifier[gr_gid] ) identifier[os] . identifier[chmod] ( identifier[path] , identifier[permissions] ) keyword[if] identifier[maxdepth] == literal[int] : identifier[log] ( literal[string] , identifier[level] = identifier[DEBUG] ) keyword[return] keyword[elif] identifier[maxdepth] > literal[int] : identifier[maxdepth] -= literal[int] keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): identifier[contents] = identifier[glob] . identifier[glob] ( literal[string] %( identifier[path] )) keyword[for] identifier[c] keyword[in] identifier[contents] : identifier[ensure_permissions] ( identifier[c] , identifier[user] = identifier[user] , identifier[group] = identifier[group] , identifier[permissions] = identifier[permissions] , identifier[maxdepth] = identifier[maxdepth] )
def ensure_permissions(path, user, group, permissions, maxdepth=-1): """Ensure permissions for path. If path is a file, apply to file and return. If path is a directory, apply recursively (if required) to directory contents and return. :param user: user name :param group: group name :param permissions: octal permissions :param maxdepth: maximum recursion depth. A negative maxdepth allows infinite recursion and maxdepth=0 means no recursion. :returns: None """ if not os.path.exists(path): log("File '%s' does not exist - cannot set permissions" % path, level=WARNING) return # depends on [control=['if'], data=[]] _user = pwd.getpwnam(user) os.chown(path, _user.pw_uid, grp.getgrnam(group).gr_gid) os.chmod(path, permissions) if maxdepth == 0: log('Max recursion depth reached - skipping further recursion', level=DEBUG) return # depends on [control=['if'], data=[]] elif maxdepth > 0: maxdepth -= 1 # depends on [control=['if'], data=['maxdepth']] if os.path.isdir(path): contents = glob.glob('%s/*' % path) for c in contents: ensure_permissions(c, user=user, group=group, permissions=permissions, maxdepth=maxdepth) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]]
def reset(self): """ Process everything all over again. """ self.indexCount = 0 indexDir = self.store.newDirectory(self.indexDirectory) if indexDir.exists(): indexDir.remove() for src in self.getSources(): src.removeReliableListener(self) src.addReliableListener(self, style=iaxiom.REMOTE)
def function[reset, parameter[self]]: constant[ Process everything all over again. ] name[self].indexCount assign[=] constant[0] variable[indexDir] assign[=] call[name[self].store.newDirectory, parameter[name[self].indexDirectory]] if call[name[indexDir].exists, parameter[]] begin[:] call[name[indexDir].remove, parameter[]] for taget[name[src]] in starred[call[name[self].getSources, parameter[]]] begin[:] call[name[src].removeReliableListener, parameter[name[self]]] call[name[src].addReliableListener, parameter[name[self]]]
keyword[def] identifier[reset] ( identifier[self] ): literal[string] identifier[self] . identifier[indexCount] = literal[int] identifier[indexDir] = identifier[self] . identifier[store] . identifier[newDirectory] ( identifier[self] . identifier[indexDirectory] ) keyword[if] identifier[indexDir] . identifier[exists] (): identifier[indexDir] . identifier[remove] () keyword[for] identifier[src] keyword[in] identifier[self] . identifier[getSources] (): identifier[src] . identifier[removeReliableListener] ( identifier[self] ) identifier[src] . identifier[addReliableListener] ( identifier[self] , identifier[style] = identifier[iaxiom] . identifier[REMOTE] )
def reset(self): """ Process everything all over again. """ self.indexCount = 0 indexDir = self.store.newDirectory(self.indexDirectory) if indexDir.exists(): indexDir.remove() # depends on [control=['if'], data=[]] for src in self.getSources(): src.removeReliableListener(self) src.addReliableListener(self, style=iaxiom.REMOTE) # depends on [control=['for'], data=['src']]
def SegmentProd(a, ids): """ Segmented prod op. """ func = lambda idxs: reduce(np.multiply, a[idxs]) return seg_map(func, a, ids),
def function[SegmentProd, parameter[a, ids]]: constant[ Segmented prod op. ] variable[func] assign[=] <ast.Lambda object at 0x7da1b0650d00> return[tuple[[<ast.Call object at 0x7da1b0652380>]]]
keyword[def] identifier[SegmentProd] ( identifier[a] , identifier[ids] ): literal[string] identifier[func] = keyword[lambda] identifier[idxs] : identifier[reduce] ( identifier[np] . identifier[multiply] , identifier[a] [ identifier[idxs] ]) keyword[return] identifier[seg_map] ( identifier[func] , identifier[a] , identifier[ids] ),
def SegmentProd(a, ids): """ Segmented prod op. """ func = lambda idxs: reduce(np.multiply, a[idxs]) return (seg_map(func, a, ids),)
def alter(self, function): """ Alters the currently stored value by applying a function on it. :param function: (Function), A stateful serializable object which represents the Function defined on server side. This object must have a serializable Function counter part registered on server side with the actual ``org.hazelcast.core.IFunction`` implementation. """ check_not_none(function, "function can't be None") return self._encode_invoke(atomic_long_alter_codec, function=self._to_data(function))
def function[alter, parameter[self, function]]: constant[ Alters the currently stored value by applying a function on it. :param function: (Function), A stateful serializable object which represents the Function defined on server side. This object must have a serializable Function counter part registered on server side with the actual ``org.hazelcast.core.IFunction`` implementation. ] call[name[check_not_none], parameter[name[function], constant[function can't be None]]] return[call[name[self]._encode_invoke, parameter[name[atomic_long_alter_codec]]]]
keyword[def] identifier[alter] ( identifier[self] , identifier[function] ): literal[string] identifier[check_not_none] ( identifier[function] , literal[string] ) keyword[return] identifier[self] . identifier[_encode_invoke] ( identifier[atomic_long_alter_codec] , identifier[function] = identifier[self] . identifier[_to_data] ( identifier[function] ))
def alter(self, function): """ Alters the currently stored value by applying a function on it. :param function: (Function), A stateful serializable object which represents the Function defined on server side. This object must have a serializable Function counter part registered on server side with the actual ``org.hazelcast.core.IFunction`` implementation. """ check_not_none(function, "function can't be None") return self._encode_invoke(atomic_long_alter_codec, function=self._to_data(function))
def pack(self, value=None): """Pack the value as a binary representation. :attr:`data` is packed before the calling :meth:`.GenericMessage.pack`. After that, :attr:`data`'s value is restored. Returns: bytes: The binary representation. Raises: :exc:`~.exceptions.PackException`: If pack fails. """ if value is None: data_backup = None if self.data is not None and not isinstance(self.data, bytes): data_backup = self.data self.data = self.data.pack() packed = super().pack() if data_backup is not None: self.data = data_backup return packed elif isinstance(value, type(self)): return value.pack() else: msg = "{} is not an instance of {}".format(value, type(self).__name__) raise PackException(msg)
def function[pack, parameter[self, value]]: constant[Pack the value as a binary representation. :attr:`data` is packed before the calling :meth:`.GenericMessage.pack`. After that, :attr:`data`'s value is restored. Returns: bytes: The binary representation. Raises: :exc:`~.exceptions.PackException`: If pack fails. ] if compare[name[value] is constant[None]] begin[:] variable[data_backup] assign[=] constant[None] if <ast.BoolOp object at 0x7da20c6c48b0> begin[:] variable[data_backup] assign[=] name[self].data name[self].data assign[=] call[name[self].data.pack, parameter[]] variable[packed] assign[=] call[call[name[super], parameter[]].pack, parameter[]] if compare[name[data_backup] is_not constant[None]] begin[:] name[self].data assign[=] name[data_backup] return[name[packed]]
keyword[def] identifier[pack] ( identifier[self] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[None] : identifier[data_backup] = keyword[None] keyword[if] identifier[self] . identifier[data] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[self] . identifier[data] , identifier[bytes] ): identifier[data_backup] = identifier[self] . identifier[data] identifier[self] . identifier[data] = identifier[self] . identifier[data] . identifier[pack] () identifier[packed] = identifier[super] (). identifier[pack] () keyword[if] identifier[data_backup] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[data] = identifier[data_backup] keyword[return] identifier[packed] keyword[elif] identifier[isinstance] ( identifier[value] , identifier[type] ( identifier[self] )): keyword[return] identifier[value] . identifier[pack] () keyword[else] : identifier[msg] = literal[string] . identifier[format] ( identifier[value] , identifier[type] ( identifier[self] ). identifier[__name__] ) keyword[raise] identifier[PackException] ( identifier[msg] )
def pack(self, value=None): """Pack the value as a binary representation. :attr:`data` is packed before the calling :meth:`.GenericMessage.pack`. After that, :attr:`data`'s value is restored. Returns: bytes: The binary representation. Raises: :exc:`~.exceptions.PackException`: If pack fails. """ if value is None: data_backup = None if self.data is not None and (not isinstance(self.data, bytes)): data_backup = self.data self.data = self.data.pack() # depends on [control=['if'], data=[]] packed = super().pack() if data_backup is not None: self.data = data_backup # depends on [control=['if'], data=['data_backup']] return packed # depends on [control=['if'], data=[]] elif isinstance(value, type(self)): return value.pack() # depends on [control=['if'], data=[]] else: msg = '{} is not an instance of {}'.format(value, type(self).__name__) raise PackException(msg)
def resnet_imagenet_34_td_weight_05_05(): """Set of hyperparameters.""" hp = resnet_imagenet_34() hp.use_td = "weight" hp.targeting_rate = 0.5 hp.keep_prob = 0.5 return hp
def function[resnet_imagenet_34_td_weight_05_05, parameter[]]: constant[Set of hyperparameters.] variable[hp] assign[=] call[name[resnet_imagenet_34], parameter[]] name[hp].use_td assign[=] constant[weight] name[hp].targeting_rate assign[=] constant[0.5] name[hp].keep_prob assign[=] constant[0.5] return[name[hp]]
keyword[def] identifier[resnet_imagenet_34_td_weight_05_05] (): literal[string] identifier[hp] = identifier[resnet_imagenet_34] () identifier[hp] . identifier[use_td] = literal[string] identifier[hp] . identifier[targeting_rate] = literal[int] identifier[hp] . identifier[keep_prob] = literal[int] keyword[return] identifier[hp]
def resnet_imagenet_34_td_weight_05_05(): """Set of hyperparameters.""" hp = resnet_imagenet_34() hp.use_td = 'weight' hp.targeting_rate = 0.5 hp.keep_prob = 0.5 return hp
def display_information_message_bar( title=None, message=None, more_details=None, button_text=tr('Show details ...'), duration=8, iface_object=iface): """ Display an information message bar. :param iface_object: The QGIS IFace instance. Note that we cannot use qgis.utils.iface since it is not available in our test environment. :type iface_object: QgisInterface :param title: The title of the message bar. :type title: basestring :param message: The message inside the message bar. :type message: basestring :param more_details: The message inside the 'Show details' button. :type more_details: basestring :param button_text: The text of the button if 'more_details' is not empty. :type button_text: basestring :param duration: The duration for the display, default is 8 seconds. :type duration: int """ iface_object.messageBar().clearWidgets() widget = iface_object.messageBar().createMessage(title, message) if more_details: button = QPushButton(widget) button.setText(button_text) button.pressed.connect( lambda: display_information_message_box( title=title, message=more_details)) widget.layout().addWidget(button) iface_object.messageBar().pushWidget(widget, Qgis.Info, duration)
def function[display_information_message_bar, parameter[title, message, more_details, button_text, duration, iface_object]]: constant[ Display an information message bar. :param iface_object: The QGIS IFace instance. Note that we cannot use qgis.utils.iface since it is not available in our test environment. :type iface_object: QgisInterface :param title: The title of the message bar. :type title: basestring :param message: The message inside the message bar. :type message: basestring :param more_details: The message inside the 'Show details' button. :type more_details: basestring :param button_text: The text of the button if 'more_details' is not empty. :type button_text: basestring :param duration: The duration for the display, default is 8 seconds. :type duration: int ] call[call[name[iface_object].messageBar, parameter[]].clearWidgets, parameter[]] variable[widget] assign[=] call[call[name[iface_object].messageBar, parameter[]].createMessage, parameter[name[title], name[message]]] if name[more_details] begin[:] variable[button] assign[=] call[name[QPushButton], parameter[name[widget]]] call[name[button].setText, parameter[name[button_text]]] call[name[button].pressed.connect, parameter[<ast.Lambda object at 0x7da1b0c3c8e0>]] call[call[name[widget].layout, parameter[]].addWidget, parameter[name[button]]] call[call[name[iface_object].messageBar, parameter[]].pushWidget, parameter[name[widget], name[Qgis].Info, name[duration]]]
keyword[def] identifier[display_information_message_bar] ( identifier[title] = keyword[None] , identifier[message] = keyword[None] , identifier[more_details] = keyword[None] , identifier[button_text] = identifier[tr] ( literal[string] ), identifier[duration] = literal[int] , identifier[iface_object] = identifier[iface] ): literal[string] identifier[iface_object] . identifier[messageBar] (). identifier[clearWidgets] () identifier[widget] = identifier[iface_object] . identifier[messageBar] (). identifier[createMessage] ( identifier[title] , identifier[message] ) keyword[if] identifier[more_details] : identifier[button] = identifier[QPushButton] ( identifier[widget] ) identifier[button] . identifier[setText] ( identifier[button_text] ) identifier[button] . identifier[pressed] . identifier[connect] ( keyword[lambda] : identifier[display_information_message_box] ( identifier[title] = identifier[title] , identifier[message] = identifier[more_details] )) identifier[widget] . identifier[layout] (). identifier[addWidget] ( identifier[button] ) identifier[iface_object] . identifier[messageBar] (). identifier[pushWidget] ( identifier[widget] , identifier[Qgis] . identifier[Info] , identifier[duration] )
def display_information_message_bar(title=None, message=None, more_details=None, button_text=tr('Show details ...'), duration=8, iface_object=iface): """ Display an information message bar. :param iface_object: The QGIS IFace instance. Note that we cannot use qgis.utils.iface since it is not available in our test environment. :type iface_object: QgisInterface :param title: The title of the message bar. :type title: basestring :param message: The message inside the message bar. :type message: basestring :param more_details: The message inside the 'Show details' button. :type more_details: basestring :param button_text: The text of the button if 'more_details' is not empty. :type button_text: basestring :param duration: The duration for the display, default is 8 seconds. :type duration: int """ iface_object.messageBar().clearWidgets() widget = iface_object.messageBar().createMessage(title, message) if more_details: button = QPushButton(widget) button.setText(button_text) button.pressed.connect(lambda : display_information_message_box(title=title, message=more_details)) widget.layout().addWidget(button) # depends on [control=['if'], data=[]] iface_object.messageBar().pushWidget(widget, Qgis.Info, duration)
def _get_cache_dir(candidate): """Get the current cache directory.""" if candidate: return candidate import distutils.dist # suppress(import-error) import distutils.command.build # suppress(import-error) build_cmd = distutils.command.build.build(distutils.dist.Distribution()) build_cmd.finalize_options() cache_dir = os.path.abspath(build_cmd.build_temp) # Make sure that it is created before anyone tries to use it try: os.makedirs(cache_dir) except OSError as error: if error.errno != errno.EEXIST: raise error return cache_dir
def function[_get_cache_dir, parameter[candidate]]: constant[Get the current cache directory.] if name[candidate] begin[:] return[name[candidate]] import module[distutils.dist] import module[distutils.command.build] variable[build_cmd] assign[=] call[name[distutils].command.build.build, parameter[call[name[distutils].dist.Distribution, parameter[]]]] call[name[build_cmd].finalize_options, parameter[]] variable[cache_dir] assign[=] call[name[os].path.abspath, parameter[name[build_cmd].build_temp]] <ast.Try object at 0x7da18f00e290> return[name[cache_dir]]
keyword[def] identifier[_get_cache_dir] ( identifier[candidate] ): literal[string] keyword[if] identifier[candidate] : keyword[return] identifier[candidate] keyword[import] identifier[distutils] . identifier[dist] keyword[import] identifier[distutils] . identifier[command] . identifier[build] identifier[build_cmd] = identifier[distutils] . identifier[command] . identifier[build] . identifier[build] ( identifier[distutils] . identifier[dist] . identifier[Distribution] ()) identifier[build_cmd] . identifier[finalize_options] () identifier[cache_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[build_cmd] . identifier[build_temp] ) keyword[try] : identifier[os] . identifier[makedirs] ( identifier[cache_dir] ) keyword[except] identifier[OSError] keyword[as] identifier[error] : keyword[if] identifier[error] . identifier[errno] != identifier[errno] . identifier[EEXIST] : keyword[raise] identifier[error] keyword[return] identifier[cache_dir]
def _get_cache_dir(candidate): """Get the current cache directory.""" if candidate: return candidate # depends on [control=['if'], data=[]] import distutils.dist # suppress(import-error) import distutils.command.build # suppress(import-error) build_cmd = distutils.command.build.build(distutils.dist.Distribution()) build_cmd.finalize_options() cache_dir = os.path.abspath(build_cmd.build_temp) # Make sure that it is created before anyone tries to use it try: os.makedirs(cache_dir) # depends on [control=['try'], data=[]] except OSError as error: if error.errno != errno.EEXIST: raise error # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['error']] return cache_dir
def has_pfn(self, url, site=None): """ Wrapper of the pegasus hasPFN function, that allows it to be called outside of specific pegasus functions. """ curr_pfn = dax.PFN(url, site) return self.hasPFN(curr_pfn)
def function[has_pfn, parameter[self, url, site]]: constant[ Wrapper of the pegasus hasPFN function, that allows it to be called outside of specific pegasus functions. ] variable[curr_pfn] assign[=] call[name[dax].PFN, parameter[name[url], name[site]]] return[call[name[self].hasPFN, parameter[name[curr_pfn]]]]
keyword[def] identifier[has_pfn] ( identifier[self] , identifier[url] , identifier[site] = keyword[None] ): literal[string] identifier[curr_pfn] = identifier[dax] . identifier[PFN] ( identifier[url] , identifier[site] ) keyword[return] identifier[self] . identifier[hasPFN] ( identifier[curr_pfn] )
def has_pfn(self, url, site=None): """ Wrapper of the pegasus hasPFN function, that allows it to be called outside of specific pegasus functions. """ curr_pfn = dax.PFN(url, site) return self.hasPFN(curr_pfn)
def wait_for_fun(fun, timeout=900, **kwargs): ''' Wait until a function finishes, or times out ''' start = time.time() log.debug('Attempting function %s', fun) trycount = 0 while True: trycount += 1 try: response = fun(**kwargs) if not isinstance(response, bool): return response except Exception as exc: log.debug('Caught exception in wait_for_fun: %s', exc) time.sleep(1) log.debug('Retrying function %s on (try %s)', fun, trycount) if time.time() - start > timeout: log.error('Function timed out: %s', timeout) return False
def function[wait_for_fun, parameter[fun, timeout]]: constant[ Wait until a function finishes, or times out ] variable[start] assign[=] call[name[time].time, parameter[]] call[name[log].debug, parameter[constant[Attempting function %s], name[fun]]] variable[trycount] assign[=] constant[0] while constant[True] begin[:] <ast.AugAssign object at 0x7da1b1f361a0> <ast.Try object at 0x7da1b1f36230> if compare[binary_operation[call[name[time].time, parameter[]] - name[start]] greater[>] name[timeout]] begin[:] call[name[log].error, parameter[constant[Function timed out: %s], name[timeout]]] return[constant[False]]
keyword[def] identifier[wait_for_fun] ( identifier[fun] , identifier[timeout] = literal[int] ,** identifier[kwargs] ): literal[string] identifier[start] = identifier[time] . identifier[time] () identifier[log] . identifier[debug] ( literal[string] , identifier[fun] ) identifier[trycount] = literal[int] keyword[while] keyword[True] : identifier[trycount] += literal[int] keyword[try] : identifier[response] = identifier[fun] (** identifier[kwargs] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[response] , identifier[bool] ): keyword[return] identifier[response] keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[log] . identifier[debug] ( literal[string] , identifier[exc] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[log] . identifier[debug] ( literal[string] , identifier[fun] , identifier[trycount] ) keyword[if] identifier[time] . identifier[time] ()- identifier[start] > identifier[timeout] : identifier[log] . identifier[error] ( literal[string] , identifier[timeout] ) keyword[return] keyword[False]
def wait_for_fun(fun, timeout=900, **kwargs): """ Wait until a function finishes, or times out """ start = time.time() log.debug('Attempting function %s', fun) trycount = 0 while True: trycount += 1 try: response = fun(**kwargs) if not isinstance(response, bool): return response # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as exc: log.debug('Caught exception in wait_for_fun: %s', exc) time.sleep(1) log.debug('Retrying function %s on (try %s)', fun, trycount) # depends on [control=['except'], data=['exc']] if time.time() - start > timeout: log.error('Function timed out: %s', timeout) return False # depends on [control=['if'], data=['timeout']] # depends on [control=['while'], data=[]]
def reverseGeocode(self, location): """ The reverseGeocode operation determines the address at a particular x/y location. You pass the coordinates of a point location to the geocoding service, and the service returns the address that is closest to the location. Input: location - either an Point object or a list defined as [X,Y] """ params = { "f" : "json" } url = self._url + "/reverseGeocode" if isinstance(location, Point): params['location'] = location.asDictionary elif isinstance(location, list): params['location'] = "%s,%s" % (location[0], location[1]) else: raise Exception("Invalid location") return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def function[reverseGeocode, parameter[self, location]]: constant[ The reverseGeocode operation determines the address at a particular x/y location. You pass the coordinates of a point location to the geocoding service, and the service returns the address that is closest to the location. Input: location - either an Point object or a list defined as [X,Y] ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f09f610>], [<ast.Constant object at 0x7da18f09fdf0>]] variable[url] assign[=] binary_operation[name[self]._url + constant[/reverseGeocode]] if call[name[isinstance], parameter[name[location], name[Point]]] begin[:] call[name[params]][constant[location]] assign[=] name[location].asDictionary return[call[name[self]._post, parameter[]]]
keyword[def] identifier[reverseGeocode] ( identifier[self] , identifier[location] ): literal[string] identifier[params] ={ literal[string] : literal[string] } identifier[url] = identifier[self] . identifier[_url] + literal[string] keyword[if] identifier[isinstance] ( identifier[location] , identifier[Point] ): identifier[params] [ literal[string] ]= identifier[location] . identifier[asDictionary] keyword[elif] identifier[isinstance] ( identifier[location] , identifier[list] ): identifier[params] [ literal[string] ]= literal[string] %( identifier[location] [ literal[int] ], identifier[location] [ literal[int] ]) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[url] , identifier[param_dict] = identifier[params] , identifier[securityHandler] = identifier[self] . identifier[_securityHandler] , identifier[proxy_url] = identifier[self] . identifier[_proxy_url] , identifier[proxy_port] = identifier[self] . identifier[_proxy_port] )
def reverseGeocode(self, location): """ The reverseGeocode operation determines the address at a particular x/y location. You pass the coordinates of a point location to the geocoding service, and the service returns the address that is closest to the location. Input: location - either an Point object or a list defined as [X,Y] """ params = {'f': 'json'} url = self._url + '/reverseGeocode' if isinstance(location, Point): params['location'] = location.asDictionary # depends on [control=['if'], data=[]] elif isinstance(location, list): params['location'] = '%s,%s' % (location[0], location[1]) # depends on [control=['if'], data=[]] else: raise Exception('Invalid location') return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def validate_config(cls, service_config, target): """ Validate generic options for a particular target """ if service_config.has_option(target, 'only_if_assigned'): die("[%s] has an 'only_if_assigned' option. Should be " "'%s.only_if_assigned'." % (target, cls.CONFIG_PREFIX)) if service_config.has_option(target, 'also_unassigned'): die("[%s] has an 'also_unassigned' option. Should be " "'%s.also_unassigned'." % (target, cls.CONFIG_PREFIX)) if service_config.has_option(target, 'default_priority'): die("[%s] has a 'default_priority' option. Should be " "'%s.default_priority'." % (target, cls.CONFIG_PREFIX)) if service_config.has_option(target, 'add_tags'): die("[%s] has an 'add_tags' option. Should be " "'%s.add_tags'." % (target, cls.CONFIG_PREFIX))
def function[validate_config, parameter[cls, service_config, target]]: constant[ Validate generic options for a particular target ] if call[name[service_config].has_option, parameter[name[target], constant[only_if_assigned]]] begin[:] call[name[die], parameter[binary_operation[constant[[%s] has an 'only_if_assigned' option. Should be '%s.only_if_assigned'.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0241810>, <ast.Attribute object at 0x7da1b02402e0>]]]]] if call[name[service_config].has_option, parameter[name[target], constant[also_unassigned]]] begin[:] call[name[die], parameter[binary_operation[constant[[%s] has an 'also_unassigned' option. Should be '%s.also_unassigned'.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0381030>, <ast.Attribute object at 0x7da1b03831c0>]]]]] if call[name[service_config].has_option, parameter[name[target], constant[default_priority]]] begin[:] call[name[die], parameter[binary_operation[constant[[%s] has a 'default_priority' option. Should be '%s.default_priority'.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0381a20>, <ast.Attribute object at 0x7da1b0383310>]]]]] if call[name[service_config].has_option, parameter[name[target], constant[add_tags]]] begin[:] call[name[die], parameter[binary_operation[constant[[%s] has an 'add_tags' option. Should be '%s.add_tags'.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0381240>, <ast.Attribute object at 0x7da1b0383d90>]]]]]
keyword[def] identifier[validate_config] ( identifier[cls] , identifier[service_config] , identifier[target] ): literal[string] keyword[if] identifier[service_config] . identifier[has_option] ( identifier[target] , literal[string] ): identifier[die] ( literal[string] literal[string] %( identifier[target] , identifier[cls] . identifier[CONFIG_PREFIX] )) keyword[if] identifier[service_config] . identifier[has_option] ( identifier[target] , literal[string] ): identifier[die] ( literal[string] literal[string] %( identifier[target] , identifier[cls] . identifier[CONFIG_PREFIX] )) keyword[if] identifier[service_config] . identifier[has_option] ( identifier[target] , literal[string] ): identifier[die] ( literal[string] literal[string] %( identifier[target] , identifier[cls] . identifier[CONFIG_PREFIX] )) keyword[if] identifier[service_config] . identifier[has_option] ( identifier[target] , literal[string] ): identifier[die] ( literal[string] literal[string] %( identifier[target] , identifier[cls] . identifier[CONFIG_PREFIX] ))
def validate_config(cls, service_config, target): """ Validate generic options for a particular target """ if service_config.has_option(target, 'only_if_assigned'): die("[%s] has an 'only_if_assigned' option. Should be '%s.only_if_assigned'." % (target, cls.CONFIG_PREFIX)) # depends on [control=['if'], data=[]] if service_config.has_option(target, 'also_unassigned'): die("[%s] has an 'also_unassigned' option. Should be '%s.also_unassigned'." % (target, cls.CONFIG_PREFIX)) # depends on [control=['if'], data=[]] if service_config.has_option(target, 'default_priority'): die("[%s] has a 'default_priority' option. Should be '%s.default_priority'." % (target, cls.CONFIG_PREFIX)) # depends on [control=['if'], data=[]] if service_config.has_option(target, 'add_tags'): die("[%s] has an 'add_tags' option. Should be '%s.add_tags'." % (target, cls.CONFIG_PREFIX)) # depends on [control=['if'], data=[]]
def template_overlaps(bank_filters, template, psd, low_frequency_cutoff): """ This functions calculates the overlaps between the template and the bank veto templates. Parameters ---------- bank_filters: List of FrequencySeries template: FrequencySeries psd: FrequencySeries low_frequency_cutoff: float Returns ------- overlaps: List of complex overlap values. """ overlaps = [] template_ow = template / psd for bank_template in bank_filters: overlap = overlap_cplx(template_ow, bank_template, low_frequency_cutoff=low_frequency_cutoff, normalized=False) norm = sqrt(1 / template.sigmasq(psd) / bank_template.sigmasq(psd)) overlaps.append(overlap * norm) if (abs(overlaps[-1]) > 0.99): errMsg = "Overlap > 0.99 between bank template and filter. " errMsg += "This bank template will not be used to calculate " errMsg += "bank chisq for this filter template. The expected " errMsg += "value will be added to the chisq to account for " errMsg += "the removal of this template.\n" errMsg += "Masses of filter template: %e %e\n" \ %(template.params.mass1, template.params.mass2) errMsg += "Masses of bank filter template: %e %e\n" \ %(bank_template.params.mass1, bank_template.params.mass2) errMsg += "Overlap: %e" %(abs(overlaps[-1])) logging.debug(errMsg) return overlaps
def function[template_overlaps, parameter[bank_filters, template, psd, low_frequency_cutoff]]: constant[ This functions calculates the overlaps between the template and the bank veto templates. Parameters ---------- bank_filters: List of FrequencySeries template: FrequencySeries psd: FrequencySeries low_frequency_cutoff: float Returns ------- overlaps: List of complex overlap values. ] variable[overlaps] assign[=] list[[]] variable[template_ow] assign[=] binary_operation[name[template] / name[psd]] for taget[name[bank_template]] in starred[name[bank_filters]] begin[:] variable[overlap] assign[=] call[name[overlap_cplx], parameter[name[template_ow], name[bank_template]]] variable[norm] assign[=] call[name[sqrt], parameter[binary_operation[binary_operation[constant[1] / call[name[template].sigmasq, parameter[name[psd]]]] / call[name[bank_template].sigmasq, parameter[name[psd]]]]]] call[name[overlaps].append, parameter[binary_operation[name[overlap] * name[norm]]]] if compare[call[name[abs], parameter[call[name[overlaps]][<ast.UnaryOp object at 0x7da1b26adf30>]]] greater[>] constant[0.99]] begin[:] variable[errMsg] assign[=] constant[Overlap > 0.99 between bank template and filter. ] <ast.AugAssign object at 0x7da1b26ad780> <ast.AugAssign object at 0x7da1b26add80> <ast.AugAssign object at 0x7da1b26acaf0> <ast.AugAssign object at 0x7da1b26afc70> <ast.AugAssign object at 0x7da1b26af970> <ast.AugAssign object at 0x7da1b26ae2f0> <ast.AugAssign object at 0x7da1b26adf00> call[name[logging].debug, parameter[name[errMsg]]] return[name[overlaps]]
keyword[def] identifier[template_overlaps] ( identifier[bank_filters] , identifier[template] , identifier[psd] , identifier[low_frequency_cutoff] ): literal[string] identifier[overlaps] =[] identifier[template_ow] = identifier[template] / identifier[psd] keyword[for] identifier[bank_template] keyword[in] identifier[bank_filters] : identifier[overlap] = identifier[overlap_cplx] ( identifier[template_ow] , identifier[bank_template] , identifier[low_frequency_cutoff] = identifier[low_frequency_cutoff] , identifier[normalized] = keyword[False] ) identifier[norm] = identifier[sqrt] ( literal[int] / identifier[template] . identifier[sigmasq] ( identifier[psd] )/ identifier[bank_template] . identifier[sigmasq] ( identifier[psd] )) identifier[overlaps] . identifier[append] ( identifier[overlap] * identifier[norm] ) keyword[if] ( identifier[abs] ( identifier[overlaps] [- literal[int] ])> literal[int] ): identifier[errMsg] = literal[string] identifier[errMsg] += literal[string] identifier[errMsg] += literal[string] identifier[errMsg] += literal[string] identifier[errMsg] += literal[string] identifier[errMsg] += literal[string] %( identifier[template] . identifier[params] . identifier[mass1] , identifier[template] . identifier[params] . identifier[mass2] ) identifier[errMsg] += literal[string] %( identifier[bank_template] . identifier[params] . identifier[mass1] , identifier[bank_template] . identifier[params] . identifier[mass2] ) identifier[errMsg] += literal[string] %( identifier[abs] ( identifier[overlaps] [- literal[int] ])) identifier[logging] . identifier[debug] ( identifier[errMsg] ) keyword[return] identifier[overlaps]
def template_overlaps(bank_filters, template, psd, low_frequency_cutoff): """ This functions calculates the overlaps between the template and the bank veto templates. Parameters ---------- bank_filters: List of FrequencySeries template: FrequencySeries psd: FrequencySeries low_frequency_cutoff: float Returns ------- overlaps: List of complex overlap values. """ overlaps = [] template_ow = template / psd for bank_template in bank_filters: overlap = overlap_cplx(template_ow, bank_template, low_frequency_cutoff=low_frequency_cutoff, normalized=False) norm = sqrt(1 / template.sigmasq(psd) / bank_template.sigmasq(psd)) overlaps.append(overlap * norm) if abs(overlaps[-1]) > 0.99: errMsg = 'Overlap > 0.99 between bank template and filter. ' errMsg += 'This bank template will not be used to calculate ' errMsg += 'bank chisq for this filter template. The expected ' errMsg += 'value will be added to the chisq to account for ' errMsg += 'the removal of this template.\n' errMsg += 'Masses of filter template: %e %e\n' % (template.params.mass1, template.params.mass2) errMsg += 'Masses of bank filter template: %e %e\n' % (bank_template.params.mass1, bank_template.params.mass2) errMsg += 'Overlap: %e' % abs(overlaps[-1]) logging.debug(errMsg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bank_template']] return overlaps
def arcs_missing(self): """Returns a sorted list of the arcs in the code not executed.""" possible = self.arc_possibilities() executed = self.arcs_executed() missing = [ p for p in possible if p not in executed and p[0] not in self.no_branch ] return sorted(missing)
def function[arcs_missing, parameter[self]]: constant[Returns a sorted list of the arcs in the code not executed.] variable[possible] assign[=] call[name[self].arc_possibilities, parameter[]] variable[executed] assign[=] call[name[self].arcs_executed, parameter[]] variable[missing] assign[=] <ast.ListComp object at 0x7da18fe93e50> return[call[name[sorted], parameter[name[missing]]]]
keyword[def] identifier[arcs_missing] ( identifier[self] ): literal[string] identifier[possible] = identifier[self] . identifier[arc_possibilities] () identifier[executed] = identifier[self] . identifier[arcs_executed] () identifier[missing] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[possible] keyword[if] identifier[p] keyword[not] keyword[in] identifier[executed] keyword[and] identifier[p] [ literal[int] ] keyword[not] keyword[in] identifier[self] . identifier[no_branch] ] keyword[return] identifier[sorted] ( identifier[missing] )
def arcs_missing(self): """Returns a sorted list of the arcs in the code not executed.""" possible = self.arc_possibilities() executed = self.arcs_executed() missing = [p for p in possible if p not in executed and p[0] not in self.no_branch] return sorted(missing)
def put_file(self, file, object_type, object_id, width, height, mimetype, reproducible): """Puts the ``file`` of the image. :param file: the image file to put :type file: file-like object, :class:`file` :param object_type: the object type of the image to put e.g. ``'comics.cover'`` :type object_type: :class:`str` :param object_id: the object identifier number of the image to put :type object_id: :class:`numbers.Integral` :param width: the width of the image to put :type width: :class:`numbers.Integral` :param height: the height of the image to put :type height: :class:`numbers.Integral` :param mimetype: the mimetype of the image to put e.g. ``'image/jpeg'`` :type mimetype: :class:`str` :param reproducible: :const:`True` only if it's reproducible by computing e.g. resized thumbnails. :const:`False` if it cannot be reproduced e.g. original images :type reproducible: :class:`bool` .. note:: This is an abstract method which has to be implemented (overridden) by subclasses. It's not for consumers but implementations, so consumers should use :meth:`store()` method instead of this. """ raise NotImplementedError('put_file() has to be implemented')
def function[put_file, parameter[self, file, object_type, object_id, width, height, mimetype, reproducible]]: constant[Puts the ``file`` of the image. :param file: the image file to put :type file: file-like object, :class:`file` :param object_type: the object type of the image to put e.g. ``'comics.cover'`` :type object_type: :class:`str` :param object_id: the object identifier number of the image to put :type object_id: :class:`numbers.Integral` :param width: the width of the image to put :type width: :class:`numbers.Integral` :param height: the height of the image to put :type height: :class:`numbers.Integral` :param mimetype: the mimetype of the image to put e.g. ``'image/jpeg'`` :type mimetype: :class:`str` :param reproducible: :const:`True` only if it's reproducible by computing e.g. resized thumbnails. :const:`False` if it cannot be reproduced e.g. original images :type reproducible: :class:`bool` .. note:: This is an abstract method which has to be implemented (overridden) by subclasses. It's not for consumers but implementations, so consumers should use :meth:`store()` method instead of this. ] <ast.Raise object at 0x7da1b0effeb0>
keyword[def] identifier[put_file] ( identifier[self] , identifier[file] , identifier[object_type] , identifier[object_id] , identifier[width] , identifier[height] , identifier[mimetype] , identifier[reproducible] ): literal[string] keyword[raise] identifier[NotImplementedError] ( literal[string] )
def put_file(self, file, object_type, object_id, width, height, mimetype, reproducible): """Puts the ``file`` of the image. :param file: the image file to put :type file: file-like object, :class:`file` :param object_type: the object type of the image to put e.g. ``'comics.cover'`` :type object_type: :class:`str` :param object_id: the object identifier number of the image to put :type object_id: :class:`numbers.Integral` :param width: the width of the image to put :type width: :class:`numbers.Integral` :param height: the height of the image to put :type height: :class:`numbers.Integral` :param mimetype: the mimetype of the image to put e.g. ``'image/jpeg'`` :type mimetype: :class:`str` :param reproducible: :const:`True` only if it's reproducible by computing e.g. resized thumbnails. :const:`False` if it cannot be reproduced e.g. original images :type reproducible: :class:`bool` .. note:: This is an abstract method which has to be implemented (overridden) by subclasses. It's not for consumers but implementations, so consumers should use :meth:`store()` method instead of this. """ raise NotImplementedError('put_file() has to be implemented')
def _check_all_metadata_found(metadata, items): """Print warning if samples in CSV file are missing in folder""" for name in metadata: seen = False for sample in items: check_file = sample["files"][0] if sample.get("files") else sample["vrn_file"] if isinstance(name, (tuple, list)): if check_file.find(name[0]) > -1: seen = True elif check_file.find(name) > -1: seen = True elif "*" in name and fnmatch.fnmatch(check_file, "*/%s" % name): seen = True if not seen: print("WARNING: sample not found %s" % str(name))
def function[_check_all_metadata_found, parameter[metadata, items]]: constant[Print warning if samples in CSV file are missing in folder] for taget[name[name]] in starred[name[metadata]] begin[:] variable[seen] assign[=] constant[False] for taget[name[sample]] in starred[name[items]] begin[:] variable[check_file] assign[=] <ast.IfExp object at 0x7da1b19dbb80> if call[name[isinstance], parameter[name[name], tuple[[<ast.Name object at 0x7da1b19daaa0>, <ast.Name object at 0x7da1b19d8790>]]]] begin[:] if compare[call[name[check_file].find, parameter[call[name[name]][constant[0]]]] greater[>] <ast.UnaryOp object at 0x7da1b19d93f0>] begin[:] variable[seen] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b17080a0> begin[:] call[name[print], parameter[binary_operation[constant[WARNING: sample not found %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[name]]]]]]
keyword[def] identifier[_check_all_metadata_found] ( identifier[metadata] , identifier[items] ): literal[string] keyword[for] identifier[name] keyword[in] identifier[metadata] : identifier[seen] = keyword[False] keyword[for] identifier[sample] keyword[in] identifier[items] : identifier[check_file] = identifier[sample] [ literal[string] ][ literal[int] ] keyword[if] identifier[sample] . identifier[get] ( literal[string] ) keyword[else] identifier[sample] [ literal[string] ] keyword[if] identifier[isinstance] ( identifier[name] ,( identifier[tuple] , identifier[list] )): keyword[if] identifier[check_file] . identifier[find] ( identifier[name] [ literal[int] ])>- literal[int] : identifier[seen] = keyword[True] keyword[elif] identifier[check_file] . identifier[find] ( identifier[name] )>- literal[int] : identifier[seen] = keyword[True] keyword[elif] literal[string] keyword[in] identifier[name] keyword[and] identifier[fnmatch] . identifier[fnmatch] ( identifier[check_file] , literal[string] % identifier[name] ): identifier[seen] = keyword[True] keyword[if] keyword[not] identifier[seen] : identifier[print] ( literal[string] % identifier[str] ( identifier[name] ))
def _check_all_metadata_found(metadata, items): """Print warning if samples in CSV file are missing in folder""" for name in metadata: seen = False for sample in items: check_file = sample['files'][0] if sample.get('files') else sample['vrn_file'] if isinstance(name, (tuple, list)): if check_file.find(name[0]) > -1: seen = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif check_file.find(name) > -1: seen = True # depends on [control=['if'], data=[]] elif '*' in name and fnmatch.fnmatch(check_file, '*/%s' % name): seen = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']] if not seen: print('WARNING: sample not found %s' % str(name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
def create_layout(lexer=None, reserve_space_for_menu=8, get_prompt_tokens=None, get_bottom_toolbar_tokens=None, extra_input_processors=None, multiline=False, wrap_lines=True): """ Creates a custom `Layout` for the Crash input REPL This layout includes: * a bottom left-aligned session toolbar container * a bottom right-aligned side-bar container +-------------------------------------------+ | cr> select 1; | | | | | +-------------------------------------------+ | bottom_toolbar_tokens sidebar_tokens | +-------------------------------------------+ """ # Create processors list. input_processors = [ ConditionalProcessor( # Highlight the reverse-i-search buffer HighlightSearchProcessor(preview_search=True), HasFocus(SEARCH_BUFFER)), ] if extra_input_processors: input_processors.extend(extra_input_processors) lexer = PygmentsLexer(lexer, sync_from_start=True) multiline = to_cli_filter(multiline) sidebar_token = [ (Token.Toolbar.Status.Key, "[ctrl+d]"), (Token.Toolbar.Status, " Exit") ] sidebar_width = token_list_width(sidebar_token) get_sidebar_tokens = lambda _: sidebar_token def get_height(cli): # If there is an autocompletion menu to be shown, make sure that our # layout has at least a minimal height in order to display it. if reserve_space_for_menu and not cli.is_done: buff = cli.current_buffer # Reserve the space, either when there are completions, or when # `complete_while_typing` is true and we expect completions very # soon. if buff.complete_while_typing() or buff.complete_state is not None: return LayoutDimension(min=reserve_space_for_menu) return LayoutDimension() # Create and return Container instance. return HSplit([ VSplit([ HSplit([ # The main input, with completion menus floating on top of it. FloatContainer( HSplit([ Window( BufferControl( input_processors=input_processors, lexer=lexer, # enable preview search for reverse-i-search preview_search=True), get_height=get_height, wrap_lines=wrap_lines, left_margins=[ # In multiline mode, use the window margin to display # the prompt and continuation tokens. ConditionalMargin( PromptMargin(get_prompt_tokens), filter=multiline ) ], ), ]), [ # Completion menu Float(xcursor=True, ycursor=True, content=CompletionsMenu( max_height=16, scroll_offset=1, extra_filter=HasFocus(DEFAULT_BUFFER)) ), ] ), # reverse-i-search toolbar (ctrl+r) ConditionalContainer(SearchToolbar(), multiline), ]) ]), ] + [ VSplit([ # Left-Aligned Session Toolbar ConditionalContainer( Window( TokenListControl(get_bottom_toolbar_tokens), height=LayoutDimension.exact(1) ), filter=~IsDone() & RendererHeightIsKnown()), # Right-Aligned Container ConditionalContainer( Window( TokenListControl(get_sidebar_tokens), height=LayoutDimension.exact(1), width=LayoutDimension.exact(sidebar_width) ), filter=~IsDone() & RendererHeightIsKnown()) ]) ])
def function[create_layout, parameter[lexer, reserve_space_for_menu, get_prompt_tokens, get_bottom_toolbar_tokens, extra_input_processors, multiline, wrap_lines]]: constant[ Creates a custom `Layout` for the Crash input REPL This layout includes: * a bottom left-aligned session toolbar container * a bottom right-aligned side-bar container +-------------------------------------------+ | cr> select 1; | | | | | +-------------------------------------------+ | bottom_toolbar_tokens sidebar_tokens | +-------------------------------------------+ ] variable[input_processors] assign[=] list[[<ast.Call object at 0x7da18bcc89a0>]] if name[extra_input_processors] begin[:] call[name[input_processors].extend, parameter[name[extra_input_processors]]] variable[lexer] assign[=] call[name[PygmentsLexer], parameter[name[lexer]]] variable[multiline] assign[=] call[name[to_cli_filter], parameter[name[multiline]]] variable[sidebar_token] assign[=] list[[<ast.Tuple object at 0x7da18bccad10>, <ast.Tuple object at 0x7da18bccac80>]] variable[sidebar_width] assign[=] call[name[token_list_width], parameter[name[sidebar_token]]] variable[get_sidebar_tokens] assign[=] <ast.Lambda object at 0x7da20e9b12a0> def function[get_height, parameter[cli]]: if <ast.BoolOp object at 0x7da20e9b13c0> begin[:] variable[buff] assign[=] name[cli].current_buffer if <ast.BoolOp object at 0x7da20e9b15d0> begin[:] return[call[name[LayoutDimension], parameter[]]] return[call[name[LayoutDimension], parameter[]]] return[call[name[HSplit], parameter[binary_operation[list[[<ast.Call object at 0x7da20e9b0c40>]] + list[[<ast.Call object at 0x7da2044c3e20>]]]]]]
keyword[def] identifier[create_layout] ( identifier[lexer] = keyword[None] , identifier[reserve_space_for_menu] = literal[int] , identifier[get_prompt_tokens] = keyword[None] , identifier[get_bottom_toolbar_tokens] = keyword[None] , identifier[extra_input_processors] = keyword[None] , identifier[multiline] = keyword[False] , identifier[wrap_lines] = keyword[True] ): literal[string] identifier[input_processors] =[ identifier[ConditionalProcessor] ( identifier[HighlightSearchProcessor] ( identifier[preview_search] = keyword[True] ), identifier[HasFocus] ( identifier[SEARCH_BUFFER] )), ] keyword[if] identifier[extra_input_processors] : identifier[input_processors] . identifier[extend] ( identifier[extra_input_processors] ) identifier[lexer] = identifier[PygmentsLexer] ( identifier[lexer] , identifier[sync_from_start] = keyword[True] ) identifier[multiline] = identifier[to_cli_filter] ( identifier[multiline] ) identifier[sidebar_token] =[ ( identifier[Token] . identifier[Toolbar] . identifier[Status] . identifier[Key] , literal[string] ), ( identifier[Token] . identifier[Toolbar] . identifier[Status] , literal[string] ) ] identifier[sidebar_width] = identifier[token_list_width] ( identifier[sidebar_token] ) identifier[get_sidebar_tokens] = keyword[lambda] identifier[_] : identifier[sidebar_token] keyword[def] identifier[get_height] ( identifier[cli] ): keyword[if] identifier[reserve_space_for_menu] keyword[and] keyword[not] identifier[cli] . identifier[is_done] : identifier[buff] = identifier[cli] . identifier[current_buffer] keyword[if] identifier[buff] . identifier[complete_while_typing] () keyword[or] identifier[buff] . identifier[complete_state] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[LayoutDimension] ( identifier[min] = identifier[reserve_space_for_menu] ) keyword[return] identifier[LayoutDimension] () keyword[return] identifier[HSplit] ([ identifier[VSplit] ([ identifier[HSplit] ([ identifier[FloatContainer] ( identifier[HSplit] ([ identifier[Window] ( identifier[BufferControl] ( identifier[input_processors] = identifier[input_processors] , identifier[lexer] = identifier[lexer] , identifier[preview_search] = keyword[True] ), identifier[get_height] = identifier[get_height] , identifier[wrap_lines] = identifier[wrap_lines] , identifier[left_margins] =[ identifier[ConditionalMargin] ( identifier[PromptMargin] ( identifier[get_prompt_tokens] ), identifier[filter] = identifier[multiline] ) ], ), ]), [ identifier[Float] ( identifier[xcursor] = keyword[True] , identifier[ycursor] = keyword[True] , identifier[content] = identifier[CompletionsMenu] ( identifier[max_height] = literal[int] , identifier[scroll_offset] = literal[int] , identifier[extra_filter] = identifier[HasFocus] ( identifier[DEFAULT_BUFFER] )) ), ] ), identifier[ConditionalContainer] ( identifier[SearchToolbar] (), identifier[multiline] ), ]) ]), ]+[ identifier[VSplit] ([ identifier[ConditionalContainer] ( identifier[Window] ( identifier[TokenListControl] ( identifier[get_bottom_toolbar_tokens] ), identifier[height] = identifier[LayoutDimension] . identifier[exact] ( literal[int] ) ), identifier[filter] =~ identifier[IsDone] ()& identifier[RendererHeightIsKnown] ()), identifier[ConditionalContainer] ( identifier[Window] ( identifier[TokenListControl] ( identifier[get_sidebar_tokens] ), identifier[height] = identifier[LayoutDimension] . identifier[exact] ( literal[int] ), identifier[width] = identifier[LayoutDimension] . identifier[exact] ( identifier[sidebar_width] ) ), identifier[filter] =~ identifier[IsDone] ()& identifier[RendererHeightIsKnown] ()) ]) ])
def create_layout(lexer=None, reserve_space_for_menu=8, get_prompt_tokens=None, get_bottom_toolbar_tokens=None, extra_input_processors=None, multiline=False, wrap_lines=True): """ Creates a custom `Layout` for the Crash input REPL This layout includes: * a bottom left-aligned session toolbar container * a bottom right-aligned side-bar container +-------------------------------------------+ | cr> select 1; | | | | | +-------------------------------------------+ | bottom_toolbar_tokens sidebar_tokens | +-------------------------------------------+ """ # Create processors list. # Highlight the reverse-i-search buffer input_processors = [ConditionalProcessor(HighlightSearchProcessor(preview_search=True), HasFocus(SEARCH_BUFFER))] if extra_input_processors: input_processors.extend(extra_input_processors) # depends on [control=['if'], data=[]] lexer = PygmentsLexer(lexer, sync_from_start=True) multiline = to_cli_filter(multiline) sidebar_token = [(Token.Toolbar.Status.Key, '[ctrl+d]'), (Token.Toolbar.Status, ' Exit')] sidebar_width = token_list_width(sidebar_token) get_sidebar_tokens = lambda _: sidebar_token def get_height(cli): # If there is an autocompletion menu to be shown, make sure that our # layout has at least a minimal height in order to display it. if reserve_space_for_menu and (not cli.is_done): buff = cli.current_buffer # Reserve the space, either when there are completions, or when # `complete_while_typing` is true and we expect completions very # soon. if buff.complete_while_typing() or buff.complete_state is not None: return LayoutDimension(min=reserve_space_for_menu) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return LayoutDimension() # Create and return Container instance. # The main input, with completion menus floating on top of it. # enable preview search for reverse-i-search # In multiline mode, use the window margin to display # the prompt and continuation tokens. # Completion menu # reverse-i-search toolbar (ctrl+r) # Left-Aligned Session Toolbar # Right-Aligned Container return HSplit([VSplit([HSplit([FloatContainer(HSplit([Window(BufferControl(input_processors=input_processors, lexer=lexer, preview_search=True), get_height=get_height, wrap_lines=wrap_lines, left_margins=[ConditionalMargin(PromptMargin(get_prompt_tokens), filter=multiline)])]), [Float(xcursor=True, ycursor=True, content=CompletionsMenu(max_height=16, scroll_offset=1, extra_filter=HasFocus(DEFAULT_BUFFER)))]), ConditionalContainer(SearchToolbar(), multiline)])])] + [VSplit([ConditionalContainer(Window(TokenListControl(get_bottom_toolbar_tokens), height=LayoutDimension.exact(1)), filter=~IsDone() & RendererHeightIsKnown()), ConditionalContainer(Window(TokenListControl(get_sidebar_tokens), height=LayoutDimension.exact(1), width=LayoutDimension.exact(sidebar_width)), filter=~IsDone() & RendererHeightIsKnown())])])
async def timeout(source, timeout): """Raise a time-out if an element of the asynchronous sequence takes too long to arrive. Note: the timeout is not global but specific to each step of the iteration. """ async with streamcontext(source) as streamer: while True: try: item = await wait_for(anext(streamer), timeout) except StopAsyncIteration: break else: yield item
<ast.AsyncFunctionDef object at 0x7da1b1b7d1b0>
keyword[async] keyword[def] identifier[timeout] ( identifier[source] , identifier[timeout] ): literal[string] keyword[async] keyword[with] identifier[streamcontext] ( identifier[source] ) keyword[as] identifier[streamer] : keyword[while] keyword[True] : keyword[try] : identifier[item] = keyword[await] identifier[wait_for] ( identifier[anext] ( identifier[streamer] ), identifier[timeout] ) keyword[except] identifier[StopAsyncIteration] : keyword[break] keyword[else] : keyword[yield] identifier[item]
async def timeout(source, timeout): """Raise a time-out if an element of the asynchronous sequence takes too long to arrive. Note: the timeout is not global but specific to each step of the iteration. """ async with streamcontext(source) as streamer: while True: try: item = await wait_for(anext(streamer), timeout) # depends on [control=['try'], data=[]] except StopAsyncIteration: break # depends on [control=['except'], data=[]] else: yield item # depends on [control=['while'], data=[]]
def ptmsiReallocationCommand(PTmsiSignature_presence=0): """P-TMSI REALLOCATION COMMAND Section 9.4.7""" a = TpPd(pd=0x3) b = MessageType(mesType=0x10) # 00010000 c = MobileId() d = RoutingAreaIdentification() e = ForceToStandbyAndSpareHalfOctets() packet = a / b / c / d / e if PTmsiSignature_presence is 1: g = PTmsiSignature(ieiPTS=0x19) packet = packet / g return packet
def function[ptmsiReallocationCommand, parameter[PTmsiSignature_presence]]: constant[P-TMSI REALLOCATION COMMAND Section 9.4.7] variable[a] assign[=] call[name[TpPd], parameter[]] variable[b] assign[=] call[name[MessageType], parameter[]] variable[c] assign[=] call[name[MobileId], parameter[]] variable[d] assign[=] call[name[RoutingAreaIdentification], parameter[]] variable[e] assign[=] call[name[ForceToStandbyAndSpareHalfOctets], parameter[]] variable[packet] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] / name[e]] if compare[name[PTmsiSignature_presence] is constant[1]] begin[:] variable[g] assign[=] call[name[PTmsiSignature], parameter[]] variable[packet] assign[=] binary_operation[name[packet] / name[g]] return[name[packet]]
keyword[def] identifier[ptmsiReallocationCommand] ( identifier[PTmsiSignature_presence] = literal[int] ): literal[string] identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] ) identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] ) identifier[c] = identifier[MobileId] () identifier[d] = identifier[RoutingAreaIdentification] () identifier[e] = identifier[ForceToStandbyAndSpareHalfOctets] () identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] / identifier[e] keyword[if] identifier[PTmsiSignature_presence] keyword[is] literal[int] : identifier[g] = identifier[PTmsiSignature] ( identifier[ieiPTS] = literal[int] ) identifier[packet] = identifier[packet] / identifier[g] keyword[return] identifier[packet]
def ptmsiReallocationCommand(PTmsiSignature_presence=0): """P-TMSI REALLOCATION COMMAND Section 9.4.7""" a = TpPd(pd=3) b = MessageType(mesType=16) # 00010000 c = MobileId() d = RoutingAreaIdentification() e = ForceToStandbyAndSpareHalfOctets() packet = a / b / c / d / e if PTmsiSignature_presence is 1: g = PTmsiSignature(ieiPTS=25) packet = packet / g # depends on [control=['if'], data=[]] return packet
def buffer_list(self): """ The buffer list this instance operates on. Only available in mode != AIOBLOCK_MODE_POLL. Changes on a submitted transfer are not fully applied until its next submission: kernel will still be using original buffer list. """ if self._iocb.aio_lio_opcode == libaio.IO_CMD_POLL: raise AttributeError return self._buffer_list
def function[buffer_list, parameter[self]]: constant[ The buffer list this instance operates on. Only available in mode != AIOBLOCK_MODE_POLL. Changes on a submitted transfer are not fully applied until its next submission: kernel will still be using original buffer list. ] if compare[name[self]._iocb.aio_lio_opcode equal[==] name[libaio].IO_CMD_POLL] begin[:] <ast.Raise object at 0x7da1b0212e00> return[name[self]._buffer_list]
keyword[def] identifier[buffer_list] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_iocb] . identifier[aio_lio_opcode] == identifier[libaio] . identifier[IO_CMD_POLL] : keyword[raise] identifier[AttributeError] keyword[return] identifier[self] . identifier[_buffer_list]
def buffer_list(self): """ The buffer list this instance operates on. Only available in mode != AIOBLOCK_MODE_POLL. Changes on a submitted transfer are not fully applied until its next submission: kernel will still be using original buffer list. """ if self._iocb.aio_lio_opcode == libaio.IO_CMD_POLL: raise AttributeError # depends on [control=['if'], data=[]] return self._buffer_list
def levenshtein(str1, s2): ''' Distance between two strings ''' N1 = len(str1) N2 = len(s2) stringRange = [range(N1 + 1)] * (N2 + 1) for i in range(N2 + 1): stringRange[i] = range(i,i + N1 + 1) for i in range(0,N2): for j in range(0,N1): if str1[j] == s2[i]: stringRange[i+1][j+1] = min(stringRange[i+1][j] + 1, stringRange[i][j+1] + 1, stringRange[i][j]) else: stringRange[i+1][j+1] = min(stringRange[i+1][j] + 1, stringRange[i][j+1] + 1, stringRange[i][j] + 1) return stringRange[N2][N1]
def function[levenshtein, parameter[str1, s2]]: constant[ Distance between two strings ] variable[N1] assign[=] call[name[len], parameter[name[str1]]] variable[N2] assign[=] call[name[len], parameter[name[s2]]] variable[stringRange] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b212ed70>]] * binary_operation[name[N2] + constant[1]]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[N2] + constant[1]]]]] begin[:] call[name[stringRange]][name[i]] assign[=] call[name[range], parameter[name[i], binary_operation[binary_operation[name[i] + name[N1]] + constant[1]]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[N2]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[0], name[N1]]]] begin[:] if compare[call[name[str1]][name[j]] equal[==] call[name[s2]][name[i]]] begin[:] call[call[name[stringRange]][binary_operation[name[i] + constant[1]]]][binary_operation[name[j] + constant[1]]] assign[=] call[name[min], parameter[binary_operation[call[call[name[stringRange]][binary_operation[name[i] + constant[1]]]][name[j]] + constant[1]], binary_operation[call[call[name[stringRange]][name[i]]][binary_operation[name[j] + constant[1]]] + constant[1]], call[call[name[stringRange]][name[i]]][name[j]]]] return[call[call[name[stringRange]][name[N2]]][name[N1]]]
keyword[def] identifier[levenshtein] ( identifier[str1] , identifier[s2] ): literal[string] identifier[N1] = identifier[len] ( identifier[str1] ) identifier[N2] = identifier[len] ( identifier[s2] ) identifier[stringRange] =[ identifier[range] ( identifier[N1] + literal[int] )]*( identifier[N2] + literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N2] + literal[int] ): identifier[stringRange] [ identifier[i] ]= identifier[range] ( identifier[i] , identifier[i] + identifier[N1] + literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[N2] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[N1] ): keyword[if] identifier[str1] [ identifier[j] ]== identifier[s2] [ identifier[i] ]: identifier[stringRange] [ identifier[i] + literal[int] ][ identifier[j] + literal[int] ]= identifier[min] ( identifier[stringRange] [ identifier[i] + literal[int] ][ identifier[j] ]+ literal[int] , identifier[stringRange] [ identifier[i] ][ identifier[j] + literal[int] ]+ literal[int] , identifier[stringRange] [ identifier[i] ][ identifier[j] ]) keyword[else] : identifier[stringRange] [ identifier[i] + literal[int] ][ identifier[j] + literal[int] ]= identifier[min] ( identifier[stringRange] [ identifier[i] + literal[int] ][ identifier[j] ]+ literal[int] , identifier[stringRange] [ identifier[i] ][ identifier[j] + literal[int] ]+ literal[int] , identifier[stringRange] [ identifier[i] ][ identifier[j] ]+ literal[int] ) keyword[return] identifier[stringRange] [ identifier[N2] ][ identifier[N1] ]
def levenshtein(str1, s2): """ Distance between two strings """ N1 = len(str1) N2 = len(s2) stringRange = [range(N1 + 1)] * (N2 + 1) for i in range(N2 + 1): stringRange[i] = range(i, i + N1 + 1) # depends on [control=['for'], data=['i']] for i in range(0, N2): for j in range(0, N1): if str1[j] == s2[i]: stringRange[i + 1][j + 1] = min(stringRange[i + 1][j] + 1, stringRange[i][j + 1] + 1, stringRange[i][j]) # depends on [control=['if'], data=[]] else: stringRange[i + 1][j + 1] = min(stringRange[i + 1][j] + 1, stringRange[i][j + 1] + 1, stringRange[i][j] + 1) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return stringRange[N2][N1]
def check_requirements(self, reqs): # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] """Return 2 sets: - conflicting requirements: set of (installed, wanted) reqs tuples - missing requirements: set of reqs """ missing = set() conflicting = set() if reqs: ws = WorkingSet(self._lib_dirs) for req in reqs: try: if ws.find(Requirement.parse(req)) is None: missing.add(req) except VersionConflict as e: conflicting.add((str(e.args[0].as_requirement()), str(e.args[1]))) return conflicting, missing
def function[check_requirements, parameter[self, reqs]]: constant[Return 2 sets: - conflicting requirements: set of (installed, wanted) reqs tuples - missing requirements: set of reqs ] variable[missing] assign[=] call[name[set], parameter[]] variable[conflicting] assign[=] call[name[set], parameter[]] if name[reqs] begin[:] variable[ws] assign[=] call[name[WorkingSet], parameter[name[self]._lib_dirs]] for taget[name[req]] in starred[name[reqs]] begin[:] <ast.Try object at 0x7da1b1e8c730> return[tuple[[<ast.Name object at 0x7da1b1ea11b0>, <ast.Name object at 0x7da1b1ea10f0>]]]
keyword[def] identifier[check_requirements] ( identifier[self] , identifier[reqs] ): literal[string] identifier[missing] = identifier[set] () identifier[conflicting] = identifier[set] () keyword[if] identifier[reqs] : identifier[ws] = identifier[WorkingSet] ( identifier[self] . identifier[_lib_dirs] ) keyword[for] identifier[req] keyword[in] identifier[reqs] : keyword[try] : keyword[if] identifier[ws] . identifier[find] ( identifier[Requirement] . identifier[parse] ( identifier[req] )) keyword[is] keyword[None] : identifier[missing] . identifier[add] ( identifier[req] ) keyword[except] identifier[VersionConflict] keyword[as] identifier[e] : identifier[conflicting] . identifier[add] (( identifier[str] ( identifier[e] . identifier[args] [ literal[int] ]. identifier[as_requirement] ()), identifier[str] ( identifier[e] . identifier[args] [ literal[int] ]))) keyword[return] identifier[conflicting] , identifier[missing]
def check_requirements(self, reqs): # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] 'Return 2 sets:\n - conflicting requirements: set of (installed, wanted) reqs tuples\n - missing requirements: set of reqs\n ' missing = set() conflicting = set() if reqs: ws = WorkingSet(self._lib_dirs) for req in reqs: try: if ws.find(Requirement.parse(req)) is None: missing.add(req) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except VersionConflict as e: conflicting.add((str(e.args[0].as_requirement()), str(e.args[1]))) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['req']] # depends on [control=['if'], data=[]] return (conflicting, missing)
def compare_ecp_pots(potential1, potential2, compare_meta=False, rel_tol=0.0): ''' Compare two ecp potentials for approximate equality (exponents/coefficients are within a tolerance) If compare_meta is True, the metadata is also compared for exact equality. ''' if potential1['angular_momentum'] != potential2['angular_momentum']: return False rexponents1 = potential1['r_exponents'] rexponents2 = potential2['r_exponents'] gexponents1 = potential1['gaussian_exponents'] gexponents2 = potential2['gaussian_exponents'] coefficients1 = potential1['coefficients'] coefficients2 = potential2['coefficients'] # integer comparison if rexponents1 != rexponents2: return False if not _compare_vector(gexponents1, gexponents2, rel_tol): return False if not _compare_matrix(coefficients1, coefficients2, rel_tol): return False if compare_meta: if potential1['ecp_type'] != potential2['ecp_type']: return False return True else: return True
def function[compare_ecp_pots, parameter[potential1, potential2, compare_meta, rel_tol]]: constant[ Compare two ecp potentials for approximate equality (exponents/coefficients are within a tolerance) If compare_meta is True, the metadata is also compared for exact equality. ] if compare[call[name[potential1]][constant[angular_momentum]] not_equal[!=] call[name[potential2]][constant[angular_momentum]]] begin[:] return[constant[False]] variable[rexponents1] assign[=] call[name[potential1]][constant[r_exponents]] variable[rexponents2] assign[=] call[name[potential2]][constant[r_exponents]] variable[gexponents1] assign[=] call[name[potential1]][constant[gaussian_exponents]] variable[gexponents2] assign[=] call[name[potential2]][constant[gaussian_exponents]] variable[coefficients1] assign[=] call[name[potential1]][constant[coefficients]] variable[coefficients2] assign[=] call[name[potential2]][constant[coefficients]] if compare[name[rexponents1] not_equal[!=] name[rexponents2]] begin[:] return[constant[False]] if <ast.UnaryOp object at 0x7da20c6aa260> begin[:] return[constant[False]] if <ast.UnaryOp object at 0x7da20c6ab490> begin[:] return[constant[False]] if name[compare_meta] begin[:] if compare[call[name[potential1]][constant[ecp_type]] not_equal[!=] call[name[potential2]][constant[ecp_type]]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[compare_ecp_pots] ( identifier[potential1] , identifier[potential2] , identifier[compare_meta] = keyword[False] , identifier[rel_tol] = literal[int] ): literal[string] keyword[if] identifier[potential1] [ literal[string] ]!= identifier[potential2] [ literal[string] ]: keyword[return] keyword[False] identifier[rexponents1] = identifier[potential1] [ literal[string] ] identifier[rexponents2] = identifier[potential2] [ literal[string] ] identifier[gexponents1] = identifier[potential1] [ literal[string] ] identifier[gexponents2] = identifier[potential2] [ literal[string] ] identifier[coefficients1] = identifier[potential1] [ literal[string] ] identifier[coefficients2] = identifier[potential2] [ literal[string] ] keyword[if] identifier[rexponents1] != identifier[rexponents2] : keyword[return] keyword[False] keyword[if] keyword[not] identifier[_compare_vector] ( identifier[gexponents1] , identifier[gexponents2] , identifier[rel_tol] ): keyword[return] keyword[False] keyword[if] keyword[not] identifier[_compare_matrix] ( identifier[coefficients1] , identifier[coefficients2] , identifier[rel_tol] ): keyword[return] keyword[False] keyword[if] identifier[compare_meta] : keyword[if] identifier[potential1] [ literal[string] ]!= identifier[potential2] [ literal[string] ]: keyword[return] keyword[False] keyword[return] keyword[True] keyword[else] : keyword[return] keyword[True]
def compare_ecp_pots(potential1, potential2, compare_meta=False, rel_tol=0.0): """ Compare two ecp potentials for approximate equality (exponents/coefficients are within a tolerance) If compare_meta is True, the metadata is also compared for exact equality. """ if potential1['angular_momentum'] != potential2['angular_momentum']: return False # depends on [control=['if'], data=[]] rexponents1 = potential1['r_exponents'] rexponents2 = potential2['r_exponents'] gexponents1 = potential1['gaussian_exponents'] gexponents2 = potential2['gaussian_exponents'] coefficients1 = potential1['coefficients'] coefficients2 = potential2['coefficients'] # integer comparison if rexponents1 != rexponents2: return False # depends on [control=['if'], data=[]] if not _compare_vector(gexponents1, gexponents2, rel_tol): return False # depends on [control=['if'], data=[]] if not _compare_matrix(coefficients1, coefficients2, rel_tol): return False # depends on [control=['if'], data=[]] if compare_meta: if potential1['ecp_type'] != potential2['ecp_type']: return False # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] else: return True
def generate_about_table(extra_info={}): """ Make a table with information about FlowCal and the current analysis. Parameters ---------- extra_info : dict, optional Additional keyword:value pairs to include in the table. Returns ------- about_table : DataFrame Table with information about FlowCal and the current analysis, as keyword:value pairs. The following keywords are included: FlowCal version, and date and time of analysis. Keywords and values from `extra_info` are also included. """ # Make keyword and value arrays keywords = [] values = [] # FlowCal version keywords.append('FlowCal version') values.append(FlowCal.__version__) # Analysis date and time keywords.append('Date of analysis') values.append(time.strftime("%Y/%m/%d")) keywords.append('Time of analysis') values.append(time.strftime("%I:%M:%S%p")) # Add additional keyword:value pairs for k, v in six.iteritems(extra_info): keywords.append(k) values.append(v) # Make table as data frame about_table = pd.DataFrame(values, index=keywords) # Set column names about_table.columns = ['Value'] about_table.index.name = 'Keyword' return about_table
def function[generate_about_table, parameter[extra_info]]: constant[ Make a table with information about FlowCal and the current analysis. Parameters ---------- extra_info : dict, optional Additional keyword:value pairs to include in the table. Returns ------- about_table : DataFrame Table with information about FlowCal and the current analysis, as keyword:value pairs. The following keywords are included: FlowCal version, and date and time of analysis. Keywords and values from `extra_info` are also included. ] variable[keywords] assign[=] list[[]] variable[values] assign[=] list[[]] call[name[keywords].append, parameter[constant[FlowCal version]]] call[name[values].append, parameter[name[FlowCal].__version__]] call[name[keywords].append, parameter[constant[Date of analysis]]] call[name[values].append, parameter[call[name[time].strftime, parameter[constant[%Y/%m/%d]]]]] call[name[keywords].append, parameter[constant[Time of analysis]]] call[name[values].append, parameter[call[name[time].strftime, parameter[constant[%I:%M:%S%p]]]]] for taget[tuple[[<ast.Name object at 0x7da1b1ca10c0>, <ast.Name object at 0x7da1b1ca0fd0>]]] in starred[call[name[six].iteritems, parameter[name[extra_info]]]] begin[:] call[name[keywords].append, parameter[name[k]]] call[name[values].append, parameter[name[v]]] variable[about_table] assign[=] call[name[pd].DataFrame, parameter[name[values]]] name[about_table].columns assign[=] list[[<ast.Constant object at 0x7da1b1ca1840>]] name[about_table].index.name assign[=] constant[Keyword] return[name[about_table]]
keyword[def] identifier[generate_about_table] ( identifier[extra_info] ={}): literal[string] identifier[keywords] =[] identifier[values] =[] identifier[keywords] . identifier[append] ( literal[string] ) identifier[values] . identifier[append] ( identifier[FlowCal] . identifier[__version__] ) identifier[keywords] . identifier[append] ( literal[string] ) identifier[values] . identifier[append] ( identifier[time] . identifier[strftime] ( literal[string] )) identifier[keywords] . identifier[append] ( literal[string] ) identifier[values] . identifier[append] ( identifier[time] . identifier[strftime] ( literal[string] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[extra_info] ): identifier[keywords] . identifier[append] ( identifier[k] ) identifier[values] . identifier[append] ( identifier[v] ) identifier[about_table] = identifier[pd] . identifier[DataFrame] ( identifier[values] , identifier[index] = identifier[keywords] ) identifier[about_table] . identifier[columns] =[ literal[string] ] identifier[about_table] . identifier[index] . identifier[name] = literal[string] keyword[return] identifier[about_table]
def generate_about_table(extra_info={}): """ Make a table with information about FlowCal and the current analysis. Parameters ---------- extra_info : dict, optional Additional keyword:value pairs to include in the table. Returns ------- about_table : DataFrame Table with information about FlowCal and the current analysis, as keyword:value pairs. The following keywords are included: FlowCal version, and date and time of analysis. Keywords and values from `extra_info` are also included. """ # Make keyword and value arrays keywords = [] values = [] # FlowCal version keywords.append('FlowCal version') values.append(FlowCal.__version__) # Analysis date and time keywords.append('Date of analysis') values.append(time.strftime('%Y/%m/%d')) keywords.append('Time of analysis') values.append(time.strftime('%I:%M:%S%p')) # Add additional keyword:value pairs for (k, v) in six.iteritems(extra_info): keywords.append(k) values.append(v) # depends on [control=['for'], data=[]] # Make table as data frame about_table = pd.DataFrame(values, index=keywords) # Set column names about_table.columns = ['Value'] about_table.index.name = 'Keyword' return about_table
def set_armed_state(self, state): """Set the armed state, also update local state.""" self.set_service_value( self.security_sensor_service, 'Armed', 'newArmedValue', state) self.set_cache_value('Armed', state)
def function[set_armed_state, parameter[self, state]]: constant[Set the armed state, also update local state.] call[name[self].set_service_value, parameter[name[self].security_sensor_service, constant[Armed], constant[newArmedValue], name[state]]] call[name[self].set_cache_value, parameter[constant[Armed], name[state]]]
keyword[def] identifier[set_armed_state] ( identifier[self] , identifier[state] ): literal[string] identifier[self] . identifier[set_service_value] ( identifier[self] . identifier[security_sensor_service] , literal[string] , literal[string] , identifier[state] ) identifier[self] . identifier[set_cache_value] ( literal[string] , identifier[state] )
def set_armed_state(self, state): """Set the armed state, also update local state.""" self.set_service_value(self.security_sensor_service, 'Armed', 'newArmedValue', state) self.set_cache_value('Armed', state)
def AddBlock(self, block): """ Add the given block to the model and also its input/output variables """ if isinstance(block, Block): self.blocks.append(block) self.max_order = max(self.max_order, block.max_input_order-1) self.max_order = max(self.max_order, block.max_output_order) for variable in block.inputs+block.outputs: self._AddVariable(variable) else: print(block) raise TypeError self._utd_graph = False
def function[AddBlock, parameter[self, block]]: constant[ Add the given block to the model and also its input/output variables ] if call[name[isinstance], parameter[name[block], name[Block]]] begin[:] call[name[self].blocks.append, parameter[name[block]]] name[self].max_order assign[=] call[name[max], parameter[name[self].max_order, binary_operation[name[block].max_input_order - constant[1]]]] name[self].max_order assign[=] call[name[max], parameter[name[self].max_order, name[block].max_output_order]] for taget[name[variable]] in starred[binary_operation[name[block].inputs + name[block].outputs]] begin[:] call[name[self]._AddVariable, parameter[name[variable]]] name[self]._utd_graph assign[=] constant[False]
keyword[def] identifier[AddBlock] ( identifier[self] , identifier[block] ): literal[string] keyword[if] identifier[isinstance] ( identifier[block] , identifier[Block] ): identifier[self] . identifier[blocks] . identifier[append] ( identifier[block] ) identifier[self] . identifier[max_order] = identifier[max] ( identifier[self] . identifier[max_order] , identifier[block] . identifier[max_input_order] - literal[int] ) identifier[self] . identifier[max_order] = identifier[max] ( identifier[self] . identifier[max_order] , identifier[block] . identifier[max_output_order] ) keyword[for] identifier[variable] keyword[in] identifier[block] . identifier[inputs] + identifier[block] . identifier[outputs] : identifier[self] . identifier[_AddVariable] ( identifier[variable] ) keyword[else] : identifier[print] ( identifier[block] ) keyword[raise] identifier[TypeError] identifier[self] . identifier[_utd_graph] = keyword[False]
def AddBlock(self, block): """ Add the given block to the model and also its input/output variables """ if isinstance(block, Block): self.blocks.append(block) self.max_order = max(self.max_order, block.max_input_order - 1) self.max_order = max(self.max_order, block.max_output_order) for variable in block.inputs + block.outputs: self._AddVariable(variable) # depends on [control=['for'], data=['variable']] # depends on [control=['if'], data=[]] else: print(block) raise TypeError self._utd_graph = False
def update_track(session, track_id, latitude, longitude, stop_tracking=False): """ Updates the current location by creating a new track point and appending it to the given track """ tracking_data = { 'track_point': { 'latitude': latitude, 'longitude': longitude, }, 'stop_tracking': stop_tracking } # PUT /api/projects/0.1/tracks/{track_id}/ response = make_put_request(session, 'tracks/{}'.format(track_id), json_data=tracking_data) json_data = response.json() if response.status_code == 200: return json_data['result'] else: raise TrackNotUpdatedException(message=json_data['message'], error_code=json_data['error_code'], request_id=json_data['request_id'])
def function[update_track, parameter[session, track_id, latitude, longitude, stop_tracking]]: constant[ Updates the current location by creating a new track point and appending it to the given track ] variable[tracking_data] assign[=] dictionary[[<ast.Constant object at 0x7da18f00c460>, <ast.Constant object at 0x7da18f00d2a0>], [<ast.Dict object at 0x7da18f00f700>, <ast.Name object at 0x7da18f00ded0>]] variable[response] assign[=] call[name[make_put_request], parameter[name[session], call[constant[tracks/{}].format, parameter[name[track_id]]]]] variable[json_data] assign[=] call[name[response].json, parameter[]] if compare[name[response].status_code equal[==] constant[200]] begin[:] return[call[name[json_data]][constant[result]]]
keyword[def] identifier[update_track] ( identifier[session] , identifier[track_id] , identifier[latitude] , identifier[longitude] , identifier[stop_tracking] = keyword[False] ): literal[string] identifier[tracking_data] ={ literal[string] :{ literal[string] : identifier[latitude] , literal[string] : identifier[longitude] , }, literal[string] : identifier[stop_tracking] } identifier[response] = identifier[make_put_request] ( identifier[session] , literal[string] . identifier[format] ( identifier[track_id] ), identifier[json_data] = identifier[tracking_data] ) identifier[json_data] = identifier[response] . identifier[json] () keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[return] identifier[json_data] [ literal[string] ] keyword[else] : keyword[raise] identifier[TrackNotUpdatedException] ( identifier[message] = identifier[json_data] [ literal[string] ], identifier[error_code] = identifier[json_data] [ literal[string] ], identifier[request_id] = identifier[json_data] [ literal[string] ])
def update_track(session, track_id, latitude, longitude, stop_tracking=False): """ Updates the current location by creating a new track point and appending it to the given track """ tracking_data = {'track_point': {'latitude': latitude, 'longitude': longitude}, 'stop_tracking': stop_tracking} # PUT /api/projects/0.1/tracks/{track_id}/ response = make_put_request(session, 'tracks/{}'.format(track_id), json_data=tracking_data) json_data = response.json() if response.status_code == 200: return json_data['result'] # depends on [control=['if'], data=[]] else: raise TrackNotUpdatedException(message=json_data['message'], error_code=json_data['error_code'], request_id=json_data['request_id'])
def resizeColumnsToContents(self): """Resize the columns to its contents.""" self._autosized_cols = set() self._resizeColumnsToContents(self.table_level, self.table_index, self._max_autosize_ms) self._update_layout()
def function[resizeColumnsToContents, parameter[self]]: constant[Resize the columns to its contents.] name[self]._autosized_cols assign[=] call[name[set], parameter[]] call[name[self]._resizeColumnsToContents, parameter[name[self].table_level, name[self].table_index, name[self]._max_autosize_ms]] call[name[self]._update_layout, parameter[]]
keyword[def] identifier[resizeColumnsToContents] ( identifier[self] ): literal[string] identifier[self] . identifier[_autosized_cols] = identifier[set] () identifier[self] . identifier[_resizeColumnsToContents] ( identifier[self] . identifier[table_level] , identifier[self] . identifier[table_index] , identifier[self] . identifier[_max_autosize_ms] ) identifier[self] . identifier[_update_layout] ()
def resizeColumnsToContents(self): """Resize the columns to its contents.""" self._autosized_cols = set() self._resizeColumnsToContents(self.table_level, self.table_index, self._max_autosize_ms) self._update_layout()
def bat_to_sh(file_path): """Convert honeybee .bat file to .sh file. WARNING: This is a very simple function and doesn't handle any edge cases. """ sh_file = file_path[:-4] + '.sh' with open(file_path, 'rb') as inf, open(sh_file, 'wb') as outf: outf.write('#!/usr/bin/env bash\n\n') for line in inf: # pass the path lines, etc to get to the commands if line.strip(): continue else: break for line in inf: if line.startswith('echo'): continue modified_line = line.replace('c:\\radiance\\bin\\', '').replace('\\', '/') outf.write(modified_line) print('bash file is created at:\n\t%s' % sh_file) # Heroku - Make command.sh executable st = os.stat(sh_file) os.chmod(sh_file, st.st_mode | 0o111) return sh_file
def function[bat_to_sh, parameter[file_path]]: constant[Convert honeybee .bat file to .sh file. WARNING: This is a very simple function and doesn't handle any edge cases. ] variable[sh_file] assign[=] binary_operation[call[name[file_path]][<ast.Slice object at 0x7da1b1251750>] + constant[.sh]] with call[name[open], parameter[name[file_path], constant[rb]]] begin[:] call[name[outf].write, parameter[constant[#!/usr/bin/env bash ]]] for taget[name[line]] in starred[name[inf]] begin[:] if call[name[line].strip, parameter[]] begin[:] continue for taget[name[line]] in starred[name[inf]] begin[:] if call[name[line].startswith, parameter[constant[echo]]] begin[:] continue variable[modified_line] assign[=] call[call[name[line].replace, parameter[constant[c:\radiance\bin\], constant[]]].replace, parameter[constant[\], constant[/]]] call[name[outf].write, parameter[name[modified_line]]] call[name[print], parameter[binary_operation[constant[bash file is created at: %s] <ast.Mod object at 0x7da2590d6920> name[sh_file]]]] variable[st] assign[=] call[name[os].stat, parameter[name[sh_file]]] call[name[os].chmod, parameter[name[sh_file], binary_operation[name[st].st_mode <ast.BitOr object at 0x7da2590d6aa0> constant[73]]]] return[name[sh_file]]
keyword[def] identifier[bat_to_sh] ( identifier[file_path] ): literal[string] identifier[sh_file] = identifier[file_path] [:- literal[int] ]+ literal[string] keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[inf] , identifier[open] ( identifier[sh_file] , literal[string] ) keyword[as] identifier[outf] : identifier[outf] . identifier[write] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[inf] : keyword[if] identifier[line] . identifier[strip] (): keyword[continue] keyword[else] : keyword[break] keyword[for] identifier[line] keyword[in] identifier[inf] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[modified_line] = identifier[line] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) identifier[outf] . identifier[write] ( identifier[modified_line] ) identifier[print] ( literal[string] % identifier[sh_file] ) identifier[st] = identifier[os] . identifier[stat] ( identifier[sh_file] ) identifier[os] . identifier[chmod] ( identifier[sh_file] , identifier[st] . identifier[st_mode] | literal[int] ) keyword[return] identifier[sh_file]
def bat_to_sh(file_path): """Convert honeybee .bat file to .sh file. WARNING: This is a very simple function and doesn't handle any edge cases. """ sh_file = file_path[:-4] + '.sh' with open(file_path, 'rb') as inf, open(sh_file, 'wb') as outf: outf.write('#!/usr/bin/env bash\n\n') for line in inf: # pass the path lines, etc to get to the commands if line.strip(): continue # depends on [control=['if'], data=[]] else: break # depends on [control=['for'], data=['line']] for line in inf: if line.startswith('echo'): continue # depends on [control=['if'], data=[]] modified_line = line.replace('c:\\radiance\\bin\\', '').replace('\\', '/') outf.write(modified_line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['inf']] print('bash file is created at:\n\t%s' % sh_file) # Heroku - Make command.sh executable st = os.stat(sh_file) os.chmod(sh_file, st.st_mode | 73) return sh_file
def add_options(self): """ Add program options. """ super(ThemeSwitcher, self).add_options() self.add_bool_option("-l", "--list", help="list available themes") self.add_bool_option("-c", "--current", help="print path to currently selected theme") self.add_bool_option("-n", "--next", help="rotate through selected themes, and print new path") self.add_bool_option("-a", "--all", help="remove any selections, and use all themes") self.add_value_option("-t", "--toggle", "NAME", help="toggle selection of a theme")
def function[add_options, parameter[self]]: constant[ Add program options. ] call[call[name[super], parameter[name[ThemeSwitcher], name[self]]].add_options, parameter[]] call[name[self].add_bool_option, parameter[constant[-l], constant[--list]]] call[name[self].add_bool_option, parameter[constant[-c], constant[--current]]] call[name[self].add_bool_option, parameter[constant[-n], constant[--next]]] call[name[self].add_bool_option, parameter[constant[-a], constant[--all]]] call[name[self].add_value_option, parameter[constant[-t], constant[--toggle], constant[NAME]]]
keyword[def] identifier[add_options] ( identifier[self] ): literal[string] identifier[super] ( identifier[ThemeSwitcher] , identifier[self] ). identifier[add_options] () identifier[self] . identifier[add_bool_option] ( literal[string] , literal[string] , identifier[help] = literal[string] ) identifier[self] . identifier[add_bool_option] ( literal[string] , literal[string] , identifier[help] = literal[string] ) identifier[self] . identifier[add_bool_option] ( literal[string] , literal[string] , identifier[help] = literal[string] ) identifier[self] . identifier[add_bool_option] ( literal[string] , literal[string] , identifier[help] = literal[string] ) identifier[self] . identifier[add_value_option] ( literal[string] , literal[string] , literal[string] , identifier[help] = literal[string] )
def add_options(self): """ Add program options. """ super(ThemeSwitcher, self).add_options() self.add_bool_option('-l', '--list', help='list available themes') self.add_bool_option('-c', '--current', help='print path to currently selected theme') self.add_bool_option('-n', '--next', help='rotate through selected themes, and print new path') self.add_bool_option('-a', '--all', help='remove any selections, and use all themes') self.add_value_option('-t', '--toggle', 'NAME', help='toggle selection of a theme')
def stem(self, word): """ Stem a Romanian word and return the stemmed form. :param word: The word that is stemmed. :type word: str or unicode :return: The stemmed form. :rtype: unicode """ word = word.lower() step1_success = False step2_success = False for i in range(1, len(word)-1): if word[i-1] in self.__vowels and word[i+1] in self.__vowels: if word[i] == "u": word = "".join((word[:i], "U", word[i+1:])) elif word[i] == "i": word = "".join((word[:i], "I", word[i+1:])) r1, r2 = self._r1r2_standard(word, self.__vowels) rv = self._rv_standard(word, self.__vowels) # STEP 0: Removal of plurals and other simplifications for suffix in self.__step0_suffixes: if word.endswith(suffix): if suffix in r1: if suffix in ("ul", "ului"): word = word[:-len(suffix)] if suffix in rv: rv = rv[:-len(suffix)] else: rv = "" elif (suffix == "aua" or suffix == "atei" or (suffix == "ile" and word[-5:-3] != "ab")): word = word[:-2] elif suffix in ("ea", "ele", "elor"): word = "".join((word[:-len(suffix)], "e")) if suffix in rv: rv = "".join((rv[:-len(suffix)], "e")) else: rv = "" elif suffix in ("ii", "iua", "iei", "iile", "iilor", "ilor"): word = "".join((word[:-len(suffix)], "i")) if suffix in rv: rv = "".join((rv[:-len(suffix)], "i")) else: rv = "" elif suffix in ("a\u0163ie", "a\u0163ia"): word = word[:-1] break # STEP 1: Reduction of combining suffixes while True: replacement_done = False for suffix in self.__step1_suffixes: if word.endswith(suffix): if suffix in r1: step1_success = True replacement_done = True if suffix in ("abilitate", "abilitati", "abilit\u0103i", "abilit\u0103\u0163i"): word = "".join((word[:-len(suffix)], "abil")) elif suffix == "ibilitate": word = word[:-5] elif suffix in ("ivitate", "ivitati", "ivit\u0103i", "ivit\u0103\u0163i"): word = "".join((word[:-len(suffix)], "iv")) elif suffix in ("icitate", "icitati", "icit\u0103i", "icit\u0103\u0163i", "icator", "icatori", "iciv", "iciva", "icive", "icivi", "iciv\u0103", "ical", "icala", "icale", "icali", "ical\u0103"): word = "".join((word[:-len(suffix)], "ic")) elif suffix in ("ativ", "ativa", "ative", "ativi", "ativ\u0103", "a\u0163iune", "atoare", "ator", "atori", "\u0103toare", "\u0103tor", "\u0103tori"): word = "".join((word[:-len(suffix)], "at")) if suffix in r2: r2 = "".join((r2[:-len(suffix)], "at")) elif suffix in ("itiv", "itiva", "itive", "itivi", "itiv\u0103", "i\u0163iune", "itoare", "itor", "itori"): word = "".join((word[:-len(suffix)], "it")) if suffix in r2: r2 = "".join((r2[:-len(suffix)], "it")) else: step1_success = False break if not replacement_done: break # STEP 2: Removal of standard suffixes for suffix in self.__step2_suffixes: if word.endswith(suffix): if suffix in r2: step2_success = True if suffix in ("iune", "iuni"): if word[-5] == "\u0163": word = "".join((word[:-5], "t")) elif suffix in ("ism", "isme", "ist", "ista", "iste", "isti", "ist\u0103", "i\u015Fti"): word = "".join((word[:-len(suffix)], "ist")) else: word = word[:-len(suffix)] break # STEP 3: Removal of verb suffixes if not step1_success and not step2_success: for suffix in self.__step3_suffixes: if word.endswith(suffix): if suffix in rv: if suffix in ('seser\u0103\u0163i', 'seser\u0103m', 'ser\u0103\u0163i', 'sese\u015Fi', 'seser\u0103', 'ser\u0103m', 'sesem', 'se\u015Fi', 'ser\u0103', 'sese', 'a\u0163i', 'e\u0163i', 'i\u0163i', '\xE2\u0163i', 'sei', '\u0103m', 'em', 'im', '\xE2m', 'se'): word = word[:-len(suffix)] rv = rv[:-len(suffix)] else: if (not rv.startswith(suffix) and rv[rv.index(suffix)-1] not in "aeio\u0103\xE2\xEE"): word = word[:-len(suffix)] break # STEP 4: Removal of final vowel for suffix in ("ie", "a", "e", "i", "\u0103"): if word.endswith(suffix): if suffix in rv: word = word[:-len(suffix)] break word = word.replace("I", "i").replace("U", "u") return word
def function[stem, parameter[self, word]]: constant[ Stem a Romanian word and return the stemmed form. :param word: The word that is stemmed. :type word: str or unicode :return: The stemmed form. :rtype: unicode ] variable[word] assign[=] call[name[word].lower, parameter[]] variable[step1_success] assign[=] constant[False] variable[step2_success] assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[word]]] - constant[1]]]]] begin[:] if <ast.BoolOp object at 0x7da1b1db3a30> begin[:] if compare[call[name[word]][name[i]] equal[==] constant[u]] begin[:] variable[word] assign[=] call[constant[].join, parameter[tuple[[<ast.Subscript object at 0x7da1b1db34c0>, <ast.Constant object at 0x7da1b1db3400>, <ast.Subscript object at 0x7da1b1db33d0>]]]] <ast.Tuple object at 0x7da1b1db2e30> assign[=] call[name[self]._r1r2_standard, parameter[name[word], name[self].__vowels]] variable[rv] assign[=] call[name[self]._rv_standard, parameter[name[word], name[self].__vowels]] for taget[name[suffix]] in starred[name[self].__step0_suffixes] begin[:] if call[name[word].endswith, parameter[name[suffix]]] begin[:] if compare[name[suffix] in name[r1]] begin[:] if compare[name[suffix] in tuple[[<ast.Constant object at 0x7da1b1db27d0>, <ast.Constant object at 0x7da1b1db27a0>]]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b1db26b0>] if compare[name[suffix] in name[rv]] begin[:] variable[rv] assign[=] call[name[rv]][<ast.Slice object at 0x7da1b1db2440>] break while constant[True] begin[:] variable[replacement_done] assign[=] constant[False] for taget[name[suffix]] in starred[name[self].__step1_suffixes] begin[:] if call[name[word].endswith, parameter[name[suffix]]] begin[:] if compare[name[suffix] in name[r1]] begin[:] variable[step1_success] assign[=] constant[True] variable[replacement_done] assign[=] constant[True] if compare[name[suffix] in tuple[[<ast.Constant object at 0x7da1b1d36410>, <ast.Constant object at 0x7da1b1d36440>, <ast.Constant object at 0x7da1b1d36470>, <ast.Constant object at 0x7da1b1d364a0>]]] begin[:] variable[word] assign[=] call[constant[].join, parameter[tuple[[<ast.Subscript object at 0x7da1b1d342b0>, <ast.Constant object at 0x7da1b1d34160>]]]] break if <ast.UnaryOp object at 0x7da1b1d63c70> begin[:] break for taget[name[suffix]] in starred[name[self].__step2_suffixes] begin[:] if call[name[word].endswith, parameter[name[suffix]]] begin[:] if compare[name[suffix] in name[r2]] begin[:] variable[step2_success] assign[=] constant[True] if compare[name[suffix] in tuple[[<ast.Constant object at 0x7da1b1d637f0>, <ast.Constant object at 0x7da1b1d637c0>]]] begin[:] if compare[call[name[word]][<ast.UnaryOp object at 0x7da1b1d636d0>] equal[==] constant[ţ]] begin[:] variable[word] assign[=] call[constant[].join, parameter[tuple[[<ast.Subscript object at 0x7da1b1d63520>, <ast.Constant object at 0x7da1b1d63430>]]]] break if <ast.BoolOp object at 0x7da1b1d62ce0> begin[:] for taget[name[suffix]] in starred[name[self].__step3_suffixes] begin[:] if call[name[word].endswith, parameter[name[suffix]]] begin[:] if compare[name[suffix] in name[rv]] begin[:] if compare[name[suffix] in tuple[[<ast.Constant object at 0x7da1b1d628c0>, <ast.Constant object at 0x7da1b1d62890>, <ast.Constant object at 0x7da1b1d62860>, <ast.Constant object at 0x7da1b1d62830>, <ast.Constant object at 0x7da1b1d62800>, <ast.Constant object at 0x7da1b1d627d0>, <ast.Constant object at 0x7da1b1d627a0>, <ast.Constant object at 0x7da1b1d62770>, <ast.Constant object at 0x7da1b1d62740>, <ast.Constant object at 0x7da1b1d62710>, <ast.Constant object at 0x7da1b1d626e0>, <ast.Constant object at 0x7da1b1d626b0>, <ast.Constant object at 0x7da1b1d62680>, <ast.Constant object at 0x7da1b1d62650>, <ast.Constant object at 0x7da1b1d62620>, <ast.Constant object at 0x7da1b1d625f0>, <ast.Constant object at 0x7da1b1d625c0>, <ast.Constant object at 0x7da1b1d62590>, <ast.Constant object at 0x7da1b1d62560>, <ast.Constant object at 0x7da1b1d62530>]]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b1d62440>] variable[rv] assign[=] call[name[rv]][<ast.Slice object at 0x7da1b1d62290>] break for taget[name[suffix]] in starred[tuple[[<ast.Constant object at 0x7da1b1d61ba0>, <ast.Constant object at 0x7da1b1d61b70>, <ast.Constant object at 0x7da1b1d61b40>, <ast.Constant object at 0x7da1b1d61b10>, <ast.Constant object at 0x7da1b1d61ae0>]]] begin[:] if call[name[word].endswith, parameter[name[suffix]]] begin[:] if compare[name[suffix] in name[rv]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b1d61840>] break variable[word] assign[=] call[call[name[word].replace, parameter[constant[I], constant[i]]].replace, parameter[constant[U], constant[u]]] return[name[word]]
keyword[def] identifier[stem] ( identifier[self] , identifier[word] ): literal[string] identifier[word] = identifier[word] . identifier[lower] () identifier[step1_success] = keyword[False] identifier[step2_success] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[word] )- literal[int] ): keyword[if] identifier[word] [ identifier[i] - literal[int] ] keyword[in] identifier[self] . identifier[__vowels] keyword[and] identifier[word] [ identifier[i] + literal[int] ] keyword[in] identifier[self] . identifier[__vowels] : keyword[if] identifier[word] [ identifier[i] ]== literal[string] : identifier[word] = literal[string] . identifier[join] (( identifier[word] [: identifier[i] ], literal[string] , identifier[word] [ identifier[i] + literal[int] :])) keyword[elif] identifier[word] [ identifier[i] ]== literal[string] : identifier[word] = literal[string] . identifier[join] (( identifier[word] [: identifier[i] ], literal[string] , identifier[word] [ identifier[i] + literal[int] :])) identifier[r1] , identifier[r2] = identifier[self] . identifier[_r1r2_standard] ( identifier[word] , identifier[self] . identifier[__vowels] ) identifier[rv] = identifier[self] . identifier[_rv_standard] ( identifier[word] , identifier[self] . identifier[__vowels] ) keyword[for] identifier[suffix] keyword[in] identifier[self] . identifier[__step0_suffixes] : keyword[if] identifier[word] . identifier[endswith] ( identifier[suffix] ): keyword[if] identifier[suffix] keyword[in] identifier[r1] : keyword[if] identifier[suffix] keyword[in] ( literal[string] , literal[string] ): identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] keyword[if] identifier[suffix] keyword[in] identifier[rv] : identifier[rv] = identifier[rv] [:- identifier[len] ( identifier[suffix] )] keyword[else] : identifier[rv] = literal[string] keyword[elif] ( identifier[suffix] == literal[string] keyword[or] identifier[suffix] == literal[string] keyword[or] ( identifier[suffix] == literal[string] keyword[and] identifier[word] [- literal[int] :- literal[int] ]!= literal[string] )): identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[if] identifier[suffix] keyword[in] identifier[rv] : identifier[rv] = literal[string] . identifier[join] (( identifier[rv] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[else] : identifier[rv] = literal[string] keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[if] identifier[suffix] keyword[in] identifier[rv] : identifier[rv] = literal[string] . identifier[join] (( identifier[rv] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[else] : identifier[rv] = literal[string] keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] ): identifier[word] = identifier[word] [:- literal[int] ] keyword[break] keyword[while] keyword[True] : identifier[replacement_done] = keyword[False] keyword[for] identifier[suffix] keyword[in] identifier[self] . identifier[__step1_suffixes] : keyword[if] identifier[word] . identifier[endswith] ( identifier[suffix] ): keyword[if] identifier[suffix] keyword[in] identifier[r1] : identifier[step1_success] = keyword[True] identifier[replacement_done] = keyword[True] keyword[if] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[elif] identifier[suffix] == literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[if] identifier[suffix] keyword[in] identifier[r2] : identifier[r2] = literal[string] . identifier[join] (( identifier[r2] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[if] identifier[suffix] keyword[in] identifier[r2] : identifier[r2] = literal[string] . identifier[join] (( identifier[r2] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[else] : identifier[step1_success] = keyword[False] keyword[break] keyword[if] keyword[not] identifier[replacement_done] : keyword[break] keyword[for] identifier[suffix] keyword[in] identifier[self] . identifier[__step2_suffixes] : keyword[if] identifier[word] . identifier[endswith] ( identifier[suffix] ): keyword[if] identifier[suffix] keyword[in] identifier[r2] : identifier[step2_success] = keyword[True] keyword[if] identifier[suffix] keyword[in] ( literal[string] , literal[string] ): keyword[if] identifier[word] [- literal[int] ]== literal[string] : identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- literal[int] ], literal[string] )) keyword[elif] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = literal[string] . identifier[join] (( identifier[word] [:- identifier[len] ( identifier[suffix] )], literal[string] )) keyword[else] : identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] keyword[break] keyword[if] keyword[not] identifier[step1_success] keyword[and] keyword[not] identifier[step2_success] : keyword[for] identifier[suffix] keyword[in] identifier[self] . identifier[__step3_suffixes] : keyword[if] identifier[word] . identifier[endswith] ( identifier[suffix] ): keyword[if] identifier[suffix] keyword[in] identifier[rv] : keyword[if] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] identifier[rv] = identifier[rv] [:- identifier[len] ( identifier[suffix] )] keyword[else] : keyword[if] ( keyword[not] identifier[rv] . identifier[startswith] ( identifier[suffix] ) keyword[and] identifier[rv] [ identifier[rv] . identifier[index] ( identifier[suffix] )- literal[int] ] keyword[not] keyword[in] literal[string] ): identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] keyword[break] keyword[for] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[if] identifier[word] . identifier[endswith] ( identifier[suffix] ): keyword[if] identifier[suffix] keyword[in] identifier[rv] : identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] keyword[break] identifier[word] = identifier[word] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) keyword[return] identifier[word]
def stem(self, word): """ Stem a Romanian word and return the stemmed form. :param word: The word that is stemmed. :type word: str or unicode :return: The stemmed form. :rtype: unicode """ word = word.lower() step1_success = False step2_success = False for i in range(1, len(word) - 1): if word[i - 1] in self.__vowels and word[i + 1] in self.__vowels: if word[i] == 'u': word = ''.join((word[:i], 'U', word[i + 1:])) # depends on [control=['if'], data=[]] elif word[i] == 'i': word = ''.join((word[:i], 'I', word[i + 1:])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] (r1, r2) = self._r1r2_standard(word, self.__vowels) rv = self._rv_standard(word, self.__vowels) # STEP 0: Removal of plurals and other simplifications for suffix in self.__step0_suffixes: if word.endswith(suffix): if suffix in r1: if suffix in ('ul', 'ului'): word = word[:-len(suffix)] if suffix in rv: rv = rv[:-len(suffix)] # depends on [control=['if'], data=['suffix', 'rv']] else: rv = '' # depends on [control=['if'], data=['suffix']] elif suffix == 'aua' or suffix == 'atei' or (suffix == 'ile' and word[-5:-3] != 'ab'): word = word[:-2] # depends on [control=['if'], data=[]] elif suffix in ('ea', 'ele', 'elor'): word = ''.join((word[:-len(suffix)], 'e')) if suffix in rv: rv = ''.join((rv[:-len(suffix)], 'e')) # depends on [control=['if'], data=['suffix', 'rv']] else: rv = '' # depends on [control=['if'], data=['suffix']] elif suffix in ('ii', 'iua', 'iei', 'iile', 'iilor', 'ilor'): word = ''.join((word[:-len(suffix)], 'i')) if suffix in rv: rv = ''.join((rv[:-len(suffix)], 'i')) # depends on [control=['if'], data=['suffix', 'rv']] else: rv = '' # depends on [control=['if'], data=['suffix']] elif suffix in ('aţie', 'aţia'): word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['suffix']] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['suffix']] # STEP 1: Reduction of combining suffixes while True: replacement_done = False for suffix in self.__step1_suffixes: if word.endswith(suffix): if suffix in r1: step1_success = True replacement_done = True if suffix in ('abilitate', 'abilitati', 'abilităi', 'abilităţi'): word = ''.join((word[:-len(suffix)], 'abil')) # depends on [control=['if'], data=['suffix']] elif suffix == 'ibilitate': word = word[:-5] # depends on [control=['if'], data=[]] elif suffix in ('ivitate', 'ivitati', 'ivităi', 'ivităţi'): word = ''.join((word[:-len(suffix)], 'iv')) # depends on [control=['if'], data=['suffix']] elif suffix in ('icitate', 'icitati', 'icităi', 'icităţi', 'icator', 'icatori', 'iciv', 'iciva', 'icive', 'icivi', 'icivă', 'ical', 'icala', 'icale', 'icali', 'icală'): word = ''.join((word[:-len(suffix)], 'ic')) # depends on [control=['if'], data=['suffix']] elif suffix in ('ativ', 'ativa', 'ative', 'ativi', 'ativă', 'aţiune', 'atoare', 'ator', 'atori', 'ătoare', 'ător', 'ători'): word = ''.join((word[:-len(suffix)], 'at')) if suffix in r2: r2 = ''.join((r2[:-len(suffix)], 'at')) # depends on [control=['if'], data=['suffix', 'r2']] # depends on [control=['if'], data=['suffix']] elif suffix in ('itiv', 'itiva', 'itive', 'itivi', 'itivă', 'iţiune', 'itoare', 'itor', 'itori'): word = ''.join((word[:-len(suffix)], 'it')) if suffix in r2: r2 = ''.join((r2[:-len(suffix)], 'it')) # depends on [control=['if'], data=['suffix', 'r2']] # depends on [control=['if'], data=['suffix']] # depends on [control=['if'], data=['suffix']] else: step1_success = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['suffix']] if not replacement_done: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # STEP 2: Removal of standard suffixes for suffix in self.__step2_suffixes: if word.endswith(suffix): if suffix in r2: step2_success = True if suffix in ('iune', 'iuni'): if word[-5] == 'ţ': word = ''.join((word[:-5], 't')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif suffix in ('ism', 'isme', 'ist', 'ista', 'iste', 'isti', 'istă', 'işti'): word = ''.join((word[:-len(suffix)], 'ist')) # depends on [control=['if'], data=['suffix']] else: word = word[:-len(suffix)] # depends on [control=['if'], data=['suffix']] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['suffix']] # STEP 3: Removal of verb suffixes if not step1_success and (not step2_success): for suffix in self.__step3_suffixes: if word.endswith(suffix): if suffix in rv: if suffix in ('seserăţi', 'seserăm', 'serăţi', 'seseşi', 'seseră', 'serăm', 'sesem', 'seşi', 'seră', 'sese', 'aţi', 'eţi', 'iţi', 'âţi', 'sei', 'ăm', 'em', 'im', 'âm', 'se'): word = word[:-len(suffix)] rv = rv[:-len(suffix)] # depends on [control=['if'], data=['suffix']] elif not rv.startswith(suffix) and rv[rv.index(suffix) - 1] not in 'aeioăâî': word = word[:-len(suffix)] # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=['suffix', 'rv']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['suffix']] # depends on [control=['if'], data=[]] # STEP 4: Removal of final vowel for suffix in ('ie', 'a', 'e', 'i', 'ă'): if word.endswith(suffix): if suffix in rv: word = word[:-len(suffix)] # depends on [control=['if'], data=['suffix']] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['suffix']] word = word.replace('I', 'i').replace('U', 'u') return word
def check_api_key(request, key, hproPk): """Check if an API key is valid""" if settings.PIAPI_STANDALONE: return True (_, _, hproject) = getPlugItObject(hproPk) if not hproject: return False if hproject.plugItApiKey is None or hproject.plugItApiKey == '': return False return hproject.plugItApiKey == key
def function[check_api_key, parameter[request, key, hproPk]]: constant[Check if an API key is valid] if name[settings].PIAPI_STANDALONE begin[:] return[constant[True]] <ast.Tuple object at 0x7da18f723c40> assign[=] call[name[getPlugItObject], parameter[name[hproPk]]] if <ast.UnaryOp object at 0x7da18f722440> begin[:] return[constant[False]] if <ast.BoolOp object at 0x7da18f722860> begin[:] return[constant[False]] return[compare[name[hproject].plugItApiKey equal[==] name[key]]]
keyword[def] identifier[check_api_key] ( identifier[request] , identifier[key] , identifier[hproPk] ): literal[string] keyword[if] identifier[settings] . identifier[PIAPI_STANDALONE] : keyword[return] keyword[True] ( identifier[_] , identifier[_] , identifier[hproject] )= identifier[getPlugItObject] ( identifier[hproPk] ) keyword[if] keyword[not] identifier[hproject] : keyword[return] keyword[False] keyword[if] identifier[hproject] . identifier[plugItApiKey] keyword[is] keyword[None] keyword[or] identifier[hproject] . identifier[plugItApiKey] == literal[string] : keyword[return] keyword[False] keyword[return] identifier[hproject] . identifier[plugItApiKey] == identifier[key]
def check_api_key(request, key, hproPk): """Check if an API key is valid""" if settings.PIAPI_STANDALONE: return True # depends on [control=['if'], data=[]] (_, _, hproject) = getPlugItObject(hproPk) if not hproject: return False # depends on [control=['if'], data=[]] if hproject.plugItApiKey is None or hproject.plugItApiKey == '': return False # depends on [control=['if'], data=[]] return hproject.plugItApiKey == key
def compute_retrospective_loss(self): ''' Compute retrospective loss. Returns: The tuple data. - `np.ndarray` of delta. - `np.ndarray` of losses of each batch. - float of loss of all batch. ''' observed_arr, encoded_arr, decoded_arr, re_encoded_arr = self.__inferenced_tuple batch_size = observed_arr.shape[0] if self.__input_neuron_count == self.__hidden_neuron_count: target_arr = encoded_arr - np.expand_dims(observed_arr.mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr - target_arr, 2)) else: # For each batch, draw a samples from the Uniform distribution. if self.__input_neuron_count > self.__hidden_neuron_count: all_dim_arr = np.arange(self.__input_neuron_count) np.random.shuffle(all_dim_arr) choiced_dim_arr = all_dim_arr[:self.__hidden_neuron_count] target_arr = encoded_arr - np.expand_dims(observed_arr[:, :, choiced_dim_arr].mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr[:, :, choiced_dim_arr] - target_arr, 2)) else: all_dim_arr = np.arange(self.__hidden_neuron_count) np.random.shuffle(all_dim_arr) choiced_dim_arr = all_dim_arr[:self.__input_neuron_count] target_arr = encoded_arr[:, :, choiced_dim_arr] - np.expand_dims(observed_arr.mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr - target_arr, 2)) summary_delta_arr = np.nan_to_num(summary_delta_arr) summary_delta_arr = (summary_delta_arr - summary_delta_arr.mean()) / (summary_delta_arr.std() + 1e-08) match_delta_arr = np.sqrt(np.power(encoded_arr[:, -1] - re_encoded_arr[:, -1], 2)) match_delta_arr = np.nan_to_num(match_delta_arr) match_delta_arr = (match_delta_arr - match_delta_arr.mean()) / (match_delta_arr.std() + 1e-08) other_encoded_delta_arr = np.nansum( np.sqrt( np.power( np.maximum( 0, encoded_arr[:, :-1] - re_encoded_arr[:, -1].reshape( re_encoded_arr[:, -1].shape[0], 1, re_encoded_arr[:, -1].shape[1] ) ), 2 ) ) + self.__margin_param, axis=1 ) other_encoded_delta_arr = np.nan_to_num(other_encoded_delta_arr) other_encoded_delta_arr = (other_encoded_delta_arr - other_encoded_delta_arr.mean()) / (other_encoded_delta_arr.std() + 1e-08) other_re_encoded_delta_arr = np.nansum( np.sqrt( np.power( np.maximum( 0, encoded_arr[:, -1].reshape( encoded_arr[:, -1].shape[0], 1, encoded_arr[:, -1].shape[1] ) - re_encoded_arr[:, :-1], ), 2 ) ) + self.__margin_param, axis=1 ) other_encoded_delta_arr = np.nan_to_num(other_encoded_delta_arr) other_re_encoded_delta_arr = (other_re_encoded_delta_arr - other_re_encoded_delta_arr.mean()) / (other_re_encoded_delta_arr.std() + 1e-08) mismatch_delta_arr = (match_delta_arr - other_encoded_delta_arr) + (match_delta_arr - other_re_encoded_delta_arr) delta_arr = summary_delta_arr + np.expand_dims(self.__retrospective_lambda * match_delta_arr, axis=1) + np.expand_dims(self.__retrospective_eta * mismatch_delta_arr, axis=1) v = np.linalg.norm(delta_arr) if v > self.__grad_clip_threshold: delta_arr = delta_arr * self.__grad_clip_threshold / v loss = np.square(delta_arr).mean() loss_arr = np.square(delta_arr).sum(axis=1).mean(axis=1) return delta_arr, loss_arr, loss
def function[compute_retrospective_loss, parameter[self]]: constant[ Compute retrospective loss. Returns: The tuple data. - `np.ndarray` of delta. - `np.ndarray` of losses of each batch. - float of loss of all batch. ] <ast.Tuple object at 0x7da1b0706230> assign[=] name[self].__inferenced_tuple variable[batch_size] assign[=] call[name[observed_arr].shape][constant[0]] if compare[name[self].__input_neuron_count equal[==] name[self].__hidden_neuron_count] begin[:] variable[target_arr] assign[=] binary_operation[name[encoded_arr] - call[name[np].expand_dims, parameter[call[name[observed_arr].mean, parameter[]]]]] variable[summary_delta_arr] assign[=] call[name[np].sqrt, parameter[call[name[np].power, parameter[binary_operation[name[decoded_arr] - name[target_arr]], constant[2]]]]] variable[summary_delta_arr] assign[=] call[name[np].nan_to_num, parameter[name[summary_delta_arr]]] variable[summary_delta_arr] assign[=] binary_operation[binary_operation[name[summary_delta_arr] - call[name[summary_delta_arr].mean, parameter[]]] / binary_operation[call[name[summary_delta_arr].std, parameter[]] + constant[1e-08]]] variable[match_delta_arr] assign[=] call[name[np].sqrt, parameter[call[name[np].power, parameter[binary_operation[call[name[encoded_arr]][tuple[[<ast.Slice object at 0x7da1b07079a0>, <ast.UnaryOp object at 0x7da1b0706ef0>]]] - call[name[re_encoded_arr]][tuple[[<ast.Slice object at 0x7da1b0705de0>, <ast.UnaryOp object at 0x7da1b07072b0>]]]], constant[2]]]]] variable[match_delta_arr] assign[=] call[name[np].nan_to_num, parameter[name[match_delta_arr]]] variable[match_delta_arr] assign[=] binary_operation[binary_operation[name[match_delta_arr] - call[name[match_delta_arr].mean, parameter[]]] / binary_operation[call[name[match_delta_arr].std, parameter[]] + constant[1e-08]]] variable[other_encoded_delta_arr] assign[=] call[name[np].nansum, parameter[binary_operation[call[name[np].sqrt, parameter[call[name[np].power, parameter[call[name[np].maximum, parameter[constant[0], binary_operation[call[name[encoded_arr]][tuple[[<ast.Slice object at 0x7da1b0706b30>, <ast.Slice object at 0x7da1b07073a0>]]] - call[call[name[re_encoded_arr]][tuple[[<ast.Slice object at 0x7da1b0704d90>, <ast.UnaryOp object at 0x7da1b07065f0>]]].reshape, parameter[call[call[name[re_encoded_arr]][tuple[[<ast.Slice object at 0x7da1b0707850>, <ast.UnaryOp object at 0x7da1b0707670>]]].shape][constant[0]], constant[1], call[call[name[re_encoded_arr]][tuple[[<ast.Slice object at 0x7da1b0706560>, <ast.UnaryOp object at 0x7da1b07076a0>]]].shape][constant[1]]]]]]], constant[2]]]]] + name[self].__margin_param]]] variable[other_encoded_delta_arr] assign[=] call[name[np].nan_to_num, parameter[name[other_encoded_delta_arr]]] variable[other_encoded_delta_arr] assign[=] binary_operation[binary_operation[name[other_encoded_delta_arr] - call[name[other_encoded_delta_arr].mean, parameter[]]] / binary_operation[call[name[other_encoded_delta_arr].std, parameter[]] + constant[1e-08]]] variable[other_re_encoded_delta_arr] assign[=] call[name[np].nansum, parameter[binary_operation[call[name[np].sqrt, parameter[call[name[np].power, parameter[call[name[np].maximum, parameter[constant[0], binary_operation[call[call[name[encoded_arr]][tuple[[<ast.Slice object at 0x7da20e9b1b10>, <ast.UnaryOp object at 0x7da20e9b2c80>]]].reshape, parameter[call[call[name[encoded_arr]][tuple[[<ast.Slice object at 0x7da20e9b1570>, <ast.UnaryOp object at 0x7da20e9b23e0>]]].shape][constant[0]], constant[1], call[call[name[encoded_arr]][tuple[[<ast.Slice object at 0x7da20e9b29e0>, <ast.UnaryOp object at 0x7da20e9b2f20>]]].shape][constant[1]]]] - call[name[re_encoded_arr]][tuple[[<ast.Slice object at 0x7da20e9b3490>, <ast.Slice object at 0x7da20e9b39a0>]]]]]], constant[2]]]]] + name[self].__margin_param]]] variable[other_encoded_delta_arr] assign[=] call[name[np].nan_to_num, parameter[name[other_encoded_delta_arr]]] variable[other_re_encoded_delta_arr] assign[=] binary_operation[binary_operation[name[other_re_encoded_delta_arr] - call[name[other_re_encoded_delta_arr].mean, parameter[]]] / binary_operation[call[name[other_re_encoded_delta_arr].std, parameter[]] + constant[1e-08]]] variable[mismatch_delta_arr] assign[=] binary_operation[binary_operation[name[match_delta_arr] - name[other_encoded_delta_arr]] + binary_operation[name[match_delta_arr] - name[other_re_encoded_delta_arr]]] variable[delta_arr] assign[=] binary_operation[binary_operation[name[summary_delta_arr] + call[name[np].expand_dims, parameter[binary_operation[name[self].__retrospective_lambda * name[match_delta_arr]]]]] + call[name[np].expand_dims, parameter[binary_operation[name[self].__retrospective_eta * name[mismatch_delta_arr]]]]] variable[v] assign[=] call[name[np].linalg.norm, parameter[name[delta_arr]]] if compare[name[v] greater[>] name[self].__grad_clip_threshold] begin[:] variable[delta_arr] assign[=] binary_operation[binary_operation[name[delta_arr] * name[self].__grad_clip_threshold] / name[v]] variable[loss] assign[=] call[call[name[np].square, parameter[name[delta_arr]]].mean, parameter[]] variable[loss_arr] assign[=] call[call[call[name[np].square, parameter[name[delta_arr]]].sum, parameter[]].mean, parameter[]] return[tuple[[<ast.Name object at 0x7da20e9b1fc0>, <ast.Name object at 0x7da20e9b3850>, <ast.Name object at 0x7da20e9b1990>]]]
keyword[def] identifier[compute_retrospective_loss] ( identifier[self] ): literal[string] identifier[observed_arr] , identifier[encoded_arr] , identifier[decoded_arr] , identifier[re_encoded_arr] = identifier[self] . identifier[__inferenced_tuple] identifier[batch_size] = identifier[observed_arr] . identifier[shape] [ literal[int] ] keyword[if] identifier[self] . identifier[__input_neuron_count] == identifier[self] . identifier[__hidden_neuron_count] : identifier[target_arr] = identifier[encoded_arr] - identifier[np] . identifier[expand_dims] ( identifier[observed_arr] . identifier[mean] ( identifier[axis] = literal[int] ), identifier[axis] = literal[int] ) identifier[summary_delta_arr] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[decoded_arr] - identifier[target_arr] , literal[int] )) keyword[else] : keyword[if] identifier[self] . identifier[__input_neuron_count] > identifier[self] . identifier[__hidden_neuron_count] : identifier[all_dim_arr] = identifier[np] . identifier[arange] ( identifier[self] . identifier[__input_neuron_count] ) identifier[np] . identifier[random] . identifier[shuffle] ( identifier[all_dim_arr] ) identifier[choiced_dim_arr] = identifier[all_dim_arr] [: identifier[self] . identifier[__hidden_neuron_count] ] identifier[target_arr] = identifier[encoded_arr] - identifier[np] . identifier[expand_dims] ( identifier[observed_arr] [:,:, identifier[choiced_dim_arr] ]. identifier[mean] ( identifier[axis] = literal[int] ), identifier[axis] = literal[int] ) identifier[summary_delta_arr] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[decoded_arr] [:,:, identifier[choiced_dim_arr] ]- identifier[target_arr] , literal[int] )) keyword[else] : identifier[all_dim_arr] = identifier[np] . identifier[arange] ( identifier[self] . identifier[__hidden_neuron_count] ) identifier[np] . identifier[random] . identifier[shuffle] ( identifier[all_dim_arr] ) identifier[choiced_dim_arr] = identifier[all_dim_arr] [: identifier[self] . identifier[__input_neuron_count] ] identifier[target_arr] = identifier[encoded_arr] [:,:, identifier[choiced_dim_arr] ]- identifier[np] . identifier[expand_dims] ( identifier[observed_arr] . identifier[mean] ( identifier[axis] = literal[int] ), identifier[axis] = literal[int] ) identifier[summary_delta_arr] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[decoded_arr] - identifier[target_arr] , literal[int] )) identifier[summary_delta_arr] = identifier[np] . identifier[nan_to_num] ( identifier[summary_delta_arr] ) identifier[summary_delta_arr] =( identifier[summary_delta_arr] - identifier[summary_delta_arr] . identifier[mean] ())/( identifier[summary_delta_arr] . identifier[std] ()+ literal[int] ) identifier[match_delta_arr] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[encoded_arr] [:,- literal[int] ]- identifier[re_encoded_arr] [:,- literal[int] ], literal[int] )) identifier[match_delta_arr] = identifier[np] . identifier[nan_to_num] ( identifier[match_delta_arr] ) identifier[match_delta_arr] =( identifier[match_delta_arr] - identifier[match_delta_arr] . identifier[mean] ())/( identifier[match_delta_arr] . identifier[std] ()+ literal[int] ) identifier[other_encoded_delta_arr] = identifier[np] . identifier[nansum] ( identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[np] . identifier[maximum] ( literal[int] , identifier[encoded_arr] [:,:- literal[int] ]- identifier[re_encoded_arr] [:,- literal[int] ]. identifier[reshape] ( identifier[re_encoded_arr] [:,- literal[int] ]. identifier[shape] [ literal[int] ], literal[int] , identifier[re_encoded_arr] [:,- literal[int] ]. identifier[shape] [ literal[int] ] ) ), literal[int] ) )+ identifier[self] . identifier[__margin_param] , identifier[axis] = literal[int] ) identifier[other_encoded_delta_arr] = identifier[np] . identifier[nan_to_num] ( identifier[other_encoded_delta_arr] ) identifier[other_encoded_delta_arr] =( identifier[other_encoded_delta_arr] - identifier[other_encoded_delta_arr] . identifier[mean] ())/( identifier[other_encoded_delta_arr] . identifier[std] ()+ literal[int] ) identifier[other_re_encoded_delta_arr] = identifier[np] . identifier[nansum] ( identifier[np] . identifier[sqrt] ( identifier[np] . identifier[power] ( identifier[np] . identifier[maximum] ( literal[int] , identifier[encoded_arr] [:,- literal[int] ]. identifier[reshape] ( identifier[encoded_arr] [:,- literal[int] ]. identifier[shape] [ literal[int] ], literal[int] , identifier[encoded_arr] [:,- literal[int] ]. identifier[shape] [ literal[int] ] )- identifier[re_encoded_arr] [:,:- literal[int] ], ), literal[int] ) )+ identifier[self] . identifier[__margin_param] , identifier[axis] = literal[int] ) identifier[other_encoded_delta_arr] = identifier[np] . identifier[nan_to_num] ( identifier[other_encoded_delta_arr] ) identifier[other_re_encoded_delta_arr] =( identifier[other_re_encoded_delta_arr] - identifier[other_re_encoded_delta_arr] . identifier[mean] ())/( identifier[other_re_encoded_delta_arr] . identifier[std] ()+ literal[int] ) identifier[mismatch_delta_arr] =( identifier[match_delta_arr] - identifier[other_encoded_delta_arr] )+( identifier[match_delta_arr] - identifier[other_re_encoded_delta_arr] ) identifier[delta_arr] = identifier[summary_delta_arr] + identifier[np] . identifier[expand_dims] ( identifier[self] . identifier[__retrospective_lambda] * identifier[match_delta_arr] , identifier[axis] = literal[int] )+ identifier[np] . identifier[expand_dims] ( identifier[self] . identifier[__retrospective_eta] * identifier[mismatch_delta_arr] , identifier[axis] = literal[int] ) identifier[v] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[delta_arr] ) keyword[if] identifier[v] > identifier[self] . identifier[__grad_clip_threshold] : identifier[delta_arr] = identifier[delta_arr] * identifier[self] . identifier[__grad_clip_threshold] / identifier[v] identifier[loss] = identifier[np] . identifier[square] ( identifier[delta_arr] ). identifier[mean] () identifier[loss_arr] = identifier[np] . identifier[square] ( identifier[delta_arr] ). identifier[sum] ( identifier[axis] = literal[int] ). identifier[mean] ( identifier[axis] = literal[int] ) keyword[return] identifier[delta_arr] , identifier[loss_arr] , identifier[loss]
def compute_retrospective_loss(self): """ Compute retrospective loss. Returns: The tuple data. - `np.ndarray` of delta. - `np.ndarray` of losses of each batch. - float of loss of all batch. """ (observed_arr, encoded_arr, decoded_arr, re_encoded_arr) = self.__inferenced_tuple batch_size = observed_arr.shape[0] if self.__input_neuron_count == self.__hidden_neuron_count: target_arr = encoded_arr - np.expand_dims(observed_arr.mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr - target_arr, 2)) # depends on [control=['if'], data=[]] # For each batch, draw a samples from the Uniform distribution. elif self.__input_neuron_count > self.__hidden_neuron_count: all_dim_arr = np.arange(self.__input_neuron_count) np.random.shuffle(all_dim_arr) choiced_dim_arr = all_dim_arr[:self.__hidden_neuron_count] target_arr = encoded_arr - np.expand_dims(observed_arr[:, :, choiced_dim_arr].mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr[:, :, choiced_dim_arr] - target_arr, 2)) # depends on [control=['if'], data=[]] else: all_dim_arr = np.arange(self.__hidden_neuron_count) np.random.shuffle(all_dim_arr) choiced_dim_arr = all_dim_arr[:self.__input_neuron_count] target_arr = encoded_arr[:, :, choiced_dim_arr] - np.expand_dims(observed_arr.mean(axis=2), axis=2) summary_delta_arr = np.sqrt(np.power(decoded_arr - target_arr, 2)) summary_delta_arr = np.nan_to_num(summary_delta_arr) summary_delta_arr = (summary_delta_arr - summary_delta_arr.mean()) / (summary_delta_arr.std() + 1e-08) match_delta_arr = np.sqrt(np.power(encoded_arr[:, -1] - re_encoded_arr[:, -1], 2)) match_delta_arr = np.nan_to_num(match_delta_arr) match_delta_arr = (match_delta_arr - match_delta_arr.mean()) / (match_delta_arr.std() + 1e-08) other_encoded_delta_arr = np.nansum(np.sqrt(np.power(np.maximum(0, encoded_arr[:, :-1] - re_encoded_arr[:, -1].reshape(re_encoded_arr[:, -1].shape[0], 1, re_encoded_arr[:, -1].shape[1])), 2)) + self.__margin_param, axis=1) other_encoded_delta_arr = np.nan_to_num(other_encoded_delta_arr) other_encoded_delta_arr = (other_encoded_delta_arr - other_encoded_delta_arr.mean()) / (other_encoded_delta_arr.std() + 1e-08) other_re_encoded_delta_arr = np.nansum(np.sqrt(np.power(np.maximum(0, encoded_arr[:, -1].reshape(encoded_arr[:, -1].shape[0], 1, encoded_arr[:, -1].shape[1]) - re_encoded_arr[:, :-1]), 2)) + self.__margin_param, axis=1) other_encoded_delta_arr = np.nan_to_num(other_encoded_delta_arr) other_re_encoded_delta_arr = (other_re_encoded_delta_arr - other_re_encoded_delta_arr.mean()) / (other_re_encoded_delta_arr.std() + 1e-08) mismatch_delta_arr = match_delta_arr - other_encoded_delta_arr + (match_delta_arr - other_re_encoded_delta_arr) delta_arr = summary_delta_arr + np.expand_dims(self.__retrospective_lambda * match_delta_arr, axis=1) + np.expand_dims(self.__retrospective_eta * mismatch_delta_arr, axis=1) v = np.linalg.norm(delta_arr) if v > self.__grad_clip_threshold: delta_arr = delta_arr * self.__grad_clip_threshold / v # depends on [control=['if'], data=['v']] loss = np.square(delta_arr).mean() loss_arr = np.square(delta_arr).sum(axis=1).mean(axis=1) return (delta_arr, loss_arr, loss)
def _check_rotated_filename_candidates(self): """ Check for various rotated logfile filename patterns and return the first match we find. """ # savelog(8) candidate = "%s.0" % self.filename if (exists(candidate) and exists("%s.1.gz" % self.filename) and (stat(candidate).st_mtime > stat("%s.1.gz" % self.filename).st_mtime)): return candidate # logrotate(8) # with delaycompress candidate = "%s.1" % self.filename if exists(candidate): return candidate # without delaycompress candidate = "%s.1.gz" % self.filename if exists(candidate): return candidate rotated_filename_patterns = [ # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # for TimedRotatingFileHandler "%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]", ] if self.log_patterns: rotated_filename_patterns.extend(self.log_patterns) # break into directory and filename components to support cases where the # the file is prepended as part of rotation file_dir, rel_filename = os.path.split(self.filename) for rotated_filename_pattern in rotated_filename_patterns: candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename)) if candidates: candidates.sort() return candidates[-1] # return most recent # no match return None
def function[_check_rotated_filename_candidates, parameter[self]]: constant[ Check for various rotated logfile filename patterns and return the first match we find. ] variable[candidate] assign[=] binary_operation[constant[%s.0] <ast.Mod object at 0x7da2590d6920> name[self].filename] if <ast.BoolOp object at 0x7da1b1267a60> begin[:] return[name[candidate]] variable[candidate] assign[=] binary_operation[constant[%s.1] <ast.Mod object at 0x7da2590d6920> name[self].filename] if call[name[exists], parameter[name[candidate]]] begin[:] return[name[candidate]] variable[candidate] assign[=] binary_operation[constant[%s.1.gz] <ast.Mod object at 0x7da2590d6920> name[self].filename] if call[name[exists], parameter[name[candidate]]] begin[:] return[name[candidate]] variable[rotated_filename_patterns] assign[=] list[[<ast.Constant object at 0x7da1b1267640>, <ast.Constant object at 0x7da1b1267190>, <ast.Constant object at 0x7da1b1267730>, <ast.Constant object at 0x7da1b1266ad0>, <ast.Constant object at 0x7da1b12648b0>]] if name[self].log_patterns begin[:] call[name[rotated_filename_patterns].extend, parameter[name[self].log_patterns]] <ast.Tuple object at 0x7da1b1265810> assign[=] call[name[os].path.split, parameter[name[self].filename]] for taget[name[rotated_filename_pattern]] in starred[name[rotated_filename_patterns]] begin[:] variable[candidates] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[file_dir], binary_operation[name[rotated_filename_pattern] <ast.Mod object at 0x7da2590d6920> name[rel_filename]]]]]] if name[candidates] begin[:] call[name[candidates].sort, parameter[]] return[call[name[candidates]][<ast.UnaryOp object at 0x7da1b1264370>]] return[constant[None]]
keyword[def] identifier[_check_rotated_filename_candidates] ( identifier[self] ): literal[string] identifier[candidate] = literal[string] % identifier[self] . identifier[filename] keyword[if] ( identifier[exists] ( identifier[candidate] ) keyword[and] identifier[exists] ( literal[string] % identifier[self] . identifier[filename] ) keyword[and] ( identifier[stat] ( identifier[candidate] ). identifier[st_mtime] > identifier[stat] ( literal[string] % identifier[self] . identifier[filename] ). identifier[st_mtime] )): keyword[return] identifier[candidate] identifier[candidate] = literal[string] % identifier[self] . identifier[filename] keyword[if] identifier[exists] ( identifier[candidate] ): keyword[return] identifier[candidate] identifier[candidate] = literal[string] % identifier[self] . identifier[filename] keyword[if] identifier[exists] ( identifier[candidate] ): keyword[return] identifier[candidate] identifier[rotated_filename_patterns] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , ] keyword[if] identifier[self] . identifier[log_patterns] : identifier[rotated_filename_patterns] . identifier[extend] ( identifier[self] . identifier[log_patterns] ) identifier[file_dir] , identifier[rel_filename] = identifier[os] . identifier[path] . identifier[split] ( identifier[self] . identifier[filename] ) keyword[for] identifier[rotated_filename_pattern] keyword[in] identifier[rotated_filename_patterns] : identifier[candidates] = identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[file_dir] , identifier[rotated_filename_pattern] % identifier[rel_filename] )) keyword[if] identifier[candidates] : identifier[candidates] . identifier[sort] () keyword[return] identifier[candidates] [- literal[int] ] keyword[return] keyword[None]
def _check_rotated_filename_candidates(self): """ Check for various rotated logfile filename patterns and return the first match we find. """ # savelog(8) candidate = '%s.0' % self.filename if exists(candidate) and exists('%s.1.gz' % self.filename) and (stat(candidate).st_mtime > stat('%s.1.gz' % self.filename).st_mtime): return candidate # depends on [control=['if'], data=[]] # logrotate(8) # with delaycompress candidate = '%s.1' % self.filename if exists(candidate): return candidate # depends on [control=['if'], data=[]] # without delaycompress candidate = '%s.1.gz' % self.filename if exists(candidate): return candidate # depends on [control=['if'], data=[]] # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress` # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress` # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress` # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress` # for TimedRotatingFileHandler rotated_filename_patterns = ['%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]', '%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz', '%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]', '%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz', '%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]'] if self.log_patterns: rotated_filename_patterns.extend(self.log_patterns) # depends on [control=['if'], data=[]] # break into directory and filename components to support cases where the # the file is prepended as part of rotation (file_dir, rel_filename) = os.path.split(self.filename) for rotated_filename_pattern in rotated_filename_patterns: candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename)) if candidates: candidates.sort() return candidates[-1] # return most recent # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rotated_filename_pattern']] # no match return None
def fill_triangular(x, upper=False, name=None): r"""Creates a (batch of) triangular matrix from a vector of inputs. Created matrix can be lower- or upper-triangular. (It is more efficient to create the matrix as upper or lower, rather than transpose.) Triangular matrix elements are filled in a clockwise spiral. See example, below. If `x.shape` is `[b1, b2, ..., bB, d]` then the output shape is `[b1, b2, ..., bB, n, n]` where `n` is such that `d = n(n+1)/2`, i.e., `n = int(np.sqrt(0.25 + 2. * m) - 0.5)`. Example: ```python fill_triangular([1, 2, 3, 4, 5, 6]) # ==> [[4, 0, 0], # [6, 5, 0], # [3, 2, 1]] fill_triangular([1, 2, 3, 4, 5, 6], upper=True) # ==> [[1, 2, 3], # [0, 5, 6], # [0, 0, 4]] ``` The key trick is to create an upper triangular matrix by concatenating `x` and a tail of itself, then reshaping. Suppose that we are filling the upper triangle of an `n`-by-`n` matrix `M` from a vector `x`. The matrix `M` contains n**2 entries total. The vector `x` contains `n * (n+1) / 2` entries. For concreteness, we'll consider `n = 5` (so `x` has `15` entries and `M` has `25`). We'll concatenate `x` and `x` with the first (`n = 5`) elements removed and reversed: ```python x = np.arange(15) + 1 xc = np.concatenate([x, x[5:][::-1]]) # ==> array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15, 14, 13, # 12, 11, 10, 9, 8, 7, 6]) # (We add one to the arange result to disambiguate the zeros below the # diagonal of our upper-triangular matrix from the first entry in `x`.) # Now, when reshapedlay this out as a matrix: y = np.reshape(xc, [5, 5]) # ==> array([[ 1, 2, 3, 4, 5], # [ 6, 7, 8, 9, 10], # [11, 12, 13, 14, 15], # [15, 14, 13, 12, 11], # [10, 9, 8, 7, 6]]) # Finally, zero the elements below the diagonal: y = np.triu(y, k=0) # ==> array([[ 1, 2, 3, 4, 5], # [ 0, 7, 8, 9, 10], # [ 0, 0, 13, 14, 15], # [ 0, 0, 0, 12, 11], # [ 0, 0, 0, 0, 6]]) ``` From this example we see that the resuting matrix is upper-triangular, and contains all the entries of x, as desired. The rest is details: - If `n` is even, `x` doesn't exactly fill an even number of rows (it fills `n / 2` rows and half of an additional row), but the whole scheme still works. - If we want a lower triangular matrix instead of an upper triangular, we remove the first `n` elements from `x` rather than from the reversed `x`. For additional comparisons, a pure numpy version of this function can be found in `distribution_util_test.py`, function `_fill_triangular`. Args: x: `Tensor` representing lower (or upper) triangular elements. upper: Python `bool` representing whether output matrix should be upper triangular (`True`) or lower triangular (`False`, default). name: Python `str`. The name to give this op. Returns: tril: `Tensor` with lower (or upper) triangular elements filled from `x`. Raises: ValueError: if `x` cannot be mapped to a triangular matrix. """ with tf.name_scope(name or "fill_triangular"): x = tf.convert_to_tensor(value=x, name="x") m = tf.compat.dimension_value( tensorshape_util.with_rank_at_least(x.shape, 1)[-1]) if m is not None: # Formula derived by solving for n: m = n(n+1)/2. m = np.int32(m) n = np.sqrt(0.25 + 2. * m) - 0.5 if n != np.floor(n): raise ValueError("Input right-most shape ({}) does not " "correspond to a triangular matrix.".format(m)) n = np.int32(n) static_final_shape = x.shape[:-1].concatenate([n, n]) else: m = tf.shape(input=x)[-1] # For derivation, see above. Casting automatically lops off the 0.5, so we # omit it. We don't validate n is an integer because this has # graph-execution cost; an error will be thrown from the reshape, below. n = tf.cast( tf.sqrt(0.25 + tf.cast(2 * m, dtype=tf.float32)), dtype=tf.int32) static_final_shape = tensorshape_util.with_rank_at_least( x.shape, 1)[:-1].concatenate([None, None]) # Try it out in numpy: # n = 3 # x = np.arange(n * (n + 1) / 2) # m = x.shape[0] # n = np.int32(np.sqrt(.25 + 2 * m) - .5) # x_tail = x[(m - (n**2 - m)):] # np.concatenate([x_tail, x[::-1]], 0).reshape(n, n) # lower # # ==> array([[3, 4, 5], # [5, 4, 3], # [2, 1, 0]]) # np.concatenate([x, x_tail[::-1]], 0).reshape(n, n) # upper # # ==> array([[0, 1, 2], # [3, 4, 5], # [5, 4, 3]]) # # Note that we can't simply do `x[..., -(n**2 - m):]` because this doesn't # correctly handle `m == n == 1`. Hence, we do nonnegative indexing. # Furthermore observe that: # m - (n**2 - m) # = n**2 / 2 + n / 2 - (n**2 - n**2 / 2 + n / 2) # = 2 (n**2 / 2 + n / 2) - n**2 # = n**2 + n - n**2 # = n ndims = prefer_static_rank(x) if upper: x_list = [x, tf.reverse(x[..., n:], axis=[ndims - 1])] else: x_list = [x[..., n:], tf.reverse(x, axis=[ndims - 1])] new_shape = ( tensorshape_util.as_list(static_final_shape) if tensorshape_util.is_fully_defined(static_final_shape) else tf.concat( [tf.shape(input=x)[:-1], [n, n]], axis=0)) x = tf.reshape(tf.concat(x_list, axis=-1), new_shape) x = tf.linalg.band_part( x, num_lower=(0 if upper else -1), num_upper=(-1 if upper else 0)) tensorshape_util.set_shape(x, static_final_shape) return x
def function[fill_triangular, parameter[x, upper, name]]: constant[Creates a (batch of) triangular matrix from a vector of inputs. Created matrix can be lower- or upper-triangular. (It is more efficient to create the matrix as upper or lower, rather than transpose.) Triangular matrix elements are filled in a clockwise spiral. See example, below. If `x.shape` is `[b1, b2, ..., bB, d]` then the output shape is `[b1, b2, ..., bB, n, n]` where `n` is such that `d = n(n+1)/2`, i.e., `n = int(np.sqrt(0.25 + 2. * m) - 0.5)`. Example: ```python fill_triangular([1, 2, 3, 4, 5, 6]) # ==> [[4, 0, 0], # [6, 5, 0], # [3, 2, 1]] fill_triangular([1, 2, 3, 4, 5, 6], upper=True) # ==> [[1, 2, 3], # [0, 5, 6], # [0, 0, 4]] ``` The key trick is to create an upper triangular matrix by concatenating `x` and a tail of itself, then reshaping. Suppose that we are filling the upper triangle of an `n`-by-`n` matrix `M` from a vector `x`. The matrix `M` contains n**2 entries total. The vector `x` contains `n * (n+1) / 2` entries. For concreteness, we'll consider `n = 5` (so `x` has `15` entries and `M` has `25`). We'll concatenate `x` and `x` with the first (`n = 5`) elements removed and reversed: ```python x = np.arange(15) + 1 xc = np.concatenate([x, x[5:][::-1]]) # ==> array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15, 14, 13, # 12, 11, 10, 9, 8, 7, 6]) # (We add one to the arange result to disambiguate the zeros below the # diagonal of our upper-triangular matrix from the first entry in `x`.) # Now, when reshapedlay this out as a matrix: y = np.reshape(xc, [5, 5]) # ==> array([[ 1, 2, 3, 4, 5], # [ 6, 7, 8, 9, 10], # [11, 12, 13, 14, 15], # [15, 14, 13, 12, 11], # [10, 9, 8, 7, 6]]) # Finally, zero the elements below the diagonal: y = np.triu(y, k=0) # ==> array([[ 1, 2, 3, 4, 5], # [ 0, 7, 8, 9, 10], # [ 0, 0, 13, 14, 15], # [ 0, 0, 0, 12, 11], # [ 0, 0, 0, 0, 6]]) ``` From this example we see that the resuting matrix is upper-triangular, and contains all the entries of x, as desired. The rest is details: - If `n` is even, `x` doesn't exactly fill an even number of rows (it fills `n / 2` rows and half of an additional row), but the whole scheme still works. - If we want a lower triangular matrix instead of an upper triangular, we remove the first `n` elements from `x` rather than from the reversed `x`. For additional comparisons, a pure numpy version of this function can be found in `distribution_util_test.py`, function `_fill_triangular`. Args: x: `Tensor` representing lower (or upper) triangular elements. upper: Python `bool` representing whether output matrix should be upper triangular (`True`) or lower triangular (`False`, default). name: Python `str`. The name to give this op. Returns: tril: `Tensor` with lower (or upper) triangular elements filled from `x`. Raises: ValueError: if `x` cannot be mapped to a triangular matrix. ] with call[name[tf].name_scope, parameter[<ast.BoolOp object at 0x7da1b0229870>]] begin[:] variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[m] assign[=] call[name[tf].compat.dimension_value, parameter[call[call[name[tensorshape_util].with_rank_at_least, parameter[name[x].shape, constant[1]]]][<ast.UnaryOp object at 0x7da1b02285e0>]]] if compare[name[m] is_not constant[None]] begin[:] variable[m] assign[=] call[name[np].int32, parameter[name[m]]] variable[n] assign[=] binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[0.25] + binary_operation[constant[2.0] * name[m]]]]] - constant[0.5]] if compare[name[n] not_equal[!=] call[name[np].floor, parameter[name[n]]]] begin[:] <ast.Raise object at 0x7da1b02293c0> variable[n] assign[=] call[name[np].int32, parameter[name[n]]] variable[static_final_shape] assign[=] call[call[name[x].shape][<ast.Slice object at 0x7da1b022b4f0>].concatenate, parameter[list[[<ast.Name object at 0x7da1b0229960>, <ast.Name object at 0x7da1b02288e0>]]]] variable[ndims] assign[=] call[name[prefer_static_rank], parameter[name[x]]] if name[upper] begin[:] variable[x_list] assign[=] list[[<ast.Name object at 0x7da1b022ab90>, <ast.Call object at 0x7da1b0228220>]] variable[new_shape] assign[=] <ast.IfExp object at 0x7da1b0229f00> variable[x] assign[=] call[name[tf].reshape, parameter[call[name[tf].concat, parameter[name[x_list]]], name[new_shape]]] variable[x] assign[=] call[name[tf].linalg.band_part, parameter[name[x]]] call[name[tensorshape_util].set_shape, parameter[name[x], name[static_final_shape]]] return[name[x]]
keyword[def] identifier[fill_triangular] ( identifier[x] , identifier[upper] = keyword[False] , identifier[name] = keyword[None] ): literal[string] keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] keyword[or] literal[string] ): identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[x] , identifier[name] = literal[string] ) identifier[m] = identifier[tf] . identifier[compat] . identifier[dimension_value] ( identifier[tensorshape_util] . identifier[with_rank_at_least] ( identifier[x] . identifier[shape] , literal[int] )[- literal[int] ]) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : identifier[m] = identifier[np] . identifier[int32] ( identifier[m] ) identifier[n] = identifier[np] . identifier[sqrt] ( literal[int] + literal[int] * identifier[m] )- literal[int] keyword[if] identifier[n] != identifier[np] . identifier[floor] ( identifier[n] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[m] )) identifier[n] = identifier[np] . identifier[int32] ( identifier[n] ) identifier[static_final_shape] = identifier[x] . identifier[shape] [:- literal[int] ]. identifier[concatenate] ([ identifier[n] , identifier[n] ]) keyword[else] : identifier[m] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] )[- literal[int] ] identifier[n] = identifier[tf] . identifier[cast] ( identifier[tf] . identifier[sqrt] ( literal[int] + identifier[tf] . identifier[cast] ( literal[int] * identifier[m] , identifier[dtype] = identifier[tf] . identifier[float32] )), identifier[dtype] = identifier[tf] . identifier[int32] ) identifier[static_final_shape] = identifier[tensorshape_util] . identifier[with_rank_at_least] ( identifier[x] . identifier[shape] , literal[int] )[:- literal[int] ]. identifier[concatenate] ([ keyword[None] , keyword[None] ]) identifier[ndims] = identifier[prefer_static_rank] ( identifier[x] ) keyword[if] identifier[upper] : identifier[x_list] =[ identifier[x] , identifier[tf] . identifier[reverse] ( identifier[x] [..., identifier[n] :], identifier[axis] =[ identifier[ndims] - literal[int] ])] keyword[else] : identifier[x_list] =[ identifier[x] [..., identifier[n] :], identifier[tf] . identifier[reverse] ( identifier[x] , identifier[axis] =[ identifier[ndims] - literal[int] ])] identifier[new_shape] =( identifier[tensorshape_util] . identifier[as_list] ( identifier[static_final_shape] ) keyword[if] identifier[tensorshape_util] . identifier[is_fully_defined] ( identifier[static_final_shape] ) keyword[else] identifier[tf] . identifier[concat] ( [ identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] )[:- literal[int] ],[ identifier[n] , identifier[n] ]], identifier[axis] = literal[int] )) identifier[x] = identifier[tf] . identifier[reshape] ( identifier[tf] . identifier[concat] ( identifier[x_list] , identifier[axis] =- literal[int] ), identifier[new_shape] ) identifier[x] = identifier[tf] . identifier[linalg] . identifier[band_part] ( identifier[x] , identifier[num_lower] =( literal[int] keyword[if] identifier[upper] keyword[else] - literal[int] ), identifier[num_upper] =(- literal[int] keyword[if] identifier[upper] keyword[else] literal[int] )) identifier[tensorshape_util] . identifier[set_shape] ( identifier[x] , identifier[static_final_shape] ) keyword[return] identifier[x]
def fill_triangular(x, upper=False, name=None): """Creates a (batch of) triangular matrix from a vector of inputs. Created matrix can be lower- or upper-triangular. (It is more efficient to create the matrix as upper or lower, rather than transpose.) Triangular matrix elements are filled in a clockwise spiral. See example, below. If `x.shape` is `[b1, b2, ..., bB, d]` then the output shape is `[b1, b2, ..., bB, n, n]` where `n` is such that `d = n(n+1)/2`, i.e., `n = int(np.sqrt(0.25 + 2. * m) - 0.5)`. Example: ```python fill_triangular([1, 2, 3, 4, 5, 6]) # ==> [[4, 0, 0], # [6, 5, 0], # [3, 2, 1]] fill_triangular([1, 2, 3, 4, 5, 6], upper=True) # ==> [[1, 2, 3], # [0, 5, 6], # [0, 0, 4]] ``` The key trick is to create an upper triangular matrix by concatenating `x` and a tail of itself, then reshaping. Suppose that we are filling the upper triangle of an `n`-by-`n` matrix `M` from a vector `x`. The matrix `M` contains n**2 entries total. The vector `x` contains `n * (n+1) / 2` entries. For concreteness, we'll consider `n = 5` (so `x` has `15` entries and `M` has `25`). We'll concatenate `x` and `x` with the first (`n = 5`) elements removed and reversed: ```python x = np.arange(15) + 1 xc = np.concatenate([x, x[5:][::-1]]) # ==> array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15, 14, 13, # 12, 11, 10, 9, 8, 7, 6]) # (We add one to the arange result to disambiguate the zeros below the # diagonal of our upper-triangular matrix from the first entry in `x`.) # Now, when reshapedlay this out as a matrix: y = np.reshape(xc, [5, 5]) # ==> array([[ 1, 2, 3, 4, 5], # [ 6, 7, 8, 9, 10], # [11, 12, 13, 14, 15], # [15, 14, 13, 12, 11], # [10, 9, 8, 7, 6]]) # Finally, zero the elements below the diagonal: y = np.triu(y, k=0) # ==> array([[ 1, 2, 3, 4, 5], # [ 0, 7, 8, 9, 10], # [ 0, 0, 13, 14, 15], # [ 0, 0, 0, 12, 11], # [ 0, 0, 0, 0, 6]]) ``` From this example we see that the resuting matrix is upper-triangular, and contains all the entries of x, as desired. The rest is details: - If `n` is even, `x` doesn't exactly fill an even number of rows (it fills `n / 2` rows and half of an additional row), but the whole scheme still works. - If we want a lower triangular matrix instead of an upper triangular, we remove the first `n` elements from `x` rather than from the reversed `x`. For additional comparisons, a pure numpy version of this function can be found in `distribution_util_test.py`, function `_fill_triangular`. Args: x: `Tensor` representing lower (or upper) triangular elements. upper: Python `bool` representing whether output matrix should be upper triangular (`True`) or lower triangular (`False`, default). name: Python `str`. The name to give this op. Returns: tril: `Tensor` with lower (or upper) triangular elements filled from `x`. Raises: ValueError: if `x` cannot be mapped to a triangular matrix. """ with tf.name_scope(name or 'fill_triangular'): x = tf.convert_to_tensor(value=x, name='x') m = tf.compat.dimension_value(tensorshape_util.with_rank_at_least(x.shape, 1)[-1]) if m is not None: # Formula derived by solving for n: m = n(n+1)/2. m = np.int32(m) n = np.sqrt(0.25 + 2.0 * m) - 0.5 if n != np.floor(n): raise ValueError('Input right-most shape ({}) does not correspond to a triangular matrix.'.format(m)) # depends on [control=['if'], data=[]] n = np.int32(n) static_final_shape = x.shape[:-1].concatenate([n, n]) # depends on [control=['if'], data=['m']] else: m = tf.shape(input=x)[-1] # For derivation, see above. Casting automatically lops off the 0.5, so we # omit it. We don't validate n is an integer because this has # graph-execution cost; an error will be thrown from the reshape, below. n = tf.cast(tf.sqrt(0.25 + tf.cast(2 * m, dtype=tf.float32)), dtype=tf.int32) static_final_shape = tensorshape_util.with_rank_at_least(x.shape, 1)[:-1].concatenate([None, None]) # Try it out in numpy: # n = 3 # x = np.arange(n * (n + 1) / 2) # m = x.shape[0] # n = np.int32(np.sqrt(.25 + 2 * m) - .5) # x_tail = x[(m - (n**2 - m)):] # np.concatenate([x_tail, x[::-1]], 0).reshape(n, n) # lower # # ==> array([[3, 4, 5], # [5, 4, 3], # [2, 1, 0]]) # np.concatenate([x, x_tail[::-1]], 0).reshape(n, n) # upper # # ==> array([[0, 1, 2], # [3, 4, 5], # [5, 4, 3]]) # # Note that we can't simply do `x[..., -(n**2 - m):]` because this doesn't # correctly handle `m == n == 1`. Hence, we do nonnegative indexing. # Furthermore observe that: # m - (n**2 - m) # = n**2 / 2 + n / 2 - (n**2 - n**2 / 2 + n / 2) # = 2 (n**2 / 2 + n / 2) - n**2 # = n**2 + n - n**2 # = n ndims = prefer_static_rank(x) if upper: x_list = [x, tf.reverse(x[..., n:], axis=[ndims - 1])] # depends on [control=['if'], data=[]] else: x_list = [x[..., n:], tf.reverse(x, axis=[ndims - 1])] new_shape = tensorshape_util.as_list(static_final_shape) if tensorshape_util.is_fully_defined(static_final_shape) else tf.concat([tf.shape(input=x)[:-1], [n, n]], axis=0) x = tf.reshape(tf.concat(x_list, axis=-1), new_shape) x = tf.linalg.band_part(x, num_lower=0 if upper else -1, num_upper=-1 if upper else 0) tensorshape_util.set_shape(x, static_final_shape) return x # depends on [control=['with'], data=[]]
def tables(self): """ Returns a list of table names. Example: >>> db.tables ["bar", "foo"] Returns: list of str: One string for each table name. """ select = ("SELECT name FROM sqlite_master",) query = self.execute(*select) result = query.fetchall() # Filter first column from rows. return [row[0] for row in result]
def function[tables, parameter[self]]: constant[ Returns a list of table names. Example: >>> db.tables ["bar", "foo"] Returns: list of str: One string for each table name. ] variable[select] assign[=] tuple[[<ast.Constant object at 0x7da1b0838040>]] variable[query] assign[=] call[name[self].execute, parameter[<ast.Starred object at 0x7da1b083af20>]] variable[result] assign[=] call[name[query].fetchall, parameter[]] return[<ast.ListComp object at 0x7da1b083a8f0>]
keyword[def] identifier[tables] ( identifier[self] ): literal[string] identifier[select] =( literal[string] ,) identifier[query] = identifier[self] . identifier[execute] (* identifier[select] ) identifier[result] = identifier[query] . identifier[fetchall] () keyword[return] [ identifier[row] [ literal[int] ] keyword[for] identifier[row] keyword[in] identifier[result] ]
def tables(self): """ Returns a list of table names. Example: >>> db.tables ["bar", "foo"] Returns: list of str: One string for each table name. """ select = ('SELECT name FROM sqlite_master',) query = self.execute(*select) result = query.fetchall() # Filter first column from rows. return [row[0] for row in result]
def identity(requestContext, name, step=60): """ Identity function: Returns datapoints where the value equals the timestamp of the datapoint. Useful when you have another series where the value is a timestamp, and you want to compare it to the time of the datapoint, to render an age Example:: &target=identity("The.time.series") This would create a series named "The.time.series" that contains points where x(t) == t. Accepts optional second argument as 'step' parameter (default step is 60 sec) """ start = int(epoch(requestContext["startTime"])) end = int(epoch(requestContext["endTime"])) values = range(start, end, step) series = TimeSeries(name, start, end, step, values) series.pathExpression = 'identity("%s")' % name return [series]
def function[identity, parameter[requestContext, name, step]]: constant[ Identity function: Returns datapoints where the value equals the timestamp of the datapoint. Useful when you have another series where the value is a timestamp, and you want to compare it to the time of the datapoint, to render an age Example:: &target=identity("The.time.series") This would create a series named "The.time.series" that contains points where x(t) == t. Accepts optional second argument as 'step' parameter (default step is 60 sec) ] variable[start] assign[=] call[name[int], parameter[call[name[epoch], parameter[call[name[requestContext]][constant[startTime]]]]]] variable[end] assign[=] call[name[int], parameter[call[name[epoch], parameter[call[name[requestContext]][constant[endTime]]]]]] variable[values] assign[=] call[name[range], parameter[name[start], name[end], name[step]]] variable[series] assign[=] call[name[TimeSeries], parameter[name[name], name[start], name[end], name[step], name[values]]] name[series].pathExpression assign[=] binary_operation[constant[identity("%s")] <ast.Mod object at 0x7da2590d6920> name[name]] return[list[[<ast.Name object at 0x7da1b065b790>]]]
keyword[def] identifier[identity] ( identifier[requestContext] , identifier[name] , identifier[step] = literal[int] ): literal[string] identifier[start] = identifier[int] ( identifier[epoch] ( identifier[requestContext] [ literal[string] ])) identifier[end] = identifier[int] ( identifier[epoch] ( identifier[requestContext] [ literal[string] ])) identifier[values] = identifier[range] ( identifier[start] , identifier[end] , identifier[step] ) identifier[series] = identifier[TimeSeries] ( identifier[name] , identifier[start] , identifier[end] , identifier[step] , identifier[values] ) identifier[series] . identifier[pathExpression] = literal[string] % identifier[name] keyword[return] [ identifier[series] ]
def identity(requestContext, name, step=60): """ Identity function: Returns datapoints where the value equals the timestamp of the datapoint. Useful when you have another series where the value is a timestamp, and you want to compare it to the time of the datapoint, to render an age Example:: &target=identity("The.time.series") This would create a series named "The.time.series" that contains points where x(t) == t. Accepts optional second argument as 'step' parameter (default step is 60 sec) """ start = int(epoch(requestContext['startTime'])) end = int(epoch(requestContext['endTime'])) values = range(start, end, step) series = TimeSeries(name, start, end, step, values) series.pathExpression = 'identity("%s")' % name return [series]
def get_time(self, **params): """https://developers.coinbase.com/api/v2#time""" response = self._get('v2', 'time', params=params) return self._make_api_object(response, APIObject)
def function[get_time, parameter[self]]: constant[https://developers.coinbase.com/api/v2#time] variable[response] assign[=] call[name[self]._get, parameter[constant[v2], constant[time]]] return[call[name[self]._make_api_object, parameter[name[response], name[APIObject]]]]
keyword[def] identifier[get_time] ( identifier[self] ,** identifier[params] ): literal[string] identifier[response] = identifier[self] . identifier[_get] ( literal[string] , literal[string] , identifier[params] = identifier[params] ) keyword[return] identifier[self] . identifier[_make_api_object] ( identifier[response] , identifier[APIObject] )
def get_time(self, **params): """https://developers.coinbase.com/api/v2#time""" response = self._get('v2', 'time', params=params) return self._make_api_object(response, APIObject)
def add_annotation_comment(self, doc, comment): """Sets the annotation comment. Raises CardinalityError if already set. OrderError if no annotator defined before. Raises SPDXValueError if comment is not free form text. """ if len(doc.annotations) != 0: if not self.annotation_comment_set: self.annotation_comment_set = True if validations.validate_annotation_comment(comment): doc.annotations[-1].comment = str_from_text(comment) return True else: raise SPDXValueError('AnnotationComment::Comment') else: raise CardinalityError('AnnotationComment::Comment') else: raise OrderError('AnnotationComment::Comment')
def function[add_annotation_comment, parameter[self, doc, comment]]: constant[Sets the annotation comment. Raises CardinalityError if already set. OrderError if no annotator defined before. Raises SPDXValueError if comment is not free form text. ] if compare[call[name[len], parameter[name[doc].annotations]] not_equal[!=] constant[0]] begin[:] if <ast.UnaryOp object at 0x7da207f03340> begin[:] name[self].annotation_comment_set assign[=] constant[True] if call[name[validations].validate_annotation_comment, parameter[name[comment]]] begin[:] call[name[doc].annotations][<ast.UnaryOp object at 0x7da207f00a30>].comment assign[=] call[name[str_from_text], parameter[name[comment]]] return[constant[True]]
keyword[def] identifier[add_annotation_comment] ( identifier[self] , identifier[doc] , identifier[comment] ): literal[string] keyword[if] identifier[len] ( identifier[doc] . identifier[annotations] )!= literal[int] : keyword[if] keyword[not] identifier[self] . identifier[annotation_comment_set] : identifier[self] . identifier[annotation_comment_set] = keyword[True] keyword[if] identifier[validations] . identifier[validate_annotation_comment] ( identifier[comment] ): identifier[doc] . identifier[annotations] [- literal[int] ]. identifier[comment] = identifier[str_from_text] ( identifier[comment] ) keyword[return] keyword[True] keyword[else] : keyword[raise] identifier[SPDXValueError] ( literal[string] ) keyword[else] : keyword[raise] identifier[CardinalityError] ( literal[string] ) keyword[else] : keyword[raise] identifier[OrderError] ( literal[string] )
def add_annotation_comment(self, doc, comment): """Sets the annotation comment. Raises CardinalityError if already set. OrderError if no annotator defined before. Raises SPDXValueError if comment is not free form text. """ if len(doc.annotations) != 0: if not self.annotation_comment_set: self.annotation_comment_set = True if validations.validate_annotation_comment(comment): doc.annotations[-1].comment = str_from_text(comment) return True # depends on [control=['if'], data=[]] else: raise SPDXValueError('AnnotationComment::Comment') # depends on [control=['if'], data=[]] else: raise CardinalityError('AnnotationComment::Comment') # depends on [control=['if'], data=[]] else: raise OrderError('AnnotationComment::Comment')
def boolean(input): """Convert the given input to a boolean value. Intelligently handles boolean and non-string values, returning as-is and passing to the bool builtin respectively. This process is case-insensitive. Acceptable values: True * yes * y * on * true * t * 1 False * no * n * off * false * f * 0 :param input: the value to convert to a boolean :type input: any :returns: converted boolean value :rtype: bool """ try: input = input.strip().lower() except AttributeError: return bool(input) if input in ('yes', 'y', 'on', 'true', 't', '1'): return True if input in ('no', 'n', 'off', 'false', 'f', '0'): return False raise ValueError("Unable to convert {0!r} to a boolean value.".format(input))
def function[boolean, parameter[input]]: constant[Convert the given input to a boolean value. Intelligently handles boolean and non-string values, returning as-is and passing to the bool builtin respectively. This process is case-insensitive. Acceptable values: True * yes * y * on * true * t * 1 False * no * n * off * false * f * 0 :param input: the value to convert to a boolean :type input: any :returns: converted boolean value :rtype: bool ] <ast.Try object at 0x7da1b1ff19f0> if compare[name[input] in tuple[[<ast.Constant object at 0x7da1b1ff2350>, <ast.Constant object at 0x7da1b1ff11e0>, <ast.Constant object at 0x7da1b1ff0400>, <ast.Constant object at 0x7da1b1ff2560>, <ast.Constant object at 0x7da1b1ff2500>, <ast.Constant object at 0x7da1b1ff1990>]]] begin[:] return[constant[True]] if compare[name[input] in tuple[[<ast.Constant object at 0x7da18f09e860>, <ast.Constant object at 0x7da18f09f550>, <ast.Constant object at 0x7da18f09c760>, <ast.Constant object at 0x7da18f09c940>, <ast.Constant object at 0x7da18f09caf0>, <ast.Constant object at 0x7da18f09ead0>]]] begin[:] return[constant[False]] <ast.Raise object at 0x7da18f09d600>
keyword[def] identifier[boolean] ( identifier[input] ): literal[string] keyword[try] : identifier[input] = identifier[input] . identifier[strip] (). identifier[lower] () keyword[except] identifier[AttributeError] : keyword[return] identifier[bool] ( identifier[input] ) keyword[if] identifier[input] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[return] keyword[True] keyword[if] identifier[input] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[return] keyword[False] keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[input] ))
def boolean(input): """Convert the given input to a boolean value. Intelligently handles boolean and non-string values, returning as-is and passing to the bool builtin respectively. This process is case-insensitive. Acceptable values: True * yes * y * on * true * t * 1 False * no * n * off * false * f * 0 :param input: the value to convert to a boolean :type input: any :returns: converted boolean value :rtype: bool """ try: input = input.strip().lower() # depends on [control=['try'], data=[]] except AttributeError: return bool(input) # depends on [control=['except'], data=[]] if input in ('yes', 'y', 'on', 'true', 't', '1'): return True # depends on [control=['if'], data=[]] if input in ('no', 'n', 'off', 'false', 'f', '0'): return False # depends on [control=['if'], data=[]] raise ValueError('Unable to convert {0!r} to a boolean value.'.format(input))
def __get_average_intra_cluster_distance(self, entry): """! @brief Calculates average intra cluster distance between current and specified clusters. @param[in] entry (cfentry): Clustering feature to which distance should be obtained. @return (double) Average intra cluster distance. """ linear_part_first = list_math_addition(self.linear_sum, entry.linear_sum); linear_part_second = linear_part_first; linear_part_distance = sum(list_math_multiplication(linear_part_first, linear_part_second)); general_part_distance = 2.0 * (self.number_points + entry.number_points) * (self.square_sum + entry.square_sum) - 2.0 * linear_part_distance; return (general_part_distance / ( (self.number_points + entry.number_points) * (self.number_points + entry.number_points - 1.0) )) ** 0.5;
def function[__get_average_intra_cluster_distance, parameter[self, entry]]: constant[! @brief Calculates average intra cluster distance between current and specified clusters. @param[in] entry (cfentry): Clustering feature to which distance should be obtained. @return (double) Average intra cluster distance. ] variable[linear_part_first] assign[=] call[name[list_math_addition], parameter[name[self].linear_sum, name[entry].linear_sum]] variable[linear_part_second] assign[=] name[linear_part_first] variable[linear_part_distance] assign[=] call[name[sum], parameter[call[name[list_math_multiplication], parameter[name[linear_part_first], name[linear_part_second]]]]] variable[general_part_distance] assign[=] binary_operation[binary_operation[binary_operation[constant[2.0] * binary_operation[name[self].number_points + name[entry].number_points]] * binary_operation[name[self].square_sum + name[entry].square_sum]] - binary_operation[constant[2.0] * name[linear_part_distance]]] return[binary_operation[binary_operation[name[general_part_distance] / binary_operation[binary_operation[name[self].number_points + name[entry].number_points] * binary_operation[binary_operation[name[self].number_points + name[entry].number_points] - constant[1.0]]]] ** constant[0.5]]]
keyword[def] identifier[__get_average_intra_cluster_distance] ( identifier[self] , identifier[entry] ): literal[string] identifier[linear_part_first] = identifier[list_math_addition] ( identifier[self] . identifier[linear_sum] , identifier[entry] . identifier[linear_sum] ); identifier[linear_part_second] = identifier[linear_part_first] ; identifier[linear_part_distance] = identifier[sum] ( identifier[list_math_multiplication] ( identifier[linear_part_first] , identifier[linear_part_second] )); identifier[general_part_distance] = literal[int] *( identifier[self] . identifier[number_points] + identifier[entry] . identifier[number_points] )*( identifier[self] . identifier[square_sum] + identifier[entry] . identifier[square_sum] )- literal[int] * identifier[linear_part_distance] ; keyword[return] ( identifier[general_part_distance] /(( identifier[self] . identifier[number_points] + identifier[entry] . identifier[number_points] )*( identifier[self] . identifier[number_points] + identifier[entry] . identifier[number_points] - literal[int] )))** literal[int] ;
def __get_average_intra_cluster_distance(self, entry): """! @brief Calculates average intra cluster distance between current and specified clusters. @param[in] entry (cfentry): Clustering feature to which distance should be obtained. @return (double) Average intra cluster distance. """ linear_part_first = list_math_addition(self.linear_sum, entry.linear_sum) linear_part_second = linear_part_first linear_part_distance = sum(list_math_multiplication(linear_part_first, linear_part_second)) general_part_distance = 2.0 * (self.number_points + entry.number_points) * (self.square_sum + entry.square_sum) - 2.0 * linear_part_distance return (general_part_distance / ((self.number_points + entry.number_points) * (self.number_points + entry.number_points - 1.0))) ** 0.5
def clean_name(self): "Avoid name clashes between static and dynamic attributes." name = self.cleaned_data['name'] reserved_names = self._meta.model._meta.get_all_field_names() if name not in reserved_names: return name raise ValidationError(_('Attribute name must not clash with reserved names' ' ("%s")') % '", "'.join(reserved_names))
def function[clean_name, parameter[self]]: constant[Avoid name clashes between static and dynamic attributes.] variable[name] assign[=] call[name[self].cleaned_data][constant[name]] variable[reserved_names] assign[=] call[name[self]._meta.model._meta.get_all_field_names, parameter[]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[reserved_names]] begin[:] return[name[name]] <ast.Raise object at 0x7da18bcc93f0>
keyword[def] identifier[clean_name] ( identifier[self] ): literal[string] identifier[name] = identifier[self] . identifier[cleaned_data] [ literal[string] ] identifier[reserved_names] = identifier[self] . identifier[_meta] . identifier[model] . identifier[_meta] . identifier[get_all_field_names] () keyword[if] identifier[name] keyword[not] keyword[in] identifier[reserved_names] : keyword[return] identifier[name] keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] literal[string] )% literal[string] . identifier[join] ( identifier[reserved_names] ))
def clean_name(self): """Avoid name clashes between static and dynamic attributes.""" name = self.cleaned_data['name'] reserved_names = self._meta.model._meta.get_all_field_names() if name not in reserved_names: return name # depends on [control=['if'], data=['name']] raise ValidationError(_('Attribute name must not clash with reserved names ("%s")') % '", "'.join(reserved_names))
def _collapse_cursor(self, parts): """ Act on any CursorMoveUp commands by deleting preceding tokens """ final_parts = [] for part in parts: # Throw out empty string tokens ("") if not part: continue # Go back, deleting every token in the last 'line' if part == CursorMoveUp: if final_parts: final_parts.pop() while final_parts and '\n' not in final_parts[-1]: final_parts.pop() continue # Otherwise, just pass this token forward final_parts.append(part) return final_parts
def function[_collapse_cursor, parameter[self, parts]]: constant[ Act on any CursorMoveUp commands by deleting preceding tokens ] variable[final_parts] assign[=] list[[]] for taget[name[part]] in starred[name[parts]] begin[:] if <ast.UnaryOp object at 0x7da1b12920b0> begin[:] continue if compare[name[part] equal[==] name[CursorMoveUp]] begin[:] if name[final_parts] begin[:] call[name[final_parts].pop, parameter[]] while <ast.BoolOp object at 0x7da1b12904c0> begin[:] call[name[final_parts].pop, parameter[]] continue call[name[final_parts].append, parameter[name[part]]] return[name[final_parts]]
keyword[def] identifier[_collapse_cursor] ( identifier[self] , identifier[parts] ): literal[string] identifier[final_parts] =[] keyword[for] identifier[part] keyword[in] identifier[parts] : keyword[if] keyword[not] identifier[part] : keyword[continue] keyword[if] identifier[part] == identifier[CursorMoveUp] : keyword[if] identifier[final_parts] : identifier[final_parts] . identifier[pop] () keyword[while] identifier[final_parts] keyword[and] literal[string] keyword[not] keyword[in] identifier[final_parts] [- literal[int] ]: identifier[final_parts] . identifier[pop] () keyword[continue] identifier[final_parts] . identifier[append] ( identifier[part] ) keyword[return] identifier[final_parts]
def _collapse_cursor(self, parts): """ Act on any CursorMoveUp commands by deleting preceding tokens """ final_parts = [] for part in parts: # Throw out empty string tokens ("") if not part: continue # depends on [control=['if'], data=[]] # Go back, deleting every token in the last 'line' if part == CursorMoveUp: if final_parts: final_parts.pop() # depends on [control=['if'], data=[]] while final_parts and '\n' not in final_parts[-1]: final_parts.pop() # depends on [control=['while'], data=[]] continue # depends on [control=['if'], data=[]] # Otherwise, just pass this token forward final_parts.append(part) # depends on [control=['for'], data=['part']] return final_parts
def convert_batchnorm(net, node, module, builder): """Convert a batchnorm layer from mxnet to coreml. Parameters ---------- network: net A mxnet network object. layer: node Node to convert. module: module An module for MXNet builder: NeuralNetworkBuilder A neural network builder object. """ input_name, output_name = _get_input_output_name(net, node) name = node['name'] inputs = node['inputs'] eps = 1e-3 # Default value of eps for MXNet. use_global_stats = False # Default value of use_global_stats for MXNet. fix_gamma = True # Default value of fix_gamma for MXNet. attrs = _get_attrs(node) if 'eps' in attrs: eps = literal_eval(attrs['eps']) if 'fix_gamma' in attrs: fix_gamma = literal_eval(attrs['fix_gamma']) args, aux = module.get_params() gamma = args[_get_node_name(net, inputs[1][0])].asnumpy() beta = args[_get_node_name(net, inputs[2][0])].asnumpy() mean = aux[_get_node_name(net, inputs[3][0])].asnumpy() variance = aux[_get_node_name(net, inputs[4][0])].asnumpy() nb_channels = gamma.shape[0] if fix_gamma: gamma.fill(1.) builder.add_batchnorm( name=name, channels=nb_channels, gamma=gamma, beta=beta, mean=mean, variance=variance, input_name=input_name, output_name=output_name, epsilon=eps)
def function[convert_batchnorm, parameter[net, node, module, builder]]: constant[Convert a batchnorm layer from mxnet to coreml. Parameters ---------- network: net A mxnet network object. layer: node Node to convert. module: module An module for MXNet builder: NeuralNetworkBuilder A neural network builder object. ] <ast.Tuple object at 0x7da20c991000> assign[=] call[name[_get_input_output_name], parameter[name[net], name[node]]] variable[name] assign[=] call[name[node]][constant[name]] variable[inputs] assign[=] call[name[node]][constant[inputs]] variable[eps] assign[=] constant[0.001] variable[use_global_stats] assign[=] constant[False] variable[fix_gamma] assign[=] constant[True] variable[attrs] assign[=] call[name[_get_attrs], parameter[name[node]]] if compare[constant[eps] in name[attrs]] begin[:] variable[eps] assign[=] call[name[literal_eval], parameter[call[name[attrs]][constant[eps]]]] if compare[constant[fix_gamma] in name[attrs]] begin[:] variable[fix_gamma] assign[=] call[name[literal_eval], parameter[call[name[attrs]][constant[fix_gamma]]]] <ast.Tuple object at 0x7da1b2089660> assign[=] call[name[module].get_params, parameter[]] variable[gamma] assign[=] call[call[name[args]][call[name[_get_node_name], parameter[name[net], call[call[name[inputs]][constant[1]]][constant[0]]]]].asnumpy, parameter[]] variable[beta] assign[=] call[call[name[args]][call[name[_get_node_name], parameter[name[net], call[call[name[inputs]][constant[2]]][constant[0]]]]].asnumpy, parameter[]] variable[mean] assign[=] call[call[name[aux]][call[name[_get_node_name], parameter[name[net], call[call[name[inputs]][constant[3]]][constant[0]]]]].asnumpy, parameter[]] variable[variance] assign[=] call[call[name[aux]][call[name[_get_node_name], parameter[name[net], call[call[name[inputs]][constant[4]]][constant[0]]]]].asnumpy, parameter[]] variable[nb_channels] assign[=] call[name[gamma].shape][constant[0]] if name[fix_gamma] begin[:] call[name[gamma].fill, parameter[constant[1.0]]] call[name[builder].add_batchnorm, parameter[]]
keyword[def] identifier[convert_batchnorm] ( identifier[net] , identifier[node] , identifier[module] , identifier[builder] ): literal[string] identifier[input_name] , identifier[output_name] = identifier[_get_input_output_name] ( identifier[net] , identifier[node] ) identifier[name] = identifier[node] [ literal[string] ] identifier[inputs] = identifier[node] [ literal[string] ] identifier[eps] = literal[int] identifier[use_global_stats] = keyword[False] identifier[fix_gamma] = keyword[True] identifier[attrs] = identifier[_get_attrs] ( identifier[node] ) keyword[if] literal[string] keyword[in] identifier[attrs] : identifier[eps] = identifier[literal_eval] ( identifier[attrs] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[attrs] : identifier[fix_gamma] = identifier[literal_eval] ( identifier[attrs] [ literal[string] ]) identifier[args] , identifier[aux] = identifier[module] . identifier[get_params] () identifier[gamma] = identifier[args] [ identifier[_get_node_name] ( identifier[net] , identifier[inputs] [ literal[int] ][ literal[int] ])]. identifier[asnumpy] () identifier[beta] = identifier[args] [ identifier[_get_node_name] ( identifier[net] , identifier[inputs] [ literal[int] ][ literal[int] ])]. identifier[asnumpy] () identifier[mean] = identifier[aux] [ identifier[_get_node_name] ( identifier[net] , identifier[inputs] [ literal[int] ][ literal[int] ])]. identifier[asnumpy] () identifier[variance] = identifier[aux] [ identifier[_get_node_name] ( identifier[net] , identifier[inputs] [ literal[int] ][ literal[int] ])]. identifier[asnumpy] () identifier[nb_channels] = identifier[gamma] . identifier[shape] [ literal[int] ] keyword[if] identifier[fix_gamma] : identifier[gamma] . identifier[fill] ( literal[int] ) identifier[builder] . identifier[add_batchnorm] ( identifier[name] = identifier[name] , identifier[channels] = identifier[nb_channels] , identifier[gamma] = identifier[gamma] , identifier[beta] = identifier[beta] , identifier[mean] = identifier[mean] , identifier[variance] = identifier[variance] , identifier[input_name] = identifier[input_name] , identifier[output_name] = identifier[output_name] , identifier[epsilon] = identifier[eps] )
def convert_batchnorm(net, node, module, builder): """Convert a batchnorm layer from mxnet to coreml. Parameters ---------- network: net A mxnet network object. layer: node Node to convert. module: module An module for MXNet builder: NeuralNetworkBuilder A neural network builder object. """ (input_name, output_name) = _get_input_output_name(net, node) name = node['name'] inputs = node['inputs'] eps = 0.001 # Default value of eps for MXNet. use_global_stats = False # Default value of use_global_stats for MXNet. fix_gamma = True # Default value of fix_gamma for MXNet. attrs = _get_attrs(node) if 'eps' in attrs: eps = literal_eval(attrs['eps']) # depends on [control=['if'], data=['attrs']] if 'fix_gamma' in attrs: fix_gamma = literal_eval(attrs['fix_gamma']) # depends on [control=['if'], data=['attrs']] (args, aux) = module.get_params() gamma = args[_get_node_name(net, inputs[1][0])].asnumpy() beta = args[_get_node_name(net, inputs[2][0])].asnumpy() mean = aux[_get_node_name(net, inputs[3][0])].asnumpy() variance = aux[_get_node_name(net, inputs[4][0])].asnumpy() nb_channels = gamma.shape[0] if fix_gamma: gamma.fill(1.0) # depends on [control=['if'], data=[]] builder.add_batchnorm(name=name, channels=nb_channels, gamma=gamma, beta=beta, mean=mean, variance=variance, input_name=input_name, output_name=output_name, epsilon=eps)
def _get_xml(xml_str): ''' Intrepret the data coming from opennebula and raise if it's not XML. ''' try: xml_data = etree.XML(xml_str) # XMLSyntaxError seems to be only available from lxml, but that is the xml # library loaded by this module except etree.XMLSyntaxError as err: # opennebula returned invalid XML, which could be an error message, so # log it raise SaltCloudSystemExit('opennebula returned: {0}'.format(xml_str)) return xml_data
def function[_get_xml, parameter[xml_str]]: constant[ Intrepret the data coming from opennebula and raise if it's not XML. ] <ast.Try object at 0x7da18bc70550> return[name[xml_data]]
keyword[def] identifier[_get_xml] ( identifier[xml_str] ): literal[string] keyword[try] : identifier[xml_data] = identifier[etree] . identifier[XML] ( identifier[xml_str] ) keyword[except] identifier[etree] . identifier[XMLSyntaxError] keyword[as] identifier[err] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[xml_str] )) keyword[return] identifier[xml_data]
def _get_xml(xml_str): """ Intrepret the data coming from opennebula and raise if it's not XML. """ try: xml_data = etree.XML(xml_str) # depends on [control=['try'], data=[]] # XMLSyntaxError seems to be only available from lxml, but that is the xml # library loaded by this module except etree.XMLSyntaxError as err: # opennebula returned invalid XML, which could be an error message, so # log it raise SaltCloudSystemExit('opennebula returned: {0}'.format(xml_str)) # depends on [control=['except'], data=[]] return xml_data
def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited=None): """ Print a tree of nodes. This is like render_tree, except it prints lines directly instead of creating a string representation in memory, so that huge trees can be printed. :Parameters: - `root` - the root node of the tree - `child_func` - the function called to get the children of a node - `prune` - don't visit the same node twice - `showtags` - print status information to the left of each node line - `margin` - the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe. - `visited` - a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune. """ rname = str(root) # Initialize 'visited' dict, if required if visited is None: visited = {} if showtags: if showtags == 2: legend = (' E = exists\n' + ' R = exists in repository only\n' + ' b = implicit builder\n' + ' B = explicit builder\n' + ' S = side effect\n' + ' P = precious\n' + ' A = always build\n' + ' C = current\n' + ' N = no clean\n' + ' H = no cache\n' + '\n') sys.stdout.write(legend) tags = ['['] tags.append(' E'[IDX(root.exists())]) tags.append(' R'[IDX(root.rexists() and not root.exists())]) tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] + [0,2][IDX(root.has_builder())]]) tags.append(' S'[IDX(root.side_effect)]) tags.append(' P'[IDX(root.precious)]) tags.append(' A'[IDX(root.always_build)]) tags.append(' C'[IDX(root.is_up_to_date())]) tags.append(' N'[IDX(root.noclean)]) tags.append(' H'[IDX(root.nocache)]) tags.append(']') else: tags = [] def MMM(m): return [" ","| "][m] margins = list(map(MMM, margin[:-1])) children = child_func(root) if prune and rname in visited and children: sys.stdout.write(''.join(tags + margins + ['+-[', rname, ']']) + '\n') return sys.stdout.write(''.join(tags + margins + ['+-', rname]) + '\n') visited[rname] = 1 if children: margin.append(1) idx = IDX(showtags) for C in children[:-1]: print_tree(C, child_func, prune, idx, margin, visited) margin[-1] = 0 print_tree(children[-1], child_func, prune, idx, margin, visited) margin.pop()
def function[print_tree, parameter[root, child_func, prune, showtags, margin, visited]]: constant[ Print a tree of nodes. This is like render_tree, except it prints lines directly instead of creating a string representation in memory, so that huge trees can be printed. :Parameters: - `root` - the root node of the tree - `child_func` - the function called to get the children of a node - `prune` - don't visit the same node twice - `showtags` - print status information to the left of each node line - `margin` - the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe. - `visited` - a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune. ] variable[rname] assign[=] call[name[str], parameter[name[root]]] if compare[name[visited] is constant[None]] begin[:] variable[visited] assign[=] dictionary[[], []] if name[showtags] begin[:] if compare[name[showtags] equal[==] constant[2]] begin[:] variable[legend] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[ E = exists ] + constant[ R = exists in repository only ]] + constant[ b = implicit builder ]] + constant[ B = explicit builder ]] + constant[ S = side effect ]] + constant[ P = precious ]] + constant[ A = always build ]] + constant[ C = current ]] + constant[ N = no clean ]] + constant[ H = no cache ]] + constant[ ]] call[name[sys].stdout.write, parameter[name[legend]]] variable[tags] assign[=] list[[<ast.Constant object at 0x7da2041da830>]] call[name[tags].append, parameter[call[constant[ E]][call[name[IDX], parameter[call[name[root].exists, parameter[]]]]]]] call[name[tags].append, parameter[call[constant[ R]][call[name[IDX], parameter[<ast.BoolOp object at 0x7da2041d9b40>]]]]] call[name[tags].append, parameter[call[constant[ BbB]][binary_operation[call[list[[<ast.Constant object at 0x7da2041dbd90>, <ast.Constant object at 0x7da2041db8b0>]]][call[name[IDX], parameter[call[name[root].has_explicit_builder, parameter[]]]]] + call[list[[<ast.Constant object at 0x7da2041da1a0>, <ast.Constant object at 0x7da2041db970>]]][call[name[IDX], parameter[call[name[root].has_builder, parameter[]]]]]]]]] call[name[tags].append, parameter[call[constant[ S]][call[name[IDX], parameter[name[root].side_effect]]]]] call[name[tags].append, parameter[call[constant[ P]][call[name[IDX], parameter[name[root].precious]]]]] call[name[tags].append, parameter[call[constant[ A]][call[name[IDX], parameter[name[root].always_build]]]]] call[name[tags].append, parameter[call[constant[ C]][call[name[IDX], parameter[call[name[root].is_up_to_date, parameter[]]]]]]] call[name[tags].append, parameter[call[constant[ N]][call[name[IDX], parameter[name[root].noclean]]]]] call[name[tags].append, parameter[call[constant[ H]][call[name[IDX], parameter[name[root].nocache]]]]] call[name[tags].append, parameter[constant[]]]] def function[MMM, parameter[m]]: return[call[list[[<ast.Constant object at 0x7da2041da920>, <ast.Constant object at 0x7da2041d8a00>]]][name[m]]] variable[margins] assign[=] call[name[list], parameter[call[name[map], parameter[name[MMM], call[name[margin]][<ast.Slice object at 0x7da207f9aa10>]]]]] variable[children] assign[=] call[name[child_func], parameter[name[root]]] if <ast.BoolOp object at 0x7da207f99de0> begin[:] call[name[sys].stdout.write, parameter[binary_operation[call[constant[].join, parameter[binary_operation[binary_operation[name[tags] + name[margins]] + list[[<ast.Constant object at 0x7da207f9b550>, <ast.Name object at 0x7da207f9a680>, <ast.Constant object at 0x7da207f987f0>]]]]] + constant[ ]]]] return[None] call[name[sys].stdout.write, parameter[binary_operation[call[constant[].join, parameter[binary_operation[binary_operation[name[tags] + name[margins]] + list[[<ast.Constant object at 0x7da207f98550>, <ast.Name object at 0x7da207f9a650>]]]]] + constant[ ]]]] call[name[visited]][name[rname]] assign[=] constant[1] if name[children] begin[:] call[name[margin].append, parameter[constant[1]]] variable[idx] assign[=] call[name[IDX], parameter[name[showtags]]] for taget[name[C]] in starred[call[name[children]][<ast.Slice object at 0x7da207f992d0>]] begin[:] call[name[print_tree], parameter[name[C], name[child_func], name[prune], name[idx], name[margin], name[visited]]] call[name[margin]][<ast.UnaryOp object at 0x7da207f9b4f0>] assign[=] constant[0] call[name[print_tree], parameter[call[name[children]][<ast.UnaryOp object at 0x7da207f98f10>], name[child_func], name[prune], name[idx], name[margin], name[visited]]] call[name[margin].pop, parameter[]]
keyword[def] identifier[print_tree] ( identifier[root] , identifier[child_func] , identifier[prune] = literal[int] , identifier[showtags] = literal[int] , identifier[margin] =[ literal[int] ], identifier[visited] = keyword[None] ): literal[string] identifier[rname] = identifier[str] ( identifier[root] ) keyword[if] identifier[visited] keyword[is] keyword[None] : identifier[visited] ={} keyword[if] identifier[showtags] : keyword[if] identifier[showtags] == literal[int] : identifier[legend] =( literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] ) identifier[sys] . identifier[stdout] . identifier[write] ( identifier[legend] ) identifier[tags] =[ literal[string] ] identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[exists] ())]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[rexists] () keyword[and] keyword[not] identifier[root] . identifier[exists] ())]) identifier[tags] . identifier[append] ( literal[string] [[ literal[int] , literal[int] ][ identifier[IDX] ( identifier[root] . identifier[has_explicit_builder] ())]+ [ literal[int] , literal[int] ][ identifier[IDX] ( identifier[root] . identifier[has_builder] ())]]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[side_effect] )]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[precious] )]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[always_build] )]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[is_up_to_date] ())]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[noclean] )]) identifier[tags] . identifier[append] ( literal[string] [ identifier[IDX] ( identifier[root] . identifier[nocache] )]) identifier[tags] . identifier[append] ( literal[string] ) keyword[else] : identifier[tags] =[] keyword[def] identifier[MMM] ( identifier[m] ): keyword[return] [ literal[string] , literal[string] ][ identifier[m] ] identifier[margins] = identifier[list] ( identifier[map] ( identifier[MMM] , identifier[margin] [:- literal[int] ])) identifier[children] = identifier[child_func] ( identifier[root] ) keyword[if] identifier[prune] keyword[and] identifier[rname] keyword[in] identifier[visited] keyword[and] identifier[children] : identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[join] ( identifier[tags] + identifier[margins] +[ literal[string] , identifier[rname] , literal[string] ])+ literal[string] ) keyword[return] identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[join] ( identifier[tags] + identifier[margins] +[ literal[string] , identifier[rname] ])+ literal[string] ) identifier[visited] [ identifier[rname] ]= literal[int] keyword[if] identifier[children] : identifier[margin] . identifier[append] ( literal[int] ) identifier[idx] = identifier[IDX] ( identifier[showtags] ) keyword[for] identifier[C] keyword[in] identifier[children] [:- literal[int] ]: identifier[print_tree] ( identifier[C] , identifier[child_func] , identifier[prune] , identifier[idx] , identifier[margin] , identifier[visited] ) identifier[margin] [- literal[int] ]= literal[int] identifier[print_tree] ( identifier[children] [- literal[int] ], identifier[child_func] , identifier[prune] , identifier[idx] , identifier[margin] , identifier[visited] ) identifier[margin] . identifier[pop] ()
def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited=None): """ Print a tree of nodes. This is like render_tree, except it prints lines directly instead of creating a string representation in memory, so that huge trees can be printed. :Parameters: - `root` - the root node of the tree - `child_func` - the function called to get the children of a node - `prune` - don't visit the same node twice - `showtags` - print status information to the left of each node line - `margin` - the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe. - `visited` - a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune. """ rname = str(root) # Initialize 'visited' dict, if required if visited is None: visited = {} # depends on [control=['if'], data=['visited']] if showtags: if showtags == 2: legend = ' E = exists\n' + ' R = exists in repository only\n' + ' b = implicit builder\n' + ' B = explicit builder\n' + ' S = side effect\n' + ' P = precious\n' + ' A = always build\n' + ' C = current\n' + ' N = no clean\n' + ' H = no cache\n' + '\n' sys.stdout.write(legend) # depends on [control=['if'], data=[]] tags = ['['] tags.append(' E'[IDX(root.exists())]) tags.append(' R'[IDX(root.rexists() and (not root.exists()))]) tags.append(' BbB'[[0, 1][IDX(root.has_explicit_builder())] + [0, 2][IDX(root.has_builder())]]) tags.append(' S'[IDX(root.side_effect)]) tags.append(' P'[IDX(root.precious)]) tags.append(' A'[IDX(root.always_build)]) tags.append(' C'[IDX(root.is_up_to_date())]) tags.append(' N'[IDX(root.noclean)]) tags.append(' H'[IDX(root.nocache)]) tags.append(']') # depends on [control=['if'], data=[]] else: tags = [] def MMM(m): return [' ', '| '][m] margins = list(map(MMM, margin[:-1])) children = child_func(root) if prune and rname in visited and children: sys.stdout.write(''.join(tags + margins + ['+-[', rname, ']']) + '\n') return # depends on [control=['if'], data=[]] sys.stdout.write(''.join(tags + margins + ['+-', rname]) + '\n') visited[rname] = 1 if children: margin.append(1) idx = IDX(showtags) for C in children[:-1]: print_tree(C, child_func, prune, idx, margin, visited) # depends on [control=['for'], data=['C']] margin[-1] = 0 print_tree(children[-1], child_func, prune, idx, margin, visited) margin.pop() # depends on [control=['if'], data=[]]
def qteGetVariableDoc(self, varName: str, module=None): """ Retrieve documentation for ``varName`` defined in ``module``. If ``module`` is **None** then ``qte_global`` will be used. |Args| * ``varName`` (**str**): variable name. * ``module`` (**Python module**): the module in which the variable should be defined. |Returns| **str**: documentation string for ``varName``. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # Use the global name space per default. if module is None: module = qte_global # No documentation for the variable can exists if the doc # string dictionary is undefined. if not hasattr(module, '_qte__variable__docstring__dictionary__'): return None # If the variable is undefined then return **None**. if varName not in module._qte__variable__docstring__dictionary__: return None # Return the requested value. return module._qte__variable__docstring__dictionary__[varName]
def function[qteGetVariableDoc, parameter[self, varName, module]]: constant[ Retrieve documentation for ``varName`` defined in ``module``. If ``module`` is **None** then ``qte_global`` will be used. |Args| * ``varName`` (**str**): variable name. * ``module`` (**Python module**): the module in which the variable should be defined. |Returns| **str**: documentation string for ``varName``. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. ] if compare[name[module] is constant[None]] begin[:] variable[module] assign[=] name[qte_global] if <ast.UnaryOp object at 0x7da20e9b31c0> begin[:] return[constant[None]] if compare[name[varName] <ast.NotIn object at 0x7da2590d7190> name[module]._qte__variable__docstring__dictionary__] begin[:] return[constant[None]] return[call[name[module]._qte__variable__docstring__dictionary__][name[varName]]]
keyword[def] identifier[qteGetVariableDoc] ( identifier[self] , identifier[varName] : identifier[str] , identifier[module] = keyword[None] ): literal[string] keyword[if] identifier[module] keyword[is] keyword[None] : identifier[module] = identifier[qte_global] keyword[if] keyword[not] identifier[hasattr] ( identifier[module] , literal[string] ): keyword[return] keyword[None] keyword[if] identifier[varName] keyword[not] keyword[in] identifier[module] . identifier[_qte__variable__docstring__dictionary__] : keyword[return] keyword[None] keyword[return] identifier[module] . identifier[_qte__variable__docstring__dictionary__] [ identifier[varName] ]
def qteGetVariableDoc(self, varName: str, module=None): """ Retrieve documentation for ``varName`` defined in ``module``. If ``module`` is **None** then ``qte_global`` will be used. |Args| * ``varName`` (**str**): variable name. * ``module`` (**Python module**): the module in which the variable should be defined. |Returns| **str**: documentation string for ``varName``. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # Use the global name space per default. if module is None: module = qte_global # depends on [control=['if'], data=['module']] # No documentation for the variable can exists if the doc # string dictionary is undefined. if not hasattr(module, '_qte__variable__docstring__dictionary__'): return None # depends on [control=['if'], data=[]] # If the variable is undefined then return **None**. if varName not in module._qte__variable__docstring__dictionary__: return None # depends on [control=['if'], data=[]] # Return the requested value. return module._qte__variable__docstring__dictionary__[varName]
def setup_prjs_page(self, ): """Create and set the model on the projects page :returns: None :rtype: None :raises: None """ self.prjs_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) log.debug("Loading projects for projects page.") rootdata = treemodel.ListItemData(['Name', 'Short', 'Path', 'Created', 'Semester', 'Status', 'Resolution', 'FPS', 'Scale']) rootitem = treemodel.TreeItem(rootdata) prjs = djadapter.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, rootitem) self.prjs_model = treemodel.TreeModel(rootitem) self.prjs_tablev.setModel(self.prjs_model)
def function[setup_prjs_page, parameter[self]]: constant[Create and set the model on the projects page :returns: None :rtype: None :raises: None ] call[call[name[self].prjs_tablev.horizontalHeader, parameter[]].setResizeMode, parameter[name[QtGui].QHeaderView.ResizeToContents]] call[name[log].debug, parameter[constant[Loading projects for projects page.]]] variable[rootdata] assign[=] call[name[treemodel].ListItemData, parameter[list[[<ast.Constant object at 0x7da1b1627d60>, <ast.Constant object at 0x7da1b16261a0>, <ast.Constant object at 0x7da1b1627b50>, <ast.Constant object at 0x7da1b1624790>, <ast.Constant object at 0x7da1b1625cc0>, <ast.Constant object at 0x7da1b1627ca0>, <ast.Constant object at 0x7da1b1626d70>, <ast.Constant object at 0x7da1b1627b20>, <ast.Constant object at 0x7da1b1624460>]]]] variable[rootitem] assign[=] call[name[treemodel].TreeItem, parameter[name[rootdata]]] variable[prjs] assign[=] call[name[djadapter].projects.all, parameter[]] for taget[name[prj]] in starred[name[prjs]] begin[:] variable[prjdata] assign[=] call[name[djitemdata].ProjectItemData, parameter[name[prj]]] call[name[treemodel].TreeItem, parameter[name[prjdata], name[rootitem]]] name[self].prjs_model assign[=] call[name[treemodel].TreeModel, parameter[name[rootitem]]] call[name[self].prjs_tablev.setModel, parameter[name[self].prjs_model]]
keyword[def] identifier[setup_prjs_page] ( identifier[self] ,): literal[string] identifier[self] . identifier[prjs_tablev] . identifier[horizontalHeader] (). identifier[setResizeMode] ( identifier[QtGui] . identifier[QHeaderView] . identifier[ResizeToContents] ) identifier[log] . identifier[debug] ( literal[string] ) identifier[rootdata] = identifier[treemodel] . identifier[ListItemData] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[rootitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[rootdata] ) identifier[prjs] = identifier[djadapter] . identifier[projects] . identifier[all] () keyword[for] identifier[prj] keyword[in] identifier[prjs] : identifier[prjdata] = identifier[djitemdata] . identifier[ProjectItemData] ( identifier[prj] ) identifier[treemodel] . identifier[TreeItem] ( identifier[prjdata] , identifier[rootitem] ) identifier[self] . identifier[prjs_model] = identifier[treemodel] . identifier[TreeModel] ( identifier[rootitem] ) identifier[self] . identifier[prjs_tablev] . identifier[setModel] ( identifier[self] . identifier[prjs_model] )
def setup_prjs_page(self): """Create and set the model on the projects page :returns: None :rtype: None :raises: None """ self.prjs_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) log.debug('Loading projects for projects page.') rootdata = treemodel.ListItemData(['Name', 'Short', 'Path', 'Created', 'Semester', 'Status', 'Resolution', 'FPS', 'Scale']) rootitem = treemodel.TreeItem(rootdata) prjs = djadapter.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, rootitem) # depends on [control=['for'], data=['prj']] self.prjs_model = treemodel.TreeModel(rootitem) self.prjs_tablev.setModel(self.prjs_model)
def preprocess_ubuntu_corpus(df): """Split all strings in df.Context and df.Utterance on __eot__ (turn) markers """ statements = [] replies = [] for i, record in tqdm(df.iterrows()): turns = list(split_turns(record.Context)) statement = turns[-1] if len(turns) else '\n' # <1> statements.append(statement) turns = list(split_turns(record.Utterance)) reply = turns[-1] if len(turns) else '\n' replies.append(reply) df['statement'] = statements df['reply'] = replies return df
def function[preprocess_ubuntu_corpus, parameter[df]]: constant[Split all strings in df.Context and df.Utterance on __eot__ (turn) markers ] variable[statements] assign[=] list[[]] variable[replies] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f8130d0>, <ast.Name object at 0x7da18f8106a0>]]] in starred[call[name[tqdm], parameter[call[name[df].iterrows, parameter[]]]]] begin[:] variable[turns] assign[=] call[name[list], parameter[call[name[split_turns], parameter[name[record].Context]]]] variable[statement] assign[=] <ast.IfExp object at 0x7da18f812770> call[name[statements].append, parameter[name[statement]]] variable[turns] assign[=] call[name[list], parameter[call[name[split_turns], parameter[name[record].Utterance]]]] variable[reply] assign[=] <ast.IfExp object at 0x7da18f812bc0> call[name[replies].append, parameter[name[reply]]] call[name[df]][constant[statement]] assign[=] name[statements] call[name[df]][constant[reply]] assign[=] name[replies] return[name[df]]
keyword[def] identifier[preprocess_ubuntu_corpus] ( identifier[df] ): literal[string] identifier[statements] =[] identifier[replies] =[] keyword[for] identifier[i] , identifier[record] keyword[in] identifier[tqdm] ( identifier[df] . identifier[iterrows] ()): identifier[turns] = identifier[list] ( identifier[split_turns] ( identifier[record] . identifier[Context] )) identifier[statement] = identifier[turns] [- literal[int] ] keyword[if] identifier[len] ( identifier[turns] ) keyword[else] literal[string] identifier[statements] . identifier[append] ( identifier[statement] ) identifier[turns] = identifier[list] ( identifier[split_turns] ( identifier[record] . identifier[Utterance] )) identifier[reply] = identifier[turns] [- literal[int] ] keyword[if] identifier[len] ( identifier[turns] ) keyword[else] literal[string] identifier[replies] . identifier[append] ( identifier[reply] ) identifier[df] [ literal[string] ]= identifier[statements] identifier[df] [ literal[string] ]= identifier[replies] keyword[return] identifier[df]
def preprocess_ubuntu_corpus(df): """Split all strings in df.Context and df.Utterance on __eot__ (turn) markers """ statements = [] replies = [] for (i, record) in tqdm(df.iterrows()): turns = list(split_turns(record.Context)) statement = turns[-1] if len(turns) else '\n' # <1> statements.append(statement) turns = list(split_turns(record.Utterance)) reply = turns[-1] if len(turns) else '\n' replies.append(reply) # depends on [control=['for'], data=[]] df['statement'] = statements df['reply'] = replies return df
def mount(self, app, script_path): ''' Mount a Bottle application to a specific URL prefix ''' if not isinstance(app, Bottle): raise TypeError('Only Bottle instances are supported for now.') script_path = '/'.join(filter(None, script_path.split('/'))) path_depth = script_path.count('/') + 1 if not script_path: raise TypeError('Empty script_path. Perhaps you want a merge()?') for other in self.mounts: if other.startswith(script_path): raise TypeError('Conflict with existing mount: %s' % other) @self.route('/%s/:#.*#' % script_path, method="ANY") def mountpoint(): request.path_shift(path_depth) return app.handle(request.path, request.method) self.mounts[script_path] = app
def function[mount, parameter[self, app, script_path]]: constant[ Mount a Bottle application to a specific URL prefix ] if <ast.UnaryOp object at 0x7da20c6e6980> begin[:] <ast.Raise object at 0x7da20c6e5750> variable[script_path] assign[=] call[constant[/].join, parameter[call[name[filter], parameter[constant[None], call[name[script_path].split, parameter[constant[/]]]]]]] variable[path_depth] assign[=] binary_operation[call[name[script_path].count, parameter[constant[/]]] + constant[1]] if <ast.UnaryOp object at 0x7da20c6e56c0> begin[:] <ast.Raise object at 0x7da20c6e6f80> for taget[name[other]] in starred[name[self].mounts] begin[:] if call[name[other].startswith, parameter[name[script_path]]] begin[:] <ast.Raise object at 0x7da20c6e6e60> def function[mountpoint, parameter[]]: call[name[request].path_shift, parameter[name[path_depth]]] return[call[name[app].handle, parameter[name[request].path, name[request].method]]] call[name[self].mounts][name[script_path]] assign[=] name[app]
keyword[def] identifier[mount] ( identifier[self] , identifier[app] , identifier[script_path] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[app] , identifier[Bottle] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[script_path] = literal[string] . identifier[join] ( identifier[filter] ( keyword[None] , identifier[script_path] . identifier[split] ( literal[string] ))) identifier[path_depth] = identifier[script_path] . identifier[count] ( literal[string] )+ literal[int] keyword[if] keyword[not] identifier[script_path] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[for] identifier[other] keyword[in] identifier[self] . identifier[mounts] : keyword[if] identifier[other] . identifier[startswith] ( identifier[script_path] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[other] ) @ identifier[self] . identifier[route] ( literal[string] % identifier[script_path] , identifier[method] = literal[string] ) keyword[def] identifier[mountpoint] (): identifier[request] . identifier[path_shift] ( identifier[path_depth] ) keyword[return] identifier[app] . identifier[handle] ( identifier[request] . identifier[path] , identifier[request] . identifier[method] ) identifier[self] . identifier[mounts] [ identifier[script_path] ]= identifier[app]
def mount(self, app, script_path): """ Mount a Bottle application to a specific URL prefix """ if not isinstance(app, Bottle): raise TypeError('Only Bottle instances are supported for now.') # depends on [control=['if'], data=[]] script_path = '/'.join(filter(None, script_path.split('/'))) path_depth = script_path.count('/') + 1 if not script_path: raise TypeError('Empty script_path. Perhaps you want a merge()?') # depends on [control=['if'], data=[]] for other in self.mounts: if other.startswith(script_path): raise TypeError('Conflict with existing mount: %s' % other) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['other']] @self.route('/%s/:#.*#' % script_path, method='ANY') def mountpoint(): request.path_shift(path_depth) return app.handle(request.path, request.method) self.mounts[script_path] = app
def add_virtual_columns_equatorial_to_galactic_cartesian(self, alpha, delta, distance, xname, yname, zname, radians=True, alpha_gp=np.radians(192.85948), delta_gp=np.radians(27.12825), l_omega=np.radians(32.93192)): """From http://arxiv.org/pdf/1306.2945v2.pdf""" if not radians: alpha = "pi/180.*%s" % alpha delta = "pi/180.*%s" % delta self.virtual_columns[zname] = "{distance} * (cos({delta}) * cos({delta_gp}) * cos({alpha} - {alpha_gp}) + sin({delta}) * sin({delta_gp}))".format(**locals()) self.virtual_columns[xname] = "{distance} * (cos({delta}) * sin({alpha} - {alpha_gp}))".format(**locals()) self.virtual_columns[yname] = "{distance} * (sin({delta}) * cos({delta_gp}) - cos({delta}) * sin({delta_gp}) * cos({alpha} - {alpha_gp}))".format(**locals())
def function[add_virtual_columns_equatorial_to_galactic_cartesian, parameter[self, alpha, delta, distance, xname, yname, zname, radians, alpha_gp, delta_gp, l_omega]]: constant[From http://arxiv.org/pdf/1306.2945v2.pdf] if <ast.UnaryOp object at 0x7da1b2345ab0> begin[:] variable[alpha] assign[=] binary_operation[constant[pi/180.*%s] <ast.Mod object at 0x7da2590d6920> name[alpha]] variable[delta] assign[=] binary_operation[constant[pi/180.*%s] <ast.Mod object at 0x7da2590d6920> name[delta]] call[name[self].virtual_columns][name[zname]] assign[=] call[constant[{distance} * (cos({delta}) * cos({delta_gp}) * cos({alpha} - {alpha_gp}) + sin({delta}) * sin({delta_gp}))].format, parameter[]] call[name[self].virtual_columns][name[xname]] assign[=] call[constant[{distance} * (cos({delta}) * sin({alpha} - {alpha_gp}))].format, parameter[]] call[name[self].virtual_columns][name[yname]] assign[=] call[constant[{distance} * (sin({delta}) * cos({delta_gp}) - cos({delta}) * sin({delta_gp}) * cos({alpha} - {alpha_gp}))].format, parameter[]]
keyword[def] identifier[add_virtual_columns_equatorial_to_galactic_cartesian] ( identifier[self] , identifier[alpha] , identifier[delta] , identifier[distance] , identifier[xname] , identifier[yname] , identifier[zname] , identifier[radians] = keyword[True] , identifier[alpha_gp] = identifier[np] . identifier[radians] ( literal[int] ), identifier[delta_gp] = identifier[np] . identifier[radians] ( literal[int] ), identifier[l_omega] = identifier[np] . identifier[radians] ( literal[int] )): literal[string] keyword[if] keyword[not] identifier[radians] : identifier[alpha] = literal[string] % identifier[alpha] identifier[delta] = literal[string] % identifier[delta] identifier[self] . identifier[virtual_columns] [ identifier[zname] ]= literal[string] . identifier[format] (** identifier[locals] ()) identifier[self] . identifier[virtual_columns] [ identifier[xname] ]= literal[string] . identifier[format] (** identifier[locals] ()) identifier[self] . identifier[virtual_columns] [ identifier[yname] ]= literal[string] . identifier[format] (** identifier[locals] ())
def add_virtual_columns_equatorial_to_galactic_cartesian(self, alpha, delta, distance, xname, yname, zname, radians=True, alpha_gp=np.radians(192.85948), delta_gp=np.radians(27.12825), l_omega=np.radians(32.93192)): """From http://arxiv.org/pdf/1306.2945v2.pdf""" if not radians: alpha = 'pi/180.*%s' % alpha delta = 'pi/180.*%s' % delta # depends on [control=['if'], data=[]] self.virtual_columns[zname] = '{distance} * (cos({delta}) * cos({delta_gp}) * cos({alpha} - {alpha_gp}) + sin({delta}) * sin({delta_gp}))'.format(**locals()) self.virtual_columns[xname] = '{distance} * (cos({delta}) * sin({alpha} - {alpha_gp}))'.format(**locals()) self.virtual_columns[yname] = '{distance} * (sin({delta}) * cos({delta_gp}) - cos({delta}) * sin({delta_gp}) * cos({alpha} - {alpha_gp}))'.format(**locals())
def get_input_files(self): """ Return list of input files for this DAG node and its job. """ input_files = list(self.__input_files) if isinstance(self.job(), CondorDAGJob): input_files = input_files + self.job().get_input_files() return input_files
def function[get_input_files, parameter[self]]: constant[ Return list of input files for this DAG node and its job. ] variable[input_files] assign[=] call[name[list], parameter[name[self].__input_files]] if call[name[isinstance], parameter[call[name[self].job, parameter[]], name[CondorDAGJob]]] begin[:] variable[input_files] assign[=] binary_operation[name[input_files] + call[call[name[self].job, parameter[]].get_input_files, parameter[]]] return[name[input_files]]
keyword[def] identifier[get_input_files] ( identifier[self] ): literal[string] identifier[input_files] = identifier[list] ( identifier[self] . identifier[__input_files] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[job] (), identifier[CondorDAGJob] ): identifier[input_files] = identifier[input_files] + identifier[self] . identifier[job] (). identifier[get_input_files] () keyword[return] identifier[input_files]
def get_input_files(self): """ Return list of input files for this DAG node and its job. """ input_files = list(self.__input_files) if isinstance(self.job(), CondorDAGJob): input_files = input_files + self.job().get_input_files() # depends on [control=['if'], data=[]] return input_files
def mdr_mutual_information(X, Y, labels, base=2): """Calculates the MDR mutual information, I(XY;labels), in the given base MDR mutual information is calculated by combining variables X and Y into a single MDR model then calculating the mutual information between the resulting model's predictions and the labels. Parameters ---------- X: array-like (# samples) An array of values corresponding to one feature in the MDR model Y: array-like (# samples) An array of values corresponding to one feature in the MDR model labels: array-like (# samples) The class labels corresponding to features X and Y base: integer (default: 2) The base in which to calculate MDR mutual information Returns ---------- mdr_mutual_information: float The MDR mutual information calculated according to the equation I(XY;labels) = H(labels) - H(labels|XY) """ return mutual_information(_mdr_predict(X, Y, labels), labels, base=base)
def function[mdr_mutual_information, parameter[X, Y, labels, base]]: constant[Calculates the MDR mutual information, I(XY;labels), in the given base MDR mutual information is calculated by combining variables X and Y into a single MDR model then calculating the mutual information between the resulting model's predictions and the labels. Parameters ---------- X: array-like (# samples) An array of values corresponding to one feature in the MDR model Y: array-like (# samples) An array of values corresponding to one feature in the MDR model labels: array-like (# samples) The class labels corresponding to features X and Y base: integer (default: 2) The base in which to calculate MDR mutual information Returns ---------- mdr_mutual_information: float The MDR mutual information calculated according to the equation I(XY;labels) = H(labels) - H(labels|XY) ] return[call[name[mutual_information], parameter[call[name[_mdr_predict], parameter[name[X], name[Y], name[labels]]], name[labels]]]]
keyword[def] identifier[mdr_mutual_information] ( identifier[X] , identifier[Y] , identifier[labels] , identifier[base] = literal[int] ): literal[string] keyword[return] identifier[mutual_information] ( identifier[_mdr_predict] ( identifier[X] , identifier[Y] , identifier[labels] ), identifier[labels] , identifier[base] = identifier[base] )
def mdr_mutual_information(X, Y, labels, base=2): """Calculates the MDR mutual information, I(XY;labels), in the given base MDR mutual information is calculated by combining variables X and Y into a single MDR model then calculating the mutual information between the resulting model's predictions and the labels. Parameters ---------- X: array-like (# samples) An array of values corresponding to one feature in the MDR model Y: array-like (# samples) An array of values corresponding to one feature in the MDR model labels: array-like (# samples) The class labels corresponding to features X and Y base: integer (default: 2) The base in which to calculate MDR mutual information Returns ---------- mdr_mutual_information: float The MDR mutual information calculated according to the equation I(XY;labels) = H(labels) - H(labels|XY) """ return mutual_information(_mdr_predict(X, Y, labels), labels, base=base)
def submit(self, executor, task, tag=None): """Submits a task to a provided executor :type executor: s3transfer.futures.BoundedExecutor :param executor: The executor to submit the callable to :type task: s3transfer.tasks.Task :param task: The task to submit to the executor :type tag: s3transfer.futures.TaskTag :param tag: A tag to associate to the submitted task :rtype: concurrent.futures.Future :returns: A future representing the submitted task """ logger.debug( "Submitting task %s to executor %s for transfer request: %s." % ( task, executor, self.transfer_id) ) future = executor.submit(task, tag=tag) # Add this created future to the list of associated future just # in case it is needed during cleanups. self.add_associated_future(future) future.add_done_callback( FunctionContainer(self.remove_associated_future, future)) return future
def function[submit, parameter[self, executor, task, tag]]: constant[Submits a task to a provided executor :type executor: s3transfer.futures.BoundedExecutor :param executor: The executor to submit the callable to :type task: s3transfer.tasks.Task :param task: The task to submit to the executor :type tag: s3transfer.futures.TaskTag :param tag: A tag to associate to the submitted task :rtype: concurrent.futures.Future :returns: A future representing the submitted task ] call[name[logger].debug, parameter[binary_operation[constant[Submitting task %s to executor %s for transfer request: %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cd750>, <ast.Name object at 0x7da18c4cfd30>, <ast.Attribute object at 0x7da18c4cfaf0>]]]]] variable[future] assign[=] call[name[executor].submit, parameter[name[task]]] call[name[self].add_associated_future, parameter[name[future]]] call[name[future].add_done_callback, parameter[call[name[FunctionContainer], parameter[name[self].remove_associated_future, name[future]]]]] return[name[future]]
keyword[def] identifier[submit] ( identifier[self] , identifier[executor] , identifier[task] , identifier[tag] = keyword[None] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] %( identifier[task] , identifier[executor] , identifier[self] . identifier[transfer_id] ) ) identifier[future] = identifier[executor] . identifier[submit] ( identifier[task] , identifier[tag] = identifier[tag] ) identifier[self] . identifier[add_associated_future] ( identifier[future] ) identifier[future] . identifier[add_done_callback] ( identifier[FunctionContainer] ( identifier[self] . identifier[remove_associated_future] , identifier[future] )) keyword[return] identifier[future]
def submit(self, executor, task, tag=None): """Submits a task to a provided executor :type executor: s3transfer.futures.BoundedExecutor :param executor: The executor to submit the callable to :type task: s3transfer.tasks.Task :param task: The task to submit to the executor :type tag: s3transfer.futures.TaskTag :param tag: A tag to associate to the submitted task :rtype: concurrent.futures.Future :returns: A future representing the submitted task """ logger.debug('Submitting task %s to executor %s for transfer request: %s.' % (task, executor, self.transfer_id)) future = executor.submit(task, tag=tag) # Add this created future to the list of associated future just # in case it is needed during cleanups. self.add_associated_future(future) future.add_done_callback(FunctionContainer(self.remove_associated_future, future)) return future
def register_actor(name, actor_handle): """Register a named actor under a string key. Args: name: The name of the named actor. actor_handle: The actor object to be associated with this name """ if not isinstance(name, str): raise TypeError("The name argument must be a string.") if not isinstance(actor_handle, ray.actor.ActorHandle): raise TypeError("The actor_handle argument must be an ActorHandle " "object.") actor_name = _calculate_key(name) pickled_state = pickle.dumps(actor_handle) # Add the actor to Redis if it does not already exist. already_exists = _internal_kv_put(actor_name, pickled_state) if already_exists: # If the registration fails, then erase the new actor handle that # was added when pickling the actor handle. actor_handle._ray_new_actor_handles.pop() raise ValueError( "Error: the actor with name={} already exists".format(name))
def function[register_actor, parameter[name, actor_handle]]: constant[Register a named actor under a string key. Args: name: The name of the named actor. actor_handle: The actor object to be associated with this name ] if <ast.UnaryOp object at 0x7da20e9b1c90> begin[:] <ast.Raise object at 0x7da20e9b18d0> if <ast.UnaryOp object at 0x7da20e9b19f0> begin[:] <ast.Raise object at 0x7da20e9b3e80> variable[actor_name] assign[=] call[name[_calculate_key], parameter[name[name]]] variable[pickled_state] assign[=] call[name[pickle].dumps, parameter[name[actor_handle]]] variable[already_exists] assign[=] call[name[_internal_kv_put], parameter[name[actor_name], name[pickled_state]]] if name[already_exists] begin[:] call[name[actor_handle]._ray_new_actor_handles.pop, parameter[]] <ast.Raise object at 0x7da20e9b22f0>
keyword[def] identifier[register_actor] ( identifier[name] , identifier[actor_handle] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[name] , identifier[str] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[actor_handle] , identifier[ray] . identifier[actor] . identifier[ActorHandle] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) identifier[actor_name] = identifier[_calculate_key] ( identifier[name] ) identifier[pickled_state] = identifier[pickle] . identifier[dumps] ( identifier[actor_handle] ) identifier[already_exists] = identifier[_internal_kv_put] ( identifier[actor_name] , identifier[pickled_state] ) keyword[if] identifier[already_exists] : identifier[actor_handle] . identifier[_ray_new_actor_handles] . identifier[pop] () keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
def register_actor(name, actor_handle): """Register a named actor under a string key. Args: name: The name of the named actor. actor_handle: The actor object to be associated with this name """ if not isinstance(name, str): raise TypeError('The name argument must be a string.') # depends on [control=['if'], data=[]] if not isinstance(actor_handle, ray.actor.ActorHandle): raise TypeError('The actor_handle argument must be an ActorHandle object.') # depends on [control=['if'], data=[]] actor_name = _calculate_key(name) pickled_state = pickle.dumps(actor_handle) # Add the actor to Redis if it does not already exist. already_exists = _internal_kv_put(actor_name, pickled_state) if already_exists: # If the registration fails, then erase the new actor handle that # was added when pickling the actor handle. actor_handle._ray_new_actor_handles.pop() raise ValueError('Error: the actor with name={} already exists'.format(name)) # depends on [control=['if'], data=[]]
def build_cpp(build_context, target, compiler_config, workspace_dir): """Compile and link a C++ binary for `target`.""" rmtree(workspace_dir) binary = join(*split(target.name)) objects = link_cpp_artifacts(build_context, target, workspace_dir, True) buildenv_workspace = build_context.conf.host_to_buildenv_path( workspace_dir) objects.extend(compile_cc( build_context, compiler_config, target.props.in_buildenv, get_source_files(target, build_context), workspace_dir, buildenv_workspace, target.props.cmd_env)) bin_file = join(buildenv_workspace, binary) link_cmd = ( [compiler_config.linker, '-o', bin_file] + objects + compiler_config.link_flags) build_context.run_in_buildenv( target.props.in_buildenv, link_cmd, target.props.cmd_env) target.artifacts.add(AT.binary, relpath(join(workspace_dir, binary), build_context.conf.project_root), binary)
def function[build_cpp, parameter[build_context, target, compiler_config, workspace_dir]]: constant[Compile and link a C++ binary for `target`.] call[name[rmtree], parameter[name[workspace_dir]]] variable[binary] assign[=] call[name[join], parameter[<ast.Starred object at 0x7da1b1b68460>]] variable[objects] assign[=] call[name[link_cpp_artifacts], parameter[name[build_context], name[target], name[workspace_dir], constant[True]]] variable[buildenv_workspace] assign[=] call[name[build_context].conf.host_to_buildenv_path, parameter[name[workspace_dir]]] call[name[objects].extend, parameter[call[name[compile_cc], parameter[name[build_context], name[compiler_config], name[target].props.in_buildenv, call[name[get_source_files], parameter[name[target], name[build_context]]], name[workspace_dir], name[buildenv_workspace], name[target].props.cmd_env]]]] variable[bin_file] assign[=] call[name[join], parameter[name[buildenv_workspace], name[binary]]] variable[link_cmd] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da1b1b6b820>, <ast.Constant object at 0x7da1b1b68130>, <ast.Name object at 0x7da1b1b68610>]] + name[objects]] + name[compiler_config].link_flags] call[name[build_context].run_in_buildenv, parameter[name[target].props.in_buildenv, name[link_cmd], name[target].props.cmd_env]] call[name[target].artifacts.add, parameter[name[AT].binary, call[name[relpath], parameter[call[name[join], parameter[name[workspace_dir], name[binary]]], name[build_context].conf.project_root]], name[binary]]]
keyword[def] identifier[build_cpp] ( identifier[build_context] , identifier[target] , identifier[compiler_config] , identifier[workspace_dir] ): literal[string] identifier[rmtree] ( identifier[workspace_dir] ) identifier[binary] = identifier[join] (* identifier[split] ( identifier[target] . identifier[name] )) identifier[objects] = identifier[link_cpp_artifacts] ( identifier[build_context] , identifier[target] , identifier[workspace_dir] , keyword[True] ) identifier[buildenv_workspace] = identifier[build_context] . identifier[conf] . identifier[host_to_buildenv_path] ( identifier[workspace_dir] ) identifier[objects] . identifier[extend] ( identifier[compile_cc] ( identifier[build_context] , identifier[compiler_config] , identifier[target] . identifier[props] . identifier[in_buildenv] , identifier[get_source_files] ( identifier[target] , identifier[build_context] ), identifier[workspace_dir] , identifier[buildenv_workspace] , identifier[target] . identifier[props] . identifier[cmd_env] )) identifier[bin_file] = identifier[join] ( identifier[buildenv_workspace] , identifier[binary] ) identifier[link_cmd] =( [ identifier[compiler_config] . identifier[linker] , literal[string] , identifier[bin_file] ]+ identifier[objects] + identifier[compiler_config] . identifier[link_flags] ) identifier[build_context] . identifier[run_in_buildenv] ( identifier[target] . identifier[props] . identifier[in_buildenv] , identifier[link_cmd] , identifier[target] . identifier[props] . identifier[cmd_env] ) identifier[target] . identifier[artifacts] . identifier[add] ( identifier[AT] . identifier[binary] , identifier[relpath] ( identifier[join] ( identifier[workspace_dir] , identifier[binary] ), identifier[build_context] . identifier[conf] . identifier[project_root] ), identifier[binary] )
def build_cpp(build_context, target, compiler_config, workspace_dir): """Compile and link a C++ binary for `target`.""" rmtree(workspace_dir) binary = join(*split(target.name)) objects = link_cpp_artifacts(build_context, target, workspace_dir, True) buildenv_workspace = build_context.conf.host_to_buildenv_path(workspace_dir) objects.extend(compile_cc(build_context, compiler_config, target.props.in_buildenv, get_source_files(target, build_context), workspace_dir, buildenv_workspace, target.props.cmd_env)) bin_file = join(buildenv_workspace, binary) link_cmd = [compiler_config.linker, '-o', bin_file] + objects + compiler_config.link_flags build_context.run_in_buildenv(target.props.in_buildenv, link_cmd, target.props.cmd_env) target.artifacts.add(AT.binary, relpath(join(workspace_dir, binary), build_context.conf.project_root), binary)
def get_version(version=None): """Returns a PEP 386-compliant version number from VERSION. :param version: A tuple that represent a version. :type version: tuple :returns: a PEP 386-compliant version number. :rtype: str """ if version is None: version_list = inasafe_version.split('.') version = tuple(version_list + [inasafe_release_status] + ['0']) if len(version) != 5: msg = 'Version must be a tuple of length 5. I got %s' % (version,) raise RuntimeError(msg) if version[3] not in ('alpha', 'beta', 'rc', 'final'): msg = 'Version tuple not as expected. I got %s' % (version,) raise RuntimeError(msg) # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if version[2] == 0 else 3 main = '.'.join(str(x) for x in version[:parts]) sub = '' # This crashes on windows if version[3] == 'alpha' and version[4] == '0': # Currently failed on windows and mac if 'win32' in sys.platform or 'darwin' in sys.platform: sub = '.dev-master' else: try: git_hash = current_git_hash() if git_hash: sub = '.dev-%s' % git_hash except WindowsError: sub = '.dev-master' elif version[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[version[3]] + str(version[4]) return main + sub
def function[get_version, parameter[version]]: constant[Returns a PEP 386-compliant version number from VERSION. :param version: A tuple that represent a version. :type version: tuple :returns: a PEP 386-compliant version number. :rtype: str ] if compare[name[version] is constant[None]] begin[:] variable[version_list] assign[=] call[name[inasafe_version].split, parameter[constant[.]]] variable[version] assign[=] call[name[tuple], parameter[binary_operation[binary_operation[name[version_list] + list[[<ast.Name object at 0x7da1b0c89b10>]]] + list[[<ast.Constant object at 0x7da1b0c885b0>]]]]] if compare[call[name[len], parameter[name[version]]] not_equal[!=] constant[5]] begin[:] variable[msg] assign[=] binary_operation[constant[Version must be a tuple of length 5. I got %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0c897e0>]]] <ast.Raise object at 0x7da1b0c898d0> if compare[call[name[version]][constant[3]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b0c89de0>, <ast.Constant object at 0x7da1b0c8bd90>, <ast.Constant object at 0x7da1b0c8a980>, <ast.Constant object at 0x7da1b0c8a020>]]] begin[:] variable[msg] assign[=] binary_operation[constant[Version tuple not as expected. I got %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0c8bc70>]]] <ast.Raise object at 0x7da1b0c894b0> variable[parts] assign[=] <ast.IfExp object at 0x7da1b0c88610> variable[main] assign[=] call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da1b0c8a500>]] variable[sub] assign[=] constant[] if <ast.BoolOp object at 0x7da1b0c88be0> begin[:] if <ast.BoolOp object at 0x7da1b0c8b400> begin[:] variable[sub] assign[=] constant[.dev-master] return[binary_operation[name[main] + name[sub]]]
keyword[def] identifier[get_version] ( identifier[version] = keyword[None] ): literal[string] keyword[if] identifier[version] keyword[is] keyword[None] : identifier[version_list] = identifier[inasafe_version] . identifier[split] ( literal[string] ) identifier[version] = identifier[tuple] ( identifier[version_list] +[ identifier[inasafe_release_status] ]+[ literal[string] ]) keyword[if] identifier[len] ( identifier[version] )!= literal[int] : identifier[msg] = literal[string] %( identifier[version] ,) keyword[raise] identifier[RuntimeError] ( identifier[msg] ) keyword[if] identifier[version] [ literal[int] ] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[msg] = literal[string] %( identifier[version] ,) keyword[raise] identifier[RuntimeError] ( identifier[msg] ) identifier[parts] = literal[int] keyword[if] identifier[version] [ literal[int] ]== literal[int] keyword[else] literal[int] identifier[main] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version] [: identifier[parts] ]) identifier[sub] = literal[string] keyword[if] identifier[version] [ literal[int] ]== literal[string] keyword[and] identifier[version] [ literal[int] ]== literal[string] : keyword[if] literal[string] keyword[in] identifier[sys] . identifier[platform] keyword[or] literal[string] keyword[in] identifier[sys] . identifier[platform] : identifier[sub] = literal[string] keyword[else] : keyword[try] : identifier[git_hash] = identifier[current_git_hash] () keyword[if] identifier[git_hash] : identifier[sub] = literal[string] % identifier[git_hash] keyword[except] identifier[WindowsError] : identifier[sub] = literal[string] keyword[elif] identifier[version] [ literal[int] ]!= literal[string] : identifier[mapping] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[sub] = identifier[mapping] [ identifier[version] [ literal[int] ]]+ identifier[str] ( identifier[version] [ literal[int] ]) keyword[return] identifier[main] + identifier[sub]
def get_version(version=None): """Returns a PEP 386-compliant version number from VERSION. :param version: A tuple that represent a version. :type version: tuple :returns: a PEP 386-compliant version number. :rtype: str """ if version is None: version_list = inasafe_version.split('.') version = tuple(version_list + [inasafe_release_status] + ['0']) # depends on [control=['if'], data=['version']] if len(version) != 5: msg = 'Version must be a tuple of length 5. I got %s' % (version,) raise RuntimeError(msg) # depends on [control=['if'], data=[]] if version[3] not in ('alpha', 'beta', 'rc', 'final'): msg = 'Version tuple not as expected. I got %s' % (version,) raise RuntimeError(msg) # depends on [control=['if'], data=[]] # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if version[2] == 0 else 3 main = '.'.join((str(x) for x in version[:parts])) sub = '' # This crashes on windows if version[3] == 'alpha' and version[4] == '0': # Currently failed on windows and mac if 'win32' in sys.platform or 'darwin' in sys.platform: sub = '.dev-master' # depends on [control=['if'], data=[]] else: try: git_hash = current_git_hash() if git_hash: sub = '.dev-%s' % git_hash # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except WindowsError: sub = '.dev-master' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif version[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[version[3]] + str(version[4]) # depends on [control=['if'], data=[]] return main + sub
def get_latex_expression(self, pos, strict_braces=None): """ Parses the latex content given to the constructor (and stored in `self.s`), starting at position `pos`, to parse a single LaTeX expression. Reads a latex expression, e.g. macro argument. This may be a single char, an escape sequence, or a expression placed in braces. This is what TeX calls a "token" (and not what we call a token... anyway). Returns a tuple `(node, pos, len)`, where `pos` is the position of the first char of the expression and `len` the length of the expression. """ with _PushPropOverride(self, 'strict_braces', strict_braces): tok = self.get_token(pos, environments=False, keep_inline_math=False) if (tok.tok == 'macro'): if (tok.arg == 'end'): if not self.tolerant_parsing: # error, this should be an \end{environment}, not an argument in itself raise LatexWalkerParseError("Expected expression, got \end", self.s, pos) else: return (LatexCharsNode(chars=''), tok.pos, 0) return (LatexMacroNode(macroname=tok.arg, nodeoptarg=None, nodeargs=[], macro_post_space=tok.post_space), tok.pos, tok.len) if (tok.tok == 'comment'): return self.get_latex_expression(pos+tok.len) if (tok.tok == 'brace_open'): return self.get_latex_braced_group(tok.pos) if (tok.tok == 'brace_close'): if (self.strict_braces and not self.tolerant_parsing): raise LatexWalkerParseError("Expected expression, got closing brace!", self.s, pos) return (LatexCharsNode(chars=''), tok.pos, 0) if (tok.tok == 'char'): return (LatexCharsNode(chars=tok.arg), tok.pos, tok.len) raise LatexWalkerParseError("Unknown token type: %s" %(tok.tok), self.s, pos)
def function[get_latex_expression, parameter[self, pos, strict_braces]]: constant[ Parses the latex content given to the constructor (and stored in `self.s`), starting at position `pos`, to parse a single LaTeX expression. Reads a latex expression, e.g. macro argument. This may be a single char, an escape sequence, or a expression placed in braces. This is what TeX calls a "token" (and not what we call a token... anyway). Returns a tuple `(node, pos, len)`, where `pos` is the position of the first char of the expression and `len` the length of the expression. ] with call[name[_PushPropOverride], parameter[name[self], constant[strict_braces], name[strict_braces]]] begin[:] variable[tok] assign[=] call[name[self].get_token, parameter[name[pos]]] if compare[name[tok].tok equal[==] constant[macro]] begin[:] if compare[name[tok].arg equal[==] constant[end]] begin[:] if <ast.UnaryOp object at 0x7da1b026ddb0> begin[:] <ast.Raise object at 0x7da1b026ccd0> return[tuple[[<ast.Call object at 0x7da1b026c640>, <ast.Attribute object at 0x7da1b026e4d0>, <ast.Attribute object at 0x7da1b026ce20>]]] if compare[name[tok].tok equal[==] constant[comment]] begin[:] return[call[name[self].get_latex_expression, parameter[binary_operation[name[pos] + name[tok].len]]]] if compare[name[tok].tok equal[==] constant[brace_open]] begin[:] return[call[name[self].get_latex_braced_group, parameter[name[tok].pos]]] if compare[name[tok].tok equal[==] constant[brace_close]] begin[:] if <ast.BoolOp object at 0x7da1b026d4e0> begin[:] <ast.Raise object at 0x7da1b026ceb0> return[tuple[[<ast.Call object at 0x7da1b026e770>, <ast.Attribute object at 0x7da1b026fe50>, <ast.Constant object at 0x7da1b026d060>]]] if compare[name[tok].tok equal[==] constant[char]] begin[:] return[tuple[[<ast.Call object at 0x7da1b026d150>, <ast.Attribute object at 0x7da1b026d600>, <ast.Attribute object at 0x7da1b026da80>]]] <ast.Raise object at 0x7da1b026ece0>
keyword[def] identifier[get_latex_expression] ( identifier[self] , identifier[pos] , identifier[strict_braces] = keyword[None] ): literal[string] keyword[with] identifier[_PushPropOverride] ( identifier[self] , literal[string] , identifier[strict_braces] ): identifier[tok] = identifier[self] . identifier[get_token] ( identifier[pos] , identifier[environments] = keyword[False] , identifier[keep_inline_math] = keyword[False] ) keyword[if] ( identifier[tok] . identifier[tok] == literal[string] ): keyword[if] ( identifier[tok] . identifier[arg] == literal[string] ): keyword[if] keyword[not] identifier[self] . identifier[tolerant_parsing] : keyword[raise] identifier[LatexWalkerParseError] ( literal[string] , identifier[self] . identifier[s] , identifier[pos] ) keyword[else] : keyword[return] ( identifier[LatexCharsNode] ( identifier[chars] = literal[string] ), identifier[tok] . identifier[pos] , literal[int] ) keyword[return] ( identifier[LatexMacroNode] ( identifier[macroname] = identifier[tok] . identifier[arg] , identifier[nodeoptarg] = keyword[None] , identifier[nodeargs] =[], identifier[macro_post_space] = identifier[tok] . identifier[post_space] ), identifier[tok] . identifier[pos] , identifier[tok] . identifier[len] ) keyword[if] ( identifier[tok] . identifier[tok] == literal[string] ): keyword[return] identifier[self] . identifier[get_latex_expression] ( identifier[pos] + identifier[tok] . identifier[len] ) keyword[if] ( identifier[tok] . identifier[tok] == literal[string] ): keyword[return] identifier[self] . identifier[get_latex_braced_group] ( identifier[tok] . identifier[pos] ) keyword[if] ( identifier[tok] . identifier[tok] == literal[string] ): keyword[if] ( identifier[self] . identifier[strict_braces] keyword[and] keyword[not] identifier[self] . identifier[tolerant_parsing] ): keyword[raise] identifier[LatexWalkerParseError] ( literal[string] , identifier[self] . identifier[s] , identifier[pos] ) keyword[return] ( identifier[LatexCharsNode] ( identifier[chars] = literal[string] ), identifier[tok] . identifier[pos] , literal[int] ) keyword[if] ( identifier[tok] . identifier[tok] == literal[string] ): keyword[return] ( identifier[LatexCharsNode] ( identifier[chars] = identifier[tok] . identifier[arg] ), identifier[tok] . identifier[pos] , identifier[tok] . identifier[len] ) keyword[raise] identifier[LatexWalkerParseError] ( literal[string] %( identifier[tok] . identifier[tok] ), identifier[self] . identifier[s] , identifier[pos] )
def get_latex_expression(self, pos, strict_braces=None): """ Parses the latex content given to the constructor (and stored in `self.s`), starting at position `pos`, to parse a single LaTeX expression. Reads a latex expression, e.g. macro argument. This may be a single char, an escape sequence, or a expression placed in braces. This is what TeX calls a "token" (and not what we call a token... anyway). Returns a tuple `(node, pos, len)`, where `pos` is the position of the first char of the expression and `len` the length of the expression. """ with _PushPropOverride(self, 'strict_braces', strict_braces): tok = self.get_token(pos, environments=False, keep_inline_math=False) if tok.tok == 'macro': if tok.arg == 'end': if not self.tolerant_parsing: # error, this should be an \end{environment}, not an argument in itself raise LatexWalkerParseError('Expected expression, got \\end', self.s, pos) # depends on [control=['if'], data=[]] else: return (LatexCharsNode(chars=''), tok.pos, 0) # depends on [control=['if'], data=[]] return (LatexMacroNode(macroname=tok.arg, nodeoptarg=None, nodeargs=[], macro_post_space=tok.post_space), tok.pos, tok.len) # depends on [control=['if'], data=[]] if tok.tok == 'comment': return self.get_latex_expression(pos + tok.len) # depends on [control=['if'], data=[]] if tok.tok == 'brace_open': return self.get_latex_braced_group(tok.pos) # depends on [control=['if'], data=[]] if tok.tok == 'brace_close': if self.strict_braces and (not self.tolerant_parsing): raise LatexWalkerParseError('Expected expression, got closing brace!', self.s, pos) # depends on [control=['if'], data=[]] return (LatexCharsNode(chars=''), tok.pos, 0) # depends on [control=['if'], data=[]] if tok.tok == 'char': return (LatexCharsNode(chars=tok.arg), tok.pos, tok.len) # depends on [control=['if'], data=[]] raise LatexWalkerParseError('Unknown token type: %s' % tok.tok, self.s, pos) # depends on [control=['with'], data=[]]
def is_chief(task: backend.Task, run_name: str): """Returns True if task is chief task in the corresponding run""" global run_task_dict if run_name not in run_task_dict: return True task_list = run_task_dict[run_name] assert task in task_list, f"Task {task.name} doesn't belong to run {run_name}" return task_list[0] == task
def function[is_chief, parameter[task, run_name]]: constant[Returns True if task is chief task in the corresponding run] <ast.Global object at 0x7da2054a4d90> if compare[name[run_name] <ast.NotIn object at 0x7da2590d7190> name[run_task_dict]] begin[:] return[constant[True]] variable[task_list] assign[=] call[name[run_task_dict]][name[run_name]] assert[compare[name[task] in name[task_list]]] return[compare[call[name[task_list]][constant[0]] equal[==] name[task]]]
keyword[def] identifier[is_chief] ( identifier[task] : identifier[backend] . identifier[Task] , identifier[run_name] : identifier[str] ): literal[string] keyword[global] identifier[run_task_dict] keyword[if] identifier[run_name] keyword[not] keyword[in] identifier[run_task_dict] : keyword[return] keyword[True] identifier[task_list] = identifier[run_task_dict] [ identifier[run_name] ] keyword[assert] identifier[task] keyword[in] identifier[task_list] , literal[string] keyword[return] identifier[task_list] [ literal[int] ]== identifier[task]
def is_chief(task: backend.Task, run_name: str): """Returns True if task is chief task in the corresponding run""" global run_task_dict if run_name not in run_task_dict: return True # depends on [control=['if'], data=[]] task_list = run_task_dict[run_name] assert task in task_list, f"Task {task.name} doesn't belong to run {run_name}" return task_list[0] == task
def on_pytoml_dumps(self, pytoml, config, dictionary, **kwargs): """ The `pytoml <https://pypi.org/project/pytoml/>`_ dumps method. :param module pytoml: The ``pytoml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :returns: The TOML serialization :rtype: str """ inline_tables = set(kwargs.get("inline_tables", [])) if len(inline_tables) > 0: warnings.warn("pytoml does not support 'inline_tables' argument") return pytoml.dumps(dictionary)
def function[on_pytoml_dumps, parameter[self, pytoml, config, dictionary]]: constant[ The `pytoml <https://pypi.org/project/pytoml/>`_ dumps method. :param module pytoml: The ``pytoml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :returns: The TOML serialization :rtype: str ] variable[inline_tables] assign[=] call[name[set], parameter[call[name[kwargs].get, parameter[constant[inline_tables], list[[]]]]]] if compare[call[name[len], parameter[name[inline_tables]]] greater[>] constant[0]] begin[:] call[name[warnings].warn, parameter[constant[pytoml does not support 'inline_tables' argument]]] return[call[name[pytoml].dumps, parameter[name[dictionary]]]]
keyword[def] identifier[on_pytoml_dumps] ( identifier[self] , identifier[pytoml] , identifier[config] , identifier[dictionary] ,** identifier[kwargs] ): literal[string] identifier[inline_tables] = identifier[set] ( identifier[kwargs] . identifier[get] ( literal[string] ,[])) keyword[if] identifier[len] ( identifier[inline_tables] )> literal[int] : identifier[warnings] . identifier[warn] ( literal[string] ) keyword[return] identifier[pytoml] . identifier[dumps] ( identifier[dictionary] )
def on_pytoml_dumps(self, pytoml, config, dictionary, **kwargs): """ The `pytoml <https://pypi.org/project/pytoml/>`_ dumps method. :param module pytoml: The ``pytoml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :returns: The TOML serialization :rtype: str """ inline_tables = set(kwargs.get('inline_tables', [])) if len(inline_tables) > 0: warnings.warn("pytoml does not support 'inline_tables' argument") # depends on [control=['if'], data=[]] return pytoml.dumps(dictionary)
def compare_token(expected: Union[str, bytes], actual: Union[str, bytes]) -> bool: """ Compares the given tokens. :param expected: The expected token. :type expected: Union[str, bytes] :param actual: The actual token. :type actual: Union[str, bytes] :return: Do the tokens match? :rtype: bool """ expected = util.to_bytes(expected) actual = util.to_bytes(actual) _, expected_sig_seg = expected.rsplit(b'.', 1) _, actual_sig_seg = actual.rsplit(b'.', 1) expected_sig = util.b64_decode(expected_sig_seg) actual_sig = util.b64_decode(actual_sig_seg) return compare_signature(expected_sig, actual_sig)
def function[compare_token, parameter[expected, actual]]: constant[ Compares the given tokens. :param expected: The expected token. :type expected: Union[str, bytes] :param actual: The actual token. :type actual: Union[str, bytes] :return: Do the tokens match? :rtype: bool ] variable[expected] assign[=] call[name[util].to_bytes, parameter[name[expected]]] variable[actual] assign[=] call[name[util].to_bytes, parameter[name[actual]]] <ast.Tuple object at 0x7da20c6ab340> assign[=] call[name[expected].rsplit, parameter[constant[b'.'], constant[1]]] <ast.Tuple object at 0x7da20c6ab6d0> assign[=] call[name[actual].rsplit, parameter[constant[b'.'], constant[1]]] variable[expected_sig] assign[=] call[name[util].b64_decode, parameter[name[expected_sig_seg]]] variable[actual_sig] assign[=] call[name[util].b64_decode, parameter[name[actual_sig_seg]]] return[call[name[compare_signature], parameter[name[expected_sig], name[actual_sig]]]]
keyword[def] identifier[compare_token] ( identifier[expected] : identifier[Union] [ identifier[str] , identifier[bytes] ], identifier[actual] : identifier[Union] [ identifier[str] , identifier[bytes] ])-> identifier[bool] : literal[string] identifier[expected] = identifier[util] . identifier[to_bytes] ( identifier[expected] ) identifier[actual] = identifier[util] . identifier[to_bytes] ( identifier[actual] ) identifier[_] , identifier[expected_sig_seg] = identifier[expected] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[_] , identifier[actual_sig_seg] = identifier[actual] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[expected_sig] = identifier[util] . identifier[b64_decode] ( identifier[expected_sig_seg] ) identifier[actual_sig] = identifier[util] . identifier[b64_decode] ( identifier[actual_sig_seg] ) keyword[return] identifier[compare_signature] ( identifier[expected_sig] , identifier[actual_sig] )
def compare_token(expected: Union[str, bytes], actual: Union[str, bytes]) -> bool: """ Compares the given tokens. :param expected: The expected token. :type expected: Union[str, bytes] :param actual: The actual token. :type actual: Union[str, bytes] :return: Do the tokens match? :rtype: bool """ expected = util.to_bytes(expected) actual = util.to_bytes(actual) (_, expected_sig_seg) = expected.rsplit(b'.', 1) (_, actual_sig_seg) = actual.rsplit(b'.', 1) expected_sig = util.b64_decode(expected_sig_seg) actual_sig = util.b64_decode(actual_sig_seg) return compare_signature(expected_sig, actual_sig)
def _recursive_import(package): """ Args: package(py:term:`package`): Package to walk Import all modules from a package recursively """ prefix = '%s.' % (package.__name__) path = getattr(package, '__path__', None) if path: for submod in pkgutil.walk_packages(path, prefix=prefix): _import_module(submod[1], submod[0].path)
def function[_recursive_import, parameter[package]]: constant[ Args: package(py:term:`package`): Package to walk Import all modules from a package recursively ] variable[prefix] assign[=] binary_operation[constant[%s.] <ast.Mod object at 0x7da2590d6920> name[package].__name__] variable[path] assign[=] call[name[getattr], parameter[name[package], constant[__path__], constant[None]]] if name[path] begin[:] for taget[name[submod]] in starred[call[name[pkgutil].walk_packages, parameter[name[path]]]] begin[:] call[name[_import_module], parameter[call[name[submod]][constant[1]], call[name[submod]][constant[0]].path]]
keyword[def] identifier[_recursive_import] ( identifier[package] ): literal[string] identifier[prefix] = literal[string] %( identifier[package] . identifier[__name__] ) identifier[path] = identifier[getattr] ( identifier[package] , literal[string] , keyword[None] ) keyword[if] identifier[path] : keyword[for] identifier[submod] keyword[in] identifier[pkgutil] . identifier[walk_packages] ( identifier[path] , identifier[prefix] = identifier[prefix] ): identifier[_import_module] ( identifier[submod] [ literal[int] ], identifier[submod] [ literal[int] ]. identifier[path] )
def _recursive_import(package): """ Args: package(py:term:`package`): Package to walk Import all modules from a package recursively """ prefix = '%s.' % package.__name__ path = getattr(package, '__path__', None) if path: for submod in pkgutil.walk_packages(path, prefix=prefix): _import_module(submod[1], submod[0].path) # depends on [control=['for'], data=['submod']] # depends on [control=['if'], data=[]]
def get(self, id): """Retrieves the job with the selected ID. :param str id: The ID of the job :returns: The dictionary of the job if found, None otherwise """ self.cur.execute("SELECT * FROM jobs WHERE hash=?", (id,)) item = self.cur.fetchone() if item: return dict(zip( ("id", "description", "last-run", "next-run", "last-run-result"), item)) return None
def function[get, parameter[self, id]]: constant[Retrieves the job with the selected ID. :param str id: The ID of the job :returns: The dictionary of the job if found, None otherwise ] call[name[self].cur.execute, parameter[constant[SELECT * FROM jobs WHERE hash=?], tuple[[<ast.Name object at 0x7da1b1e19ab0>]]]] variable[item] assign[=] call[name[self].cur.fetchone, parameter[]] if name[item] begin[:] return[call[name[dict], parameter[call[name[zip], parameter[tuple[[<ast.Constant object at 0x7da1b2047100>, <ast.Constant object at 0x7da1b20475e0>, <ast.Constant object at 0x7da1b2046c80>, <ast.Constant object at 0x7da1b2045330>, <ast.Constant object at 0x7da1b2044490>]], name[item]]]]]] return[constant[None]]
keyword[def] identifier[get] ( identifier[self] , identifier[id] ): literal[string] identifier[self] . identifier[cur] . identifier[execute] ( literal[string] ,( identifier[id] ,)) identifier[item] = identifier[self] . identifier[cur] . identifier[fetchone] () keyword[if] identifier[item] : keyword[return] identifier[dict] ( identifier[zip] ( ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ), identifier[item] )) keyword[return] keyword[None]
def get(self, id): """Retrieves the job with the selected ID. :param str id: The ID of the job :returns: The dictionary of the job if found, None otherwise """ self.cur.execute('SELECT * FROM jobs WHERE hash=?', (id,)) item = self.cur.fetchone() if item: return dict(zip(('id', 'description', 'last-run', 'next-run', 'last-run-result'), item)) # depends on [control=['if'], data=[]] return None
def __PrintMessageDocstringLines(self, message_type): """Print the docstring for this message.""" description = message_type.description or '%s message type.' % ( message_type.name) short_description = ( _EmptyMessage(message_type) and len(description) < (self.__printer.CalculateWidth() - 6)) with self.__printer.CommentContext(): if short_description: # Note that we use explicit string interpolation here since # we're in comment context. self.__printer('r"""%s"""' % description) return for line in textwrap.wrap('r"""%s' % description, self.__printer.CalculateWidth()): self.__printer(line) PrintIndentedDescriptions(self.__printer, message_type.enum_types, 'Enums') PrintIndentedDescriptions( self.__printer, message_type.message_types, 'Messages') PrintIndentedDescriptions( self.__printer, message_type.fields, 'Fields') self.__printer('"""') self.__printer()
def function[__PrintMessageDocstringLines, parameter[self, message_type]]: constant[Print the docstring for this message.] variable[description] assign[=] <ast.BoolOp object at 0x7da1b07f5630> variable[short_description] assign[=] <ast.BoolOp object at 0x7da1b07f4e80> with call[name[self].__printer.CommentContext, parameter[]] begin[:] if name[short_description] begin[:] call[name[self].__printer, parameter[binary_operation[constant[r"""%s"""] <ast.Mod object at 0x7da2590d6920> name[description]]]] return[None] for taget[name[line]] in starred[call[name[textwrap].wrap, parameter[binary_operation[constant[r"""%s] <ast.Mod object at 0x7da2590d6920> name[description]], call[name[self].__printer.CalculateWidth, parameter[]]]]] begin[:] call[name[self].__printer, parameter[name[line]]] call[name[PrintIndentedDescriptions], parameter[name[self].__printer, name[message_type].enum_types, constant[Enums]]] call[name[PrintIndentedDescriptions], parameter[name[self].__printer, name[message_type].message_types, constant[Messages]]] call[name[PrintIndentedDescriptions], parameter[name[self].__printer, name[message_type].fields, constant[Fields]]] call[name[self].__printer, parameter[constant["""]]] call[name[self].__printer, parameter[]]
keyword[def] identifier[__PrintMessageDocstringLines] ( identifier[self] , identifier[message_type] ): literal[string] identifier[description] = identifier[message_type] . identifier[description] keyword[or] literal[string] %( identifier[message_type] . identifier[name] ) identifier[short_description] =( identifier[_EmptyMessage] ( identifier[message_type] ) keyword[and] identifier[len] ( identifier[description] )<( identifier[self] . identifier[__printer] . identifier[CalculateWidth] ()- literal[int] )) keyword[with] identifier[self] . identifier[__printer] . identifier[CommentContext] (): keyword[if] identifier[short_description] : identifier[self] . identifier[__printer] ( literal[string] % identifier[description] ) keyword[return] keyword[for] identifier[line] keyword[in] identifier[textwrap] . identifier[wrap] ( literal[string] % identifier[description] , identifier[self] . identifier[__printer] . identifier[CalculateWidth] ()): identifier[self] . identifier[__printer] ( identifier[line] ) identifier[PrintIndentedDescriptions] ( identifier[self] . identifier[__printer] , identifier[message_type] . identifier[enum_types] , literal[string] ) identifier[PrintIndentedDescriptions] ( identifier[self] . identifier[__printer] , identifier[message_type] . identifier[message_types] , literal[string] ) identifier[PrintIndentedDescriptions] ( identifier[self] . identifier[__printer] , identifier[message_type] . identifier[fields] , literal[string] ) identifier[self] . identifier[__printer] ( literal[string] ) identifier[self] . identifier[__printer] ()
def __PrintMessageDocstringLines(self, message_type): """Print the docstring for this message.""" description = message_type.description or '%s message type.' % message_type.name short_description = _EmptyMessage(message_type) and len(description) < self.__printer.CalculateWidth() - 6 with self.__printer.CommentContext(): if short_description: # Note that we use explicit string interpolation here since # we're in comment context. self.__printer('r"""%s"""' % description) return # depends on [control=['if'], data=[]] for line in textwrap.wrap('r"""%s' % description, self.__printer.CalculateWidth()): self.__printer(line) # depends on [control=['for'], data=['line']] PrintIndentedDescriptions(self.__printer, message_type.enum_types, 'Enums') PrintIndentedDescriptions(self.__printer, message_type.message_types, 'Messages') PrintIndentedDescriptions(self.__printer, message_type.fields, 'Fields') self.__printer('"""') self.__printer() # depends on [control=['with'], data=[]]
def flow_pipemajor(Diam, HeadLossFric, Length, Nu, PipeRough): """Return the flow rate with only major losses. This function applies to both laminar and turbulent flows. """ #Inputs do not need to be checked here because they are checked by #functions this function calls. FlowHagen = flow_hagen(Diam, HeadLossFric, Length, Nu).magnitude if FlowHagen < flow_transition(Diam, Nu).magnitude: return FlowHagen else: return flow_swamee(Diam, HeadLossFric, Length, Nu, PipeRough).magnitude
def function[flow_pipemajor, parameter[Diam, HeadLossFric, Length, Nu, PipeRough]]: constant[Return the flow rate with only major losses. This function applies to both laminar and turbulent flows. ] variable[FlowHagen] assign[=] call[name[flow_hagen], parameter[name[Diam], name[HeadLossFric], name[Length], name[Nu]]].magnitude if compare[name[FlowHagen] less[<] call[name[flow_transition], parameter[name[Diam], name[Nu]]].magnitude] begin[:] return[name[FlowHagen]]
keyword[def] identifier[flow_pipemajor] ( identifier[Diam] , identifier[HeadLossFric] , identifier[Length] , identifier[Nu] , identifier[PipeRough] ): literal[string] identifier[FlowHagen] = identifier[flow_hagen] ( identifier[Diam] , identifier[HeadLossFric] , identifier[Length] , identifier[Nu] ). identifier[magnitude] keyword[if] identifier[FlowHagen] < identifier[flow_transition] ( identifier[Diam] , identifier[Nu] ). identifier[magnitude] : keyword[return] identifier[FlowHagen] keyword[else] : keyword[return] identifier[flow_swamee] ( identifier[Diam] , identifier[HeadLossFric] , identifier[Length] , identifier[Nu] , identifier[PipeRough] ). identifier[magnitude]
def flow_pipemajor(Diam, HeadLossFric, Length, Nu, PipeRough): """Return the flow rate with only major losses. This function applies to both laminar and turbulent flows. """ #Inputs do not need to be checked here because they are checked by #functions this function calls. FlowHagen = flow_hagen(Diam, HeadLossFric, Length, Nu).magnitude if FlowHagen < flow_transition(Diam, Nu).magnitude: return FlowHagen # depends on [control=['if'], data=['FlowHagen']] else: return flow_swamee(Diam, HeadLossFric, Length, Nu, PipeRough).magnitude
def validate_boundary(reference_intervals, estimated_intervals, trim): """Checks that the input annotations to a segment boundary estimation metric (i.e. one that only takes in segment intervals) look like valid segment times, and throws helpful errors if not. Parameters ---------- reference_intervals : np.ndarray, shape=(n, 2) reference segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. estimated_intervals : np.ndarray, shape=(m, 2) estimated segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. trim : bool will the start and end events be trimmed? """ if trim: # If we're trimming, then we need at least 2 intervals min_size = 2 else: # If we're not trimming, then we only need one interval min_size = 1 if len(reference_intervals) < min_size: warnings.warn("Reference intervals are empty.") if len(estimated_intervals) < min_size: warnings.warn("Estimated intervals are empty.") for intervals in [reference_intervals, estimated_intervals]: util.validate_intervals(intervals)
def function[validate_boundary, parameter[reference_intervals, estimated_intervals, trim]]: constant[Checks that the input annotations to a segment boundary estimation metric (i.e. one that only takes in segment intervals) look like valid segment times, and throws helpful errors if not. Parameters ---------- reference_intervals : np.ndarray, shape=(n, 2) reference segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. estimated_intervals : np.ndarray, shape=(m, 2) estimated segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. trim : bool will the start and end events be trimmed? ] if name[trim] begin[:] variable[min_size] assign[=] constant[2] if compare[call[name[len], parameter[name[reference_intervals]]] less[<] name[min_size]] begin[:] call[name[warnings].warn, parameter[constant[Reference intervals are empty.]]] if compare[call[name[len], parameter[name[estimated_intervals]]] less[<] name[min_size]] begin[:] call[name[warnings].warn, parameter[constant[Estimated intervals are empty.]]] for taget[name[intervals]] in starred[list[[<ast.Name object at 0x7da1b0f18d90>, <ast.Name object at 0x7da1b0f18b50>]]] begin[:] call[name[util].validate_intervals, parameter[name[intervals]]]
keyword[def] identifier[validate_boundary] ( identifier[reference_intervals] , identifier[estimated_intervals] , identifier[trim] ): literal[string] keyword[if] identifier[trim] : identifier[min_size] = literal[int] keyword[else] : identifier[min_size] = literal[int] keyword[if] identifier[len] ( identifier[reference_intervals] )< identifier[min_size] : identifier[warnings] . identifier[warn] ( literal[string] ) keyword[if] identifier[len] ( identifier[estimated_intervals] )< identifier[min_size] : identifier[warnings] . identifier[warn] ( literal[string] ) keyword[for] identifier[intervals] keyword[in] [ identifier[reference_intervals] , identifier[estimated_intervals] ]: identifier[util] . identifier[validate_intervals] ( identifier[intervals] )
def validate_boundary(reference_intervals, estimated_intervals, trim): """Checks that the input annotations to a segment boundary estimation metric (i.e. one that only takes in segment intervals) look like valid segment times, and throws helpful errors if not. Parameters ---------- reference_intervals : np.ndarray, shape=(n, 2) reference segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. estimated_intervals : np.ndarray, shape=(m, 2) estimated segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. trim : bool will the start and end events be trimmed? """ if trim: # If we're trimming, then we need at least 2 intervals min_size = 2 # depends on [control=['if'], data=[]] else: # If we're not trimming, then we only need one interval min_size = 1 if len(reference_intervals) < min_size: warnings.warn('Reference intervals are empty.') # depends on [control=['if'], data=[]] if len(estimated_intervals) < min_size: warnings.warn('Estimated intervals are empty.') # depends on [control=['if'], data=[]] for intervals in [reference_intervals, estimated_intervals]: util.validate_intervals(intervals) # depends on [control=['for'], data=['intervals']]
def to_seconds(value, strict=True, force_int=True): """ converts duration value to integer seconds strict=True (by default) raises StrictnessError if either hours, minutes or seconds in duration value exceed allowed values """ if isinstance(value, int): return value # assuming it's seconds elif isinstance(value, timedelta): seconds = value.total_seconds() if force_int: seconds = int(round(seconds)) return seconds elif isinstance(value, str): hours, minutes, seconds = _parse(value, strict) elif isinstance(value, tuple): check_tuple(value, strict) hours, minutes, seconds = value else: raise TypeError( 'Value %s (type %s) not supported' % ( value, type(value).__name__ ) ) if not (hours or minutes or seconds): raise ValueError('No hours, minutes or seconds found') result = hours*3600 + minutes*60 + seconds return result
def function[to_seconds, parameter[value, strict, force_int]]: constant[ converts duration value to integer seconds strict=True (by default) raises StrictnessError if either hours, minutes or seconds in duration value exceed allowed values ] if call[name[isinstance], parameter[name[value], name[int]]] begin[:] return[name[value]] if <ast.UnaryOp object at 0x7da1b0bd9fc0> begin[:] <ast.Raise object at 0x7da1b0bd81c0> variable[result] assign[=] binary_operation[binary_operation[binary_operation[name[hours] * constant[3600]] + binary_operation[name[minutes] * constant[60]]] + name[seconds]] return[name[result]]
keyword[def] identifier[to_seconds] ( identifier[value] , identifier[strict] = keyword[True] , identifier[force_int] = keyword[True] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[int] ): keyword[return] identifier[value] keyword[elif] identifier[isinstance] ( identifier[value] , identifier[timedelta] ): identifier[seconds] = identifier[value] . identifier[total_seconds] () keyword[if] identifier[force_int] : identifier[seconds] = identifier[int] ( identifier[round] ( identifier[seconds] )) keyword[return] identifier[seconds] keyword[elif] identifier[isinstance] ( identifier[value] , identifier[str] ): identifier[hours] , identifier[minutes] , identifier[seconds] = identifier[_parse] ( identifier[value] , identifier[strict] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[tuple] ): identifier[check_tuple] ( identifier[value] , identifier[strict] ) identifier[hours] , identifier[minutes] , identifier[seconds] = identifier[value] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] %( identifier[value] , identifier[type] ( identifier[value] ). identifier[__name__] ) ) keyword[if] keyword[not] ( identifier[hours] keyword[or] identifier[minutes] keyword[or] identifier[seconds] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[result] = identifier[hours] * literal[int] + identifier[minutes] * literal[int] + identifier[seconds] keyword[return] identifier[result]
def to_seconds(value, strict=True, force_int=True): """ converts duration value to integer seconds strict=True (by default) raises StrictnessError if either hours, minutes or seconds in duration value exceed allowed values """ if isinstance(value, int): return value # assuming it's seconds # depends on [control=['if'], data=[]] elif isinstance(value, timedelta): seconds = value.total_seconds() if force_int: seconds = int(round(seconds)) # depends on [control=['if'], data=[]] return seconds # depends on [control=['if'], data=[]] elif isinstance(value, str): (hours, minutes, seconds) = _parse(value, strict) # depends on [control=['if'], data=[]] elif isinstance(value, tuple): check_tuple(value, strict) (hours, minutes, seconds) = value # depends on [control=['if'], data=[]] else: raise TypeError('Value %s (type %s) not supported' % (value, type(value).__name__)) if not (hours or minutes or seconds): raise ValueError('No hours, minutes or seconds found') # depends on [control=['if'], data=[]] result = hours * 3600 + minutes * 60 + seconds return result
def sample(self, image): """sample(image) -> bounding_box Yields an iterator over all bounding boxes in different scales that are sampled for the given image. **Parameters:** ``image`` : array_like(2D or 3D) The image, for which the bounding boxes should be generated **Yields:** ``bounding_box`` : :py:class:`BoundingBox` An iterator iterating over all bounding boxes for the given ``image`` """ for scale, scaled_image_shape in self.scales(image): # prepare the feature extractor to extract features from the given image for bb in self.sample_scaled(scaled_image_shape): # extract features for yield bb.scale(1./scale)
def function[sample, parameter[self, image]]: constant[sample(image) -> bounding_box Yields an iterator over all bounding boxes in different scales that are sampled for the given image. **Parameters:** ``image`` : array_like(2D or 3D) The image, for which the bounding boxes should be generated **Yields:** ``bounding_box`` : :py:class:`BoundingBox` An iterator iterating over all bounding boxes for the given ``image`` ] for taget[tuple[[<ast.Name object at 0x7da20c993100>, <ast.Name object at 0x7da20c992680>]]] in starred[call[name[self].scales, parameter[name[image]]]] begin[:] for taget[name[bb]] in starred[call[name[self].sample_scaled, parameter[name[scaled_image_shape]]]] begin[:] <ast.Yield object at 0x7da20c993490>
keyword[def] identifier[sample] ( identifier[self] , identifier[image] ): literal[string] keyword[for] identifier[scale] , identifier[scaled_image_shape] keyword[in] identifier[self] . identifier[scales] ( identifier[image] ): keyword[for] identifier[bb] keyword[in] identifier[self] . identifier[sample_scaled] ( identifier[scaled_image_shape] ): keyword[yield] identifier[bb] . identifier[scale] ( literal[int] / identifier[scale] )
def sample(self, image): """sample(image) -> bounding_box Yields an iterator over all bounding boxes in different scales that are sampled for the given image. **Parameters:** ``image`` : array_like(2D or 3D) The image, for which the bounding boxes should be generated **Yields:** ``bounding_box`` : :py:class:`BoundingBox` An iterator iterating over all bounding boxes for the given ``image`` """ for (scale, scaled_image_shape) in self.scales(image): # prepare the feature extractor to extract features from the given image for bb in self.sample_scaled(scaled_image_shape): # extract features for yield bb.scale(1.0 / scale) # depends on [control=['for'], data=['bb']] # depends on [control=['for'], data=[]]
def wait_until_invisibility_of(self, locator, timeout=None): """ Waits for an element to be invisible @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string to search for the element @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found """ timeout = timeout if timeout is not None else self.timeout def wait(): ''' Wait function passed to executor ''' element = WebDriverWait(self.driver, timeout).until(EC.invisibility_of_element_located( (self.locator_handler.parse_locator(locator).By, self.locator_handler.parse_locator(locator).value))) return WebElementWrapper.WebElementWrapper(self, locator, element) return self.execute_and_handle_webdriver_exceptions( wait, timeout, locator, 'Timeout waiting for element to be invisible')
def function[wait_until_invisibility_of, parameter[self, locator, timeout]]: constant[ Waits for an element to be invisible @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string to search for the element @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found ] variable[timeout] assign[=] <ast.IfExp object at 0x7da1b10d6860> def function[wait, parameter[]]: constant[ Wait function passed to executor ] variable[element] assign[=] call[call[name[WebDriverWait], parameter[name[self].driver, name[timeout]]].until, parameter[call[name[EC].invisibility_of_element_located, parameter[tuple[[<ast.Attribute object at 0x7da1b10d7760>, <ast.Attribute object at 0x7da1b10d4a30>]]]]]] return[call[name[WebElementWrapper].WebElementWrapper, parameter[name[self], name[locator], name[element]]]] return[call[name[self].execute_and_handle_webdriver_exceptions, parameter[name[wait], name[timeout], name[locator], constant[Timeout waiting for element to be invisible]]]]
keyword[def] identifier[wait_until_invisibility_of] ( identifier[self] , identifier[locator] , identifier[timeout] = keyword[None] ): literal[string] identifier[timeout] = identifier[timeout] keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[timeout] keyword[def] identifier[wait] (): literal[string] identifier[element] = identifier[WebDriverWait] ( identifier[self] . identifier[driver] , identifier[timeout] ). identifier[until] ( identifier[EC] . identifier[invisibility_of_element_located] ( ( identifier[self] . identifier[locator_handler] . identifier[parse_locator] ( identifier[locator] ). identifier[By] , identifier[self] . identifier[locator_handler] . identifier[parse_locator] ( identifier[locator] ). identifier[value] ))) keyword[return] identifier[WebElementWrapper] . identifier[WebElementWrapper] ( identifier[self] , identifier[locator] , identifier[element] ) keyword[return] identifier[self] . identifier[execute_and_handle_webdriver_exceptions] ( identifier[wait] , identifier[timeout] , identifier[locator] , literal[string] )
def wait_until_invisibility_of(self, locator, timeout=None): """ Waits for an element to be invisible @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string to search for the element @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found """ timeout = timeout if timeout is not None else self.timeout def wait(): """ Wait function passed to executor """ element = WebDriverWait(self.driver, timeout).until(EC.invisibility_of_element_located((self.locator_handler.parse_locator(locator).By, self.locator_handler.parse_locator(locator).value))) return WebElementWrapper.WebElementWrapper(self, locator, element) return self.execute_and_handle_webdriver_exceptions(wait, timeout, locator, 'Timeout waiting for element to be invisible')
def default_mean_field_normal_fn( is_singular=False, loc_initializer=tf.compat.v1.initializers.random_normal(stddev=0.1), untransformed_scale_initializer=tf.compat.v1.initializers.random_normal( mean=-3., stddev=0.1), loc_regularizer=None, untransformed_scale_regularizer=None, loc_constraint=None, untransformed_scale_constraint=None): """Creates a function to build Normal distributions with trainable params. This function produces a closure which produces `tfd.Normal` parameterized by a loc` and `scale` each created using `tf.get_variable`. Args: is_singular: Python `bool` if `True`, forces the special case limit of `scale->0`, i.e., a `Deterministic` distribution. loc_initializer: Initializer function for the `loc` parameters. The default is `tf.random_normal_initializer(mean=0., stddev=0.1)`. untransformed_scale_initializer: Initializer function for the `scale` parameters. Default value: `tf.random_normal_initializer(mean=-3., stddev=0.1)`. This implies the softplus transformed result is initialized near `0`. It allows a `Normal` distribution with `scale` parameter set to this value to approximately act like a point mass. loc_regularizer: Regularizer function for the `loc` parameters. untransformed_scale_regularizer: Regularizer function for the `scale` parameters. loc_constraint: An optional projection function to be applied to the loc after being updated by an `Optimizer`. The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. untransformed_scale_constraint: An optional projection function to be applied to the `scale` parameters after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. Returns: make_normal_fn: Python `callable` which creates a `tfd.Normal` using from args: `dtype, shape, name, trainable, add_variable_fn`. """ loc_scale_fn = default_loc_scale_fn( is_singular=is_singular, loc_initializer=loc_initializer, untransformed_scale_initializer=untransformed_scale_initializer, loc_regularizer=loc_regularizer, untransformed_scale_regularizer=untransformed_scale_regularizer, loc_constraint=loc_constraint, untransformed_scale_constraint=untransformed_scale_constraint) def _fn(dtype, shape, name, trainable, add_variable_fn): """Creates multivariate `Deterministic` or `Normal` distribution. Args: dtype: Type of parameter's event. shape: Python `list`-like representing the parameter's event shape. name: Python `str` name prepended to any created (or existing) `tf.Variable`s. trainable: Python `bool` indicating all created `tf.Variable`s should be added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`. add_variable_fn: `tf.get_variable`-like `callable` used to create (or access existing) `tf.Variable`s. Returns: Multivariate `Deterministic` or `Normal` distribution. """ loc, scale = loc_scale_fn(dtype, shape, name, trainable, add_variable_fn) if scale is None: dist = tfd.Deterministic(loc=loc) else: dist = tfd.Normal(loc=loc, scale=scale) batch_ndims = tf.size(input=dist.batch_shape_tensor()) return tfd.Independent(dist, reinterpreted_batch_ndims=batch_ndims) return _fn
def function[default_mean_field_normal_fn, parameter[is_singular, loc_initializer, untransformed_scale_initializer, loc_regularizer, untransformed_scale_regularizer, loc_constraint, untransformed_scale_constraint]]: constant[Creates a function to build Normal distributions with trainable params. This function produces a closure which produces `tfd.Normal` parameterized by a loc` and `scale` each created using `tf.get_variable`. Args: is_singular: Python `bool` if `True`, forces the special case limit of `scale->0`, i.e., a `Deterministic` distribution. loc_initializer: Initializer function for the `loc` parameters. The default is `tf.random_normal_initializer(mean=0., stddev=0.1)`. untransformed_scale_initializer: Initializer function for the `scale` parameters. Default value: `tf.random_normal_initializer(mean=-3., stddev=0.1)`. This implies the softplus transformed result is initialized near `0`. It allows a `Normal` distribution with `scale` parameter set to this value to approximately act like a point mass. loc_regularizer: Regularizer function for the `loc` parameters. untransformed_scale_regularizer: Regularizer function for the `scale` parameters. loc_constraint: An optional projection function to be applied to the loc after being updated by an `Optimizer`. The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. untransformed_scale_constraint: An optional projection function to be applied to the `scale` parameters after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. Returns: make_normal_fn: Python `callable` which creates a `tfd.Normal` using from args: `dtype, shape, name, trainable, add_variable_fn`. ] variable[loc_scale_fn] assign[=] call[name[default_loc_scale_fn], parameter[]] def function[_fn, parameter[dtype, shape, name, trainable, add_variable_fn]]: constant[Creates multivariate `Deterministic` or `Normal` distribution. Args: dtype: Type of parameter's event. shape: Python `list`-like representing the parameter's event shape. name: Python `str` name prepended to any created (or existing) `tf.Variable`s. trainable: Python `bool` indicating all created `tf.Variable`s should be added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`. add_variable_fn: `tf.get_variable`-like `callable` used to create (or access existing) `tf.Variable`s. Returns: Multivariate `Deterministic` or `Normal` distribution. ] <ast.Tuple object at 0x7da1b0229c00> assign[=] call[name[loc_scale_fn], parameter[name[dtype], name[shape], name[name], name[trainable], name[add_variable_fn]]] if compare[name[scale] is constant[None]] begin[:] variable[dist] assign[=] call[name[tfd].Deterministic, parameter[]] variable[batch_ndims] assign[=] call[name[tf].size, parameter[]] return[call[name[tfd].Independent, parameter[name[dist]]]] return[name[_fn]]
keyword[def] identifier[default_mean_field_normal_fn] ( identifier[is_singular] = keyword[False] , identifier[loc_initializer] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[initializers] . identifier[random_normal] ( identifier[stddev] = literal[int] ), identifier[untransformed_scale_initializer] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[initializers] . identifier[random_normal] ( identifier[mean] =- literal[int] , identifier[stddev] = literal[int] ), identifier[loc_regularizer] = keyword[None] , identifier[untransformed_scale_regularizer] = keyword[None] , identifier[loc_constraint] = keyword[None] , identifier[untransformed_scale_constraint] = keyword[None] ): literal[string] identifier[loc_scale_fn] = identifier[default_loc_scale_fn] ( identifier[is_singular] = identifier[is_singular] , identifier[loc_initializer] = identifier[loc_initializer] , identifier[untransformed_scale_initializer] = identifier[untransformed_scale_initializer] , identifier[loc_regularizer] = identifier[loc_regularizer] , identifier[untransformed_scale_regularizer] = identifier[untransformed_scale_regularizer] , identifier[loc_constraint] = identifier[loc_constraint] , identifier[untransformed_scale_constraint] = identifier[untransformed_scale_constraint] ) keyword[def] identifier[_fn] ( identifier[dtype] , identifier[shape] , identifier[name] , identifier[trainable] , identifier[add_variable_fn] ): literal[string] identifier[loc] , identifier[scale] = identifier[loc_scale_fn] ( identifier[dtype] , identifier[shape] , identifier[name] , identifier[trainable] , identifier[add_variable_fn] ) keyword[if] identifier[scale] keyword[is] keyword[None] : identifier[dist] = identifier[tfd] . identifier[Deterministic] ( identifier[loc] = identifier[loc] ) keyword[else] : identifier[dist] = identifier[tfd] . identifier[Normal] ( identifier[loc] = identifier[loc] , identifier[scale] = identifier[scale] ) identifier[batch_ndims] = identifier[tf] . identifier[size] ( identifier[input] = identifier[dist] . identifier[batch_shape_tensor] ()) keyword[return] identifier[tfd] . identifier[Independent] ( identifier[dist] , identifier[reinterpreted_batch_ndims] = identifier[batch_ndims] ) keyword[return] identifier[_fn]
def default_mean_field_normal_fn(is_singular=False, loc_initializer=tf.compat.v1.initializers.random_normal(stddev=0.1), untransformed_scale_initializer=tf.compat.v1.initializers.random_normal(mean=-3.0, stddev=0.1), loc_regularizer=None, untransformed_scale_regularizer=None, loc_constraint=None, untransformed_scale_constraint=None): """Creates a function to build Normal distributions with trainable params. This function produces a closure which produces `tfd.Normal` parameterized by a loc` and `scale` each created using `tf.get_variable`. Args: is_singular: Python `bool` if `True`, forces the special case limit of `scale->0`, i.e., a `Deterministic` distribution. loc_initializer: Initializer function for the `loc` parameters. The default is `tf.random_normal_initializer(mean=0., stddev=0.1)`. untransformed_scale_initializer: Initializer function for the `scale` parameters. Default value: `tf.random_normal_initializer(mean=-3., stddev=0.1)`. This implies the softplus transformed result is initialized near `0`. It allows a `Normal` distribution with `scale` parameter set to this value to approximately act like a point mass. loc_regularizer: Regularizer function for the `loc` parameters. untransformed_scale_regularizer: Regularizer function for the `scale` parameters. loc_constraint: An optional projection function to be applied to the loc after being updated by an `Optimizer`. The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. untransformed_scale_constraint: An optional projection function to be applied to the `scale` parameters after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. Returns: make_normal_fn: Python `callable` which creates a `tfd.Normal` using from args: `dtype, shape, name, trainable, add_variable_fn`. """ loc_scale_fn = default_loc_scale_fn(is_singular=is_singular, loc_initializer=loc_initializer, untransformed_scale_initializer=untransformed_scale_initializer, loc_regularizer=loc_regularizer, untransformed_scale_regularizer=untransformed_scale_regularizer, loc_constraint=loc_constraint, untransformed_scale_constraint=untransformed_scale_constraint) def _fn(dtype, shape, name, trainable, add_variable_fn): """Creates multivariate `Deterministic` or `Normal` distribution. Args: dtype: Type of parameter's event. shape: Python `list`-like representing the parameter's event shape. name: Python `str` name prepended to any created (or existing) `tf.Variable`s. trainable: Python `bool` indicating all created `tf.Variable`s should be added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`. add_variable_fn: `tf.get_variable`-like `callable` used to create (or access existing) `tf.Variable`s. Returns: Multivariate `Deterministic` or `Normal` distribution. """ (loc, scale) = loc_scale_fn(dtype, shape, name, trainable, add_variable_fn) if scale is None: dist = tfd.Deterministic(loc=loc) # depends on [control=['if'], data=[]] else: dist = tfd.Normal(loc=loc, scale=scale) batch_ndims = tf.size(input=dist.batch_shape_tensor()) return tfd.Independent(dist, reinterpreted_batch_ndims=batch_ndims) return _fn
def device_is_attached_to_network(device, network_name): """ Checks if the device has a backing with of the right network name :param <vim.vm.Device> device: instance of adapter :param <str> network_name: network name :return: """ try: backing = device.backing except: return False if hasattr(backing, 'network') and hasattr(backing.network, 'name'): return network_name == backing.network.name elif hasattr(backing, 'port') and hasattr(backing.port, 'portgroupKey'): return network_name == backing.port.portgroupKey return False
def function[device_is_attached_to_network, parameter[device, network_name]]: constant[ Checks if the device has a backing with of the right network name :param <vim.vm.Device> device: instance of adapter :param <str> network_name: network name :return: ] <ast.Try object at 0x7da207f99ba0> if <ast.BoolOp object at 0x7da18eb55c60> begin[:] return[compare[name[network_name] equal[==] name[backing].network.name]] return[constant[False]]
keyword[def] identifier[device_is_attached_to_network] ( identifier[device] , identifier[network_name] ): literal[string] keyword[try] : identifier[backing] = identifier[device] . identifier[backing] keyword[except] : keyword[return] keyword[False] keyword[if] identifier[hasattr] ( identifier[backing] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[backing] . identifier[network] , literal[string] ): keyword[return] identifier[network_name] == identifier[backing] . identifier[network] . identifier[name] keyword[elif] identifier[hasattr] ( identifier[backing] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[backing] . identifier[port] , literal[string] ): keyword[return] identifier[network_name] == identifier[backing] . identifier[port] . identifier[portgroupKey] keyword[return] keyword[False]
def device_is_attached_to_network(device, network_name): """ Checks if the device has a backing with of the right network name :param <vim.vm.Device> device: instance of adapter :param <str> network_name: network name :return: """ try: backing = device.backing # depends on [control=['try'], data=[]] except: return False # depends on [control=['except'], data=[]] if hasattr(backing, 'network') and hasattr(backing.network, 'name'): return network_name == backing.network.name # depends on [control=['if'], data=[]] elif hasattr(backing, 'port') and hasattr(backing.port, 'portgroupKey'): return network_name == backing.port.portgroupKey # depends on [control=['if'], data=[]] return False
def parse_options(): """ Parses command-line options. """ try: opts, args = getopt.getopt(sys.argv[1:], 'hms:v', ['help', 'model', 'solver=', 'verbose']) except getopt.GetoptError as err: sys.stderr.write(str(err).capitalize()) print_usage() sys.exit(1) solver = 'g4' verbose = 1 print_model = False for opt, arg in opts: if opt in ('-h', '--help'): print_usage() sys.exit(0) elif opt in ('-m', '--model'): print_model = True elif opt in ('-s', '--solver'): solver = str(arg) elif opt in ('-v', '--verbose'): verbose += 1 else: assert False, 'Unhandled option: {0} {1}'.format(opt, arg) return print_model, solver, verbose, args
def function[parse_options, parameter[]]: constant[ Parses command-line options. ] <ast.Try object at 0x7da1b119fe50> variable[solver] assign[=] constant[g4] variable[verbose] assign[=] constant[1] variable[print_model] assign[=] constant[False] for taget[tuple[[<ast.Name object at 0x7da1b11d4d90>, <ast.Name object at 0x7da1b11d42e0>]]] in starred[name[opts]] begin[:] if compare[name[opt] in tuple[[<ast.Constant object at 0x7da1b11d4580>, <ast.Constant object at 0x7da1b11d7580>]]] begin[:] call[name[print_usage], parameter[]] call[name[sys].exit, parameter[constant[0]]] return[tuple[[<ast.Name object at 0x7da1b124c9a0>, <ast.Name object at 0x7da1b124ca00>, <ast.Name object at 0x7da1b1152410>, <ast.Name object at 0x7da1b1152f50>]]]
keyword[def] identifier[parse_options] (): literal[string] keyword[try] : identifier[opts] , identifier[args] = identifier[getopt] . identifier[getopt] ( identifier[sys] . identifier[argv] [ literal[int] :], literal[string] ,[ literal[string] , literal[string] , literal[string] , literal[string] ]) keyword[except] identifier[getopt] . identifier[GetoptError] keyword[as] identifier[err] : identifier[sys] . identifier[stderr] . identifier[write] ( identifier[str] ( identifier[err] ). identifier[capitalize] ()) identifier[print_usage] () identifier[sys] . identifier[exit] ( literal[int] ) identifier[solver] = literal[string] identifier[verbose] = literal[int] identifier[print_model] = keyword[False] keyword[for] identifier[opt] , identifier[arg] keyword[in] identifier[opts] : keyword[if] identifier[opt] keyword[in] ( literal[string] , literal[string] ): identifier[print_usage] () identifier[sys] . identifier[exit] ( literal[int] ) keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ): identifier[print_model] = keyword[True] keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ): identifier[solver] = identifier[str] ( identifier[arg] ) keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ): identifier[verbose] += literal[int] keyword[else] : keyword[assert] keyword[False] , literal[string] . identifier[format] ( identifier[opt] , identifier[arg] ) keyword[return] identifier[print_model] , identifier[solver] , identifier[verbose] , identifier[args]
def parse_options(): """ Parses command-line options. """ try: (opts, args) = getopt.getopt(sys.argv[1:], 'hms:v', ['help', 'model', 'solver=', 'verbose']) # depends on [control=['try'], data=[]] except getopt.GetoptError as err: sys.stderr.write(str(err).capitalize()) print_usage() sys.exit(1) # depends on [control=['except'], data=['err']] solver = 'g4' verbose = 1 print_model = False for (opt, arg) in opts: if opt in ('-h', '--help'): print_usage() sys.exit(0) # depends on [control=['if'], data=[]] elif opt in ('-m', '--model'): print_model = True # depends on [control=['if'], data=[]] elif opt in ('-s', '--solver'): solver = str(arg) # depends on [control=['if'], data=[]] elif opt in ('-v', '--verbose'): verbose += 1 # depends on [control=['if'], data=[]] else: assert False, 'Unhandled option: {0} {1}'.format(opt, arg) # depends on [control=['for'], data=[]] return (print_model, solver, verbose, args)
def make_serviceitem_name(name, condition='is', negate=False, preserve_case=False): """ Create a node for ServiceItem/name :return: A IndicatorItem represented as an Element node """ document = 'ServiceItem' search = 'ServiceItem/name' content_type = 'string' content = name ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate, preserve_case=preserve_case) return ii_node
def function[make_serviceitem_name, parameter[name, condition, negate, preserve_case]]: constant[ Create a node for ServiceItem/name :return: A IndicatorItem represented as an Element node ] variable[document] assign[=] constant[ServiceItem] variable[search] assign[=] constant[ServiceItem/name] variable[content_type] assign[=] constant[string] variable[content] assign[=] name[name] variable[ii_node] assign[=] call[name[ioc_api].make_indicatoritem_node, parameter[name[condition], name[document], name[search], name[content_type], name[content]]] return[name[ii_node]]
keyword[def] identifier[make_serviceitem_name] ( identifier[name] , identifier[condition] = literal[string] , identifier[negate] = keyword[False] , identifier[preserve_case] = keyword[False] ): literal[string] identifier[document] = literal[string] identifier[search] = literal[string] identifier[content_type] = literal[string] identifier[content] = identifier[name] identifier[ii_node] = identifier[ioc_api] . identifier[make_indicatoritem_node] ( identifier[condition] , identifier[document] , identifier[search] , identifier[content_type] , identifier[content] , identifier[negate] = identifier[negate] , identifier[preserve_case] = identifier[preserve_case] ) keyword[return] identifier[ii_node]
def make_serviceitem_name(name, condition='is', negate=False, preserve_case=False): """ Create a node for ServiceItem/name :return: A IndicatorItem represented as an Element node """ document = 'ServiceItem' search = 'ServiceItem/name' content_type = 'string' content = name ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate, preserve_case=preserve_case) return ii_node
def login(): """ This route has two purposes. First, it is used by the user to login. Second, it is used by the CAS to respond with the `ticket` after the user logs in successfully. When the user accesses this url, they are redirected to the CAS to login. If the login was successful, the CAS will respond to this route with the ticket in the url. The ticket is then validated. If validation was successful the logged in username is saved in the user's session under the key `CAS_USERNAME_SESSION_KEY` and the user's attributes are saved under the key 'CAS_USERNAME_ATTRIBUTE_KEY' """ cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY'] redirect_url = create_cas_login_url( current_app.config['CAS_SERVER'], current_app.config['CAS_LOGIN_ROUTE'], flask.url_for('.login', origin=flask.session.get('CAS_AFTER_LOGIN_SESSION_URL'), _external=True)) if 'ticket' in flask.request.args: flask.session[cas_token_session_key] = flask.request.args['ticket'] if cas_token_session_key in flask.session: if validate(flask.session[cas_token_session_key]): if 'CAS_AFTER_LOGIN_SESSION_URL' in flask.session: redirect_url = flask.session.pop('CAS_AFTER_LOGIN_SESSION_URL') elif flask.request.args.get('origin'): redirect_url = flask.request.args['origin'] else: redirect_url = flask.url_for( current_app.config['CAS_AFTER_LOGIN']) else: del flask.session[cas_token_session_key] current_app.logger.debug('Redirecting to: {0}'.format(redirect_url)) return flask.redirect(redirect_url)
def function[login, parameter[]]: constant[ This route has two purposes. First, it is used by the user to login. Second, it is used by the CAS to respond with the `ticket` after the user logs in successfully. When the user accesses this url, they are redirected to the CAS to login. If the login was successful, the CAS will respond to this route with the ticket in the url. The ticket is then validated. If validation was successful the logged in username is saved in the user's session under the key `CAS_USERNAME_SESSION_KEY` and the user's attributes are saved under the key 'CAS_USERNAME_ATTRIBUTE_KEY' ] variable[cas_token_session_key] assign[=] call[name[current_app].config][constant[CAS_TOKEN_SESSION_KEY]] variable[redirect_url] assign[=] call[name[create_cas_login_url], parameter[call[name[current_app].config][constant[CAS_SERVER]], call[name[current_app].config][constant[CAS_LOGIN_ROUTE]], call[name[flask].url_for, parameter[constant[.login]]]]] if compare[constant[ticket] in name[flask].request.args] begin[:] call[name[flask].session][name[cas_token_session_key]] assign[=] call[name[flask].request.args][constant[ticket]] if compare[name[cas_token_session_key] in name[flask].session] begin[:] if call[name[validate], parameter[call[name[flask].session][name[cas_token_session_key]]]] begin[:] if compare[constant[CAS_AFTER_LOGIN_SESSION_URL] in name[flask].session] begin[:] variable[redirect_url] assign[=] call[name[flask].session.pop, parameter[constant[CAS_AFTER_LOGIN_SESSION_URL]]] call[name[current_app].logger.debug, parameter[call[constant[Redirecting to: {0}].format, parameter[name[redirect_url]]]]] return[call[name[flask].redirect, parameter[name[redirect_url]]]]
keyword[def] identifier[login] (): literal[string] identifier[cas_token_session_key] = identifier[current_app] . identifier[config] [ literal[string] ] identifier[redirect_url] = identifier[create_cas_login_url] ( identifier[current_app] . identifier[config] [ literal[string] ], identifier[current_app] . identifier[config] [ literal[string] ], identifier[flask] . identifier[url_for] ( literal[string] , identifier[origin] = identifier[flask] . identifier[session] . identifier[get] ( literal[string] ), identifier[_external] = keyword[True] )) keyword[if] literal[string] keyword[in] identifier[flask] . identifier[request] . identifier[args] : identifier[flask] . identifier[session] [ identifier[cas_token_session_key] ]= identifier[flask] . identifier[request] . identifier[args] [ literal[string] ] keyword[if] identifier[cas_token_session_key] keyword[in] identifier[flask] . identifier[session] : keyword[if] identifier[validate] ( identifier[flask] . identifier[session] [ identifier[cas_token_session_key] ]): keyword[if] literal[string] keyword[in] identifier[flask] . identifier[session] : identifier[redirect_url] = identifier[flask] . identifier[session] . identifier[pop] ( literal[string] ) keyword[elif] identifier[flask] . identifier[request] . identifier[args] . identifier[get] ( literal[string] ): identifier[redirect_url] = identifier[flask] . identifier[request] . identifier[args] [ literal[string] ] keyword[else] : identifier[redirect_url] = identifier[flask] . identifier[url_for] ( identifier[current_app] . identifier[config] [ literal[string] ]) keyword[else] : keyword[del] identifier[flask] . identifier[session] [ identifier[cas_token_session_key] ] identifier[current_app] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[redirect_url] )) keyword[return] identifier[flask] . identifier[redirect] ( identifier[redirect_url] )
def login(): """ This route has two purposes. First, it is used by the user to login. Second, it is used by the CAS to respond with the `ticket` after the user logs in successfully. When the user accesses this url, they are redirected to the CAS to login. If the login was successful, the CAS will respond to this route with the ticket in the url. The ticket is then validated. If validation was successful the logged in username is saved in the user's session under the key `CAS_USERNAME_SESSION_KEY` and the user's attributes are saved under the key 'CAS_USERNAME_ATTRIBUTE_KEY' """ cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY'] redirect_url = create_cas_login_url(current_app.config['CAS_SERVER'], current_app.config['CAS_LOGIN_ROUTE'], flask.url_for('.login', origin=flask.session.get('CAS_AFTER_LOGIN_SESSION_URL'), _external=True)) if 'ticket' in flask.request.args: flask.session[cas_token_session_key] = flask.request.args['ticket'] # depends on [control=['if'], data=[]] if cas_token_session_key in flask.session: if validate(flask.session[cas_token_session_key]): if 'CAS_AFTER_LOGIN_SESSION_URL' in flask.session: redirect_url = flask.session.pop('CAS_AFTER_LOGIN_SESSION_URL') # depends on [control=['if'], data=[]] elif flask.request.args.get('origin'): redirect_url = flask.request.args['origin'] # depends on [control=['if'], data=[]] else: redirect_url = flask.url_for(current_app.config['CAS_AFTER_LOGIN']) # depends on [control=['if'], data=[]] else: del flask.session[cas_token_session_key] # depends on [control=['if'], data=['cas_token_session_key']] current_app.logger.debug('Redirecting to: {0}'.format(redirect_url)) return flask.redirect(redirect_url)
def stop_capture(self, port_number): """ Stops a packet capture. :param port_number: port number """ if not self._ethernet_adapter.port_exists(port_number): raise VPCSError("Port {port_number} doesn't exist in adapter {adapter}".format(adapter=self._ethernet_adapter, port_number=port_number)) nio = self._ethernet_adapter.get_nio(0) if not nio: raise VPCSError("Port {} is not connected".format(port_number)) nio.stopPacketCapture() if self.ubridge: yield from self._ubridge_send('bridge stop_capture {name}'.format(name="VPCS-{}".format(self._id))) log.info("VPCS '{name}' [{id}]: stopping packet capture on port {port_number}".format(name=self.name, id=self.id, port_number=port_number))
def function[stop_capture, parameter[self, port_number]]: constant[ Stops a packet capture. :param port_number: port number ] if <ast.UnaryOp object at 0x7da2044c1240> begin[:] <ast.Raise object at 0x7da20e955570> variable[nio] assign[=] call[name[self]._ethernet_adapter.get_nio, parameter[constant[0]]] if <ast.UnaryOp object at 0x7da20cabda50> begin[:] <ast.Raise object at 0x7da20cabe9b0> call[name[nio].stopPacketCapture, parameter[]] if name[self].ubridge begin[:] <ast.YieldFrom object at 0x7da20cabf370> call[name[log].info, parameter[call[constant[VPCS '{name}' [{id}]: stopping packet capture on port {port_number}].format, parameter[]]]]
keyword[def] identifier[stop_capture] ( identifier[self] , identifier[port_number] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_ethernet_adapter] . identifier[port_exists] ( identifier[port_number] ): keyword[raise] identifier[VPCSError] ( literal[string] . identifier[format] ( identifier[adapter] = identifier[self] . identifier[_ethernet_adapter] , identifier[port_number] = identifier[port_number] )) identifier[nio] = identifier[self] . identifier[_ethernet_adapter] . identifier[get_nio] ( literal[int] ) keyword[if] keyword[not] identifier[nio] : keyword[raise] identifier[VPCSError] ( literal[string] . identifier[format] ( identifier[port_number] )) identifier[nio] . identifier[stopPacketCapture] () keyword[if] identifier[self] . identifier[ubridge] : keyword[yield] keyword[from] identifier[self] . identifier[_ubridge_send] ( literal[string] . identifier[format] ( identifier[name] = literal[string] . identifier[format] ( identifier[self] . identifier[_id] ))) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[name] , identifier[id] = identifier[self] . identifier[id] , identifier[port_number] = identifier[port_number] ))
def stop_capture(self, port_number): """ Stops a packet capture. :param port_number: port number """ if not self._ethernet_adapter.port_exists(port_number): raise VPCSError("Port {port_number} doesn't exist in adapter {adapter}".format(adapter=self._ethernet_adapter, port_number=port_number)) # depends on [control=['if'], data=[]] nio = self._ethernet_adapter.get_nio(0) if not nio: raise VPCSError('Port {} is not connected'.format(port_number)) # depends on [control=['if'], data=[]] nio.stopPacketCapture() if self.ubridge: yield from self._ubridge_send('bridge stop_capture {name}'.format(name='VPCS-{}'.format(self._id))) # depends on [control=['if'], data=[]] log.info("VPCS '{name}' [{id}]: stopping packet capture on port {port_number}".format(name=self.name, id=self.id, port_number=port_number))
def update(self, pbar): 'Updates the widget to show the ETA or total time when finished.' if pbar.currval == 0: return 'ETA: --:--:--' elif pbar.finished: return 'Time: %s' % self.format_time(pbar.seconds_elapsed) else: return 'ETA: %s' % self.format_time(self._eta(pbar))
def function[update, parameter[self, pbar]]: constant[Updates the widget to show the ETA or total time when finished.] if compare[name[pbar].currval equal[==] constant[0]] begin[:] return[constant[ETA: --:--:--]]
keyword[def] identifier[update] ( identifier[self] , identifier[pbar] ): literal[string] keyword[if] identifier[pbar] . identifier[currval] == literal[int] : keyword[return] literal[string] keyword[elif] identifier[pbar] . identifier[finished] : keyword[return] literal[string] % identifier[self] . identifier[format_time] ( identifier[pbar] . identifier[seconds_elapsed] ) keyword[else] : keyword[return] literal[string] % identifier[self] . identifier[format_time] ( identifier[self] . identifier[_eta] ( identifier[pbar] ))
def update(self, pbar): """Updates the widget to show the ETA or total time when finished.""" if pbar.currval == 0: return 'ETA: --:--:--' # depends on [control=['if'], data=[]] elif pbar.finished: return 'Time: %s' % self.format_time(pbar.seconds_elapsed) # depends on [control=['if'], data=[]] else: return 'ETA: %s' % self.format_time(self._eta(pbar))