code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_resource(url): """ Issue a GET request to SWS with the given url and return a response in json format. :returns: http response with content in json """ response = DAO.getURL(url, {'Accept': 'application/json', 'Connection': 'keep-alive'}) if response.status != 200: raise DataFailureException(url, response.status, response.data) return json.loads(response.data)
def function[get_resource, parameter[url]]: constant[ Issue a GET request to SWS with the given url and return a response in json format. :returns: http response with content in json ] variable[response] assign[=] call[name[DAO].getURL, parameter[name[url], dictionary[[<ast.Constant object at 0x7da1b2368070>, <ast.Constant object at 0x7da1b236b6a0>], [<ast.Constant object at 0x7da1b229be20>, <ast.Constant object at 0x7da1b2298100>]]]] if compare[name[response].status not_equal[!=] constant[200]] begin[:] <ast.Raise object at 0x7da1b229b850> return[call[name[json].loads, parameter[name[response].data]]]
keyword[def] identifier[get_resource] ( identifier[url] ): literal[string] identifier[response] = identifier[DAO] . identifier[getURL] ( identifier[url] ,{ literal[string] : literal[string] , literal[string] : literal[string] }) keyword[if] identifier[response] . identifier[status] != literal[int] : keyword[raise] identifier[DataFailureException] ( identifier[url] , identifier[response] . identifier[status] , identifier[response] . identifier[data] ) keyword[return] identifier[json] . identifier[loads] ( identifier[response] . identifier[data] )
def get_resource(url): """ Issue a GET request to SWS with the given url and return a response in json format. :returns: http response with content in json """ response = DAO.getURL(url, {'Accept': 'application/json', 'Connection': 'keep-alive'}) if response.status != 200: raise DataFailureException(url, response.status, response.data) # depends on [control=['if'], data=[]] return json.loads(response.data)
def p_expression(self, p): """expression : jsonpath | jsonpath FILTER_OP ID | jsonpath FILTER_OP FLOAT | jsonpath FILTER_OP NUMBER | jsonpath FILTER_OP BOOL """ if len(p) == 2: left, op, right = p[1], None, None else: __, left, op, right = p p[0] = _filter.Expression(left, op, right)
def function[p_expression, parameter[self, p]]: constant[expression : jsonpath | jsonpath FILTER_OP ID | jsonpath FILTER_OP FLOAT | jsonpath FILTER_OP NUMBER | jsonpath FILTER_OP BOOL ] if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:] <ast.Tuple object at 0x7da2041d99c0> assign[=] tuple[[<ast.Subscript object at 0x7da2041d9c60>, <ast.Constant object at 0x7da18f811a50>, <ast.Constant object at 0x7da18f810400>]] call[name[p]][constant[0]] assign[=] call[name[_filter].Expression, parameter[name[left], name[op], name[right]]]
keyword[def] identifier[p_expression] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[left] , identifier[op] , identifier[right] = identifier[p] [ literal[int] ], keyword[None] , keyword[None] keyword[else] : identifier[__] , identifier[left] , identifier[op] , identifier[right] = identifier[p] identifier[p] [ literal[int] ]= identifier[_filter] . identifier[Expression] ( identifier[left] , identifier[op] , identifier[right] )
def p_expression(self, p): """expression : jsonpath | jsonpath FILTER_OP ID | jsonpath FILTER_OP FLOAT | jsonpath FILTER_OP NUMBER | jsonpath FILTER_OP BOOL """ if len(p) == 2: (left, op, right) = (p[1], None, None) # depends on [control=['if'], data=[]] else: (__, left, op, right) = p p[0] = _filter.Expression(left, op, right)
def calculate_energy(self, energies): """ Calculates the energy of the reaction. Args: energies ({Composition: float}): Energy for each composition. E.g ., {comp1: energy1, comp2: energy2}. Returns: reaction energy as a float. """ return sum([amt * energies[c] for amt, c in zip(self._coeffs, self._all_comp)])
def function[calculate_energy, parameter[self, energies]]: constant[ Calculates the energy of the reaction. Args: energies ({Composition: float}): Energy for each composition. E.g ., {comp1: energy1, comp2: energy2}. Returns: reaction energy as a float. ] return[call[name[sum], parameter[<ast.ListComp object at 0x7da20c990040>]]]
keyword[def] identifier[calculate_energy] ( identifier[self] , identifier[energies] ): literal[string] keyword[return] identifier[sum] ([ identifier[amt] * identifier[energies] [ identifier[c] ] keyword[for] identifier[amt] , identifier[c] keyword[in] identifier[zip] ( identifier[self] . identifier[_coeffs] , identifier[self] . identifier[_all_comp] )])
def calculate_energy(self, energies): """ Calculates the energy of the reaction. Args: energies ({Composition: float}): Energy for each composition. E.g ., {comp1: energy1, comp2: energy2}. Returns: reaction energy as a float. """ return sum([amt * energies[c] for (amt, c) in zip(self._coeffs, self._all_comp)])
def set_target_temperature_by_id(self, zone_id, target_temperature): """ Set the target temperature for a zone by id """ if not self._do_auth(): raise RuntimeError("Unable to login") data = { "ZoneId": zone_id, "TargetTemperature": target_temperature } headers = { "Accept": "application/json", "Content-Type": "application/json", 'Authorization': 'Bearer ' + self.login_data['token']['accessToken'] } url = self.api_base_url + "Home/ZoneTargetTemperature" response = requests.post(url, data=json.dumps( data), headers=headers, timeout=10) if response.status_code != 200: return False zone_change_data = response.json() return zone_change_data.get("isSuccess", False)
def function[set_target_temperature_by_id, parameter[self, zone_id, target_temperature]]: constant[ Set the target temperature for a zone by id ] if <ast.UnaryOp object at 0x7da18f810c10> begin[:] <ast.Raise object at 0x7da18f8126e0> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f810610>, <ast.Constant object at 0x7da18f812950>], [<ast.Name object at 0x7da18f812c80>, <ast.Name object at 0x7da18f810790>]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f813550>, <ast.Constant object at 0x7da18f813520>, <ast.Constant object at 0x7da18f810e20>], [<ast.Constant object at 0x7da18f8124d0>, <ast.Constant object at 0x7da18f810a60>, <ast.BinOp object at 0x7da18f811fc0>]] variable[url] assign[=] binary_operation[name[self].api_base_url + constant[Home/ZoneTargetTemperature]] variable[response] assign[=] call[name[requests].post, parameter[name[url]]] if compare[name[response].status_code not_equal[!=] constant[200]] begin[:] return[constant[False]] variable[zone_change_data] assign[=] call[name[response].json, parameter[]] return[call[name[zone_change_data].get, parameter[constant[isSuccess], constant[False]]]]
keyword[def] identifier[set_target_temperature_by_id] ( identifier[self] , identifier[zone_id] , identifier[target_temperature] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_do_auth] (): keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[data] ={ literal[string] : identifier[zone_id] , literal[string] : identifier[target_temperature] } identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] + identifier[self] . identifier[login_data] [ literal[string] ][ literal[string] ] } identifier[url] = identifier[self] . identifier[api_base_url] + literal[string] identifier[response] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ), identifier[headers] = identifier[headers] , identifier[timeout] = literal[int] ) keyword[if] identifier[response] . identifier[status_code] != literal[int] : keyword[return] keyword[False] identifier[zone_change_data] = identifier[response] . identifier[json] () keyword[return] identifier[zone_change_data] . identifier[get] ( literal[string] , keyword[False] )
def set_target_temperature_by_id(self, zone_id, target_temperature): """ Set the target temperature for a zone by id """ if not self._do_auth(): raise RuntimeError('Unable to login') # depends on [control=['if'], data=[]] data = {'ZoneId': zone_id, 'TargetTemperature': target_temperature} headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + self.login_data['token']['accessToken']} url = self.api_base_url + 'Home/ZoneTargetTemperature' response = requests.post(url, data=json.dumps(data), headers=headers, timeout=10) if response.status_code != 200: return False # depends on [control=['if'], data=[]] zone_change_data = response.json() return zone_change_data.get('isSuccess', False)
def xarray_derivative_wrap(func): """Decorate the derivative functions to make them work nicely with DataArrays. This will automatically determine if the coordinates can be pulled directly from the DataArray, or if a call to lat_lon_grid_deltas is needed. """ @functools.wraps(func) def wrapper(f, **kwargs): if 'x' in kwargs or 'delta' in kwargs: # Use the usual DataArray to pint.Quantity preprocessing wrapper return preprocess_xarray(func)(f, **kwargs) elif isinstance(f, xr.DataArray): # Get axis argument, defaulting to first dimension axis = f.metpy.find_axis_name(kwargs.get('axis', 0)) # Initialize new kwargs with the axis number new_kwargs = {'axis': f.get_axis_num(axis)} if f[axis].attrs.get('_metpy_axis') == 'T': # Time coordinate, need to convert to seconds from datetimes new_kwargs['x'] = f[axis].metpy.as_timestamp().metpy.unit_array elif CFConventionHandler.check_axis(f[axis], 'lon'): # Longitude coordinate, need to get grid deltas new_kwargs['delta'], _ = grid_deltas_from_dataarray(f) elif CFConventionHandler.check_axis(f[axis], 'lat'): # Latitude coordinate, need to get grid deltas _, new_kwargs['delta'] = grid_deltas_from_dataarray(f) else: # General coordinate, use as is new_kwargs['x'] = f[axis].metpy.unit_array # Calculate and return result as a DataArray result = func(f.metpy.unit_array, **new_kwargs) return xr.DataArray(result.magnitude, coords=f.coords, dims=f.dims, attrs={'units': str(result.units)}) else: # Error raise ValueError('Must specify either "x" or "delta" for value positions when "f" ' 'is not a DataArray.') return wrapper
def function[xarray_derivative_wrap, parameter[func]]: constant[Decorate the derivative functions to make them work nicely with DataArrays. This will automatically determine if the coordinates can be pulled directly from the DataArray, or if a call to lat_lon_grid_deltas is needed. ] def function[wrapper, parameter[f]]: if <ast.BoolOp object at 0x7da1b22aead0> begin[:] return[call[call[name[preprocess_xarray], parameter[name[func]]], parameter[name[f]]]] return[name[wrapper]]
keyword[def] identifier[xarray_derivative_wrap] ( identifier[func] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[func] ) keyword[def] identifier[wrapper] ( identifier[f] ,** identifier[kwargs] ): keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[or] literal[string] keyword[in] identifier[kwargs] : keyword[return] identifier[preprocess_xarray] ( identifier[func] )( identifier[f] ,** identifier[kwargs] ) keyword[elif] identifier[isinstance] ( identifier[f] , identifier[xr] . identifier[DataArray] ): identifier[axis] = identifier[f] . identifier[metpy] . identifier[find_axis_name] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )) identifier[new_kwargs] ={ literal[string] : identifier[f] . identifier[get_axis_num] ( identifier[axis] )} keyword[if] identifier[f] [ identifier[axis] ]. identifier[attrs] . identifier[get] ( literal[string] )== literal[string] : identifier[new_kwargs] [ literal[string] ]= identifier[f] [ identifier[axis] ]. identifier[metpy] . identifier[as_timestamp] (). identifier[metpy] . identifier[unit_array] keyword[elif] identifier[CFConventionHandler] . identifier[check_axis] ( identifier[f] [ identifier[axis] ], literal[string] ): identifier[new_kwargs] [ literal[string] ], identifier[_] = identifier[grid_deltas_from_dataarray] ( identifier[f] ) keyword[elif] identifier[CFConventionHandler] . identifier[check_axis] ( identifier[f] [ identifier[axis] ], literal[string] ): identifier[_] , identifier[new_kwargs] [ literal[string] ]= identifier[grid_deltas_from_dataarray] ( identifier[f] ) keyword[else] : identifier[new_kwargs] [ literal[string] ]= identifier[f] [ identifier[axis] ]. identifier[metpy] . identifier[unit_array] identifier[result] = identifier[func] ( identifier[f] . identifier[metpy] . identifier[unit_array] ,** identifier[new_kwargs] ) keyword[return] identifier[xr] . identifier[DataArray] ( identifier[result] . identifier[magnitude] , identifier[coords] = identifier[f] . identifier[coords] , identifier[dims] = identifier[f] . identifier[dims] , identifier[attrs] ={ literal[string] : identifier[str] ( identifier[result] . identifier[units] )}) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[return] identifier[wrapper]
def xarray_derivative_wrap(func): """Decorate the derivative functions to make them work nicely with DataArrays. This will automatically determine if the coordinates can be pulled directly from the DataArray, or if a call to lat_lon_grid_deltas is needed. """ @functools.wraps(func) def wrapper(f, **kwargs): if 'x' in kwargs or 'delta' in kwargs: # Use the usual DataArray to pint.Quantity preprocessing wrapper return preprocess_xarray(func)(f, **kwargs) # depends on [control=['if'], data=[]] elif isinstance(f, xr.DataArray): # Get axis argument, defaulting to first dimension axis = f.metpy.find_axis_name(kwargs.get('axis', 0)) # Initialize new kwargs with the axis number new_kwargs = {'axis': f.get_axis_num(axis)} if f[axis].attrs.get('_metpy_axis') == 'T': # Time coordinate, need to convert to seconds from datetimes new_kwargs['x'] = f[axis].metpy.as_timestamp().metpy.unit_array # depends on [control=['if'], data=[]] elif CFConventionHandler.check_axis(f[axis], 'lon'): # Longitude coordinate, need to get grid deltas (new_kwargs['delta'], _) = grid_deltas_from_dataarray(f) # depends on [control=['if'], data=[]] elif CFConventionHandler.check_axis(f[axis], 'lat'): # Latitude coordinate, need to get grid deltas (_, new_kwargs['delta']) = grid_deltas_from_dataarray(f) # depends on [control=['if'], data=[]] else: # General coordinate, use as is new_kwargs['x'] = f[axis].metpy.unit_array # Calculate and return result as a DataArray result = func(f.metpy.unit_array, **new_kwargs) return xr.DataArray(result.magnitude, coords=f.coords, dims=f.dims, attrs={'units': str(result.units)}) # depends on [control=['if'], data=[]] else: # Error raise ValueError('Must specify either "x" or "delta" for value positions when "f" is not a DataArray.') return wrapper
def set_close_function(self, func): """Setting Tabs close function None -> tabs are not closable""" state = func is not None if state: self.sig_close_tab.connect(func) try: # Assuming Qt >= 4.5 QTabWidget.setTabsClosable(self, state) self.tabCloseRequested.connect(func) except AttributeError: # Workaround for Qt < 4.5 close_button = create_toolbutton(self, triggered=func, icon=ima.icon('fileclose'), tip=_("Close current tab")) self.setCornerWidget(close_button if state else None)
def function[set_close_function, parameter[self, func]]: constant[Setting Tabs close function None -> tabs are not closable] variable[state] assign[=] compare[name[func] is_not constant[None]] if name[state] begin[:] call[name[self].sig_close_tab.connect, parameter[name[func]]] <ast.Try object at 0x7da18f723f10>
keyword[def] identifier[set_close_function] ( identifier[self] , identifier[func] ): literal[string] identifier[state] = identifier[func] keyword[is] keyword[not] keyword[None] keyword[if] identifier[state] : identifier[self] . identifier[sig_close_tab] . identifier[connect] ( identifier[func] ) keyword[try] : identifier[QTabWidget] . identifier[setTabsClosable] ( identifier[self] , identifier[state] ) identifier[self] . identifier[tabCloseRequested] . identifier[connect] ( identifier[func] ) keyword[except] identifier[AttributeError] : identifier[close_button] = identifier[create_toolbutton] ( identifier[self] , identifier[triggered] = identifier[func] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] )) identifier[self] . identifier[setCornerWidget] ( identifier[close_button] keyword[if] identifier[state] keyword[else] keyword[None] )
def set_close_function(self, func): """Setting Tabs close function None -> tabs are not closable""" state = func is not None if state: self.sig_close_tab.connect(func) # depends on [control=['if'], data=[]] try: # Assuming Qt >= 4.5 QTabWidget.setTabsClosable(self, state) self.tabCloseRequested.connect(func) # depends on [control=['try'], data=[]] except AttributeError: # Workaround for Qt < 4.5 close_button = create_toolbutton(self, triggered=func, icon=ima.icon('fileclose'), tip=_('Close current tab')) self.setCornerWidget(close_button if state else None) # depends on [control=['except'], data=[]]
def writeline(self, fmt, *args): """ Write `line` (list of objects) with given `fmt` to file. The `line` will be chained if object is iterable (except for basestrings). """ fmt = self.endian + fmt size = struct.calcsize(fmt) fix = struct.pack(self.endian + 'i', size) line = struct.pack(fmt, *args) self.write(fix) self.write(line) self.write(fix)
def function[writeline, parameter[self, fmt]]: constant[ Write `line` (list of objects) with given `fmt` to file. The `line` will be chained if object is iterable (except for basestrings). ] variable[fmt] assign[=] binary_operation[name[self].endian + name[fmt]] variable[size] assign[=] call[name[struct].calcsize, parameter[name[fmt]]] variable[fix] assign[=] call[name[struct].pack, parameter[binary_operation[name[self].endian + constant[i]], name[size]]] variable[line] assign[=] call[name[struct].pack, parameter[name[fmt], <ast.Starred object at 0x7da1b0ffae00>]] call[name[self].write, parameter[name[fix]]] call[name[self].write, parameter[name[line]]] call[name[self].write, parameter[name[fix]]]
keyword[def] identifier[writeline] ( identifier[self] , identifier[fmt] ,* identifier[args] ): literal[string] identifier[fmt] = identifier[self] . identifier[endian] + identifier[fmt] identifier[size] = identifier[struct] . identifier[calcsize] ( identifier[fmt] ) identifier[fix] = identifier[struct] . identifier[pack] ( identifier[self] . identifier[endian] + literal[string] , identifier[size] ) identifier[line] = identifier[struct] . identifier[pack] ( identifier[fmt] ,* identifier[args] ) identifier[self] . identifier[write] ( identifier[fix] ) identifier[self] . identifier[write] ( identifier[line] ) identifier[self] . identifier[write] ( identifier[fix] )
def writeline(self, fmt, *args): """ Write `line` (list of objects) with given `fmt` to file. The `line` will be chained if object is iterable (except for basestrings). """ fmt = self.endian + fmt size = struct.calcsize(fmt) fix = struct.pack(self.endian + 'i', size) line = struct.pack(fmt, *args) self.write(fix) self.write(line) self.write(fix)
def remove_children(self, id_): """Removes all childrenfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node raise: NotFound - an node identified by the given ``Id`` was not found raise: NullArgument - ``id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ results = self._rls.get_relationships_by_genus_type_for_source(id_, self._relationship_type) if results.available() == 0: raise errors.NotFound() for r in results: self._ras.delete_relationship(r.get_id())
def function[remove_children, parameter[self, id_]]: constant[Removes all childrenfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node raise: NotFound - an node identified by the given ``Id`` was not found raise: NullArgument - ``id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* ] variable[results] assign[=] call[name[self]._rls.get_relationships_by_genus_type_for_source, parameter[name[id_], name[self]._relationship_type]] if compare[call[name[results].available, parameter[]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18ede4a30> for taget[name[r]] in starred[name[results]] begin[:] call[name[self]._ras.delete_relationship, parameter[call[name[r].get_id, parameter[]]]]
keyword[def] identifier[remove_children] ( identifier[self] , identifier[id_] ): literal[string] identifier[results] = identifier[self] . identifier[_rls] . identifier[get_relationships_by_genus_type_for_source] ( identifier[id_] , identifier[self] . identifier[_relationship_type] ) keyword[if] identifier[results] . identifier[available] ()== literal[int] : keyword[raise] identifier[errors] . identifier[NotFound] () keyword[for] identifier[r] keyword[in] identifier[results] : identifier[self] . identifier[_ras] . identifier[delete_relationship] ( identifier[r] . identifier[get_id] ())
def remove_children(self, id_): """Removes all childrenfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node raise: NotFound - an node identified by the given ``Id`` was not found raise: NullArgument - ``id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ results = self._rls.get_relationships_by_genus_type_for_source(id_, self._relationship_type) if results.available() == 0: raise errors.NotFound() # depends on [control=['if'], data=[]] for r in results: self._ras.delete_relationship(r.get_id()) # depends on [control=['for'], data=['r']]
def del_stream(self, bucket, label): """ Will fail if the bucket or label don't exist """ bucket = self._require_bucket(bucket) key = self._require_key(bucket, label) key.delete()
def function[del_stream, parameter[self, bucket, label]]: constant[ Will fail if the bucket or label don't exist ] variable[bucket] assign[=] call[name[self]._require_bucket, parameter[name[bucket]]] variable[key] assign[=] call[name[self]._require_key, parameter[name[bucket], name[label]]] call[name[key].delete, parameter[]]
keyword[def] identifier[del_stream] ( identifier[self] , identifier[bucket] , identifier[label] ): literal[string] identifier[bucket] = identifier[self] . identifier[_require_bucket] ( identifier[bucket] ) identifier[key] = identifier[self] . identifier[_require_key] ( identifier[bucket] , identifier[label] ) identifier[key] . identifier[delete] ()
def del_stream(self, bucket, label): """ Will fail if the bucket or label don't exist """ bucket = self._require_bucket(bucket) key = self._require_key(bucket, label) key.delete()
def decode_source(source_bytes): """Decode bytes representing source code and return the string. Universal newline support is used in the decoding. """ # source_bytes_readline = io.BytesIO(source_bytes).readline # encoding, _ = detect_encoding(source_bytes_readline) newline_decoder = io.IncrementalNewlineDecoder(None, True) return newline_decoder.decode(source_to_unicode(source_bytes))
def function[decode_source, parameter[source_bytes]]: constant[Decode bytes representing source code and return the string. Universal newline support is used in the decoding. ] variable[newline_decoder] assign[=] call[name[io].IncrementalNewlineDecoder, parameter[constant[None], constant[True]]] return[call[name[newline_decoder].decode, parameter[call[name[source_to_unicode], parameter[name[source_bytes]]]]]]
keyword[def] identifier[decode_source] ( identifier[source_bytes] ): literal[string] identifier[newline_decoder] = identifier[io] . identifier[IncrementalNewlineDecoder] ( keyword[None] , keyword[True] ) keyword[return] identifier[newline_decoder] . identifier[decode] ( identifier[source_to_unicode] ( identifier[source_bytes] ))
def decode_source(source_bytes): """Decode bytes representing source code and return the string. Universal newline support is used in the decoding. """ # source_bytes_readline = io.BytesIO(source_bytes).readline # encoding, _ = detect_encoding(source_bytes_readline) newline_decoder = io.IncrementalNewlineDecoder(None, True) return newline_decoder.decode(source_to_unicode(source_bytes))
def science_object_create(self, pid, path, format_id=None): """Create a new Science Object on a Member Node.""" self._queue_science_object_create(pid, path, format_id)
def function[science_object_create, parameter[self, pid, path, format_id]]: constant[Create a new Science Object on a Member Node.] call[name[self]._queue_science_object_create, parameter[name[pid], name[path], name[format_id]]]
keyword[def] identifier[science_object_create] ( identifier[self] , identifier[pid] , identifier[path] , identifier[format_id] = keyword[None] ): literal[string] identifier[self] . identifier[_queue_science_object_create] ( identifier[pid] , identifier[path] , identifier[format_id] )
def science_object_create(self, pid, path, format_id=None): """Create a new Science Object on a Member Node.""" self._queue_science_object_create(pid, path, format_id)
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and not self.reset and self.running: self.process_handler.check_for_new() # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects readable, writable, failed = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: client_socket, client_address = self.socket.accept() self.read_list.append(client_socket) except Exception: self.logger.warning('Daemon rejected client') else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} functions = { 'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon, } if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) except (BrokenPipeError): self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) except Exception: self.logger.exception() # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
def function[main, parameter[self]]: constant[The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit ] <ast.Try object at 0x7da1b0ebe7a0> call[name[self].process_handler.wait_for_finish, parameter[]] call[name[self].socket.close, parameter[]] call[name[cleanup], parameter[name[self].config_dir]] call[name[sys].exit, parameter[constant[0]]]
keyword[def] identifier[main] ( identifier[self] ): literal[string] keyword[try] : keyword[while] identifier[self] . identifier[running] : keyword[if] identifier[self] . identifier[process_handler] . identifier[check_finished] (): identifier[self] . identifier[logger] . identifier[write] ( identifier[self] . identifier[queue] ) keyword[if] identifier[self] . identifier[reset] keyword[and] identifier[self] . identifier[process_handler] . identifier[all_finished] (): identifier[self] . identifier[logger] . identifier[rotate] ( identifier[self] . identifier[queue] ) identifier[self] . identifier[queue] . identifier[reset] () identifier[self] . identifier[reset] = keyword[False] keyword[if] keyword[not] identifier[self] . identifier[paused] keyword[and] keyword[not] identifier[self] . identifier[reset] keyword[and] identifier[self] . identifier[running] : identifier[self] . identifier[process_handler] . identifier[check_for_new] () identifier[readable] , identifier[writable] , identifier[failed] = identifier[select] . identifier[select] ( identifier[self] . identifier[read_list] ,[],[], literal[int] ) keyword[for] identifier[waiting_socket] keyword[in] identifier[readable] : keyword[if] identifier[waiting_socket] keyword[is] identifier[self] . identifier[socket] : keyword[try] : identifier[client_socket] , identifier[client_address] = identifier[self] . identifier[socket] . identifier[accept] () identifier[self] . identifier[read_list] . identifier[append] ( identifier[client_socket] ) keyword[except] identifier[Exception] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] ) keyword[else] : keyword[try] : identifier[instruction] = identifier[waiting_socket] . identifier[recv] ( literal[int] ) keyword[except] ( identifier[EOFError] , identifier[OSError] ): identifier[self] . identifier[logger] . identifier[warning] ( literal[string] ) identifier[self] . identifier[read_list] . identifier[remove] ( identifier[waiting_socket] ) identifier[waiting_socket] . identifier[close] () identifier[instruction] = keyword[None] keyword[if] identifier[instruction] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[payload] = identifier[pickle] . identifier[loads] ( identifier[instruction] ) keyword[except] identifier[EOFError] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] ) identifier[self] . identifier[read_list] . identifier[remove] ( identifier[waiting_socket] ) identifier[waiting_socket] . identifier[close] () identifier[payload] ={ literal[string] : literal[string] } identifier[functions] ={ literal[string] : identifier[self] . identifier[add] , literal[string] : identifier[self] . identifier[remove] , literal[string] : identifier[self] . identifier[edit_command] , literal[string] : identifier[self] . identifier[switch] , literal[string] : identifier[self] . identifier[pipe_to_process] , literal[string] : identifier[self] . identifier[send_status] , literal[string] : identifier[self] . identifier[start] , literal[string] : identifier[self] . identifier[pause] , literal[string] : identifier[self] . identifier[stash] , literal[string] : identifier[self] . identifier[enqueue] , literal[string] : identifier[self] . identifier[restart] , literal[string] : identifier[self] . identifier[kill_process] , literal[string] : identifier[self] . identifier[reset_everything] , literal[string] : identifier[self] . identifier[clear] , literal[string] : identifier[self] . identifier[set_config] , literal[string] : identifier[self] . identifier[stop_daemon] , } keyword[if] identifier[payload] [ literal[string] ] keyword[in] identifier[functions] . identifier[keys] (): identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[logger] . identifier[debug] ( identifier[payload] ) identifier[response] = identifier[functions] [ identifier[payload] [ literal[string] ]]( identifier[payload] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[logger] . identifier[debug] ( identifier[response] ) keyword[try] : identifier[self] . identifier[respond_client] ( identifier[response] , identifier[waiting_socket] ) keyword[except] ( identifier[BrokenPipeError] ): identifier[self] . identifier[logger] . identifier[warning] ( literal[string] ) identifier[self] . identifier[read_list] . identifier[remove] ( identifier[waiting_socket] ) identifier[waiting_socket] . identifier[close] () identifier[instruction] = keyword[None] keyword[else] : identifier[self] . identifier[respond_client] ({ literal[string] : literal[string] , literal[string] : literal[string] }, identifier[waiting_socket] ) keyword[except] identifier[Exception] : identifier[self] . identifier[logger] . identifier[exception] () identifier[self] . identifier[process_handler] . identifier[wait_for_finish] () identifier[self] . identifier[socket] . identifier[close] () identifier[cleanup] ( identifier[self] . identifier[config_dir] ) identifier[sys] . identifier[exit] ( literal[int] )
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) # depends on [control=['if'], data=[]] if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # depends on [control=['if'], data=[]] # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and (not self.reset) and self.running: self.process_handler.check_for_new() # depends on [control=['if'], data=[]] # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects (readable, writable, failed) = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: (client_socket, client_address) = self.socket.accept() self.read_list.append(client_socket) # depends on [control=['try'], data=[]] except Exception: self.logger.warning('Daemon rejected client') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) # depends on [control=['try'], data=[]] except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # depends on [control=['except'], data=[]] # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) # depends on [control=['try'], data=[]] except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} # depends on [control=['except'], data=[]] functions = {'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon} if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) # depends on [control=['try'], data=[]] except BrokenPipeError: self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) # depends on [control=['if'], data=['instruction']] # depends on [control=['for'], data=['waiting_socket']] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except Exception: self.logger.exception() # depends on [control=['except'], data=[]] # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
def new_address(self, label=None): """Generate a new address and add it to the wallet. :param str label: label to attach to this address (optional) :return: an instance of :class:`Address` class """ params = self.build_basic_request() if label is not None: params['label'] = label response = util.call_api("merchant/{0}/new_address".format(self.identifier), params, base_url=self.service_url) json_response = json.loads(response) self.parse_error(json_response) return Address(0, json_response['address'], json_response.get('label'), 0)
def function[new_address, parameter[self, label]]: constant[Generate a new address and add it to the wallet. :param str label: label to attach to this address (optional) :return: an instance of :class:`Address` class ] variable[params] assign[=] call[name[self].build_basic_request, parameter[]] if compare[name[label] is_not constant[None]] begin[:] call[name[params]][constant[label]] assign[=] name[label] variable[response] assign[=] call[name[util].call_api, parameter[call[constant[merchant/{0}/new_address].format, parameter[name[self].identifier]], name[params]]] variable[json_response] assign[=] call[name[json].loads, parameter[name[response]]] call[name[self].parse_error, parameter[name[json_response]]] return[call[name[Address], parameter[constant[0], call[name[json_response]][constant[address]], call[name[json_response].get, parameter[constant[label]]], constant[0]]]]
keyword[def] identifier[new_address] ( identifier[self] , identifier[label] = keyword[None] ): literal[string] identifier[params] = identifier[self] . identifier[build_basic_request] () keyword[if] identifier[label] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[label] identifier[response] = identifier[util] . identifier[call_api] ( literal[string] . identifier[format] ( identifier[self] . identifier[identifier] ), identifier[params] , identifier[base_url] = identifier[self] . identifier[service_url] ) identifier[json_response] = identifier[json] . identifier[loads] ( identifier[response] ) identifier[self] . identifier[parse_error] ( identifier[json_response] ) keyword[return] identifier[Address] ( literal[int] , identifier[json_response] [ literal[string] ], identifier[json_response] . identifier[get] ( literal[string] ), literal[int] )
def new_address(self, label=None): """Generate a new address and add it to the wallet. :param str label: label to attach to this address (optional) :return: an instance of :class:`Address` class """ params = self.build_basic_request() if label is not None: params['label'] = label # depends on [control=['if'], data=['label']] response = util.call_api('merchant/{0}/new_address'.format(self.identifier), params, base_url=self.service_url) json_response = json.loads(response) self.parse_error(json_response) return Address(0, json_response['address'], json_response.get('label'), 0)
def is_all_field_none(self): """ :rtype: bool """ if self._id_ is not None: return False if self._created is not None: return False if self._updated is not None: return False if self._contract_date_start is not None: return False if self._contract_date_end is not None: return False if self._contract_version is not None: return False if self._subscription_type is not None: return False if self._subscription_type_downgrade is not None: return False if self._status is not None: return False if self._sub_status is not None: return False return True
def function[is_all_field_none, parameter[self]]: constant[ :rtype: bool ] if compare[name[self]._id_ is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._created is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._updated is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._contract_date_start is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._contract_date_end is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._contract_version is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._subscription_type is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._subscription_type_downgrade is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._status is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._sub_status is_not constant[None]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_all_field_none] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_id_] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_created] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_updated] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_contract_date_start] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_contract_date_end] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_contract_version] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_subscription_type] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_subscription_type_downgrade] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_status] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_sub_status] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_all_field_none(self): """ :rtype: bool """ if self._id_ is not None: return False # depends on [control=['if'], data=[]] if self._created is not None: return False # depends on [control=['if'], data=[]] if self._updated is not None: return False # depends on [control=['if'], data=[]] if self._contract_date_start is not None: return False # depends on [control=['if'], data=[]] if self._contract_date_end is not None: return False # depends on [control=['if'], data=[]] if self._contract_version is not None: return False # depends on [control=['if'], data=[]] if self._subscription_type is not None: return False # depends on [control=['if'], data=[]] if self._subscription_type_downgrade is not None: return False # depends on [control=['if'], data=[]] if self._status is not None: return False # depends on [control=['if'], data=[]] if self._sub_status is not None: return False # depends on [control=['if'], data=[]] return True
def signedDistance(actor, maxradius=0.5, bounds=(0, 1, 0, 1, 0, 1), dims=(10, 10, 10)): """ ``vtkSignedDistance`` filter. :param float maxradius: how far out to propagate distance calculation :param list bounds: volume bounds. """ dist = vtk.vtkSignedDistance() dist.SetInputData(actor.polydata(True)) dist.SetRadius(maxradius) dist.SetBounds(bounds) dist.SetDimensions(dims) dist.Update() return Volume(dist.GetOutput())
def function[signedDistance, parameter[actor, maxradius, bounds, dims]]: constant[ ``vtkSignedDistance`` filter. :param float maxradius: how far out to propagate distance calculation :param list bounds: volume bounds. ] variable[dist] assign[=] call[name[vtk].vtkSignedDistance, parameter[]] call[name[dist].SetInputData, parameter[call[name[actor].polydata, parameter[constant[True]]]]] call[name[dist].SetRadius, parameter[name[maxradius]]] call[name[dist].SetBounds, parameter[name[bounds]]] call[name[dist].SetDimensions, parameter[name[dims]]] call[name[dist].Update, parameter[]] return[call[name[Volume], parameter[call[name[dist].GetOutput, parameter[]]]]]
keyword[def] identifier[signedDistance] ( identifier[actor] , identifier[maxradius] = literal[int] , identifier[bounds] =( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ), identifier[dims] =( literal[int] , literal[int] , literal[int] )): literal[string] identifier[dist] = identifier[vtk] . identifier[vtkSignedDistance] () identifier[dist] . identifier[SetInputData] ( identifier[actor] . identifier[polydata] ( keyword[True] )) identifier[dist] . identifier[SetRadius] ( identifier[maxradius] ) identifier[dist] . identifier[SetBounds] ( identifier[bounds] ) identifier[dist] . identifier[SetDimensions] ( identifier[dims] ) identifier[dist] . identifier[Update] () keyword[return] identifier[Volume] ( identifier[dist] . identifier[GetOutput] ())
def signedDistance(actor, maxradius=0.5, bounds=(0, 1, 0, 1, 0, 1), dims=(10, 10, 10)): """ ``vtkSignedDistance`` filter. :param float maxradius: how far out to propagate distance calculation :param list bounds: volume bounds. """ dist = vtk.vtkSignedDistance() dist.SetInputData(actor.polydata(True)) dist.SetRadius(maxradius) dist.SetBounds(bounds) dist.SetDimensions(dims) dist.Update() return Volume(dist.GetOutput())
def parse(parser, argv=None, settings_key='settings', no_args_func=None): """ parser cliez app :param argparse.ArgumentParser parser: an instance of argparse.ArgumentParser :param argv: argument list,default is `sys.argv` :type argv: list or tuple :param str settings: settings option name, default is settings. :param object no_args_func: a callable object.if no sub-parser matched, parser will call it. :return: an instance of `cliez.component.Component` or its subclass """ argv = argv or sys.argv commands = command_list() if type(argv) not in [list, tuple]: raise TypeError("argv only can be list or tuple") # match sub-parser if len(argv) >= 2 and argv[1] in commands: sub_parsers = parser.add_subparsers() class_name = argv[1].capitalize() + 'Component' from cliez.conf import (COMPONENT_ROOT, LOGGING_CONFIG, EPILOG, GENERAL_ARGUMENTS) sys.path.insert(0, os.path.dirname(COMPONENT_ROOT)) mod = importlib.import_module( '{}.components.{}'.format(os.path.basename(COMPONENT_ROOT), argv[1])) # dynamic load component klass = getattr(mod, class_name) sub_parser = append_arguments(klass, sub_parsers, EPILOG, GENERAL_ARGUMENTS) options = parser.parse_args(argv[1:]) settings = Settings.bind( getattr(options, settings_key) ) if settings_key and hasattr(options, settings_key) else None obj = klass(parser, sub_parser, options, settings) # init logger logger_level = logging.CRITICAL if hasattr(options, 'verbose'): if options.verbose == 1: logger_level = logging.ERROR elif options.verbose == 2: logger_level = logging.WARNING elif options.verbose == 3: logger_level = logging.INFO obj.logger.setLevel(logging.INFO) pass if hasattr(options, 'debug') and options.debug: logger_level = logging.DEBUG # http lib use a strange way to logging try: import http.client as http_client http_client.HTTPConnection.debuglevel = 1 except Exception: # do nothing pass pass loggers = LOGGING_CONFIG['loggers'] for k, v in loggers.items(): v.setdefault('level', logger_level) if logger_level in [logging.INFO, logging.DEBUG]: v['handlers'] = ['stdout'] pass logging_config.dictConfig(LOGGING_CONFIG) # this may not necessary # obj.logger.setLevel(logger_level) obj.run(options) # return object to make unit test easy return obj # print all sub commands when user set. if not parser.description and len(commands): sub_parsers = parser.add_subparsers() [sub_parsers.add_parser(v) for v in commands] pass pass options = parser.parse_args(argv[1:]) if no_args_func and callable(no_args_func): return no_args_func(options) else: parser._print_message("nothing to do...\n") pass
def function[parse, parameter[parser, argv, settings_key, no_args_func]]: constant[ parser cliez app :param argparse.ArgumentParser parser: an instance of argparse.ArgumentParser :param argv: argument list,default is `sys.argv` :type argv: list or tuple :param str settings: settings option name, default is settings. :param object no_args_func: a callable object.if no sub-parser matched, parser will call it. :return: an instance of `cliez.component.Component` or its subclass ] variable[argv] assign[=] <ast.BoolOp object at 0x7da1afe895d0> variable[commands] assign[=] call[name[command_list], parameter[]] if compare[call[name[type], parameter[name[argv]]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da1afe8a1d0>, <ast.Name object at 0x7da1afe8a200>]]] begin[:] <ast.Raise object at 0x7da1afe89870> if <ast.BoolOp object at 0x7da1afe88310> begin[:] variable[sub_parsers] assign[=] call[name[parser].add_subparsers, parameter[]] variable[class_name] assign[=] binary_operation[call[call[name[argv]][constant[1]].capitalize, parameter[]] + constant[Component]] from relative_module[cliez.conf] import module[COMPONENT_ROOT], module[LOGGING_CONFIG], module[EPILOG], module[GENERAL_ARGUMENTS] call[name[sys].path.insert, parameter[constant[0], call[name[os].path.dirname, parameter[name[COMPONENT_ROOT]]]]] variable[mod] assign[=] call[name[importlib].import_module, parameter[call[constant[{}.components.{}].format, parameter[call[name[os].path.basename, parameter[name[COMPONENT_ROOT]]], call[name[argv]][constant[1]]]]]] variable[klass] assign[=] call[name[getattr], parameter[name[mod], name[class_name]]] variable[sub_parser] assign[=] call[name[append_arguments], parameter[name[klass], name[sub_parsers], name[EPILOG], name[GENERAL_ARGUMENTS]]] variable[options] assign[=] call[name[parser].parse_args, parameter[call[name[argv]][<ast.Slice object at 0x7da1afea5c60>]]] variable[settings] assign[=] <ast.IfExp object at 0x7da1afea7df0> variable[obj] assign[=] call[name[klass], parameter[name[parser], name[sub_parser], name[options], name[settings]]] variable[logger_level] assign[=] name[logging].CRITICAL if call[name[hasattr], parameter[name[options], constant[verbose]]] begin[:] if compare[name[options].verbose equal[==] constant[1]] begin[:] variable[logger_level] assign[=] name[logging].ERROR pass if <ast.BoolOp object at 0x7da1afea5180> begin[:] variable[logger_level] assign[=] name[logging].DEBUG <ast.Try object at 0x7da1afea5690> pass variable[loggers] assign[=] call[name[LOGGING_CONFIG]][constant[loggers]] for taget[tuple[[<ast.Name object at 0x7da1afea4f40>, <ast.Name object at 0x7da1afea5840>]]] in starred[call[name[loggers].items, parameter[]]] begin[:] call[name[v].setdefault, parameter[constant[level], name[logger_level]]] if compare[name[logger_level] in list[[<ast.Attribute object at 0x7da1afea6d10>, <ast.Attribute object at 0x7da1afea52a0>]]] begin[:] call[name[v]][constant[handlers]] assign[=] list[[<ast.Constant object at 0x7da1afea6230>]] pass call[name[logging_config].dictConfig, parameter[name[LOGGING_CONFIG]]] call[name[obj].run, parameter[name[options]]] return[name[obj]] if <ast.BoolOp object at 0x7da1afea5030> begin[:] variable[sub_parsers] assign[=] call[name[parser].add_subparsers, parameter[]] <ast.ListComp object at 0x7da1afea5b10> pass pass variable[options] assign[=] call[name[parser].parse_args, parameter[call[name[argv]][<ast.Slice object at 0x7da1afea4d30>]]] if <ast.BoolOp object at 0x7da1afea7c70> begin[:] return[call[name[no_args_func], parameter[name[options]]]] pass
keyword[def] identifier[parse] ( identifier[parser] , identifier[argv] = keyword[None] , identifier[settings_key] = literal[string] , identifier[no_args_func] = keyword[None] ): literal[string] identifier[argv] = identifier[argv] keyword[or] identifier[sys] . identifier[argv] identifier[commands] = identifier[command_list] () keyword[if] identifier[type] ( identifier[argv] ) keyword[not] keyword[in] [ identifier[list] , identifier[tuple] ]: keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[len] ( identifier[argv] )>= literal[int] keyword[and] identifier[argv] [ literal[int] ] keyword[in] identifier[commands] : identifier[sub_parsers] = identifier[parser] . identifier[add_subparsers] () identifier[class_name] = identifier[argv] [ literal[int] ]. identifier[capitalize] ()+ literal[string] keyword[from] identifier[cliez] . identifier[conf] keyword[import] ( identifier[COMPONENT_ROOT] , identifier[LOGGING_CONFIG] , identifier[EPILOG] , identifier[GENERAL_ARGUMENTS] ) identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[COMPONENT_ROOT] )) identifier[mod] = identifier[importlib] . identifier[import_module] ( literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[COMPONENT_ROOT] ), identifier[argv] [ literal[int] ])) identifier[klass] = identifier[getattr] ( identifier[mod] , identifier[class_name] ) identifier[sub_parser] = identifier[append_arguments] ( identifier[klass] , identifier[sub_parsers] , identifier[EPILOG] , identifier[GENERAL_ARGUMENTS] ) identifier[options] = identifier[parser] . identifier[parse_args] ( identifier[argv] [ literal[int] :]) identifier[settings] = identifier[Settings] . identifier[bind] ( identifier[getattr] ( identifier[options] , identifier[settings_key] ) ) keyword[if] identifier[settings_key] keyword[and] identifier[hasattr] ( identifier[options] , identifier[settings_key] ) keyword[else] keyword[None] identifier[obj] = identifier[klass] ( identifier[parser] , identifier[sub_parser] , identifier[options] , identifier[settings] ) identifier[logger_level] = identifier[logging] . identifier[CRITICAL] keyword[if] identifier[hasattr] ( identifier[options] , literal[string] ): keyword[if] identifier[options] . identifier[verbose] == literal[int] : identifier[logger_level] = identifier[logging] . identifier[ERROR] keyword[elif] identifier[options] . identifier[verbose] == literal[int] : identifier[logger_level] = identifier[logging] . identifier[WARNING] keyword[elif] identifier[options] . identifier[verbose] == literal[int] : identifier[logger_level] = identifier[logging] . identifier[INFO] identifier[obj] . identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[INFO] ) keyword[pass] keyword[if] identifier[hasattr] ( identifier[options] , literal[string] ) keyword[and] identifier[options] . identifier[debug] : identifier[logger_level] = identifier[logging] . identifier[DEBUG] keyword[try] : keyword[import] identifier[http] . identifier[client] keyword[as] identifier[http_client] identifier[http_client] . identifier[HTTPConnection] . identifier[debuglevel] = literal[int] keyword[except] identifier[Exception] : keyword[pass] keyword[pass] identifier[loggers] = identifier[LOGGING_CONFIG] [ literal[string] ] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[loggers] . identifier[items] (): identifier[v] . identifier[setdefault] ( literal[string] , identifier[logger_level] ) keyword[if] identifier[logger_level] keyword[in] [ identifier[logging] . identifier[INFO] , identifier[logging] . identifier[DEBUG] ]: identifier[v] [ literal[string] ]=[ literal[string] ] keyword[pass] identifier[logging_config] . identifier[dictConfig] ( identifier[LOGGING_CONFIG] ) identifier[obj] . identifier[run] ( identifier[options] ) keyword[return] identifier[obj] keyword[if] keyword[not] identifier[parser] . identifier[description] keyword[and] identifier[len] ( identifier[commands] ): identifier[sub_parsers] = identifier[parser] . identifier[add_subparsers] () [ identifier[sub_parsers] . identifier[add_parser] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[commands] ] keyword[pass] keyword[pass] identifier[options] = identifier[parser] . identifier[parse_args] ( identifier[argv] [ literal[int] :]) keyword[if] identifier[no_args_func] keyword[and] identifier[callable] ( identifier[no_args_func] ): keyword[return] identifier[no_args_func] ( identifier[options] ) keyword[else] : identifier[parser] . identifier[_print_message] ( literal[string] ) keyword[pass]
def parse(parser, argv=None, settings_key='settings', no_args_func=None): """ parser cliez app :param argparse.ArgumentParser parser: an instance of argparse.ArgumentParser :param argv: argument list,default is `sys.argv` :type argv: list or tuple :param str settings: settings option name, default is settings. :param object no_args_func: a callable object.if no sub-parser matched, parser will call it. :return: an instance of `cliez.component.Component` or its subclass """ argv = argv or sys.argv commands = command_list() if type(argv) not in [list, tuple]: raise TypeError('argv only can be list or tuple') # depends on [control=['if'], data=[]] # match sub-parser if len(argv) >= 2 and argv[1] in commands: sub_parsers = parser.add_subparsers() class_name = argv[1].capitalize() + 'Component' from cliez.conf import COMPONENT_ROOT, LOGGING_CONFIG, EPILOG, GENERAL_ARGUMENTS sys.path.insert(0, os.path.dirname(COMPONENT_ROOT)) mod = importlib.import_module('{}.components.{}'.format(os.path.basename(COMPONENT_ROOT), argv[1])) # dynamic load component klass = getattr(mod, class_name) sub_parser = append_arguments(klass, sub_parsers, EPILOG, GENERAL_ARGUMENTS) options = parser.parse_args(argv[1:]) settings = Settings.bind(getattr(options, settings_key)) if settings_key and hasattr(options, settings_key) else None obj = klass(parser, sub_parser, options, settings) # init logger logger_level = logging.CRITICAL if hasattr(options, 'verbose'): if options.verbose == 1: logger_level = logging.ERROR # depends on [control=['if'], data=[]] elif options.verbose == 2: logger_level = logging.WARNING # depends on [control=['if'], data=[]] elif options.verbose == 3: logger_level = logging.INFO obj.logger.setLevel(logging.INFO) # depends on [control=['if'], data=[]] pass # depends on [control=['if'], data=[]] if hasattr(options, 'debug') and options.debug: logger_level = logging.DEBUG # http lib use a strange way to logging try: import http.client as http_client http_client.HTTPConnection.debuglevel = 1 # depends on [control=['try'], data=[]] except Exception: # do nothing pass # depends on [control=['except'], data=[]] pass # depends on [control=['if'], data=[]] loggers = LOGGING_CONFIG['loggers'] for (k, v) in loggers.items(): v.setdefault('level', logger_level) if logger_level in [logging.INFO, logging.DEBUG]: v['handlers'] = ['stdout'] # depends on [control=['if'], data=[]] pass # depends on [control=['for'], data=[]] logging_config.dictConfig(LOGGING_CONFIG) # this may not necessary # obj.logger.setLevel(logger_level) obj.run(options) # return object to make unit test easy return obj # depends on [control=['if'], data=[]] # print all sub commands when user set. if not parser.description and len(commands): sub_parsers = parser.add_subparsers() [sub_parsers.add_parser(v) for v in commands] pass # depends on [control=['if'], data=[]] pass options = parser.parse_args(argv[1:]) if no_args_func and callable(no_args_func): return no_args_func(options) # depends on [control=['if'], data=[]] else: parser._print_message('nothing to do...\n') pass
async def xpending(self, name: str, group: str, start='-', end='+', count=None, consumer=None) -> list: """ Available since 5.0.0. Time complexity: O(log(N)+M) with N being the number of elements in the consumer group pending entries list, and M the number of elements being returned. When the command returns just the summary it runs in O(1) time assuming the list of consumers is small, otherwise there is additional O(N) time needed to iterate every consumer. Fetching data from a stream via a consumer group, and not acknowledging such data, has the effect of creating pending entries. The XPENDING command is the interface to inspect the list of pending messages. :param name: name of the stream :param group: name of the consumer group :param start: first stream ID. defaults to '-', meaning the earliest available. :param end: last stream ID. defaults to '+', meaning the latest available. :param count: int, number of entries [NOTICE] only when count is set to int, start & end options will have effect and detail of pending entries will be returned :param consumer: str, consumer of the stream in the group [NOTICE] only when count is set to int, this option can be appended to query pending entries of given consumer """ pieces = [name, group] if count is not None: pieces.extend([start, end, count]) if consumer is not None: pieces.append(str(consumer)) # todo: may there be a parse function return await self.execute_command('XPENDING', *pieces)
<ast.AsyncFunctionDef object at 0x7da1b0832fe0>
keyword[async] keyword[def] identifier[xpending] ( identifier[self] , identifier[name] : identifier[str] , identifier[group] : identifier[str] , identifier[start] = literal[string] , identifier[end] = literal[string] , identifier[count] = keyword[None] , identifier[consumer] = keyword[None] )-> identifier[list] : literal[string] identifier[pieces] =[ identifier[name] , identifier[group] ] keyword[if] identifier[count] keyword[is] keyword[not] keyword[None] : identifier[pieces] . identifier[extend] ([ identifier[start] , identifier[end] , identifier[count] ]) keyword[if] identifier[consumer] keyword[is] keyword[not] keyword[None] : identifier[pieces] . identifier[append] ( identifier[str] ( identifier[consumer] )) keyword[return] keyword[await] identifier[self] . identifier[execute_command] ( literal[string] ,* identifier[pieces] )
async def xpending(self, name: str, group: str, start='-', end='+', count=None, consumer=None) -> list: """ Available since 5.0.0. Time complexity: O(log(N)+M) with N being the number of elements in the consumer group pending entries list, and M the number of elements being returned. When the command returns just the summary it runs in O(1) time assuming the list of consumers is small, otherwise there is additional O(N) time needed to iterate every consumer. Fetching data from a stream via a consumer group, and not acknowledging such data, has the effect of creating pending entries. The XPENDING command is the interface to inspect the list of pending messages. :param name: name of the stream :param group: name of the consumer group :param start: first stream ID. defaults to '-', meaning the earliest available. :param end: last stream ID. defaults to '+', meaning the latest available. :param count: int, number of entries [NOTICE] only when count is set to int, start & end options will have effect and detail of pending entries will be returned :param consumer: str, consumer of the stream in the group [NOTICE] only when count is set to int, this option can be appended to query pending entries of given consumer """ pieces = [name, group] if count is not None: pieces.extend([start, end, count]) if consumer is not None: pieces.append(str(consumer)) # depends on [control=['if'], data=['consumer']] # depends on [control=['if'], data=['count']] # todo: may there be a parse function return await self.execute_command('XPENDING', *pieces)
def InitLocCheck(self): """make an interactive grid in which users can edit specimen names as well as which sample a specimen belongs to""" self.panel = wx.Panel(self, style=wx.SIMPLE_BORDER) text = """Step 5: Check that locations are correctly named. Fill in any blank cells using controlled vocabularies. (See Help button for details) ** Denotes controlled vocabulary""" label = wx.StaticText(self.panel, label=text) #self.Data_hierarchy = self.ErMagic.Data_hierarchy self.locations = self.er_magic_data.locations # if not self.er_magic_data.locations: msg = "You have no data in er_locations, so we are skipping step 5.\n Note that location names must be entered at the measurements level,so you may need to re-import your data, or you can add a location in step 3" dlg = wx.MessageDialog(None, caption="Message:", message=msg, style=wx.OK|wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() self.panel.Destroy() self.InitAgeCheck() return self.grid_builder = grid_frame2.GridBuilder(self.er_magic_data, 'location', self.er_magic_data.headers, self.panel) self.loc_grid = self.grid_builder.make_grid(incl_pmag=False) self.loc_grid.InitUI() self.grid_builder.add_data_to_grid(self.loc_grid, 'location', incl_pmag=False) self.grid = self.loc_grid # initialize all needed drop-down menus self.drop_down_menu = drop_down_menus.Menus("location", self, self.loc_grid, None) # need to find max/min lat/lon here IF they were added in the previous grid sites = self.er_magic_data.sites location_lat_lon = self.er_magic_data.get_min_max_lat_lon(self.er_magic_data.locations) col_names = ('location_begin_lat', 'location_end_lat', 'location_begin_lon', 'location_end_lon') col_inds = [self.grid.col_labels.index(name) for name in col_names] col_info = list(zip(col_names, col_inds)) for loc in self.er_magic_data.locations: row_ind = self.grid.row_labels.index(loc.name) for col_name, col_ind in col_info: info = location_lat_lon[loc.name][col_name] self.grid.SetCellValue(row_ind, col_ind, str(info)) ### Create Buttons ### hbox_one = wx.BoxSizer(wx.HORIZONTAL) self.helpButton = wx.Button(self.panel, label="Help") self.Bind(wx.EVT_BUTTON, lambda event: self.on_helpButton(event, "ErMagicLocationHelp.html"), self.helpButton) hbox_one.Add(self.helpButton) hboxok = wx.BoxSizer(wx.HORIZONTAL) self.saveButton = wx.Button(self.panel, id=-1, label='Save') self.Bind(wx.EVT_BUTTON, lambda event: self.on_saveButton(event, self.loc_grid), self.saveButton) self.cancelButton = wx.Button(self.panel, wx.ID_CANCEL, '&Cancel') self.Bind(wx.EVT_BUTTON, self.on_cancelButton, self.cancelButton) self.continueButton = wx.Button(self.panel, id=-1, label='Save and continue') self.Bind(wx.EVT_BUTTON, lambda event: self.on_continueButton(event, self.loc_grid, next_dia=self.InitAgeCheck), self.continueButton) self.backButton = wx.Button(self.panel, wx.ID_ANY, "&Back") previous_dia = self.InitSampCheck self.Bind(wx.EVT_BUTTON, lambda event: self.on_backButton(event, previous_dia, current_dia=self.InitLocCheck), self.backButton) hboxok.Add(self.saveButton, flag=wx.RIGHT, border=10) hboxok.Add(self.cancelButton, flag=wx.RIGHT, border=10) hboxok.Add(self.continueButton, flag=wx.RIGHT, border=10) hboxok.Add(self.backButton) # hboxgrid = pw.hbox_grid(self.panel, self.onDeleteRow, 'location', self.grid) self.deleteRowButton = hboxgrid.deleteRowButton self.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) ### Make Containers ### vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(label, flag=wx.ALIGN_CENTER|wx.TOP|wx.BOTTOM, border=20) vbox.Add(hbox_one, flag=wx.BOTTOM|wx.ALIGN_LEFT, border=10) vbox.Add(hboxok, flag=wx.BOTTOM|wx.ALIGN_LEFT, border=10) vbox.Add(hboxgrid, flag=wx.BOTTOM|wx.ALIGN_LEFT, border=10) vbox.Add(self.loc_grid, flag=wx.TOP|wx.BOTTOM, border=10) vbox.AddSpacer(20) self.hbox_all = wx.BoxSizer(wx.HORIZONTAL) self.hbox_all.AddSpacer(20) self.hbox_all.Add(vbox) self.hbox_all.AddSpacer(20) self.panel.SetSizer(self.hbox_all) #if sys.platform in ['win32', 'win64']: # self.panel.SetScrollbars(20, 20, 50, 50) self.hbox_all.Fit(self) self.Centre() self.Show() self.Hide() self.Show()
def function[InitLocCheck, parameter[self]]: constant[make an interactive grid in which users can edit specimen names as well as which sample a specimen belongs to] name[self].panel assign[=] call[name[wx].Panel, parameter[name[self]]] variable[text] assign[=] constant[Step 5: Check that locations are correctly named. Fill in any blank cells using controlled vocabularies. (See Help button for details) ** Denotes controlled vocabulary] variable[label] assign[=] call[name[wx].StaticText, parameter[name[self].panel]] name[self].locations assign[=] name[self].er_magic_data.locations if <ast.UnaryOp object at 0x7da18dc9a050> begin[:] variable[msg] assign[=] constant[You have no data in er_locations, so we are skipping step 5. Note that location names must be entered at the measurements level,so you may need to re-import your data, or you can add a location in step 3] variable[dlg] assign[=] call[name[wx].MessageDialog, parameter[constant[None]]] call[name[dlg].ShowModal, parameter[]] call[name[dlg].Destroy, parameter[]] call[name[self].panel.Destroy, parameter[]] call[name[self].InitAgeCheck, parameter[]] return[None] name[self].grid_builder assign[=] call[name[grid_frame2].GridBuilder, parameter[name[self].er_magic_data, constant[location], name[self].er_magic_data.headers, name[self].panel]] name[self].loc_grid assign[=] call[name[self].grid_builder.make_grid, parameter[]] call[name[self].loc_grid.InitUI, parameter[]] call[name[self].grid_builder.add_data_to_grid, parameter[name[self].loc_grid, constant[location]]] name[self].grid assign[=] name[self].loc_grid name[self].drop_down_menu assign[=] call[name[drop_down_menus].Menus, parameter[constant[location], name[self], name[self].loc_grid, constant[None]]] variable[sites] assign[=] name[self].er_magic_data.sites variable[location_lat_lon] assign[=] call[name[self].er_magic_data.get_min_max_lat_lon, parameter[name[self].er_magic_data.locations]] variable[col_names] assign[=] tuple[[<ast.Constant object at 0x7da18dc99e70>, <ast.Constant object at 0x7da18dc9bac0>, <ast.Constant object at 0x7da18dc98ac0>, <ast.Constant object at 0x7da18dc98e80>]] variable[col_inds] assign[=] <ast.ListComp object at 0x7da18dc9ab00> variable[col_info] assign[=] call[name[list], parameter[call[name[zip], parameter[name[col_names], name[col_inds]]]]] for taget[name[loc]] in starred[name[self].er_magic_data.locations] begin[:] variable[row_ind] assign[=] call[name[self].grid.row_labels.index, parameter[name[loc].name]] for taget[tuple[[<ast.Name object at 0x7da18dc98d30>, <ast.Name object at 0x7da18dc983a0>]]] in starred[name[col_info]] begin[:] variable[info] assign[=] call[call[name[location_lat_lon]][name[loc].name]][name[col_name]] call[name[self].grid.SetCellValue, parameter[name[row_ind], name[col_ind], call[name[str], parameter[name[info]]]]] variable[hbox_one] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] name[self].helpButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da18dc9bdf0>, name[self].helpButton]] call[name[hbox_one].Add, parameter[name[self].helpButton]] variable[hboxok] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] name[self].saveButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da1b01530a0>, name[self].saveButton]] name[self].cancelButton assign[=] call[name[wx].Button, parameter[name[self].panel, name[wx].ID_CANCEL, constant[&Cancel]]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].on_cancelButton, name[self].cancelButton]] name[self].continueButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da1b01537f0>, name[self].continueButton]] name[self].backButton assign[=] call[name[wx].Button, parameter[name[self].panel, name[wx].ID_ANY, constant[&Back]]] variable[previous_dia] assign[=] name[self].InitSampCheck call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da1b0153bb0>, name[self].backButton]] call[name[hboxok].Add, parameter[name[self].saveButton]] call[name[hboxok].Add, parameter[name[self].cancelButton]] call[name[hboxok].Add, parameter[name[self].continueButton]] call[name[hboxok].Add, parameter[name[self].backButton]] variable[hboxgrid] assign[=] call[name[pw].hbox_grid, parameter[name[self].panel, name[self].onDeleteRow, constant[location], name[self].grid]] name[self].deleteRowButton assign[=] name[hboxgrid].deleteRowButton call[name[self].Bind, parameter[name[wx].grid.EVT_GRID_LABEL_LEFT_CLICK, name[self].onLeftClickLabel, name[self].grid]] variable[vbox] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] call[name[vbox].Add, parameter[name[label]]] call[name[vbox].Add, parameter[name[hbox_one]]] call[name[vbox].Add, parameter[name[hboxok]]] call[name[vbox].Add, parameter[name[hboxgrid]]] call[name[vbox].Add, parameter[name[self].loc_grid]] call[name[vbox].AddSpacer, parameter[constant[20]]] name[self].hbox_all assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] call[name[self].hbox_all.AddSpacer, parameter[constant[20]]] call[name[self].hbox_all.Add, parameter[name[vbox]]] call[name[self].hbox_all.AddSpacer, parameter[constant[20]]] call[name[self].panel.SetSizer, parameter[name[self].hbox_all]] call[name[self].hbox_all.Fit, parameter[name[self]]] call[name[self].Centre, parameter[]] call[name[self].Show, parameter[]] call[name[self].Hide, parameter[]] call[name[self].Show, parameter[]]
keyword[def] identifier[InitLocCheck] ( identifier[self] ): literal[string] identifier[self] . identifier[panel] = identifier[wx] . identifier[Panel] ( identifier[self] , identifier[style] = identifier[wx] . identifier[SIMPLE_BORDER] ) identifier[text] = literal[string] identifier[label] = identifier[wx] . identifier[StaticText] ( identifier[self] . identifier[panel] , identifier[label] = identifier[text] ) identifier[self] . identifier[locations] = identifier[self] . identifier[er_magic_data] . identifier[locations] keyword[if] keyword[not] identifier[self] . identifier[er_magic_data] . identifier[locations] : identifier[msg] = literal[string] identifier[dlg] = identifier[wx] . identifier[MessageDialog] ( keyword[None] , identifier[caption] = literal[string] , identifier[message] = identifier[msg] , identifier[style] = identifier[wx] . identifier[OK] | identifier[wx] . identifier[ICON_INFORMATION] ) identifier[dlg] . identifier[ShowModal] () identifier[dlg] . identifier[Destroy] () identifier[self] . identifier[panel] . identifier[Destroy] () identifier[self] . identifier[InitAgeCheck] () keyword[return] identifier[self] . identifier[grid_builder] = identifier[grid_frame2] . identifier[GridBuilder] ( identifier[self] . identifier[er_magic_data] , literal[string] , identifier[self] . identifier[er_magic_data] . identifier[headers] , identifier[self] . identifier[panel] ) identifier[self] . identifier[loc_grid] = identifier[self] . identifier[grid_builder] . identifier[make_grid] ( identifier[incl_pmag] = keyword[False] ) identifier[self] . identifier[loc_grid] . identifier[InitUI] () identifier[self] . identifier[grid_builder] . identifier[add_data_to_grid] ( identifier[self] . identifier[loc_grid] , literal[string] , identifier[incl_pmag] = keyword[False] ) identifier[self] . identifier[grid] = identifier[self] . identifier[loc_grid] identifier[self] . identifier[drop_down_menu] = identifier[drop_down_menus] . identifier[Menus] ( literal[string] , identifier[self] , identifier[self] . identifier[loc_grid] , keyword[None] ) identifier[sites] = identifier[self] . identifier[er_magic_data] . identifier[sites] identifier[location_lat_lon] = identifier[self] . identifier[er_magic_data] . identifier[get_min_max_lat_lon] ( identifier[self] . identifier[er_magic_data] . identifier[locations] ) identifier[col_names] =( literal[string] , literal[string] , literal[string] , literal[string] ) identifier[col_inds] =[ identifier[self] . identifier[grid] . identifier[col_labels] . identifier[index] ( identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[col_names] ] identifier[col_info] = identifier[list] ( identifier[zip] ( identifier[col_names] , identifier[col_inds] )) keyword[for] identifier[loc] keyword[in] identifier[self] . identifier[er_magic_data] . identifier[locations] : identifier[row_ind] = identifier[self] . identifier[grid] . identifier[row_labels] . identifier[index] ( identifier[loc] . identifier[name] ) keyword[for] identifier[col_name] , identifier[col_ind] keyword[in] identifier[col_info] : identifier[info] = identifier[location_lat_lon] [ identifier[loc] . identifier[name] ][ identifier[col_name] ] identifier[self] . identifier[grid] . identifier[SetCellValue] ( identifier[row_ind] , identifier[col_ind] , identifier[str] ( identifier[info] )) identifier[hbox_one] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[self] . identifier[helpButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[label] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_helpButton] ( identifier[event] , literal[string] ), identifier[self] . identifier[helpButton] ) identifier[hbox_one] . identifier[Add] ( identifier[self] . identifier[helpButton] ) identifier[hboxok] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[self] . identifier[saveButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_saveButton] ( identifier[event] , identifier[self] . identifier[loc_grid] ), identifier[self] . identifier[saveButton] ) identifier[self] . identifier[cancelButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_CANCEL] , literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[on_cancelButton] , identifier[self] . identifier[cancelButton] ) identifier[self] . identifier[continueButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_continueButton] ( identifier[event] , identifier[self] . identifier[loc_grid] , identifier[next_dia] = identifier[self] . identifier[InitAgeCheck] ), identifier[self] . identifier[continueButton] ) identifier[self] . identifier[backButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] , literal[string] ) identifier[previous_dia] = identifier[self] . identifier[InitSampCheck] identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_backButton] ( identifier[event] , identifier[previous_dia] , identifier[current_dia] = identifier[self] . identifier[InitLocCheck] ), identifier[self] . identifier[backButton] ) identifier[hboxok] . identifier[Add] ( identifier[self] . identifier[saveButton] , identifier[flag] = identifier[wx] . identifier[RIGHT] , identifier[border] = literal[int] ) identifier[hboxok] . identifier[Add] ( identifier[self] . identifier[cancelButton] , identifier[flag] = identifier[wx] . identifier[RIGHT] , identifier[border] = literal[int] ) identifier[hboxok] . identifier[Add] ( identifier[self] . identifier[continueButton] , identifier[flag] = identifier[wx] . identifier[RIGHT] , identifier[border] = literal[int] ) identifier[hboxok] . identifier[Add] ( identifier[self] . identifier[backButton] ) identifier[hboxgrid] = identifier[pw] . identifier[hbox_grid] ( identifier[self] . identifier[panel] , identifier[self] . identifier[onDeleteRow] , literal[string] , identifier[self] . identifier[grid] ) identifier[self] . identifier[deleteRowButton] = identifier[hboxgrid] . identifier[deleteRowButton] identifier[self] . identifier[Bind] ( identifier[wx] . identifier[grid] . identifier[EVT_GRID_LABEL_LEFT_CLICK] , identifier[self] . identifier[onLeftClickLabel] , identifier[self] . identifier[grid] ) identifier[vbox] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[vbox] . identifier[Add] ( identifier[label] , identifier[flag] = identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[TOP] | identifier[wx] . identifier[BOTTOM] , identifier[border] = literal[int] ) identifier[vbox] . identifier[Add] ( identifier[hbox_one] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_LEFT] , identifier[border] = literal[int] ) identifier[vbox] . identifier[Add] ( identifier[hboxok] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_LEFT] , identifier[border] = literal[int] ) identifier[vbox] . identifier[Add] ( identifier[hboxgrid] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_LEFT] , identifier[border] = literal[int] ) identifier[vbox] . identifier[Add] ( identifier[self] . identifier[loc_grid] , identifier[flag] = identifier[wx] . identifier[TOP] | identifier[wx] . identifier[BOTTOM] , identifier[border] = literal[int] ) identifier[vbox] . identifier[AddSpacer] ( literal[int] ) identifier[self] . identifier[hbox_all] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[self] . identifier[hbox_all] . identifier[AddSpacer] ( literal[int] ) identifier[self] . identifier[hbox_all] . identifier[Add] ( identifier[vbox] ) identifier[self] . identifier[hbox_all] . identifier[AddSpacer] ( literal[int] ) identifier[self] . identifier[panel] . identifier[SetSizer] ( identifier[self] . identifier[hbox_all] ) identifier[self] . identifier[hbox_all] . identifier[Fit] ( identifier[self] ) identifier[self] . identifier[Centre] () identifier[self] . identifier[Show] () identifier[self] . identifier[Hide] () identifier[self] . identifier[Show] ()
def InitLocCheck(self): """make an interactive grid in which users can edit specimen names as well as which sample a specimen belongs to""" self.panel = wx.Panel(self, style=wx.SIMPLE_BORDER) text = 'Step 5:\nCheck that locations are correctly named.\nFill in any blank cells using controlled vocabularies.\n(See Help button for details)\n\n** Denotes controlled vocabulary' label = wx.StaticText(self.panel, label=text) #self.Data_hierarchy = self.ErMagic.Data_hierarchy self.locations = self.er_magic_data.locations # if not self.er_magic_data.locations: msg = 'You have no data in er_locations, so we are skipping step 5.\n Note that location names must be entered at the measurements level,so you may need to re-import your data, or you can add a location in step 3' dlg = wx.MessageDialog(None, caption='Message:', message=msg, style=wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() self.panel.Destroy() self.InitAgeCheck() return # depends on [control=['if'], data=[]] self.grid_builder = grid_frame2.GridBuilder(self.er_magic_data, 'location', self.er_magic_data.headers, self.panel) self.loc_grid = self.grid_builder.make_grid(incl_pmag=False) self.loc_grid.InitUI() self.grid_builder.add_data_to_grid(self.loc_grid, 'location', incl_pmag=False) self.grid = self.loc_grid # initialize all needed drop-down menus self.drop_down_menu = drop_down_menus.Menus('location', self, self.loc_grid, None) # need to find max/min lat/lon here IF they were added in the previous grid sites = self.er_magic_data.sites location_lat_lon = self.er_magic_data.get_min_max_lat_lon(self.er_magic_data.locations) col_names = ('location_begin_lat', 'location_end_lat', 'location_begin_lon', 'location_end_lon') col_inds = [self.grid.col_labels.index(name) for name in col_names] col_info = list(zip(col_names, col_inds)) for loc in self.er_magic_data.locations: row_ind = self.grid.row_labels.index(loc.name) for (col_name, col_ind) in col_info: info = location_lat_lon[loc.name][col_name] self.grid.SetCellValue(row_ind, col_ind, str(info)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['loc']] ### Create Buttons ### hbox_one = wx.BoxSizer(wx.HORIZONTAL) self.helpButton = wx.Button(self.panel, label='Help') self.Bind(wx.EVT_BUTTON, lambda event: self.on_helpButton(event, 'ErMagicLocationHelp.html'), self.helpButton) hbox_one.Add(self.helpButton) hboxok = wx.BoxSizer(wx.HORIZONTAL) self.saveButton = wx.Button(self.panel, id=-1, label='Save') self.Bind(wx.EVT_BUTTON, lambda event: self.on_saveButton(event, self.loc_grid), self.saveButton) self.cancelButton = wx.Button(self.panel, wx.ID_CANCEL, '&Cancel') self.Bind(wx.EVT_BUTTON, self.on_cancelButton, self.cancelButton) self.continueButton = wx.Button(self.panel, id=-1, label='Save and continue') self.Bind(wx.EVT_BUTTON, lambda event: self.on_continueButton(event, self.loc_grid, next_dia=self.InitAgeCheck), self.continueButton) self.backButton = wx.Button(self.panel, wx.ID_ANY, '&Back') previous_dia = self.InitSampCheck self.Bind(wx.EVT_BUTTON, lambda event: self.on_backButton(event, previous_dia, current_dia=self.InitLocCheck), self.backButton) hboxok.Add(self.saveButton, flag=wx.RIGHT, border=10) hboxok.Add(self.cancelButton, flag=wx.RIGHT, border=10) hboxok.Add(self.continueButton, flag=wx.RIGHT, border=10) hboxok.Add(self.backButton) # hboxgrid = pw.hbox_grid(self.panel, self.onDeleteRow, 'location', self.grid) self.deleteRowButton = hboxgrid.deleteRowButton self.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) ### Make Containers ### vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(label, flag=wx.ALIGN_CENTER | wx.TOP | wx.BOTTOM, border=20) vbox.Add(hbox_one, flag=wx.BOTTOM | wx.ALIGN_LEFT, border=10) vbox.Add(hboxok, flag=wx.BOTTOM | wx.ALIGN_LEFT, border=10) vbox.Add(hboxgrid, flag=wx.BOTTOM | wx.ALIGN_LEFT, border=10) vbox.Add(self.loc_grid, flag=wx.TOP | wx.BOTTOM, border=10) vbox.AddSpacer(20) self.hbox_all = wx.BoxSizer(wx.HORIZONTAL) self.hbox_all.AddSpacer(20) self.hbox_all.Add(vbox) self.hbox_all.AddSpacer(20) self.panel.SetSizer(self.hbox_all) #if sys.platform in ['win32', 'win64']: # self.panel.SetScrollbars(20, 20, 50, 50) self.hbox_all.Fit(self) self.Centre() self.Show() self.Hide() self.Show()
def save_user_cmd(email, name=None, groups=None, locale='en_US', timezone='US/Eastern'): """ Command to save a user :param email: user email :param name: user name :param groups: user permission groups :return: A command that validate date and save the user """ if name is None: name = email if groups is None: groups = [] return SaveUserCmd(name=name, email=email, groups=groups, locale=locale, timezone=timezone)
def function[save_user_cmd, parameter[email, name, groups, locale, timezone]]: constant[ Command to save a user :param email: user email :param name: user name :param groups: user permission groups :return: A command that validate date and save the user ] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] name[email] if compare[name[groups] is constant[None]] begin[:] variable[groups] assign[=] list[[]] return[call[name[SaveUserCmd], parameter[]]]
keyword[def] identifier[save_user_cmd] ( identifier[email] , identifier[name] = keyword[None] , identifier[groups] = keyword[None] , identifier[locale] = literal[string] , identifier[timezone] = literal[string] ): literal[string] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = identifier[email] keyword[if] identifier[groups] keyword[is] keyword[None] : identifier[groups] =[] keyword[return] identifier[SaveUserCmd] ( identifier[name] = identifier[name] , identifier[email] = identifier[email] , identifier[groups] = identifier[groups] , identifier[locale] = identifier[locale] , identifier[timezone] = identifier[timezone] )
def save_user_cmd(email, name=None, groups=None, locale='en_US', timezone='US/Eastern'): """ Command to save a user :param email: user email :param name: user name :param groups: user permission groups :return: A command that validate date and save the user """ if name is None: name = email # depends on [control=['if'], data=['name']] if groups is None: groups = [] # depends on [control=['if'], data=['groups']] return SaveUserCmd(name=name, email=email, groups=groups, locale=locale, timezone=timezone)
def by_filter(cls, session, opts, **kwargs): """ Get packages from given filters. :param session: SQLAlchemy session :type session: :class:`sqlalchemy.Session` :param opts: filtering options :type opts: `dict :return: package instances :rtype: generator of :class:`pyshop.models.Package` """ where = [] if opts.get('local_only'): where.append(cls.local == True) if opts.get('names'): where.append(cls.name.in_(opts['names'])) if opts.get('classifiers'): ids = [c.id for c in opts.get('classifiers')] cls_pkg = classifier__package qry = session.query(cls_pkg.c.package_id, func.count('*')) qry = qry.filter(cls_pkg.c.classifier_id.in_(ids)) qry = qry.group_by(cls_pkg.c.package_id) qry = qry.having(func.count('*') >= len(ids)) where.append(cls.id.in_([r[0] for r in qry.all()])) return cls.find(session, where=where, **kwargs)
def function[by_filter, parameter[cls, session, opts]]: constant[ Get packages from given filters. :param session: SQLAlchemy session :type session: :class:`sqlalchemy.Session` :param opts: filtering options :type opts: `dict :return: package instances :rtype: generator of :class:`pyshop.models.Package` ] variable[where] assign[=] list[[]] if call[name[opts].get, parameter[constant[local_only]]] begin[:] call[name[where].append, parameter[compare[name[cls].local equal[==] constant[True]]]] if call[name[opts].get, parameter[constant[names]]] begin[:] call[name[where].append, parameter[call[name[cls].name.in_, parameter[call[name[opts]][constant[names]]]]]] if call[name[opts].get, parameter[constant[classifiers]]] begin[:] variable[ids] assign[=] <ast.ListComp object at 0x7da2054a7490> variable[cls_pkg] assign[=] name[classifier__package] variable[qry] assign[=] call[name[session].query, parameter[name[cls_pkg].c.package_id, call[name[func].count, parameter[constant[*]]]]] variable[qry] assign[=] call[name[qry].filter, parameter[call[name[cls_pkg].c.classifier_id.in_, parameter[name[ids]]]]] variable[qry] assign[=] call[name[qry].group_by, parameter[name[cls_pkg].c.package_id]] variable[qry] assign[=] call[name[qry].having, parameter[compare[call[name[func].count, parameter[constant[*]]] greater_or_equal[>=] call[name[len], parameter[name[ids]]]]]] call[name[where].append, parameter[call[name[cls].id.in_, parameter[<ast.ListComp object at 0x7da20c76fa00>]]]] return[call[name[cls].find, parameter[name[session]]]]
keyword[def] identifier[by_filter] ( identifier[cls] , identifier[session] , identifier[opts] ,** identifier[kwargs] ): literal[string] identifier[where] =[] keyword[if] identifier[opts] . identifier[get] ( literal[string] ): identifier[where] . identifier[append] ( identifier[cls] . identifier[local] == keyword[True] ) keyword[if] identifier[opts] . identifier[get] ( literal[string] ): identifier[where] . identifier[append] ( identifier[cls] . identifier[name] . identifier[in_] ( identifier[opts] [ literal[string] ])) keyword[if] identifier[opts] . identifier[get] ( literal[string] ): identifier[ids] =[ identifier[c] . identifier[id] keyword[for] identifier[c] keyword[in] identifier[opts] . identifier[get] ( literal[string] )] identifier[cls_pkg] = identifier[classifier__package] identifier[qry] = identifier[session] . identifier[query] ( identifier[cls_pkg] . identifier[c] . identifier[package_id] , identifier[func] . identifier[count] ( literal[string] )) identifier[qry] = identifier[qry] . identifier[filter] ( identifier[cls_pkg] . identifier[c] . identifier[classifier_id] . identifier[in_] ( identifier[ids] )) identifier[qry] = identifier[qry] . identifier[group_by] ( identifier[cls_pkg] . identifier[c] . identifier[package_id] ) identifier[qry] = identifier[qry] . identifier[having] ( identifier[func] . identifier[count] ( literal[string] )>= identifier[len] ( identifier[ids] )) identifier[where] . identifier[append] ( identifier[cls] . identifier[id] . identifier[in_] ([ identifier[r] [ literal[int] ] keyword[for] identifier[r] keyword[in] identifier[qry] . identifier[all] ()])) keyword[return] identifier[cls] . identifier[find] ( identifier[session] , identifier[where] = identifier[where] ,** identifier[kwargs] )
def by_filter(cls, session, opts, **kwargs): """ Get packages from given filters. :param session: SQLAlchemy session :type session: :class:`sqlalchemy.Session` :param opts: filtering options :type opts: `dict :return: package instances :rtype: generator of :class:`pyshop.models.Package` """ where = [] if opts.get('local_only'): where.append(cls.local == True) # depends on [control=['if'], data=[]] if opts.get('names'): where.append(cls.name.in_(opts['names'])) # depends on [control=['if'], data=[]] if opts.get('classifiers'): ids = [c.id for c in opts.get('classifiers')] cls_pkg = classifier__package qry = session.query(cls_pkg.c.package_id, func.count('*')) qry = qry.filter(cls_pkg.c.classifier_id.in_(ids)) qry = qry.group_by(cls_pkg.c.package_id) qry = qry.having(func.count('*') >= len(ids)) where.append(cls.id.in_([r[0] for r in qry.all()])) # depends on [control=['if'], data=[]] return cls.find(session, where=where, **kwargs)
def sig2(method, endpoint, params, provider, aws_api_version): ''' Sign a query against AWS services using Signature Version 2 Signing Process. This is documented at: http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html ''' timenow = datetime.utcnow() timestamp = timenow.strftime('%Y-%m-%dT%H:%M:%SZ') # Retrieve access credentials from meta-data, or use provided access_key_id, secret_access_key, token = creds(provider) params_with_headers = params.copy() params_with_headers['AWSAccessKeyId'] = access_key_id params_with_headers['SignatureVersion'] = '2' params_with_headers['SignatureMethod'] = 'HmacSHA256' params_with_headers['Timestamp'] = '{0}'.format(timestamp) params_with_headers['Version'] = aws_api_version keys = sorted(params_with_headers.keys()) values = list(list(map(params_with_headers.get, keys))) querystring = urlencode(list(zip(keys, values))) canonical = '{0}\n{1}\n/\n{2}'.format( method.encode('utf-8'), endpoint.encode('utf-8'), querystring.encode('utf-8'), ) hashed = hmac.new(secret_access_key, canonical, hashlib.sha256) sig = binascii.b2a_base64(hashed.digest()) params_with_headers['Signature'] = sig.strip() # Add in security token if we have one if token != '': params_with_headers['SecurityToken'] = token return params_with_headers
def function[sig2, parameter[method, endpoint, params, provider, aws_api_version]]: constant[ Sign a query against AWS services using Signature Version 2 Signing Process. This is documented at: http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html ] variable[timenow] assign[=] call[name[datetime].utcnow, parameter[]] variable[timestamp] assign[=] call[name[timenow].strftime, parameter[constant[%Y-%m-%dT%H:%M:%SZ]]] <ast.Tuple object at 0x7da18bc73eb0> assign[=] call[name[creds], parameter[name[provider]]] variable[params_with_headers] assign[=] call[name[params].copy, parameter[]] call[name[params_with_headers]][constant[AWSAccessKeyId]] assign[=] name[access_key_id] call[name[params_with_headers]][constant[SignatureVersion]] assign[=] constant[2] call[name[params_with_headers]][constant[SignatureMethod]] assign[=] constant[HmacSHA256] call[name[params_with_headers]][constant[Timestamp]] assign[=] call[constant[{0}].format, parameter[name[timestamp]]] call[name[params_with_headers]][constant[Version]] assign[=] name[aws_api_version] variable[keys] assign[=] call[name[sorted], parameter[call[name[params_with_headers].keys, parameter[]]]] variable[values] assign[=] call[name[list], parameter[call[name[list], parameter[call[name[map], parameter[name[params_with_headers].get, name[keys]]]]]]] variable[querystring] assign[=] call[name[urlencode], parameter[call[name[list], parameter[call[name[zip], parameter[name[keys], name[values]]]]]]] variable[canonical] assign[=] call[constant[{0} {1} / {2}].format, parameter[call[name[method].encode, parameter[constant[utf-8]]], call[name[endpoint].encode, parameter[constant[utf-8]]], call[name[querystring].encode, parameter[constant[utf-8]]]]] variable[hashed] assign[=] call[name[hmac].new, parameter[name[secret_access_key], name[canonical], name[hashlib].sha256]] variable[sig] assign[=] call[name[binascii].b2a_base64, parameter[call[name[hashed].digest, parameter[]]]] call[name[params_with_headers]][constant[Signature]] assign[=] call[name[sig].strip, parameter[]] if compare[name[token] not_equal[!=] constant[]] begin[:] call[name[params_with_headers]][constant[SecurityToken]] assign[=] name[token] return[name[params_with_headers]]
keyword[def] identifier[sig2] ( identifier[method] , identifier[endpoint] , identifier[params] , identifier[provider] , identifier[aws_api_version] ): literal[string] identifier[timenow] = identifier[datetime] . identifier[utcnow] () identifier[timestamp] = identifier[timenow] . identifier[strftime] ( literal[string] ) identifier[access_key_id] , identifier[secret_access_key] , identifier[token] = identifier[creds] ( identifier[provider] ) identifier[params_with_headers] = identifier[params] . identifier[copy] () identifier[params_with_headers] [ literal[string] ]= identifier[access_key_id] identifier[params_with_headers] [ literal[string] ]= literal[string] identifier[params_with_headers] [ literal[string] ]= literal[string] identifier[params_with_headers] [ literal[string] ]= literal[string] . identifier[format] ( identifier[timestamp] ) identifier[params_with_headers] [ literal[string] ]= identifier[aws_api_version] identifier[keys] = identifier[sorted] ( identifier[params_with_headers] . identifier[keys] ()) identifier[values] = identifier[list] ( identifier[list] ( identifier[map] ( identifier[params_with_headers] . identifier[get] , identifier[keys] ))) identifier[querystring] = identifier[urlencode] ( identifier[list] ( identifier[zip] ( identifier[keys] , identifier[values] ))) identifier[canonical] = literal[string] . identifier[format] ( identifier[method] . identifier[encode] ( literal[string] ), identifier[endpoint] . identifier[encode] ( literal[string] ), identifier[querystring] . identifier[encode] ( literal[string] ), ) identifier[hashed] = identifier[hmac] . identifier[new] ( identifier[secret_access_key] , identifier[canonical] , identifier[hashlib] . identifier[sha256] ) identifier[sig] = identifier[binascii] . identifier[b2a_base64] ( identifier[hashed] . identifier[digest] ()) identifier[params_with_headers] [ literal[string] ]= identifier[sig] . identifier[strip] () keyword[if] identifier[token] != literal[string] : identifier[params_with_headers] [ literal[string] ]= identifier[token] keyword[return] identifier[params_with_headers]
def sig2(method, endpoint, params, provider, aws_api_version): """ Sign a query against AWS services using Signature Version 2 Signing Process. This is documented at: http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html """ timenow = datetime.utcnow() timestamp = timenow.strftime('%Y-%m-%dT%H:%M:%SZ') # Retrieve access credentials from meta-data, or use provided (access_key_id, secret_access_key, token) = creds(provider) params_with_headers = params.copy() params_with_headers['AWSAccessKeyId'] = access_key_id params_with_headers['SignatureVersion'] = '2' params_with_headers['SignatureMethod'] = 'HmacSHA256' params_with_headers['Timestamp'] = '{0}'.format(timestamp) params_with_headers['Version'] = aws_api_version keys = sorted(params_with_headers.keys()) values = list(list(map(params_with_headers.get, keys))) querystring = urlencode(list(zip(keys, values))) canonical = '{0}\n{1}\n/\n{2}'.format(method.encode('utf-8'), endpoint.encode('utf-8'), querystring.encode('utf-8')) hashed = hmac.new(secret_access_key, canonical, hashlib.sha256) sig = binascii.b2a_base64(hashed.digest()) params_with_headers['Signature'] = sig.strip() # Add in security token if we have one if token != '': params_with_headers['SecurityToken'] = token # depends on [control=['if'], data=['token']] return params_with_headers
def generate_url(self, remote_file): """Sign a remote file to distribute. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob. Args: remote_file(str): The blob that we want to sign. """ parse_url = _parse_url(remote_file) key = self.storage_client.storage_accounts.list_keys(self.resource_group_name, parse_url.account).keys[0].value if parse_url.file_type == 'blob': bs = BlockBlobService(account_name=parse_url.account, account_key=key) sas_token = bs.generate_blob_shared_access_signature(parse_url.container_or_share_name, parse_url.file, permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=24), ) source_blob_url = bs.make_blob_url(container_name=parse_url.container_or_share_name, blob_name=parse_url.file, sas_token=sas_token) return source_blob_url elif parse_url.file_type == 'file': fs = FileService(account_name=parse_url.account, account_key=key) sas_token = fs.generate_file_shared_access_signature(share_name=parse_url.container_or_share_name, directory_name=parse_url.path, file_name=parse_url.file, permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=24), ) source_file_url = fs.make_file_url(share_name=parse_url.container_or_share_name, directory_name=parse_url.path, file_name=parse_url.file, sas_token=sas_token) return source_file_url else: raise ValueError("This azure storage type is not valid. It should be blob or file.")
def function[generate_url, parameter[self, remote_file]]: constant[Sign a remote file to distribute. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob. Args: remote_file(str): The blob that we want to sign. ] variable[parse_url] assign[=] call[name[_parse_url], parameter[name[remote_file]]] variable[key] assign[=] call[call[name[self].storage_client.storage_accounts.list_keys, parameter[name[self].resource_group_name, name[parse_url].account]].keys][constant[0]].value if compare[name[parse_url].file_type equal[==] constant[blob]] begin[:] variable[bs] assign[=] call[name[BlockBlobService], parameter[]] variable[sas_token] assign[=] call[name[bs].generate_blob_shared_access_signature, parameter[name[parse_url].container_or_share_name, name[parse_url].file]] variable[source_blob_url] assign[=] call[name[bs].make_blob_url, parameter[]] return[name[source_blob_url]]
keyword[def] identifier[generate_url] ( identifier[self] , identifier[remote_file] ): literal[string] identifier[parse_url] = identifier[_parse_url] ( identifier[remote_file] ) identifier[key] = identifier[self] . identifier[storage_client] . identifier[storage_accounts] . identifier[list_keys] ( identifier[self] . identifier[resource_group_name] , identifier[parse_url] . identifier[account] ). identifier[keys] [ literal[int] ]. identifier[value] keyword[if] identifier[parse_url] . identifier[file_type] == literal[string] : identifier[bs] = identifier[BlockBlobService] ( identifier[account_name] = identifier[parse_url] . identifier[account] , identifier[account_key] = identifier[key] ) identifier[sas_token] = identifier[bs] . identifier[generate_blob_shared_access_signature] ( identifier[parse_url] . identifier[container_or_share_name] , identifier[parse_url] . identifier[file] , identifier[permission] = identifier[BlobPermissions] . identifier[READ] , identifier[expiry] = identifier[datetime] . identifier[utcnow] ()+ identifier[timedelta] ( identifier[hours] = literal[int] ), ) identifier[source_blob_url] = identifier[bs] . identifier[make_blob_url] ( identifier[container_name] = identifier[parse_url] . identifier[container_or_share_name] , identifier[blob_name] = identifier[parse_url] . identifier[file] , identifier[sas_token] = identifier[sas_token] ) keyword[return] identifier[source_blob_url] keyword[elif] identifier[parse_url] . identifier[file_type] == literal[string] : identifier[fs] = identifier[FileService] ( identifier[account_name] = identifier[parse_url] . identifier[account] , identifier[account_key] = identifier[key] ) identifier[sas_token] = identifier[fs] . identifier[generate_file_shared_access_signature] ( identifier[share_name] = identifier[parse_url] . identifier[container_or_share_name] , identifier[directory_name] = identifier[parse_url] . identifier[path] , identifier[file_name] = identifier[parse_url] . identifier[file] , identifier[permission] = identifier[BlobPermissions] . identifier[READ] , identifier[expiry] = identifier[datetime] . identifier[utcnow] ()+ identifier[timedelta] ( identifier[hours] = literal[int] ), ) identifier[source_file_url] = identifier[fs] . identifier[make_file_url] ( identifier[share_name] = identifier[parse_url] . identifier[container_or_share_name] , identifier[directory_name] = identifier[parse_url] . identifier[path] , identifier[file_name] = identifier[parse_url] . identifier[file] , identifier[sas_token] = identifier[sas_token] ) keyword[return] identifier[source_file_url] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def generate_url(self, remote_file): """Sign a remote file to distribute. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob. Args: remote_file(str): The blob that we want to sign. """ parse_url = _parse_url(remote_file) key = self.storage_client.storage_accounts.list_keys(self.resource_group_name, parse_url.account).keys[0].value if parse_url.file_type == 'blob': bs = BlockBlobService(account_name=parse_url.account, account_key=key) sas_token = bs.generate_blob_shared_access_signature(parse_url.container_or_share_name, parse_url.file, permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=24)) source_blob_url = bs.make_blob_url(container_name=parse_url.container_or_share_name, blob_name=parse_url.file, sas_token=sas_token) return source_blob_url # depends on [control=['if'], data=[]] elif parse_url.file_type == 'file': fs = FileService(account_name=parse_url.account, account_key=key) sas_token = fs.generate_file_shared_access_signature(share_name=parse_url.container_or_share_name, directory_name=parse_url.path, file_name=parse_url.file, permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=24)) source_file_url = fs.make_file_url(share_name=parse_url.container_or_share_name, directory_name=parse_url.path, file_name=parse_url.file, sas_token=sas_token) return source_file_url # depends on [control=['if'], data=[]] else: raise ValueError('This azure storage type is not valid. It should be blob or file.')
def get_version_manifest(name, data=None, required=False): """Retrieve a version from the currently installed manifest. """ manifest_dir = _get_manifest_dir(data, name) manifest_vs = _get_versions_manifest(manifest_dir) or [] for x in manifest_vs: if x["program"] == name: v = x.get("version", "") if v: return v if required: raise ValueError("Did not find %s in install manifest. Could not check version." % name) return ""
def function[get_version_manifest, parameter[name, data, required]]: constant[Retrieve a version from the currently installed manifest. ] variable[manifest_dir] assign[=] call[name[_get_manifest_dir], parameter[name[data], name[name]]] variable[manifest_vs] assign[=] <ast.BoolOp object at 0x7da20c76ebc0> for taget[name[x]] in starred[name[manifest_vs]] begin[:] if compare[call[name[x]][constant[program]] equal[==] name[name]] begin[:] variable[v] assign[=] call[name[x].get, parameter[constant[version], constant[]]] if name[v] begin[:] return[name[v]] if name[required] begin[:] <ast.Raise object at 0x7da20c76d840> return[constant[]]
keyword[def] identifier[get_version_manifest] ( identifier[name] , identifier[data] = keyword[None] , identifier[required] = keyword[False] ): literal[string] identifier[manifest_dir] = identifier[_get_manifest_dir] ( identifier[data] , identifier[name] ) identifier[manifest_vs] = identifier[_get_versions_manifest] ( identifier[manifest_dir] ) keyword[or] [] keyword[for] identifier[x] keyword[in] identifier[manifest_vs] : keyword[if] identifier[x] [ literal[string] ]== identifier[name] : identifier[v] = identifier[x] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[v] : keyword[return] identifier[v] keyword[if] identifier[required] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] ) keyword[return] literal[string]
def get_version_manifest(name, data=None, required=False): """Retrieve a version from the currently installed manifest. """ manifest_dir = _get_manifest_dir(data, name) manifest_vs = _get_versions_manifest(manifest_dir) or [] for x in manifest_vs: if x['program'] == name: v = x.get('version', '') if v: return v # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] if required: raise ValueError('Did not find %s in install manifest. Could not check version.' % name) # depends on [control=['if'], data=[]] return ''
def _add_process(self, proc, priority) -> None: """ Schedule process on actual time with specified priority """ self._events.push(self.now, priority, proc)
def function[_add_process, parameter[self, proc, priority]]: constant[ Schedule process on actual time with specified priority ] call[name[self]._events.push, parameter[name[self].now, name[priority], name[proc]]]
keyword[def] identifier[_add_process] ( identifier[self] , identifier[proc] , identifier[priority] )-> keyword[None] : literal[string] identifier[self] . identifier[_events] . identifier[push] ( identifier[self] . identifier[now] , identifier[priority] , identifier[proc] )
def _add_process(self, proc, priority) -> None: """ Schedule process on actual time with specified priority """ self._events.push(self.now, priority, proc)
def calcparams_pvsyst(effective_irradiance, temp_cell, alpha_sc, gamma_ref, mu_gamma, I_L_ref, I_o_ref, R_sh_ref, R_sh_0, R_s, cells_in_series, R_sh_exp=5.5, EgRef=1.121, irrad_ref=1000, temp_ref=25): ''' Calculates five parameter values for the single diode equation at effective irradiance and cell temperature using the PVsyst v6 model described in [1,2,3]. The five values returned by calcparams_pvsyst can be used by singlediode to calculate an IV curve. Parameters ---------- effective_irradiance : numeric The irradiance (W/m2) that is converted to photocurrent. temp_cell : numeric The average cell temperature of cells within a module in C. alpha_sc : float The short-circuit current temperature coefficient of the module in units of A/C. gamma_ref : float The diode ideality factor mu_gamma : float The temperature coefficient for the diode ideality factor, 1/K I_L_ref : float The light-generated current (or photocurrent) at reference conditions, in amperes. I_o_ref : float The dark or diode reverse saturation current at reference conditions, in amperes. R_sh_ref : float The shunt resistance at reference conditions, in ohms. R_sh_0 : float The shunt resistance at zero irradiance conditions, in ohms. R_s : float The series resistance at reference conditions, in ohms. cells_in_series : integer The number of cells connected in series. R_sh_exp : float The exponent in the equation for shunt resistance, unitless. Defaults to 5.5. EgRef : float The energy bandgap at reference temperature in units of eV. 1.121 eV for crystalline silicon. EgRef must be >0. irrad_ref : float (optional, default=1000) Reference irradiance in W/m^2. temp_ref : float (optional, default=25) Reference cell temperature in C. Returns ------- Tuple of the following results: photocurrent : numeric Light-generated current in amperes saturation_current : numeric Diode saturation current in amperes resistance_series : float Series resistance in ohms resistance_shunt : numeric Shunt resistance in ohms nNsVth : numeric The product of the usual diode ideality factor (n, unitless), number of cells in series (Ns), and cell thermal voltage at specified effective irradiance and cell temperature. References ---------- [1] K. Sauer, T. Roessler, C. W. Hansen, Modeling the Irradiance and Temperature Dependence of Photovoltaic Modules in PVsyst, IEEE Journal of Photovoltaics v5(1), January 2015. [2] A. Mermoud, PV modules modelling, Presentation at the 2nd PV Performance Modeling Workshop, Santa Clara, CA, May 2013 [3] A. Mermoud, T. Lejeune, Performance Assessment of a Simulation Model for PV modules of any available technology, 25th European Photovoltaic Solar Energy Conference, Valencia, Spain, Sept. 2010 See Also -------- calcparams_desoto singlediode ''' # Boltzmann constant in J/K k = 1.38064852e-23 # elementary charge in coulomb q = 1.6021766e-19 # reference temperature Tref_K = temp_ref + 273.15 Tcell_K = temp_cell + 273.15 gamma = gamma_ref + mu_gamma * (Tcell_K - Tref_K) nNsVth = gamma * k / q * cells_in_series * Tcell_K IL = effective_irradiance / irrad_ref * \ (I_L_ref + alpha_sc * (Tcell_K - Tref_K)) I0 = I_o_ref * ((Tcell_K / Tref_K) ** 3) * \ (np.exp((q * EgRef) / (k * gamma) * (1 / Tref_K - 1 / Tcell_K))) Rsh_tmp = \ (R_sh_ref - R_sh_0 * np.exp(-R_sh_exp)) / (1.0 - np.exp(-R_sh_exp)) Rsh_base = np.maximum(0.0, Rsh_tmp) Rsh = Rsh_base + (R_sh_0 - Rsh_base) * \ np.exp(-R_sh_exp * effective_irradiance / irrad_ref) Rs = R_s return IL, I0, Rs, Rsh, nNsVth
def function[calcparams_pvsyst, parameter[effective_irradiance, temp_cell, alpha_sc, gamma_ref, mu_gamma, I_L_ref, I_o_ref, R_sh_ref, R_sh_0, R_s, cells_in_series, R_sh_exp, EgRef, irrad_ref, temp_ref]]: constant[ Calculates five parameter values for the single diode equation at effective irradiance and cell temperature using the PVsyst v6 model described in [1,2,3]. The five values returned by calcparams_pvsyst can be used by singlediode to calculate an IV curve. Parameters ---------- effective_irradiance : numeric The irradiance (W/m2) that is converted to photocurrent. temp_cell : numeric The average cell temperature of cells within a module in C. alpha_sc : float The short-circuit current temperature coefficient of the module in units of A/C. gamma_ref : float The diode ideality factor mu_gamma : float The temperature coefficient for the diode ideality factor, 1/K I_L_ref : float The light-generated current (or photocurrent) at reference conditions, in amperes. I_o_ref : float The dark or diode reverse saturation current at reference conditions, in amperes. R_sh_ref : float The shunt resistance at reference conditions, in ohms. R_sh_0 : float The shunt resistance at zero irradiance conditions, in ohms. R_s : float The series resistance at reference conditions, in ohms. cells_in_series : integer The number of cells connected in series. R_sh_exp : float The exponent in the equation for shunt resistance, unitless. Defaults to 5.5. EgRef : float The energy bandgap at reference temperature in units of eV. 1.121 eV for crystalline silicon. EgRef must be >0. irrad_ref : float (optional, default=1000) Reference irradiance in W/m^2. temp_ref : float (optional, default=25) Reference cell temperature in C. Returns ------- Tuple of the following results: photocurrent : numeric Light-generated current in amperes saturation_current : numeric Diode saturation current in amperes resistance_series : float Series resistance in ohms resistance_shunt : numeric Shunt resistance in ohms nNsVth : numeric The product of the usual diode ideality factor (n, unitless), number of cells in series (Ns), and cell thermal voltage at specified effective irradiance and cell temperature. References ---------- [1] K. Sauer, T. Roessler, C. W. Hansen, Modeling the Irradiance and Temperature Dependence of Photovoltaic Modules in PVsyst, IEEE Journal of Photovoltaics v5(1), January 2015. [2] A. Mermoud, PV modules modelling, Presentation at the 2nd PV Performance Modeling Workshop, Santa Clara, CA, May 2013 [3] A. Mermoud, T. Lejeune, Performance Assessment of a Simulation Model for PV modules of any available technology, 25th European Photovoltaic Solar Energy Conference, Valencia, Spain, Sept. 2010 See Also -------- calcparams_desoto singlediode ] variable[k] assign[=] constant[1.38064852e-23] variable[q] assign[=] constant[1.6021766e-19] variable[Tref_K] assign[=] binary_operation[name[temp_ref] + constant[273.15]] variable[Tcell_K] assign[=] binary_operation[name[temp_cell] + constant[273.15]] variable[gamma] assign[=] binary_operation[name[gamma_ref] + binary_operation[name[mu_gamma] * binary_operation[name[Tcell_K] - name[Tref_K]]]] variable[nNsVth] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[gamma] * name[k]] / name[q]] * name[cells_in_series]] * name[Tcell_K]] variable[IL] assign[=] binary_operation[binary_operation[name[effective_irradiance] / name[irrad_ref]] * binary_operation[name[I_L_ref] + binary_operation[name[alpha_sc] * binary_operation[name[Tcell_K] - name[Tref_K]]]]] variable[I0] assign[=] binary_operation[binary_operation[name[I_o_ref] * binary_operation[binary_operation[name[Tcell_K] / name[Tref_K]] ** constant[3]]] * call[name[np].exp, parameter[binary_operation[binary_operation[binary_operation[name[q] * name[EgRef]] / binary_operation[name[k] * name[gamma]]] * binary_operation[binary_operation[constant[1] / name[Tref_K]] - binary_operation[constant[1] / name[Tcell_K]]]]]]] variable[Rsh_tmp] assign[=] binary_operation[binary_operation[name[R_sh_ref] - binary_operation[name[R_sh_0] * call[name[np].exp, parameter[<ast.UnaryOp object at 0x7da1b1b0f280>]]]] / binary_operation[constant[1.0] - call[name[np].exp, parameter[<ast.UnaryOp object at 0x7da1b1b0dd80>]]]] variable[Rsh_base] assign[=] call[name[np].maximum, parameter[constant[0.0], name[Rsh_tmp]]] variable[Rsh] assign[=] binary_operation[name[Rsh_base] + binary_operation[binary_operation[name[R_sh_0] - name[Rsh_base]] * call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1baf640> * name[effective_irradiance]] / name[irrad_ref]]]]]] variable[Rs] assign[=] name[R_s] return[tuple[[<ast.Name object at 0x7da1b1badf90>, <ast.Name object at 0x7da1b1baeb00>, <ast.Name object at 0x7da1b1bafca0>, <ast.Name object at 0x7da1b1bafc70>, <ast.Name object at 0x7da1b1baf160>]]]
keyword[def] identifier[calcparams_pvsyst] ( identifier[effective_irradiance] , identifier[temp_cell] , identifier[alpha_sc] , identifier[gamma_ref] , identifier[mu_gamma] , identifier[I_L_ref] , identifier[I_o_ref] , identifier[R_sh_ref] , identifier[R_sh_0] , identifier[R_s] , identifier[cells_in_series] , identifier[R_sh_exp] = literal[int] , identifier[EgRef] = literal[int] , identifier[irrad_ref] = literal[int] , identifier[temp_ref] = literal[int] ): literal[string] identifier[k] = literal[int] identifier[q] = literal[int] identifier[Tref_K] = identifier[temp_ref] + literal[int] identifier[Tcell_K] = identifier[temp_cell] + literal[int] identifier[gamma] = identifier[gamma_ref] + identifier[mu_gamma] *( identifier[Tcell_K] - identifier[Tref_K] ) identifier[nNsVth] = identifier[gamma] * identifier[k] / identifier[q] * identifier[cells_in_series] * identifier[Tcell_K] identifier[IL] = identifier[effective_irradiance] / identifier[irrad_ref] *( identifier[I_L_ref] + identifier[alpha_sc] *( identifier[Tcell_K] - identifier[Tref_K] )) identifier[I0] = identifier[I_o_ref] *(( identifier[Tcell_K] / identifier[Tref_K] )** literal[int] )*( identifier[np] . identifier[exp] (( identifier[q] * identifier[EgRef] )/( identifier[k] * identifier[gamma] )*( literal[int] / identifier[Tref_K] - literal[int] / identifier[Tcell_K] ))) identifier[Rsh_tmp] =( identifier[R_sh_ref] - identifier[R_sh_0] * identifier[np] . identifier[exp] (- identifier[R_sh_exp] ))/( literal[int] - identifier[np] . identifier[exp] (- identifier[R_sh_exp] )) identifier[Rsh_base] = identifier[np] . identifier[maximum] ( literal[int] , identifier[Rsh_tmp] ) identifier[Rsh] = identifier[Rsh_base] +( identifier[R_sh_0] - identifier[Rsh_base] )* identifier[np] . identifier[exp] (- identifier[R_sh_exp] * identifier[effective_irradiance] / identifier[irrad_ref] ) identifier[Rs] = identifier[R_s] keyword[return] identifier[IL] , identifier[I0] , identifier[Rs] , identifier[Rsh] , identifier[nNsVth]
def calcparams_pvsyst(effective_irradiance, temp_cell, alpha_sc, gamma_ref, mu_gamma, I_L_ref, I_o_ref, R_sh_ref, R_sh_0, R_s, cells_in_series, R_sh_exp=5.5, EgRef=1.121, irrad_ref=1000, temp_ref=25): """ Calculates five parameter values for the single diode equation at effective irradiance and cell temperature using the PVsyst v6 model described in [1,2,3]. The five values returned by calcparams_pvsyst can be used by singlediode to calculate an IV curve. Parameters ---------- effective_irradiance : numeric The irradiance (W/m2) that is converted to photocurrent. temp_cell : numeric The average cell temperature of cells within a module in C. alpha_sc : float The short-circuit current temperature coefficient of the module in units of A/C. gamma_ref : float The diode ideality factor mu_gamma : float The temperature coefficient for the diode ideality factor, 1/K I_L_ref : float The light-generated current (or photocurrent) at reference conditions, in amperes. I_o_ref : float The dark or diode reverse saturation current at reference conditions, in amperes. R_sh_ref : float The shunt resistance at reference conditions, in ohms. R_sh_0 : float The shunt resistance at zero irradiance conditions, in ohms. R_s : float The series resistance at reference conditions, in ohms. cells_in_series : integer The number of cells connected in series. R_sh_exp : float The exponent in the equation for shunt resistance, unitless. Defaults to 5.5. EgRef : float The energy bandgap at reference temperature in units of eV. 1.121 eV for crystalline silicon. EgRef must be >0. irrad_ref : float (optional, default=1000) Reference irradiance in W/m^2. temp_ref : float (optional, default=25) Reference cell temperature in C. Returns ------- Tuple of the following results: photocurrent : numeric Light-generated current in amperes saturation_current : numeric Diode saturation current in amperes resistance_series : float Series resistance in ohms resistance_shunt : numeric Shunt resistance in ohms nNsVth : numeric The product of the usual diode ideality factor (n, unitless), number of cells in series (Ns), and cell thermal voltage at specified effective irradiance and cell temperature. References ---------- [1] K. Sauer, T. Roessler, C. W. Hansen, Modeling the Irradiance and Temperature Dependence of Photovoltaic Modules in PVsyst, IEEE Journal of Photovoltaics v5(1), January 2015. [2] A. Mermoud, PV modules modelling, Presentation at the 2nd PV Performance Modeling Workshop, Santa Clara, CA, May 2013 [3] A. Mermoud, T. Lejeune, Performance Assessment of a Simulation Model for PV modules of any available technology, 25th European Photovoltaic Solar Energy Conference, Valencia, Spain, Sept. 2010 See Also -------- calcparams_desoto singlediode """ # Boltzmann constant in J/K k = 1.38064852e-23 # elementary charge in coulomb q = 1.6021766e-19 # reference temperature Tref_K = temp_ref + 273.15 Tcell_K = temp_cell + 273.15 gamma = gamma_ref + mu_gamma * (Tcell_K - Tref_K) nNsVth = gamma * k / q * cells_in_series * Tcell_K IL = effective_irradiance / irrad_ref * (I_L_ref + alpha_sc * (Tcell_K - Tref_K)) I0 = I_o_ref * (Tcell_K / Tref_K) ** 3 * np.exp(q * EgRef / (k * gamma) * (1 / Tref_K - 1 / Tcell_K)) Rsh_tmp = (R_sh_ref - R_sh_0 * np.exp(-R_sh_exp)) / (1.0 - np.exp(-R_sh_exp)) Rsh_base = np.maximum(0.0, Rsh_tmp) Rsh = Rsh_base + (R_sh_0 - Rsh_base) * np.exp(-R_sh_exp * effective_irradiance / irrad_ref) Rs = R_s return (IL, I0, Rs, Rsh, nNsVth)
def _prepare_defaults(self): """Trigger assignment of default values.""" for name, field in self.__fields__.items(): if field.assign: getattr(self, name)
def function[_prepare_defaults, parameter[self]]: constant[Trigger assignment of default values.] for taget[tuple[[<ast.Name object at 0x7da2054a4820>, <ast.Name object at 0x7da2054a45b0>]]] in starred[call[name[self].__fields__.items, parameter[]]] begin[:] if name[field].assign begin[:] call[name[getattr], parameter[name[self], name[name]]]
keyword[def] identifier[_prepare_defaults] ( identifier[self] ): literal[string] keyword[for] identifier[name] , identifier[field] keyword[in] identifier[self] . identifier[__fields__] . identifier[items] (): keyword[if] identifier[field] . identifier[assign] : identifier[getattr] ( identifier[self] , identifier[name] )
def _prepare_defaults(self): """Trigger assignment of default values.""" for (name, field) in self.__fields__.items(): if field.assign: getattr(self, name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def invoke( self, cli, args=None, prog_name=None, input=None, terminate_input=False, env=None, _output_lines=None, **extra ): """Like :meth:`CliRunner.invoke` but displays what the user would enter in the terminal for env vars, command args, and prompts. :param terminate_input: Whether to display "^D" after a list of input. :param _output_lines: A list used internally to collect lines to be displayed. """ output_lines = _output_lines if _output_lines is not None else [] if env: for key, value in sorted(env.items()): value = shlex.quote(value) output_lines.append("$ export {}={}".format(key, value)) args = args or [] if prog_name is None: prog_name = cli.name.replace("_", "-") output_lines.append( "$ {} {}".format(prog_name, " ".join(shlex.quote(x) for x in args)).rstrip() ) # remove "python" from command prog_name = prog_name.rsplit(" ", 1)[-1] if isinstance(input, (tuple, list)): input = "\n".join(input) + "\n" if terminate_input: input += "\x04" result = super(ExampleRunner, self).invoke( cli=cli, args=args, input=input, env=env, prog_name=prog_name, **extra ) output_lines.extend(result.output.splitlines()) return result
def function[invoke, parameter[self, cli, args, prog_name, input, terminate_input, env, _output_lines]]: constant[Like :meth:`CliRunner.invoke` but displays what the user would enter in the terminal for env vars, command args, and prompts. :param terminate_input: Whether to display "^D" after a list of input. :param _output_lines: A list used internally to collect lines to be displayed. ] variable[output_lines] assign[=] <ast.IfExp object at 0x7da18fe91000> if name[env] begin[:] for taget[tuple[[<ast.Name object at 0x7da18fe912a0>, <ast.Name object at 0x7da18fe93d30>]]] in starred[call[name[sorted], parameter[call[name[env].items, parameter[]]]]] begin[:] variable[value] assign[=] call[name[shlex].quote, parameter[name[value]]] call[name[output_lines].append, parameter[call[constant[$ export {}={}].format, parameter[name[key], name[value]]]]] variable[args] assign[=] <ast.BoolOp object at 0x7da20c7cbb80> if compare[name[prog_name] is constant[None]] begin[:] variable[prog_name] assign[=] call[name[cli].name.replace, parameter[constant[_], constant[-]]] call[name[output_lines].append, parameter[call[call[constant[$ {} {}].format, parameter[name[prog_name], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b26ad9c0>]]]].rstrip, parameter[]]]] variable[prog_name] assign[=] call[call[name[prog_name].rsplit, parameter[constant[ ], constant[1]]]][<ast.UnaryOp object at 0x7da18dc9bfd0>] if call[name[isinstance], parameter[name[input], tuple[[<ast.Name object at 0x7da18dc9b5e0>, <ast.Name object at 0x7da18dc9a500>]]]] begin[:] variable[input] assign[=] binary_operation[call[constant[ ].join, parameter[name[input]]] + constant[ ]] if name[terminate_input] begin[:] <ast.AugAssign object at 0x7da18dc9af50> variable[result] assign[=] call[call[name[super], parameter[name[ExampleRunner], name[self]]].invoke, parameter[]] call[name[output_lines].extend, parameter[call[name[result].output.splitlines, parameter[]]]] return[name[result]]
keyword[def] identifier[invoke] ( identifier[self] , identifier[cli] , identifier[args] = keyword[None] , identifier[prog_name] = keyword[None] , identifier[input] = keyword[None] , identifier[terminate_input] = keyword[False] , identifier[env] = keyword[None] , identifier[_output_lines] = keyword[None] , ** identifier[extra] ): literal[string] identifier[output_lines] = identifier[_output_lines] keyword[if] identifier[_output_lines] keyword[is] keyword[not] keyword[None] keyword[else] [] keyword[if] identifier[env] : keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[env] . identifier[items] ()): identifier[value] = identifier[shlex] . identifier[quote] ( identifier[value] ) identifier[output_lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] )) identifier[args] = identifier[args] keyword[or] [] keyword[if] identifier[prog_name] keyword[is] keyword[None] : identifier[prog_name] = identifier[cli] . identifier[name] . identifier[replace] ( literal[string] , literal[string] ) identifier[output_lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[prog_name] , literal[string] . identifier[join] ( identifier[shlex] . identifier[quote] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[args] )). identifier[rstrip] () ) identifier[prog_name] = identifier[prog_name] . identifier[rsplit] ( literal[string] , literal[int] )[- literal[int] ] keyword[if] identifier[isinstance] ( identifier[input] ,( identifier[tuple] , identifier[list] )): identifier[input] = literal[string] . identifier[join] ( identifier[input] )+ literal[string] keyword[if] identifier[terminate_input] : identifier[input] += literal[string] identifier[result] = identifier[super] ( identifier[ExampleRunner] , identifier[self] ). identifier[invoke] ( identifier[cli] = identifier[cli] , identifier[args] = identifier[args] , identifier[input] = identifier[input] , identifier[env] = identifier[env] , identifier[prog_name] = identifier[prog_name] ,** identifier[extra] ) identifier[output_lines] . identifier[extend] ( identifier[result] . identifier[output] . identifier[splitlines] ()) keyword[return] identifier[result]
def invoke(self, cli, args=None, prog_name=None, input=None, terminate_input=False, env=None, _output_lines=None, **extra): """Like :meth:`CliRunner.invoke` but displays what the user would enter in the terminal for env vars, command args, and prompts. :param terminate_input: Whether to display "^D" after a list of input. :param _output_lines: A list used internally to collect lines to be displayed. """ output_lines = _output_lines if _output_lines is not None else [] if env: for (key, value) in sorted(env.items()): value = shlex.quote(value) output_lines.append('$ export {}={}'.format(key, value)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] args = args or [] if prog_name is None: prog_name = cli.name.replace('_', '-') # depends on [control=['if'], data=['prog_name']] output_lines.append('$ {} {}'.format(prog_name, ' '.join((shlex.quote(x) for x in args))).rstrip()) # remove "python" from command prog_name = prog_name.rsplit(' ', 1)[-1] if isinstance(input, (tuple, list)): input = '\n'.join(input) + '\n' if terminate_input: input += '\x04' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] result = super(ExampleRunner, self).invoke(cli=cli, args=args, input=input, env=env, prog_name=prog_name, **extra) output_lines.extend(result.output.splitlines()) return result
def get_sds_by_ip(self,ip): """ Get ScaleIO SDS object by its ip address :param name: IP address of SDS :return: ScaleIO SDS object :raise KeyError: No SDS with specified ip found :rtype: SDS object """ if self.conn.is_ip_addr(ip): for sds in self.sds: for sdsIp in sds.ipList: if sdsIp == ip: return sds raise KeyError("SDS of that name not found") else: raise ValueError("Malformed IP address - get_sds_by_ip()")
def function[get_sds_by_ip, parameter[self, ip]]: constant[ Get ScaleIO SDS object by its ip address :param name: IP address of SDS :return: ScaleIO SDS object :raise KeyError: No SDS with specified ip found :rtype: SDS object ] if call[name[self].conn.is_ip_addr, parameter[name[ip]]] begin[:] for taget[name[sds]] in starred[name[self].sds] begin[:] for taget[name[sdsIp]] in starred[name[sds].ipList] begin[:] if compare[name[sdsIp] equal[==] name[ip]] begin[:] return[name[sds]] <ast.Raise object at 0x7da1b2595db0>
keyword[def] identifier[get_sds_by_ip] ( identifier[self] , identifier[ip] ): literal[string] keyword[if] identifier[self] . identifier[conn] . identifier[is_ip_addr] ( identifier[ip] ): keyword[for] identifier[sds] keyword[in] identifier[self] . identifier[sds] : keyword[for] identifier[sdsIp] keyword[in] identifier[sds] . identifier[ipList] : keyword[if] identifier[sdsIp] == identifier[ip] : keyword[return] identifier[sds] keyword[raise] identifier[KeyError] ( literal[string] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def get_sds_by_ip(self, ip): """ Get ScaleIO SDS object by its ip address :param name: IP address of SDS :return: ScaleIO SDS object :raise KeyError: No SDS with specified ip found :rtype: SDS object """ if self.conn.is_ip_addr(ip): for sds in self.sds: for sdsIp in sds.ipList: if sdsIp == ip: return sds # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sdsIp']] # depends on [control=['for'], data=['sds']] raise KeyError('SDS of that name not found') # depends on [control=['if'], data=[]] else: raise ValueError('Malformed IP address - get_sds_by_ip()')
def status_printer(): """Manage the printing and in-place updating of a line of characters .. note:: If the string is longer than a line, then in-place updating may not work (it will print a new line at each refresh). """ last_len = [0] def p(s): s = next(spinner) + ' ' + s len_s = len(s) output = '\r' + s + (' ' * max(last_len[0] - len_s, 0)) sys.stdout.write(output) sys.stdout.flush() last_len[0] = len_s return p
def function[status_printer, parameter[]]: constant[Manage the printing and in-place updating of a line of characters .. note:: If the string is longer than a line, then in-place updating may not work (it will print a new line at each refresh). ] variable[last_len] assign[=] list[[<ast.Constant object at 0x7da18fe93ca0>]] def function[p, parameter[s]]: variable[s] assign[=] binary_operation[binary_operation[call[name[next], parameter[name[spinner]]] + constant[ ]] + name[s]] variable[len_s] assign[=] call[name[len], parameter[name[s]]] variable[output] assign[=] binary_operation[binary_operation[constant[ ] + name[s]] + binary_operation[constant[ ] * call[name[max], parameter[binary_operation[call[name[last_len]][constant[0]] - name[len_s]], constant[0]]]]] call[name[sys].stdout.write, parameter[name[output]]] call[name[sys].stdout.flush, parameter[]] call[name[last_len]][constant[0]] assign[=] name[len_s] return[name[p]]
keyword[def] identifier[status_printer] (): literal[string] identifier[last_len] =[ literal[int] ] keyword[def] identifier[p] ( identifier[s] ): identifier[s] = identifier[next] ( identifier[spinner] )+ literal[string] + identifier[s] identifier[len_s] = identifier[len] ( identifier[s] ) identifier[output] = literal[string] + identifier[s] +( literal[string] * identifier[max] ( identifier[last_len] [ literal[int] ]- identifier[len_s] , literal[int] )) identifier[sys] . identifier[stdout] . identifier[write] ( identifier[output] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[last_len] [ literal[int] ]= identifier[len_s] keyword[return] identifier[p]
def status_printer(): """Manage the printing and in-place updating of a line of characters .. note:: If the string is longer than a line, then in-place updating may not work (it will print a new line at each refresh). """ last_len = [0] def p(s): s = next(spinner) + ' ' + s len_s = len(s) output = '\r' + s + ' ' * max(last_len[0] - len_s, 0) sys.stdout.write(output) sys.stdout.flush() last_len[0] = len_s return p
def _cancel_outstanding(self): """Cancel all of our outstanding requests""" for d in list(self._outstanding): d.addErrback(lambda _: None) # Eat any uncaught errors d.cancel()
def function[_cancel_outstanding, parameter[self]]: constant[Cancel all of our outstanding requests] for taget[name[d]] in starred[call[name[list], parameter[name[self]._outstanding]]] begin[:] call[name[d].addErrback, parameter[<ast.Lambda object at 0x7da1b04dba60>]] call[name[d].cancel, parameter[]]
keyword[def] identifier[_cancel_outstanding] ( identifier[self] ): literal[string] keyword[for] identifier[d] keyword[in] identifier[list] ( identifier[self] . identifier[_outstanding] ): identifier[d] . identifier[addErrback] ( keyword[lambda] identifier[_] : keyword[None] ) identifier[d] . identifier[cancel] ()
def _cancel_outstanding(self): """Cancel all of our outstanding requests""" for d in list(self._outstanding): d.addErrback(lambda _: None) # Eat any uncaught errors d.cancel() # depends on [control=['for'], data=['d']]
def _allLocations(self): """ Return a list of all locations of all objects. """ l = [] for locationTuple in self.keys(): l.append(Location(locationTuple)) return l
def function[_allLocations, parameter[self]]: constant[ Return a list of all locations of all objects. ] variable[l] assign[=] list[[]] for taget[name[locationTuple]] in starred[call[name[self].keys, parameter[]]] begin[:] call[name[l].append, parameter[call[name[Location], parameter[name[locationTuple]]]]] return[name[l]]
keyword[def] identifier[_allLocations] ( identifier[self] ): literal[string] identifier[l] =[] keyword[for] identifier[locationTuple] keyword[in] identifier[self] . identifier[keys] (): identifier[l] . identifier[append] ( identifier[Location] ( identifier[locationTuple] )) keyword[return] identifier[l]
def _allLocations(self): """ Return a list of all locations of all objects. """ l = [] for locationTuple in self.keys(): l.append(Location(locationTuple)) # depends on [control=['for'], data=['locationTuple']] return l
def center_loss(embedding, label, num_classes, alpha=0.1, scope="center_loss"): r"""Center-Loss as described in the paper `A Discriminative Feature Learning Approach for Deep Face Recognition` <http://ydwen.github.io/papers/WenECCV16.pdf> by Wen et al. Args: embedding (tf.Tensor): features produced by the network label (tf.Tensor): ground-truth label for each feature num_classes (int): number of different classes alpha (float): learning rate for updating the centers Returns: tf.Tensor: center loss """ nrof_features = embedding.get_shape()[1] centers = tf.get_variable('centers', [num_classes, nrof_features], dtype=tf.float32, initializer=tf.constant_initializer(0), trainable=False) label = tf.reshape(label, [-1]) centers_batch = tf.gather(centers, label) diff = (1 - alpha) * (centers_batch - embedding) centers = tf.scatter_sub(centers, label, diff) loss = tf.reduce_mean(tf.square(embedding - centers_batch), name=scope) return loss
def function[center_loss, parameter[embedding, label, num_classes, alpha, scope]]: constant[Center-Loss as described in the paper `A Discriminative Feature Learning Approach for Deep Face Recognition` <http://ydwen.github.io/papers/WenECCV16.pdf> by Wen et al. Args: embedding (tf.Tensor): features produced by the network label (tf.Tensor): ground-truth label for each feature num_classes (int): number of different classes alpha (float): learning rate for updating the centers Returns: tf.Tensor: center loss ] variable[nrof_features] assign[=] call[call[name[embedding].get_shape, parameter[]]][constant[1]] variable[centers] assign[=] call[name[tf].get_variable, parameter[constant[centers], list[[<ast.Name object at 0x7da18ede7580>, <ast.Name object at 0x7da18ede5480>]]]] variable[label] assign[=] call[name[tf].reshape, parameter[name[label], list[[<ast.UnaryOp object at 0x7da18ede5f90>]]]] variable[centers_batch] assign[=] call[name[tf].gather, parameter[name[centers], name[label]]] variable[diff] assign[=] binary_operation[binary_operation[constant[1] - name[alpha]] * binary_operation[name[centers_batch] - name[embedding]]] variable[centers] assign[=] call[name[tf].scatter_sub, parameter[name[centers], name[label], name[diff]]] variable[loss] assign[=] call[name[tf].reduce_mean, parameter[call[name[tf].square, parameter[binary_operation[name[embedding] - name[centers_batch]]]]]] return[name[loss]]
keyword[def] identifier[center_loss] ( identifier[embedding] , identifier[label] , identifier[num_classes] , identifier[alpha] = literal[int] , identifier[scope] = literal[string] ): literal[string] identifier[nrof_features] = identifier[embedding] . identifier[get_shape] ()[ literal[int] ] identifier[centers] = identifier[tf] . identifier[get_variable] ( literal[string] ,[ identifier[num_classes] , identifier[nrof_features] ], identifier[dtype] = identifier[tf] . identifier[float32] , identifier[initializer] = identifier[tf] . identifier[constant_initializer] ( literal[int] ), identifier[trainable] = keyword[False] ) identifier[label] = identifier[tf] . identifier[reshape] ( identifier[label] ,[- literal[int] ]) identifier[centers_batch] = identifier[tf] . identifier[gather] ( identifier[centers] , identifier[label] ) identifier[diff] =( literal[int] - identifier[alpha] )*( identifier[centers_batch] - identifier[embedding] ) identifier[centers] = identifier[tf] . identifier[scatter_sub] ( identifier[centers] , identifier[label] , identifier[diff] ) identifier[loss] = identifier[tf] . identifier[reduce_mean] ( identifier[tf] . identifier[square] ( identifier[embedding] - identifier[centers_batch] ), identifier[name] = identifier[scope] ) keyword[return] identifier[loss]
def center_loss(embedding, label, num_classes, alpha=0.1, scope='center_loss'): """Center-Loss as described in the paper `A Discriminative Feature Learning Approach for Deep Face Recognition` <http://ydwen.github.io/papers/WenECCV16.pdf> by Wen et al. Args: embedding (tf.Tensor): features produced by the network label (tf.Tensor): ground-truth label for each feature num_classes (int): number of different classes alpha (float): learning rate for updating the centers Returns: tf.Tensor: center loss """ nrof_features = embedding.get_shape()[1] centers = tf.get_variable('centers', [num_classes, nrof_features], dtype=tf.float32, initializer=tf.constant_initializer(0), trainable=False) label = tf.reshape(label, [-1]) centers_batch = tf.gather(centers, label) diff = (1 - alpha) * (centers_batch - embedding) centers = tf.scatter_sub(centers, label, diff) loss = tf.reduce_mean(tf.square(embedding - centers_batch), name=scope) return loss
def mixin_params(self, params): """ Add the mdsol:LastUpdateTime attribute :return: """ super(LastUpdateMixin, self).mixin_params(params) if self.last_update_time is not None: params.update({"mdsol:LastUpdateTime": self.last_update_time.isoformat()})
def function[mixin_params, parameter[self, params]]: constant[ Add the mdsol:LastUpdateTime attribute :return: ] call[call[name[super], parameter[name[LastUpdateMixin], name[self]]].mixin_params, parameter[name[params]]] if compare[name[self].last_update_time is_not constant[None]] begin[:] call[name[params].update, parameter[dictionary[[<ast.Constant object at 0x7da18f58dc30>], [<ast.Call object at 0x7da18f58cb50>]]]]
keyword[def] identifier[mixin_params] ( identifier[self] , identifier[params] ): literal[string] identifier[super] ( identifier[LastUpdateMixin] , identifier[self] ). identifier[mixin_params] ( identifier[params] ) keyword[if] identifier[self] . identifier[last_update_time] keyword[is] keyword[not] keyword[None] : identifier[params] . identifier[update] ({ literal[string] : identifier[self] . identifier[last_update_time] . identifier[isoformat] ()})
def mixin_params(self, params): """ Add the mdsol:LastUpdateTime attribute :return: """ super(LastUpdateMixin, self).mixin_params(params) if self.last_update_time is not None: params.update({'mdsol:LastUpdateTime': self.last_update_time.isoformat()}) # depends on [control=['if'], data=[]]
def runm(): """ This is super minimal and pretty hacky, but it counts as a first pass. """ signal.signal(signal.SIGINT, signal_handler) count = int(sys.argv.pop(1)) processes = [Process(target=run, args=()) for x in range(count)] try: for p in processes: p.start() except KeyError: # Not sure why we see a keyerror here. Weird. pass finally: for p in processes: p.join()
def function[runm, parameter[]]: constant[ This is super minimal and pretty hacky, but it counts as a first pass. ] call[name[signal].signal, parameter[name[signal].SIGINT, name[signal_handler]]] variable[count] assign[=] call[name[int], parameter[call[name[sys].argv.pop, parameter[constant[1]]]]] variable[processes] assign[=] <ast.ListComp object at 0x7da1b24e7730> <ast.Try object at 0x7da1b24e6590>
keyword[def] identifier[runm] (): literal[string] identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[signal_handler] ) identifier[count] = identifier[int] ( identifier[sys] . identifier[argv] . identifier[pop] ( literal[int] )) identifier[processes] =[ identifier[Process] ( identifier[target] = identifier[run] , identifier[args] =()) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[count] )] keyword[try] : keyword[for] identifier[p] keyword[in] identifier[processes] : identifier[p] . identifier[start] () keyword[except] identifier[KeyError] : keyword[pass] keyword[finally] : keyword[for] identifier[p] keyword[in] identifier[processes] : identifier[p] . identifier[join] ()
def runm(): """ This is super minimal and pretty hacky, but it counts as a first pass. """ signal.signal(signal.SIGINT, signal_handler) count = int(sys.argv.pop(1)) processes = [Process(target=run, args=()) for x in range(count)] try: for p in processes: p.start() # depends on [control=['for'], data=['p']] # depends on [control=['try'], data=[]] except KeyError: # Not sure why we see a keyerror here. Weird. pass # depends on [control=['except'], data=[]] finally: for p in processes: p.join() # depends on [control=['for'], data=['p']]
def auth_error(self,stanza): """Handle legacy authentication error. [client only]""" self.lock.acquire() try: err=stanza.get_error() ae=err.xpath_eval("e:*",{"e":"jabber:iq:auth:error"}) if ae: ae=ae[0].name else: ae=err.get_condition().name raise LegacyAuthenticationError("Authentication error condition: %s" % (ae,)) finally: self.lock.release()
def function[auth_error, parameter[self, stanza]]: constant[Handle legacy authentication error. [client only]] call[name[self].lock.acquire, parameter[]] <ast.Try object at 0x7da204960d30>
keyword[def] identifier[auth_error] ( identifier[self] , identifier[stanza] ): literal[string] identifier[self] . identifier[lock] . identifier[acquire] () keyword[try] : identifier[err] = identifier[stanza] . identifier[get_error] () identifier[ae] = identifier[err] . identifier[xpath_eval] ( literal[string] ,{ literal[string] : literal[string] }) keyword[if] identifier[ae] : identifier[ae] = identifier[ae] [ literal[int] ]. identifier[name] keyword[else] : identifier[ae] = identifier[err] . identifier[get_condition] (). identifier[name] keyword[raise] identifier[LegacyAuthenticationError] ( literal[string] %( identifier[ae] ,)) keyword[finally] : identifier[self] . identifier[lock] . identifier[release] ()
def auth_error(self, stanza): """Handle legacy authentication error. [client only]""" self.lock.acquire() try: err = stanza.get_error() ae = err.xpath_eval('e:*', {'e': 'jabber:iq:auth:error'}) if ae: ae = ae[0].name # depends on [control=['if'], data=[]] else: ae = err.get_condition().name raise LegacyAuthenticationError('Authentication error condition: %s' % (ae,)) # depends on [control=['try'], data=[]] finally: self.lock.release()
def aliases(self): """List of (namespace, name) pairs, as 2-tuples""" return {alias.namespace: alias.name for alias in self._proto.alias}.items()
def function[aliases, parameter[self]]: constant[List of (namespace, name) pairs, as 2-tuples] return[call[<ast.DictComp object at 0x7da1b1da2440>.items, parameter[]]]
keyword[def] identifier[aliases] ( identifier[self] ): literal[string] keyword[return] { identifier[alias] . identifier[namespace] : identifier[alias] . identifier[name] keyword[for] identifier[alias] keyword[in] identifier[self] . identifier[_proto] . identifier[alias] }. identifier[items] ()
def aliases(self): """List of (namespace, name) pairs, as 2-tuples""" return {alias.namespace: alias.name for alias in self._proto.alias}.items()
def _map_block_index_to_location(ir_blocks): """Associate each IR block with its corresponding location, by index.""" block_index_to_location = {} # MarkLocation blocks occur after the blocks related to that location. # The core approach here is to buffer blocks until their MarkLocation is encountered # after which all buffered blocks can be associated with the encountered MarkLocation.location. current_block_ixs = [] for num, ir_block in enumerate(ir_blocks): if isinstance(ir_block, blocks.GlobalOperationsStart): if len(current_block_ixs) > 0: unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs] raise AssertionError( u'Unexpectedly encountered global operations before mapping blocks ' u'{} to their respective locations.'.format(unassociated_blocks)) break current_block_ixs.append(num) if isinstance(ir_block, blocks.MarkLocation): for ix in current_block_ixs: block_index_to_location[ix] = ir_block.location current_block_ixs = [] return block_index_to_location
def function[_map_block_index_to_location, parameter[ir_blocks]]: constant[Associate each IR block with its corresponding location, by index.] variable[block_index_to_location] assign[=] dictionary[[], []] variable[current_block_ixs] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1644610>, <ast.Name object at 0x7da1b1645c90>]]] in starred[call[name[enumerate], parameter[name[ir_blocks]]]] begin[:] if call[name[isinstance], parameter[name[ir_block], name[blocks].GlobalOperationsStart]] begin[:] if compare[call[name[len], parameter[name[current_block_ixs]]] greater[>] constant[0]] begin[:] variable[unassociated_blocks] assign[=] <ast.ListComp object at 0x7da1b1646170> <ast.Raise object at 0x7da1b16460b0> break call[name[current_block_ixs].append, parameter[name[num]]] if call[name[isinstance], parameter[name[ir_block], name[blocks].MarkLocation]] begin[:] for taget[name[ix]] in starred[name[current_block_ixs]] begin[:] call[name[block_index_to_location]][name[ix]] assign[=] name[ir_block].location variable[current_block_ixs] assign[=] list[[]] return[name[block_index_to_location]]
keyword[def] identifier[_map_block_index_to_location] ( identifier[ir_blocks] ): literal[string] identifier[block_index_to_location] ={} identifier[current_block_ixs] =[] keyword[for] identifier[num] , identifier[ir_block] keyword[in] identifier[enumerate] ( identifier[ir_blocks] ): keyword[if] identifier[isinstance] ( identifier[ir_block] , identifier[blocks] . identifier[GlobalOperationsStart] ): keyword[if] identifier[len] ( identifier[current_block_ixs] )> literal[int] : identifier[unassociated_blocks] =[ identifier[ir_blocks] [ identifier[ix] ] keyword[for] identifier[ix] keyword[in] identifier[current_block_ixs] ] keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[unassociated_blocks] )) keyword[break] identifier[current_block_ixs] . identifier[append] ( identifier[num] ) keyword[if] identifier[isinstance] ( identifier[ir_block] , identifier[blocks] . identifier[MarkLocation] ): keyword[for] identifier[ix] keyword[in] identifier[current_block_ixs] : identifier[block_index_to_location] [ identifier[ix] ]= identifier[ir_block] . identifier[location] identifier[current_block_ixs] =[] keyword[return] identifier[block_index_to_location]
def _map_block_index_to_location(ir_blocks): """Associate each IR block with its corresponding location, by index.""" block_index_to_location = {} # MarkLocation blocks occur after the blocks related to that location. # The core approach here is to buffer blocks until their MarkLocation is encountered # after which all buffered blocks can be associated with the encountered MarkLocation.location. current_block_ixs = [] for (num, ir_block) in enumerate(ir_blocks): if isinstance(ir_block, blocks.GlobalOperationsStart): if len(current_block_ixs) > 0: unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs] raise AssertionError(u'Unexpectedly encountered global operations before mapping blocks {} to their respective locations.'.format(unassociated_blocks)) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] current_block_ixs.append(num) if isinstance(ir_block, blocks.MarkLocation): for ix in current_block_ixs: block_index_to_location[ix] = ir_block.location # depends on [control=['for'], data=['ix']] current_block_ixs = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return block_index_to_location
def get(self, stype, flags, filters, options=None): """ Send a request to the API to return results related to Visual Novels. :param str stype: What are we searching for? One of: vn, release, producer, character, votelist, vnlist, wishlist :param flags: See the D11 docs. A comma separated list of flags for what data to return. Can be list or str. :param str filters: A string with the one filter to search by (apparently you only get one). This is kind of special. You need to pass them in the form <filter><op>"<term>" for strings or <filter><op><number> for numbers. This is counter intuitive. Also, per the docs, <filter>=<number> doesn't do what we think, use >, >= or < and <=. I will attempt to properly format this if not done so when called. :param dict options: A dictionary of options to customize the search by. Optional, defaults to None. :return dict: A dictionary containing a pages and data key. data contains a list of dictionaries with data on your results. If pages is true, you can call this command again with the same parameters and pass a page option to get more data. Otherwise no further results exist for this query. :raises ServerError: Raises a ServerError if an error is returned. """ if not isinstance(flags, str): if isinstance(flags, list): finflags = ",".join(flags) else: raise SyntaxError("Flags should be a list or comma separated string") else: finflags = flags if not isinstance(filters, str): raise SyntaxError("Filters needs to be a string in the format Filter<op>Value. The simplest form is search=\"<Term>\".") if stype not in self.stypes: raise SyntaxError("{} not a valid Search type.".format(stype)) if '"' not in filters or "'" not in filters: newfilters = self.helperpat.split(filters) newfilters = [x.strip() for x in newfilters] newfilters[1] = '"' + newfilters[1] + '"' op = self.helperpat.search(filters) newfilters = op.group(0).join(newfilters) command = '{} {} ({}){}'.format(stype, finflags, newfilters, ' ' + ujson.dumps(options) if options is not None else '') else: command = '{} {} ({}){}'.format(stype, finflags, filters, ' ' + ujson.dumps(options) if options is not None else '') data = self.connection.send_command('get', command) if 'id' in data: raise ServerError(data['msg'], data['id']) else: return {'pages': data.get('more', default=False), 'data': data['items']}
def function[get, parameter[self, stype, flags, filters, options]]: constant[ Send a request to the API to return results related to Visual Novels. :param str stype: What are we searching for? One of: vn, release, producer, character, votelist, vnlist, wishlist :param flags: See the D11 docs. A comma separated list of flags for what data to return. Can be list or str. :param str filters: A string with the one filter to search by (apparently you only get one). This is kind of special. You need to pass them in the form <filter><op>"<term>" for strings or <filter><op><number> for numbers. This is counter intuitive. Also, per the docs, <filter>=<number> doesn't do what we think, use >, >= or < and <=. I will attempt to properly format this if not done so when called. :param dict options: A dictionary of options to customize the search by. Optional, defaults to None. :return dict: A dictionary containing a pages and data key. data contains a list of dictionaries with data on your results. If pages is true, you can call this command again with the same parameters and pass a page option to get more data. Otherwise no further results exist for this query. :raises ServerError: Raises a ServerError if an error is returned. ] if <ast.UnaryOp object at 0x7da1b05dbe50> begin[:] if call[name[isinstance], parameter[name[flags], name[list]]] begin[:] variable[finflags] assign[=] call[constant[,].join, parameter[name[flags]]] if <ast.UnaryOp object at 0x7da1b05db220> begin[:] <ast.Raise object at 0x7da1b05d8b80> if compare[name[stype] <ast.NotIn object at 0x7da2590d7190> name[self].stypes] begin[:] <ast.Raise object at 0x7da1b05d92a0> if <ast.BoolOp object at 0x7da1b04efe50> begin[:] variable[newfilters] assign[=] call[name[self].helperpat.split, parameter[name[filters]]] variable[newfilters] assign[=] <ast.ListComp object at 0x7da1b04ec5b0> call[name[newfilters]][constant[1]] assign[=] binary_operation[binary_operation[constant["] + call[name[newfilters]][constant[1]]] + constant["]] variable[op] assign[=] call[name[self].helperpat.search, parameter[name[filters]]] variable[newfilters] assign[=] call[call[name[op].group, parameter[constant[0]]].join, parameter[name[newfilters]]] variable[command] assign[=] call[constant[{} {} ({}){}].format, parameter[name[stype], name[finflags], name[newfilters], <ast.IfExp object at 0x7da1b05bf130>]] variable[data] assign[=] call[name[self].connection.send_command, parameter[constant[get], name[command]]] if compare[constant[id] in name[data]] begin[:] <ast.Raise object at 0x7da1b05dba60>
keyword[def] identifier[get] ( identifier[self] , identifier[stype] , identifier[flags] , identifier[filters] , identifier[options] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[flags] , identifier[str] ): keyword[if] identifier[isinstance] ( identifier[flags] , identifier[list] ): identifier[finflags] = literal[string] . identifier[join] ( identifier[flags] ) keyword[else] : keyword[raise] identifier[SyntaxError] ( literal[string] ) keyword[else] : identifier[finflags] = identifier[flags] keyword[if] keyword[not] identifier[isinstance] ( identifier[filters] , identifier[str] ): keyword[raise] identifier[SyntaxError] ( literal[string] ) keyword[if] identifier[stype] keyword[not] keyword[in] identifier[self] . identifier[stypes] : keyword[raise] identifier[SyntaxError] ( literal[string] . identifier[format] ( identifier[stype] )) keyword[if] literal[string] keyword[not] keyword[in] identifier[filters] keyword[or] literal[string] keyword[not] keyword[in] identifier[filters] : identifier[newfilters] = identifier[self] . identifier[helperpat] . identifier[split] ( identifier[filters] ) identifier[newfilters] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[newfilters] ] identifier[newfilters] [ literal[int] ]= literal[string] + identifier[newfilters] [ literal[int] ]+ literal[string] identifier[op] = identifier[self] . identifier[helperpat] . identifier[search] ( identifier[filters] ) identifier[newfilters] = identifier[op] . identifier[group] ( literal[int] ). identifier[join] ( identifier[newfilters] ) identifier[command] = literal[string] . identifier[format] ( identifier[stype] , identifier[finflags] , identifier[newfilters] , literal[string] + identifier[ujson] . identifier[dumps] ( identifier[options] ) keyword[if] identifier[options] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ) keyword[else] : identifier[command] = literal[string] . identifier[format] ( identifier[stype] , identifier[finflags] , identifier[filters] , literal[string] + identifier[ujson] . identifier[dumps] ( identifier[options] ) keyword[if] identifier[options] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ) identifier[data] = identifier[self] . identifier[connection] . identifier[send_command] ( literal[string] , identifier[command] ) keyword[if] literal[string] keyword[in] identifier[data] : keyword[raise] identifier[ServerError] ( identifier[data] [ literal[string] ], identifier[data] [ literal[string] ]) keyword[else] : keyword[return] { literal[string] : identifier[data] . identifier[get] ( literal[string] , identifier[default] = keyword[False] ), literal[string] : identifier[data] [ literal[string] ]}
def get(self, stype, flags, filters, options=None): """ Send a request to the API to return results related to Visual Novels. :param str stype: What are we searching for? One of: vn, release, producer, character, votelist, vnlist, wishlist :param flags: See the D11 docs. A comma separated list of flags for what data to return. Can be list or str. :param str filters: A string with the one filter to search by (apparently you only get one). This is kind of special. You need to pass them in the form <filter><op>"<term>" for strings or <filter><op><number> for numbers. This is counter intuitive. Also, per the docs, <filter>=<number> doesn't do what we think, use >, >= or < and <=. I will attempt to properly format this if not done so when called. :param dict options: A dictionary of options to customize the search by. Optional, defaults to None. :return dict: A dictionary containing a pages and data key. data contains a list of dictionaries with data on your results. If pages is true, you can call this command again with the same parameters and pass a page option to get more data. Otherwise no further results exist for this query. :raises ServerError: Raises a ServerError if an error is returned. """ if not isinstance(flags, str): if isinstance(flags, list): finflags = ','.join(flags) # depends on [control=['if'], data=[]] else: raise SyntaxError('Flags should be a list or comma separated string') # depends on [control=['if'], data=[]] else: finflags = flags if not isinstance(filters, str): raise SyntaxError('Filters needs to be a string in the format Filter<op>Value. The simplest form is search="<Term>".') # depends on [control=['if'], data=[]] if stype not in self.stypes: raise SyntaxError('{} not a valid Search type.'.format(stype)) # depends on [control=['if'], data=['stype']] if '"' not in filters or "'" not in filters: newfilters = self.helperpat.split(filters) newfilters = [x.strip() for x in newfilters] newfilters[1] = '"' + newfilters[1] + '"' op = self.helperpat.search(filters) newfilters = op.group(0).join(newfilters) command = '{} {} ({}){}'.format(stype, finflags, newfilters, ' ' + ujson.dumps(options) if options is not None else '') # depends on [control=['if'], data=[]] else: command = '{} {} ({}){}'.format(stype, finflags, filters, ' ' + ujson.dumps(options) if options is not None else '') data = self.connection.send_command('get', command) if 'id' in data: raise ServerError(data['msg'], data['id']) # depends on [control=['if'], data=['data']] else: return {'pages': data.get('more', default=False), 'data': data['items']}
def sendFragmentStart(self, data): """ Send the start of a data fragment stream to a websocket client. Subsequent data should be sent using sendFragment(). A fragment stream is completed when sendFragmentEnd() is called. If data is a unicode object then the frame is sent as Text. If the data is a bytearray object then the frame is sent as Binary. """ opcode = BINARY if _check_unicode(data): opcode = TEXT self._sendMessage(True, opcode, data)
def function[sendFragmentStart, parameter[self, data]]: constant[ Send the start of a data fragment stream to a websocket client. Subsequent data should be sent using sendFragment(). A fragment stream is completed when sendFragmentEnd() is called. If data is a unicode object then the frame is sent as Text. If the data is a bytearray object then the frame is sent as Binary. ] variable[opcode] assign[=] name[BINARY] if call[name[_check_unicode], parameter[name[data]]] begin[:] variable[opcode] assign[=] name[TEXT] call[name[self]._sendMessage, parameter[constant[True], name[opcode], name[data]]]
keyword[def] identifier[sendFragmentStart] ( identifier[self] , identifier[data] ): literal[string] identifier[opcode] = identifier[BINARY] keyword[if] identifier[_check_unicode] ( identifier[data] ): identifier[opcode] = identifier[TEXT] identifier[self] . identifier[_sendMessage] ( keyword[True] , identifier[opcode] , identifier[data] )
def sendFragmentStart(self, data): """ Send the start of a data fragment stream to a websocket client. Subsequent data should be sent using sendFragment(). A fragment stream is completed when sendFragmentEnd() is called. If data is a unicode object then the frame is sent as Text. If the data is a bytearray object then the frame is sent as Binary. """ opcode = BINARY if _check_unicode(data): opcode = TEXT # depends on [control=['if'], data=[]] self._sendMessage(True, opcode, data)
def set(self, key, value, ex=None, px=None, nx=False, xx=False): """ Set the ``value`` for the ``key`` in the context of the provided kwargs. As per the behavior of the redis-py lib: If nx and xx are both set, the function does nothing and None is returned. If px and ex are both set, the preference is given to px. If the key is not set for some reason, the lib function returns None. """ key = self._encode(key) value = self._encode(value) if nx and xx: return None mode = "nx" if nx else "xx" if xx else None if self._should_set(key, mode): expire = None if ex is not None: expire = ex if isinstance(ex, timedelta) else timedelta(seconds=ex) if px is not None: expire = px if isinstance(px, timedelta) else timedelta(milliseconds=px) if expire is not None and expire.total_seconds() <= 0: raise ResponseError("invalid expire time in SETEX") result = self._set(key, value) if expire: self._expire(key, expire) return result
def function[set, parameter[self, key, value, ex, px, nx, xx]]: constant[ Set the ``value`` for the ``key`` in the context of the provided kwargs. As per the behavior of the redis-py lib: If nx and xx are both set, the function does nothing and None is returned. If px and ex are both set, the preference is given to px. If the key is not set for some reason, the lib function returns None. ] variable[key] assign[=] call[name[self]._encode, parameter[name[key]]] variable[value] assign[=] call[name[self]._encode, parameter[name[value]]] if <ast.BoolOp object at 0x7da1b2345780> begin[:] return[constant[None]] variable[mode] assign[=] <ast.IfExp object at 0x7da1b2346a40> if call[name[self]._should_set, parameter[name[key], name[mode]]] begin[:] variable[expire] assign[=] constant[None] if compare[name[ex] is_not constant[None]] begin[:] variable[expire] assign[=] <ast.IfExp object at 0x7da1b2347310> if compare[name[px] is_not constant[None]] begin[:] variable[expire] assign[=] <ast.IfExp object at 0x7da1b2346920> if <ast.BoolOp object at 0x7da1b2346410> begin[:] <ast.Raise object at 0x7da1b2344ee0> variable[result] assign[=] call[name[self]._set, parameter[name[key], name[value]]] if name[expire] begin[:] call[name[self]._expire, parameter[name[key], name[expire]]] return[name[result]]
keyword[def] identifier[set] ( identifier[self] , identifier[key] , identifier[value] , identifier[ex] = keyword[None] , identifier[px] = keyword[None] , identifier[nx] = keyword[False] , identifier[xx] = keyword[False] ): literal[string] identifier[key] = identifier[self] . identifier[_encode] ( identifier[key] ) identifier[value] = identifier[self] . identifier[_encode] ( identifier[value] ) keyword[if] identifier[nx] keyword[and] identifier[xx] : keyword[return] keyword[None] identifier[mode] = literal[string] keyword[if] identifier[nx] keyword[else] literal[string] keyword[if] identifier[xx] keyword[else] keyword[None] keyword[if] identifier[self] . identifier[_should_set] ( identifier[key] , identifier[mode] ): identifier[expire] = keyword[None] keyword[if] identifier[ex] keyword[is] keyword[not] keyword[None] : identifier[expire] = identifier[ex] keyword[if] identifier[isinstance] ( identifier[ex] , identifier[timedelta] ) keyword[else] identifier[timedelta] ( identifier[seconds] = identifier[ex] ) keyword[if] identifier[px] keyword[is] keyword[not] keyword[None] : identifier[expire] = identifier[px] keyword[if] identifier[isinstance] ( identifier[px] , identifier[timedelta] ) keyword[else] identifier[timedelta] ( identifier[milliseconds] = identifier[px] ) keyword[if] identifier[expire] keyword[is] keyword[not] keyword[None] keyword[and] identifier[expire] . identifier[total_seconds] ()<= literal[int] : keyword[raise] identifier[ResponseError] ( literal[string] ) identifier[result] = identifier[self] . identifier[_set] ( identifier[key] , identifier[value] ) keyword[if] identifier[expire] : identifier[self] . identifier[_expire] ( identifier[key] , identifier[expire] ) keyword[return] identifier[result]
def set(self, key, value, ex=None, px=None, nx=False, xx=False): """ Set the ``value`` for the ``key`` in the context of the provided kwargs. As per the behavior of the redis-py lib: If nx and xx are both set, the function does nothing and None is returned. If px and ex are both set, the preference is given to px. If the key is not set for some reason, the lib function returns None. """ key = self._encode(key) value = self._encode(value) if nx and xx: return None # depends on [control=['if'], data=[]] mode = 'nx' if nx else 'xx' if xx else None if self._should_set(key, mode): expire = None if ex is not None: expire = ex if isinstance(ex, timedelta) else timedelta(seconds=ex) # depends on [control=['if'], data=['ex']] if px is not None: expire = px if isinstance(px, timedelta) else timedelta(milliseconds=px) # depends on [control=['if'], data=['px']] if expire is not None and expire.total_seconds() <= 0: raise ResponseError('invalid expire time in SETEX') # depends on [control=['if'], data=[]] result = self._set(key, value) if expire: self._expire(key, expire) # depends on [control=['if'], data=[]] return result # depends on [control=['if'], data=[]]
def get_revision_of_build_configuration(revision_id, id=None, name=None): """ Get a specific audited revision of a BuildConfiguration """ data = get_revision_of_build_configuration_raw(revision_id, id, name) if data: return utils.format_json_list(data)
def function[get_revision_of_build_configuration, parameter[revision_id, id, name]]: constant[ Get a specific audited revision of a BuildConfiguration ] variable[data] assign[=] call[name[get_revision_of_build_configuration_raw], parameter[name[revision_id], name[id], name[name]]] if name[data] begin[:] return[call[name[utils].format_json_list, parameter[name[data]]]]
keyword[def] identifier[get_revision_of_build_configuration] ( identifier[revision_id] , identifier[id] = keyword[None] , identifier[name] = keyword[None] ): literal[string] identifier[data] = identifier[get_revision_of_build_configuration_raw] ( identifier[revision_id] , identifier[id] , identifier[name] ) keyword[if] identifier[data] : keyword[return] identifier[utils] . identifier[format_json_list] ( identifier[data] )
def get_revision_of_build_configuration(revision_id, id=None, name=None): """ Get a specific audited revision of a BuildConfiguration """ data = get_revision_of_build_configuration_raw(revision_id, id, name) if data: return utils.format_json_list(data) # depends on [control=['if'], data=[]]
def get_design_run_status(self, data_view_id, run_uuid): """ Retrieves the status of an in progress or completed design run :param data_view_id: The ID number of the data view to which the run belongs, as a string :type data_view_id: str :param run_uuid: The UUID of the design run to retrieve status for :type run_uuid: str :return: A :class:`ProcessStatus` object """ url = routes.get_data_view_design_status(data_view_id, run_uuid) response = self._get(url).json() status = response["data"] return ProcessStatus( result=status.get("result"), progress=status.get("progress"), status=status.get("status"), messages=status.get("messages") )
def function[get_design_run_status, parameter[self, data_view_id, run_uuid]]: constant[ Retrieves the status of an in progress or completed design run :param data_view_id: The ID number of the data view to which the run belongs, as a string :type data_view_id: str :param run_uuid: The UUID of the design run to retrieve status for :type run_uuid: str :return: A :class:`ProcessStatus` object ] variable[url] assign[=] call[name[routes].get_data_view_design_status, parameter[name[data_view_id], name[run_uuid]]] variable[response] assign[=] call[call[name[self]._get, parameter[name[url]]].json, parameter[]] variable[status] assign[=] call[name[response]][constant[data]] return[call[name[ProcessStatus], parameter[]]]
keyword[def] identifier[get_design_run_status] ( identifier[self] , identifier[data_view_id] , identifier[run_uuid] ): literal[string] identifier[url] = identifier[routes] . identifier[get_data_view_design_status] ( identifier[data_view_id] , identifier[run_uuid] ) identifier[response] = identifier[self] . identifier[_get] ( identifier[url] ). identifier[json] () identifier[status] = identifier[response] [ literal[string] ] keyword[return] identifier[ProcessStatus] ( identifier[result] = identifier[status] . identifier[get] ( literal[string] ), identifier[progress] = identifier[status] . identifier[get] ( literal[string] ), identifier[status] = identifier[status] . identifier[get] ( literal[string] ), identifier[messages] = identifier[status] . identifier[get] ( literal[string] ) )
def get_design_run_status(self, data_view_id, run_uuid): """ Retrieves the status of an in progress or completed design run :param data_view_id: The ID number of the data view to which the run belongs, as a string :type data_view_id: str :param run_uuid: The UUID of the design run to retrieve status for :type run_uuid: str :return: A :class:`ProcessStatus` object """ url = routes.get_data_view_design_status(data_view_id, run_uuid) response = self._get(url).json() status = response['data'] return ProcessStatus(result=status.get('result'), progress=status.get('progress'), status=status.get('status'), messages=status.get('messages'))
def remove_task_db(self, fs_id): '''将任务从数据库中删除''' sql = 'DELETE FROM tasks WHERE fsid=?' self.cursor.execute(sql, [fs_id, ]) self.check_commit()
def function[remove_task_db, parameter[self, fs_id]]: constant[将任务从数据库中删除] variable[sql] assign[=] constant[DELETE FROM tasks WHERE fsid=?] call[name[self].cursor.execute, parameter[name[sql], list[[<ast.Name object at 0x7da1b1d52e60>]]]] call[name[self].check_commit, parameter[]]
keyword[def] identifier[remove_task_db] ( identifier[self] , identifier[fs_id] ): literal[string] identifier[sql] = literal[string] identifier[self] . identifier[cursor] . identifier[execute] ( identifier[sql] ,[ identifier[fs_id] ,]) identifier[self] . identifier[check_commit] ()
def remove_task_db(self, fs_id): """将任务从数据库中删除""" sql = 'DELETE FROM tasks WHERE fsid=?' self.cursor.execute(sql, [fs_id]) self.check_commit()
def _check_descendant(self, item): """Check the boxes of item's descendants.""" children = self.get_children(item) for iid in children: self.change_state(iid, "checked") self._check_descendant(iid)
def function[_check_descendant, parameter[self, item]]: constant[Check the boxes of item's descendants.] variable[children] assign[=] call[name[self].get_children, parameter[name[item]]] for taget[name[iid]] in starred[name[children]] begin[:] call[name[self].change_state, parameter[name[iid], constant[checked]]] call[name[self]._check_descendant, parameter[name[iid]]]
keyword[def] identifier[_check_descendant] ( identifier[self] , identifier[item] ): literal[string] identifier[children] = identifier[self] . identifier[get_children] ( identifier[item] ) keyword[for] identifier[iid] keyword[in] identifier[children] : identifier[self] . identifier[change_state] ( identifier[iid] , literal[string] ) identifier[self] . identifier[_check_descendant] ( identifier[iid] )
def _check_descendant(self, item): """Check the boxes of item's descendants.""" children = self.get_children(item) for iid in children: self.change_state(iid, 'checked') self._check_descendant(iid) # depends on [control=['for'], data=['iid']]
def format_style(number: int) -> str: """ Return an escape code for a style, by number. This handles invalid style numbers. """ if str(number) not in _stylenums: raise InvalidStyle(number) return codeformat(number)
def function[format_style, parameter[number]]: constant[ Return an escape code for a style, by number. This handles invalid style numbers. ] if compare[call[name[str], parameter[name[number]]] <ast.NotIn object at 0x7da2590d7190> name[_stylenums]] begin[:] <ast.Raise object at 0x7da1b03da440> return[call[name[codeformat], parameter[name[number]]]]
keyword[def] identifier[format_style] ( identifier[number] : identifier[int] )-> identifier[str] : literal[string] keyword[if] identifier[str] ( identifier[number] ) keyword[not] keyword[in] identifier[_stylenums] : keyword[raise] identifier[InvalidStyle] ( identifier[number] ) keyword[return] identifier[codeformat] ( identifier[number] )
def format_style(number: int) -> str: """ Return an escape code for a style, by number. This handles invalid style numbers. """ if str(number) not in _stylenums: raise InvalidStyle(number) # depends on [control=['if'], data=[]] return codeformat(number)
def create_divisao_dc(self): """Get an instance of divisao_dc services facade.""" return DivisaoDc( self.networkapi_url, self.user, self.password, self.user_ldap)
def function[create_divisao_dc, parameter[self]]: constant[Get an instance of divisao_dc services facade.] return[call[name[DivisaoDc], parameter[name[self].networkapi_url, name[self].user, name[self].password, name[self].user_ldap]]]
keyword[def] identifier[create_divisao_dc] ( identifier[self] ): literal[string] keyword[return] identifier[DivisaoDc] ( identifier[self] . identifier[networkapi_url] , identifier[self] . identifier[user] , identifier[self] . identifier[password] , identifier[self] . identifier[user_ldap] )
def create_divisao_dc(self): """Get an instance of divisao_dc services facade.""" return DivisaoDc(self.networkapi_url, self.user, self.password, self.user_ldap)
def _calc_bg_dimensions(self, scale_x, scale_y, pan_x, pan_y, win_wd, win_ht): """ Parameters ---------- scale_x, scale_y : float desired scale of viewer in each axis. pan_x, pan_y : float pan position in data coordinates. win_wd, win_ht : int window dimensions in pixels """ # Sanity check on the scale sx = float(win_wd) / scale_x sy = float(win_ht) / scale_y if (sx < 1.0) or (sy < 1.0): #self.logger.warning("new scale would exceed max/min; scale unchanged") raise ImageViewError("new scale would exceed pixel max; scale unchanged") # It is necessary to store these so that the get_pan_rect() # (below) calculation can proceed self._org_x, self._org_y = pan_x - self.data_off, pan_y - self.data_off self._org_scale_x, self._org_scale_y = scale_x, scale_y self._org_scale_z = (scale_x + scale_y) / 2.0 # calc minimum size of pixel image we will generate # necessary to fit the window in the desired size # get the data points in the four corners a, b = trcalc.get_bounds(self.get_pan_rect()) # determine bounding box a1, b1 = a[:2] a2, b2 = b[:2] # constrain to integer indexes x1, y1, x2, y2 = int(a1), int(b1), int(np.round(a2)), int(np.round(b2)) x1 = max(0, x1) y1 = max(0, y1) self.logger.debug("approx area covered is %dx%d to %dx%d" % ( x1, y1, x2, y2)) self._org_x1 = x1 self._org_y1 = y1 self._org_x2 = x2 self._org_y2 = y2 # Make a square from the scaled cutout, with room to rotate slop = 20 side = int(math.sqrt(win_wd**2 + win_ht**2) + slop) wd = ht = side # Find center of new array ncx, ncy = wd // 2, ht // 2 self._org_xoff, self._org_yoff = ncx, ncy return (wd, ht)
def function[_calc_bg_dimensions, parameter[self, scale_x, scale_y, pan_x, pan_y, win_wd, win_ht]]: constant[ Parameters ---------- scale_x, scale_y : float desired scale of viewer in each axis. pan_x, pan_y : float pan position in data coordinates. win_wd, win_ht : int window dimensions in pixels ] variable[sx] assign[=] binary_operation[call[name[float], parameter[name[win_wd]]] / name[scale_x]] variable[sy] assign[=] binary_operation[call[name[float], parameter[name[win_ht]]] / name[scale_y]] if <ast.BoolOp object at 0x7da2046211b0> begin[:] <ast.Raise object at 0x7da2041d9a20> <ast.Tuple object at 0x7da2041da3b0> assign[=] tuple[[<ast.BinOp object at 0x7da2041d9e70>, <ast.BinOp object at 0x7da2041d9540>]] <ast.Tuple object at 0x7da2041d8880> assign[=] tuple[[<ast.Name object at 0x7da2041dbd00>, <ast.Name object at 0x7da2041d92d0>]] name[self]._org_scale_z assign[=] binary_operation[binary_operation[name[scale_x] + name[scale_y]] / constant[2.0]] <ast.Tuple object at 0x7da2041dbc70> assign[=] call[name[trcalc].get_bounds, parameter[call[name[self].get_pan_rect, parameter[]]]] <ast.Tuple object at 0x7da2041db3a0> assign[=] call[name[a]][<ast.Slice object at 0x7da2041db100>] <ast.Tuple object at 0x7da2041da5c0> assign[=] call[name[b]][<ast.Slice object at 0x7da2041da8f0>] <ast.Tuple object at 0x7da2041db370> assign[=] tuple[[<ast.Call object at 0x7da2041da860>, <ast.Call object at 0x7da2041da350>, <ast.Call object at 0x7da2041d9bd0>, <ast.Call object at 0x7da2041d8250>]] variable[x1] assign[=] call[name[max], parameter[constant[0], name[x1]]] variable[y1] assign[=] call[name[max], parameter[constant[0], name[y1]]] call[name[self].logger.debug, parameter[binary_operation[constant[approx area covered is %dx%d to %dx%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041da7a0>, <ast.Name object at 0x7da2041d9fc0>, <ast.Name object at 0x7da2041d8d90>, <ast.Name object at 0x7da2041d9210>]]]]] name[self]._org_x1 assign[=] name[x1] name[self]._org_y1 assign[=] name[y1] name[self]._org_x2 assign[=] name[x2] name[self]._org_y2 assign[=] name[y2] variable[slop] assign[=] constant[20] variable[side] assign[=] call[name[int], parameter[binary_operation[call[name[math].sqrt, parameter[binary_operation[binary_operation[name[win_wd] ** constant[2]] + binary_operation[name[win_ht] ** constant[2]]]]] + name[slop]]]] variable[wd] assign[=] name[side] <ast.Tuple object at 0x7da20e954f40> assign[=] tuple[[<ast.BinOp object at 0x7da20e954340>, <ast.BinOp object at 0x7da20e955000>]] <ast.Tuple object at 0x7da20e957f10> assign[=] tuple[[<ast.Name object at 0x7da20e956500>, <ast.Name object at 0x7da20e956da0>]] return[tuple[[<ast.Name object at 0x7da20e954670>, <ast.Name object at 0x7da20e955f00>]]]
keyword[def] identifier[_calc_bg_dimensions] ( identifier[self] , identifier[scale_x] , identifier[scale_y] , identifier[pan_x] , identifier[pan_y] , identifier[win_wd] , identifier[win_ht] ): literal[string] identifier[sx] = identifier[float] ( identifier[win_wd] )/ identifier[scale_x] identifier[sy] = identifier[float] ( identifier[win_ht] )/ identifier[scale_y] keyword[if] ( identifier[sx] < literal[int] ) keyword[or] ( identifier[sy] < literal[int] ): keyword[raise] identifier[ImageViewError] ( literal[string] ) identifier[self] . identifier[_org_x] , identifier[self] . identifier[_org_y] = identifier[pan_x] - identifier[self] . identifier[data_off] , identifier[pan_y] - identifier[self] . identifier[data_off] identifier[self] . identifier[_org_scale_x] , identifier[self] . identifier[_org_scale_y] = identifier[scale_x] , identifier[scale_y] identifier[self] . identifier[_org_scale_z] =( identifier[scale_x] + identifier[scale_y] )/ literal[int] identifier[a] , identifier[b] = identifier[trcalc] . identifier[get_bounds] ( identifier[self] . identifier[get_pan_rect] ()) identifier[a1] , identifier[b1] = identifier[a] [: literal[int] ] identifier[a2] , identifier[b2] = identifier[b] [: literal[int] ] identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] = identifier[int] ( identifier[a1] ), identifier[int] ( identifier[b1] ), identifier[int] ( identifier[np] . identifier[round] ( identifier[a2] )), identifier[int] ( identifier[np] . identifier[round] ( identifier[b2] )) identifier[x1] = identifier[max] ( literal[int] , identifier[x1] ) identifier[y1] = identifier[max] ( literal[int] , identifier[y1] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %( identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] )) identifier[self] . identifier[_org_x1] = identifier[x1] identifier[self] . identifier[_org_y1] = identifier[y1] identifier[self] . identifier[_org_x2] = identifier[x2] identifier[self] . identifier[_org_y2] = identifier[y2] identifier[slop] = literal[int] identifier[side] = identifier[int] ( identifier[math] . identifier[sqrt] ( identifier[win_wd] ** literal[int] + identifier[win_ht] ** literal[int] )+ identifier[slop] ) identifier[wd] = identifier[ht] = identifier[side] identifier[ncx] , identifier[ncy] = identifier[wd] // literal[int] , identifier[ht] // literal[int] identifier[self] . identifier[_org_xoff] , identifier[self] . identifier[_org_yoff] = identifier[ncx] , identifier[ncy] keyword[return] ( identifier[wd] , identifier[ht] )
def _calc_bg_dimensions(self, scale_x, scale_y, pan_x, pan_y, win_wd, win_ht): """ Parameters ---------- scale_x, scale_y : float desired scale of viewer in each axis. pan_x, pan_y : float pan position in data coordinates. win_wd, win_ht : int window dimensions in pixels """ # Sanity check on the scale sx = float(win_wd) / scale_x sy = float(win_ht) / scale_y if sx < 1.0 or sy < 1.0: #self.logger.warning("new scale would exceed max/min; scale unchanged") raise ImageViewError('new scale would exceed pixel max; scale unchanged') # depends on [control=['if'], data=[]] # It is necessary to store these so that the get_pan_rect() # (below) calculation can proceed (self._org_x, self._org_y) = (pan_x - self.data_off, pan_y - self.data_off) (self._org_scale_x, self._org_scale_y) = (scale_x, scale_y) self._org_scale_z = (scale_x + scale_y) / 2.0 # calc minimum size of pixel image we will generate # necessary to fit the window in the desired size # get the data points in the four corners (a, b) = trcalc.get_bounds(self.get_pan_rect()) # determine bounding box (a1, b1) = a[:2] (a2, b2) = b[:2] # constrain to integer indexes (x1, y1, x2, y2) = (int(a1), int(b1), int(np.round(a2)), int(np.round(b2))) x1 = max(0, x1) y1 = max(0, y1) self.logger.debug('approx area covered is %dx%d to %dx%d' % (x1, y1, x2, y2)) self._org_x1 = x1 self._org_y1 = y1 self._org_x2 = x2 self._org_y2 = y2 # Make a square from the scaled cutout, with room to rotate slop = 20 side = int(math.sqrt(win_wd ** 2 + win_ht ** 2) + slop) wd = ht = side # Find center of new array (ncx, ncy) = (wd // 2, ht // 2) (self._org_xoff, self._org_yoff) = (ncx, ncy) return (wd, ht)
def is_stable_version(version): """ Return true if version is stable, i.e. with letters in the final component. Stable version examples: ``1.2``, ``1.3.4``, ``1.0.5``. Non-stable version examples: ``1.3.4beta``, ``0.1.0rc1``, ``3.0.0dev0``. """ if not isinstance(version, tuple): version = version.split('.') last_part = version[-1] if not re.search(r'[a-zA-Z]', last_part): return True else: return False
def function[is_stable_version, parameter[version]]: constant[ Return true if version is stable, i.e. with letters in the final component. Stable version examples: ``1.2``, ``1.3.4``, ``1.0.5``. Non-stable version examples: ``1.3.4beta``, ``0.1.0rc1``, ``3.0.0dev0``. ] if <ast.UnaryOp object at 0x7da18eb55060> begin[:] variable[version] assign[=] call[name[version].split, parameter[constant[.]]] variable[last_part] assign[=] call[name[version]][<ast.UnaryOp object at 0x7da18eb568c0>] if <ast.UnaryOp object at 0x7da18eb56950> begin[:] return[constant[True]]
keyword[def] identifier[is_stable_version] ( identifier[version] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[version] , identifier[tuple] ): identifier[version] = identifier[version] . identifier[split] ( literal[string] ) identifier[last_part] = identifier[version] [- literal[int] ] keyword[if] keyword[not] identifier[re] . identifier[search] ( literal[string] , identifier[last_part] ): keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def is_stable_version(version): """ Return true if version is stable, i.e. with letters in the final component. Stable version examples: ``1.2``, ``1.3.4``, ``1.0.5``. Non-stable version examples: ``1.3.4beta``, ``0.1.0rc1``, ``3.0.0dev0``. """ if not isinstance(version, tuple): version = version.split('.') # depends on [control=['if'], data=[]] last_part = version[-1] if not re.search('[a-zA-Z]', last_part): return True # depends on [control=['if'], data=[]] else: return False
def _finish(self): """ Closes and waits for subprocess to exit. """ if self._process.returncode is None: self._process.stdin.flush() self._process.stdin.close() self._process.wait() self.closed = True
def function[_finish, parameter[self]]: constant[ Closes and waits for subprocess to exit. ] if compare[name[self]._process.returncode is constant[None]] begin[:] call[name[self]._process.stdin.flush, parameter[]] call[name[self]._process.stdin.close, parameter[]] call[name[self]._process.wait, parameter[]] name[self].closed assign[=] constant[True]
keyword[def] identifier[_finish] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_process] . identifier[returncode] keyword[is] keyword[None] : identifier[self] . identifier[_process] . identifier[stdin] . identifier[flush] () identifier[self] . identifier[_process] . identifier[stdin] . identifier[close] () identifier[self] . identifier[_process] . identifier[wait] () identifier[self] . identifier[closed] = keyword[True]
def _finish(self): """ Closes and waits for subprocess to exit. """ if self._process.returncode is None: self._process.stdin.flush() self._process.stdin.close() self._process.wait() self.closed = True # depends on [control=['if'], data=[]]
def _create_json(self): """ JSON Documentation: https://www.jfrog.com/confluence/display/RTF/Security+Configuration+JSON """ data_json = { "name": self.name, "includesPattern": self.includesPattern, "excludesPattern": self.excludesPattern, "repositories": self._repositories, "principals": { 'users': self._users, 'groups': self._groups, } } return data_json
def function[_create_json, parameter[self]]: constant[ JSON Documentation: https://www.jfrog.com/confluence/display/RTF/Security+Configuration+JSON ] variable[data_json] assign[=] dictionary[[<ast.Constant object at 0x7da2047e97b0>, <ast.Constant object at 0x7da2047eb070>, <ast.Constant object at 0x7da2047e84f0>, <ast.Constant object at 0x7da18f09f6d0>, <ast.Constant object at 0x7da18f09ffd0>], [<ast.Attribute object at 0x7da18f09d330>, <ast.Attribute object at 0x7da18f09f1f0>, <ast.Attribute object at 0x7da18f09fca0>, <ast.Attribute object at 0x7da18f09e5c0>, <ast.Dict object at 0x7da18f09df60>]] return[name[data_json]]
keyword[def] identifier[_create_json] ( identifier[self] ): literal[string] identifier[data_json] ={ literal[string] : identifier[self] . identifier[name] , literal[string] : identifier[self] . identifier[includesPattern] , literal[string] : identifier[self] . identifier[excludesPattern] , literal[string] : identifier[self] . identifier[_repositories] , literal[string] :{ literal[string] : identifier[self] . identifier[_users] , literal[string] : identifier[self] . identifier[_groups] , } } keyword[return] identifier[data_json]
def _create_json(self): """ JSON Documentation: https://www.jfrog.com/confluence/display/RTF/Security+Configuration+JSON """ data_json = {'name': self.name, 'includesPattern': self.includesPattern, 'excludesPattern': self.excludesPattern, 'repositories': self._repositories, 'principals': {'users': self._users, 'groups': self._groups}} return data_json
def _validate_organization_data(organization_data): """ Validation helper """ if not validators.organization_data_is_valid(organization_data): exceptions.raise_exception( "Organization", organization_data, exceptions.InvalidOrganizationException )
def function[_validate_organization_data, parameter[organization_data]]: constant[ Validation helper ] if <ast.UnaryOp object at 0x7da2044c0970> begin[:] call[name[exceptions].raise_exception, parameter[constant[Organization], name[organization_data], name[exceptions].InvalidOrganizationException]]
keyword[def] identifier[_validate_organization_data] ( identifier[organization_data] ): literal[string] keyword[if] keyword[not] identifier[validators] . identifier[organization_data_is_valid] ( identifier[organization_data] ): identifier[exceptions] . identifier[raise_exception] ( literal[string] , identifier[organization_data] , identifier[exceptions] . identifier[InvalidOrganizationException] )
def _validate_organization_data(organization_data): """ Validation helper """ if not validators.organization_data_is_valid(organization_data): exceptions.raise_exception('Organization', organization_data, exceptions.InvalidOrganizationException) # depends on [control=['if'], data=[]]
def _query(self, action, qobj): """ returns WPToolsQuery string """ title = self.params.get('title') pageid = self.params.get('pageid') wikibase = self.params.get('wikibase') qstr = None if action == 'random': qstr = qobj.random() elif action == 'query': qstr = qobj.query(title, pageid, self._continue_params()) elif action == 'querymore': qstr = qobj.querymore(title, pageid, self._continue_params()) elif action == 'parse': qstr = qobj.parse(title, pageid) elif action == 'imageinfo': qstr = qobj.imageinfo(self._missing_imageinfo()) elif action == 'labels': qstr = qobj.labels(self._pop_entities()) elif action == 'wikidata': qstr = qobj.wikidata(title, wikibase) elif action == 'restbase': qstr = qobj.restbase(self.params.get('rest_endpoint'), title) if qstr is None: raise ValueError("Unknown action: %s" % action) return qstr
def function[_query, parameter[self, action, qobj]]: constant[ returns WPToolsQuery string ] variable[title] assign[=] call[name[self].params.get, parameter[constant[title]]] variable[pageid] assign[=] call[name[self].params.get, parameter[constant[pageid]]] variable[wikibase] assign[=] call[name[self].params.get, parameter[constant[wikibase]]] variable[qstr] assign[=] constant[None] if compare[name[action] equal[==] constant[random]] begin[:] variable[qstr] assign[=] call[name[qobj].random, parameter[]] if compare[name[qstr] is constant[None]] begin[:] <ast.Raise object at 0x7da1b12042e0> return[name[qstr]]
keyword[def] identifier[_query] ( identifier[self] , identifier[action] , identifier[qobj] ): literal[string] identifier[title] = identifier[self] . identifier[params] . identifier[get] ( literal[string] ) identifier[pageid] = identifier[self] . identifier[params] . identifier[get] ( literal[string] ) identifier[wikibase] = identifier[self] . identifier[params] . identifier[get] ( literal[string] ) identifier[qstr] = keyword[None] keyword[if] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[random] () keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[query] ( identifier[title] , identifier[pageid] , identifier[self] . identifier[_continue_params] ()) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[querymore] ( identifier[title] , identifier[pageid] , identifier[self] . identifier[_continue_params] ()) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[parse] ( identifier[title] , identifier[pageid] ) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[imageinfo] ( identifier[self] . identifier[_missing_imageinfo] ()) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[labels] ( identifier[self] . identifier[_pop_entities] ()) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[wikidata] ( identifier[title] , identifier[wikibase] ) keyword[elif] identifier[action] == literal[string] : identifier[qstr] = identifier[qobj] . identifier[restbase] ( identifier[self] . identifier[params] . identifier[get] ( literal[string] ), identifier[title] ) keyword[if] identifier[qstr] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[action] ) keyword[return] identifier[qstr]
def _query(self, action, qobj): """ returns WPToolsQuery string """ title = self.params.get('title') pageid = self.params.get('pageid') wikibase = self.params.get('wikibase') qstr = None if action == 'random': qstr = qobj.random() # depends on [control=['if'], data=[]] elif action == 'query': qstr = qobj.query(title, pageid, self._continue_params()) # depends on [control=['if'], data=[]] elif action == 'querymore': qstr = qobj.querymore(title, pageid, self._continue_params()) # depends on [control=['if'], data=[]] elif action == 'parse': qstr = qobj.parse(title, pageid) # depends on [control=['if'], data=[]] elif action == 'imageinfo': qstr = qobj.imageinfo(self._missing_imageinfo()) # depends on [control=['if'], data=[]] elif action == 'labels': qstr = qobj.labels(self._pop_entities()) # depends on [control=['if'], data=[]] elif action == 'wikidata': qstr = qobj.wikidata(title, wikibase) # depends on [control=['if'], data=[]] elif action == 'restbase': qstr = qobj.restbase(self.params.get('rest_endpoint'), title) # depends on [control=['if'], data=[]] if qstr is None: raise ValueError('Unknown action: %s' % action) # depends on [control=['if'], data=[]] return qstr
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # Background zonal flow (m/s): self.H = self.Hi.sum() self.set_U1U2(self.U1, self.U2) self.U = self.U1 - self.U2 # the F parameters self.F1 = self.rd**-2 / (1.+self.delta) self.F2 = self.delta*self.F1 # the meridional PV gradients in each layer # need to calculate actual PV gradient self.Qy1 = self.beta + self.F1*(self.U1 - self.U2) + np.gradient(np.gradient(self.U1,self.dy),self.dy) self.Qy2 = self.beta - self.F2*(self.U1 - self.U2) + np.gradient(np.gradient(self.U2,self.dy),self.dy) self.Qy = np.array([self.Qy1, self.Qy2]) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy1 = self.Qy1[:,np.newaxis] * 1j * self.k self.ikQy2 = self.Qy2[:,np.newaxis] * 1j * self.k # vector version self.ikQy = np.vstack([self.ikQy1[np.newaxis,...], self.ikQy2[np.newaxis,...]]) self.ilQx = 0. # layer spacing self.del1 = self.delta/(self.delta+1.) self.del2 = (self.delta+1.)**-1
def function[_initialize_background, parameter[self]]: constant[Set up background state (zonal flow and PV gradients).] name[self].H assign[=] call[name[self].Hi.sum, parameter[]] call[name[self].set_U1U2, parameter[name[self].U1, name[self].U2]] name[self].U assign[=] binary_operation[name[self].U1 - name[self].U2] name[self].F1 assign[=] binary_operation[binary_operation[name[self].rd ** <ast.UnaryOp object at 0x7da18dc98970>] / binary_operation[constant[1.0] + name[self].delta]] name[self].F2 assign[=] binary_operation[name[self].delta * name[self].F1] name[self].Qy1 assign[=] binary_operation[binary_operation[name[self].beta + binary_operation[name[self].F1 * binary_operation[name[self].U1 - name[self].U2]]] + call[name[np].gradient, parameter[call[name[np].gradient, parameter[name[self].U1, name[self].dy]], name[self].dy]]] name[self].Qy2 assign[=] binary_operation[binary_operation[name[self].beta - binary_operation[name[self].F2 * binary_operation[name[self].U1 - name[self].U2]]] + call[name[np].gradient, parameter[call[name[np].gradient, parameter[name[self].U2, name[self].dy]], name[self].dy]]] name[self].Qy assign[=] call[name[np].array, parameter[list[[<ast.Attribute object at 0x7da20c6c6dd0>, <ast.Attribute object at 0x7da20c6c6d70>]]]] name[self].ikQy1 assign[=] binary_operation[binary_operation[call[name[self].Qy1][tuple[[<ast.Slice object at 0x7da20c6c4970>, <ast.Attribute object at 0x7da20c6c68c0>]]] * constant[1j]] * name[self].k] name[self].ikQy2 assign[=] binary_operation[binary_operation[call[name[self].Qy2][tuple[[<ast.Slice object at 0x7da20c6c5990>, <ast.Attribute object at 0x7da20c6c6bc0>]]] * constant[1j]] * name[self].k] name[self].ikQy assign[=] call[name[np].vstack, parameter[list[[<ast.Subscript object at 0x7da20c6c74c0>, <ast.Subscript object at 0x7da20c6c49a0>]]]] name[self].ilQx assign[=] constant[0.0] name[self].del1 assign[=] binary_operation[name[self].delta / binary_operation[name[self].delta + constant[1.0]]] name[self].del2 assign[=] binary_operation[binary_operation[name[self].delta + constant[1.0]] ** <ast.UnaryOp object at 0x7da20c991b70>]
keyword[def] identifier[_initialize_background] ( identifier[self] ): literal[string] identifier[self] . identifier[H] = identifier[self] . identifier[Hi] . identifier[sum] () identifier[self] . identifier[set_U1U2] ( identifier[self] . identifier[U1] , identifier[self] . identifier[U2] ) identifier[self] . identifier[U] = identifier[self] . identifier[U1] - identifier[self] . identifier[U2] identifier[self] . identifier[F1] = identifier[self] . identifier[rd] **- literal[int] /( literal[int] + identifier[self] . identifier[delta] ) identifier[self] . identifier[F2] = identifier[self] . identifier[delta] * identifier[self] . identifier[F1] identifier[self] . identifier[Qy1] = identifier[self] . identifier[beta] + identifier[self] . identifier[F1] *( identifier[self] . identifier[U1] - identifier[self] . identifier[U2] )+ identifier[np] . identifier[gradient] ( identifier[np] . identifier[gradient] ( identifier[self] . identifier[U1] , identifier[self] . identifier[dy] ), identifier[self] . identifier[dy] ) identifier[self] . identifier[Qy2] = identifier[self] . identifier[beta] - identifier[self] . identifier[F2] *( identifier[self] . identifier[U1] - identifier[self] . identifier[U2] )+ identifier[np] . identifier[gradient] ( identifier[np] . identifier[gradient] ( identifier[self] . identifier[U2] , identifier[self] . identifier[dy] ), identifier[self] . identifier[dy] ) identifier[self] . identifier[Qy] = identifier[np] . identifier[array] ([ identifier[self] . identifier[Qy1] , identifier[self] . identifier[Qy2] ]) identifier[self] . identifier[ikQy1] = identifier[self] . identifier[Qy1] [:, identifier[np] . identifier[newaxis] ]* literal[int] * identifier[self] . identifier[k] identifier[self] . identifier[ikQy2] = identifier[self] . identifier[Qy2] [:, identifier[np] . identifier[newaxis] ]* literal[int] * identifier[self] . identifier[k] identifier[self] . identifier[ikQy] = identifier[np] . identifier[vstack] ([ identifier[self] . identifier[ikQy1] [ identifier[np] . identifier[newaxis] ,...], identifier[self] . identifier[ikQy2] [ identifier[np] . identifier[newaxis] ,...]]) identifier[self] . identifier[ilQx] = literal[int] identifier[self] . identifier[del1] = identifier[self] . identifier[delta] /( identifier[self] . identifier[delta] + literal[int] ) identifier[self] . identifier[del2] =( identifier[self] . identifier[delta] + literal[int] )**- literal[int]
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # Background zonal flow (m/s): self.H = self.Hi.sum() self.set_U1U2(self.U1, self.U2) self.U = self.U1 - self.U2 # the F parameters self.F1 = self.rd ** (-2) / (1.0 + self.delta) self.F2 = self.delta * self.F1 # the meridional PV gradients in each layer # need to calculate actual PV gradient self.Qy1 = self.beta + self.F1 * (self.U1 - self.U2) + np.gradient(np.gradient(self.U1, self.dy), self.dy) self.Qy2 = self.beta - self.F2 * (self.U1 - self.U2) + np.gradient(np.gradient(self.U2, self.dy), self.dy) self.Qy = np.array([self.Qy1, self.Qy2]) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy1 = self.Qy1[:, np.newaxis] * 1j * self.k self.ikQy2 = self.Qy2[:, np.newaxis] * 1j * self.k # vector version self.ikQy = np.vstack([self.ikQy1[np.newaxis, ...], self.ikQy2[np.newaxis, ...]]) self.ilQx = 0.0 # layer spacing self.del1 = self.delta / (self.delta + 1.0) self.del2 = (self.delta + 1.0) ** (-1)
def tablespace_remove(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): ''' Removes a tablespace from the Postgres server. CLI Example: .. code-block:: bash salt '*' postgres.tablespace_remove tsname .. versionadded:: 2015.8.0 ''' query = 'DROP TABLESPACE "{0}"'.format(name) ret = _psql_prepare_and_run(['-c', query], user=user, host=host, port=port, runas=runas, maintenance_db=maintenance_db, password=password) return ret['retcode'] == 0
def function[tablespace_remove, parameter[name, user, host, port, maintenance_db, password, runas]]: constant[ Removes a tablespace from the Postgres server. CLI Example: .. code-block:: bash salt '*' postgres.tablespace_remove tsname .. versionadded:: 2015.8.0 ] variable[query] assign[=] call[constant[DROP TABLESPACE "{0}"].format, parameter[name[name]]] variable[ret] assign[=] call[name[_psql_prepare_and_run], parameter[list[[<ast.Constant object at 0x7da20c7ca860>, <ast.Name object at 0x7da20c7c9b40>]]]] return[compare[call[name[ret]][constant[retcode]] equal[==] constant[0]]]
keyword[def] identifier[tablespace_remove] ( identifier[name] , identifier[user] = keyword[None] , identifier[host] = keyword[None] , identifier[port] = keyword[None] , identifier[maintenance_db] = keyword[None] , identifier[password] = keyword[None] , identifier[runas] = keyword[None] ): literal[string] identifier[query] = literal[string] . identifier[format] ( identifier[name] ) identifier[ret] = identifier[_psql_prepare_and_run] ([ literal[string] , identifier[query] ], identifier[user] = identifier[user] , identifier[host] = identifier[host] , identifier[port] = identifier[port] , identifier[runas] = identifier[runas] , identifier[maintenance_db] = identifier[maintenance_db] , identifier[password] = identifier[password] ) keyword[return] identifier[ret] [ literal[string] ]== literal[int]
def tablespace_remove(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): """ Removes a tablespace from the Postgres server. CLI Example: .. code-block:: bash salt '*' postgres.tablespace_remove tsname .. versionadded:: 2015.8.0 """ query = 'DROP TABLESPACE "{0}"'.format(name) ret = _psql_prepare_and_run(['-c', query], user=user, host=host, port=port, runas=runas, maintenance_db=maintenance_db, password=password) return ret['retcode'] == 0
def _setRTSDTR(port, RTS, DTR): """Set RTS and DTR to the requested state.""" port.setRTS(RTS) port.setDTR(DTR)
def function[_setRTSDTR, parameter[port, RTS, DTR]]: constant[Set RTS and DTR to the requested state.] call[name[port].setRTS, parameter[name[RTS]]] call[name[port].setDTR, parameter[name[DTR]]]
keyword[def] identifier[_setRTSDTR] ( identifier[port] , identifier[RTS] , identifier[DTR] ): literal[string] identifier[port] . identifier[setRTS] ( identifier[RTS] ) identifier[port] . identifier[setDTR] ( identifier[DTR] )
def _setRTSDTR(port, RTS, DTR): """Set RTS and DTR to the requested state.""" port.setRTS(RTS) port.setDTR(DTR)
def get_order(self, codes): """Return evidence codes in order shown in code2name.""" return sorted(codes, key=lambda e: [self.ev2idx.get(e)])
def function[get_order, parameter[self, codes]]: constant[Return evidence codes in order shown in code2name.] return[call[name[sorted], parameter[name[codes]]]]
keyword[def] identifier[get_order] ( identifier[self] , identifier[codes] ): literal[string] keyword[return] identifier[sorted] ( identifier[codes] , identifier[key] = keyword[lambda] identifier[e] :[ identifier[self] . identifier[ev2idx] . identifier[get] ( identifier[e] )])
def get_order(self, codes): """Return evidence codes in order shown in code2name.""" return sorted(codes, key=lambda e: [self.ev2idx.get(e)])
def init(self, s): """ Initialize the text interface """ # Hide cursor curses.curs_set(0) self.s = s self.s.keypad(1) self.set_screen_size() self.pads = {} self.offsets = {} self.init_help() self.init_streams_pad() self.current_pad = 'streams' self.set_title(TITLE_STRING) self.got_g = False signal.signal(28, self.resize) if self.config.CHECK_ONLINE_ON_START: self.check_online_streams() self.set_status('Ready')
def function[init, parameter[self, s]]: constant[ Initialize the text interface ] call[name[curses].curs_set, parameter[constant[0]]] name[self].s assign[=] name[s] call[name[self].s.keypad, parameter[constant[1]]] call[name[self].set_screen_size, parameter[]] name[self].pads assign[=] dictionary[[], []] name[self].offsets assign[=] dictionary[[], []] call[name[self].init_help, parameter[]] call[name[self].init_streams_pad, parameter[]] name[self].current_pad assign[=] constant[streams] call[name[self].set_title, parameter[name[TITLE_STRING]]] name[self].got_g assign[=] constant[False] call[name[signal].signal, parameter[constant[28], name[self].resize]] if name[self].config.CHECK_ONLINE_ON_START begin[:] call[name[self].check_online_streams, parameter[]] call[name[self].set_status, parameter[constant[Ready]]]
keyword[def] identifier[init] ( identifier[self] , identifier[s] ): literal[string] identifier[curses] . identifier[curs_set] ( literal[int] ) identifier[self] . identifier[s] = identifier[s] identifier[self] . identifier[s] . identifier[keypad] ( literal[int] ) identifier[self] . identifier[set_screen_size] () identifier[self] . identifier[pads] ={} identifier[self] . identifier[offsets] ={} identifier[self] . identifier[init_help] () identifier[self] . identifier[init_streams_pad] () identifier[self] . identifier[current_pad] = literal[string] identifier[self] . identifier[set_title] ( identifier[TITLE_STRING] ) identifier[self] . identifier[got_g] = keyword[False] identifier[signal] . identifier[signal] ( literal[int] , identifier[self] . identifier[resize] ) keyword[if] identifier[self] . identifier[config] . identifier[CHECK_ONLINE_ON_START] : identifier[self] . identifier[check_online_streams] () identifier[self] . identifier[set_status] ( literal[string] )
def init(self, s): """ Initialize the text interface """ # Hide cursor curses.curs_set(0) self.s = s self.s.keypad(1) self.set_screen_size() self.pads = {} self.offsets = {} self.init_help() self.init_streams_pad() self.current_pad = 'streams' self.set_title(TITLE_STRING) self.got_g = False signal.signal(28, self.resize) if self.config.CHECK_ONLINE_ON_START: self.check_online_streams() # depends on [control=['if'], data=[]] self.set_status('Ready')
def hardware_custom_profile_kap_custom_profile_bfd_vxlan_bfd_vxlan_hello_interval(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hardware = ET.SubElement(config, "hardware", xmlns="urn:brocade.com:mgmt:brocade-hardware") custom_profile = ET.SubElement(hardware, "custom-profile") kap_custom_profile = ET.SubElement(custom_profile, "kap-custom-profile") name_key = ET.SubElement(kap_custom_profile, "name") name_key.text = kwargs.pop('name') bfd_vxlan = ET.SubElement(kap_custom_profile, "bfd-vxlan") bfd_vxlan_hello_interval = ET.SubElement(bfd_vxlan, "bfd_vxlan_hello_interval") bfd_vxlan_hello_interval.text = kwargs.pop('bfd_vxlan_hello_interval') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[hardware_custom_profile_kap_custom_profile_bfd_vxlan_bfd_vxlan_hello_interval, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[hardware] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hardware]]] variable[custom_profile] assign[=] call[name[ET].SubElement, parameter[name[hardware], constant[custom-profile]]] variable[kap_custom_profile] assign[=] call[name[ET].SubElement, parameter[name[custom_profile], constant[kap-custom-profile]]] variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[kap_custom_profile], constant[name]]] name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]] variable[bfd_vxlan] assign[=] call[name[ET].SubElement, parameter[name[kap_custom_profile], constant[bfd-vxlan]]] variable[bfd_vxlan_hello_interval] assign[=] call[name[ET].SubElement, parameter[name[bfd_vxlan], constant[bfd_vxlan_hello_interval]]] name[bfd_vxlan_hello_interval].text assign[=] call[name[kwargs].pop, parameter[constant[bfd_vxlan_hello_interval]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[hardware_custom_profile_kap_custom_profile_bfd_vxlan_bfd_vxlan_hello_interval] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[hardware] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[custom_profile] = identifier[ET] . identifier[SubElement] ( identifier[hardware] , literal[string] ) identifier[kap_custom_profile] = identifier[ET] . identifier[SubElement] ( identifier[custom_profile] , literal[string] ) identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[kap_custom_profile] , literal[string] ) identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[bfd_vxlan] = identifier[ET] . identifier[SubElement] ( identifier[kap_custom_profile] , literal[string] ) identifier[bfd_vxlan_hello_interval] = identifier[ET] . identifier[SubElement] ( identifier[bfd_vxlan] , literal[string] ) identifier[bfd_vxlan_hello_interval] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def hardware_custom_profile_kap_custom_profile_bfd_vxlan_bfd_vxlan_hello_interval(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') hardware = ET.SubElement(config, 'hardware', xmlns='urn:brocade.com:mgmt:brocade-hardware') custom_profile = ET.SubElement(hardware, 'custom-profile') kap_custom_profile = ET.SubElement(custom_profile, 'kap-custom-profile') name_key = ET.SubElement(kap_custom_profile, 'name') name_key.text = kwargs.pop('name') bfd_vxlan = ET.SubElement(kap_custom_profile, 'bfd-vxlan') bfd_vxlan_hello_interval = ET.SubElement(bfd_vxlan, 'bfd_vxlan_hello_interval') bfd_vxlan_hello_interval.text = kwargs.pop('bfd_vxlan_hello_interval') callback = kwargs.pop('callback', self._callback) return callback(config)
def slot_add_nio_binding(self, slot_number, port_number, nio): """ Adds a slot NIO binding. :param slot_number: slot number :param port_number: port number :param nio: NIO instance to add to the slot/port """ try: adapter = self._slots[slot_number] except IndexError: raise DynamipsError('Slot {slot_number} does not exist on router "{name}"'.format(name=self._name, slot_number=slot_number)) if adapter is None: raise DynamipsError("Adapter is missing in slot {slot_number}".format(slot_number=slot_number)) if not adapter.port_exists(port_number): raise DynamipsError("Port {port_number} does not exist in adapter {adapter}".format(adapter=adapter, port_number=port_number)) try: yield from self._hypervisor.send('vm slot_add_nio_binding "{name}" {slot_number} {port_number} {nio}'.format(name=self._name, slot_number=slot_number, port_number=port_number, nio=nio)) except DynamipsError: # in case of error try to remove and add the nio binding yield from self._hypervisor.send('vm slot_remove_nio_binding "{name}" {slot_number} {port_number}'.format(name=self._name, slot_number=slot_number, port_number=port_number)) yield from self._hypervisor.send('vm slot_add_nio_binding "{name}" {slot_number} {port_number} {nio}'.format(name=self._name, slot_number=slot_number, port_number=port_number, nio=nio)) log.info('Router "{name}" [{id}]: NIO {nio_name} bound to port {slot_number}/{port_number}'.format(name=self._name, id=self._id, nio_name=nio.name, slot_number=slot_number, port_number=port_number)) yield from self.slot_enable_nio(slot_number, port_number) adapter.add_nio(port_number, nio)
def function[slot_add_nio_binding, parameter[self, slot_number, port_number, nio]]: constant[ Adds a slot NIO binding. :param slot_number: slot number :param port_number: port number :param nio: NIO instance to add to the slot/port ] <ast.Try object at 0x7da18eb55c30> if compare[name[adapter] is constant[None]] begin[:] <ast.Raise object at 0x7da20e7497b0> if <ast.UnaryOp object at 0x7da18f813250> begin[:] <ast.Raise object at 0x7da18f811d20> <ast.Try object at 0x7da18f813970> call[name[log].info, parameter[call[constant[Router "{name}" [{id}]: NIO {nio_name} bound to port {slot_number}/{port_number}].format, parameter[]]]] <ast.YieldFrom object at 0x7da20cabeb30> call[name[adapter].add_nio, parameter[name[port_number], name[nio]]]
keyword[def] identifier[slot_add_nio_binding] ( identifier[self] , identifier[slot_number] , identifier[port_number] , identifier[nio] ): literal[string] keyword[try] : identifier[adapter] = identifier[self] . identifier[_slots] [ identifier[slot_number] ] keyword[except] identifier[IndexError] : keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[slot_number] = identifier[slot_number] )) keyword[if] identifier[adapter] keyword[is] keyword[None] : keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[slot_number] = identifier[slot_number] )) keyword[if] keyword[not] identifier[adapter] . identifier[port_exists] ( identifier[port_number] ): keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[adapter] = identifier[adapter] , identifier[port_number] = identifier[port_number] )) keyword[try] : keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[slot_number] = identifier[slot_number] , identifier[port_number] = identifier[port_number] , identifier[nio] = identifier[nio] )) keyword[except] identifier[DynamipsError] : keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[slot_number] = identifier[slot_number] , identifier[port_number] = identifier[port_number] )) keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[slot_number] = identifier[slot_number] , identifier[port_number] = identifier[port_number] , identifier[nio] = identifier[nio] )) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[id] = identifier[self] . identifier[_id] , identifier[nio_name] = identifier[nio] . identifier[name] , identifier[slot_number] = identifier[slot_number] , identifier[port_number] = identifier[port_number] )) keyword[yield] keyword[from] identifier[self] . identifier[slot_enable_nio] ( identifier[slot_number] , identifier[port_number] ) identifier[adapter] . identifier[add_nio] ( identifier[port_number] , identifier[nio] )
def slot_add_nio_binding(self, slot_number, port_number, nio): """ Adds a slot NIO binding. :param slot_number: slot number :param port_number: port number :param nio: NIO instance to add to the slot/port """ try: adapter = self._slots[slot_number] # depends on [control=['try'], data=[]] except IndexError: raise DynamipsError('Slot {slot_number} does not exist on router "{name}"'.format(name=self._name, slot_number=slot_number)) # depends on [control=['except'], data=[]] if adapter is None: raise DynamipsError('Adapter is missing in slot {slot_number}'.format(slot_number=slot_number)) # depends on [control=['if'], data=[]] if not adapter.port_exists(port_number): raise DynamipsError('Port {port_number} does not exist in adapter {adapter}'.format(adapter=adapter, port_number=port_number)) # depends on [control=['if'], data=[]] try: yield from self._hypervisor.send('vm slot_add_nio_binding "{name}" {slot_number} {port_number} {nio}'.format(name=self._name, slot_number=slot_number, port_number=port_number, nio=nio)) # depends on [control=['try'], data=[]] except DynamipsError: # in case of error try to remove and add the nio binding yield from self._hypervisor.send('vm slot_remove_nio_binding "{name}" {slot_number} {port_number}'.format(name=self._name, slot_number=slot_number, port_number=port_number)) yield from self._hypervisor.send('vm slot_add_nio_binding "{name}" {slot_number} {port_number} {nio}'.format(name=self._name, slot_number=slot_number, port_number=port_number, nio=nio)) # depends on [control=['except'], data=[]] log.info('Router "{name}" [{id}]: NIO {nio_name} bound to port {slot_number}/{port_number}'.format(name=self._name, id=self._id, nio_name=nio.name, slot_number=slot_number, port_number=port_number)) yield from self.slot_enable_nio(slot_number, port_number) adapter.add_nio(port_number, nio)
def decode(self, id, seq, intf, filter=None, frame=None, inline=False): # pylint: disable=invalid-name,redefined-builtin """Get a capture's decode. :param id: Result ID as an int. :param seq: TestResult sequence ID as an int. :param intf: Interface name as string. :param filter: (optional) PCAP filter to apply as string. :param frame: (optional) Frame number to decode. :param inline: (optional) Use inline version of capture file. :return: :class:`captures.Decode <captures.Decode>` object :rtype: captures.Decode """ schema = DecodeSchema() resp = self.service.get(self._base(id, seq)+str(intf)+'/decode/', params={'filter': filter, 'frame': frame, 'inline': inline}) return self.service.decode(schema, resp)
def function[decode, parameter[self, id, seq, intf, filter, frame, inline]]: constant[Get a capture's decode. :param id: Result ID as an int. :param seq: TestResult sequence ID as an int. :param intf: Interface name as string. :param filter: (optional) PCAP filter to apply as string. :param frame: (optional) Frame number to decode. :param inline: (optional) Use inline version of capture file. :return: :class:`captures.Decode <captures.Decode>` object :rtype: captures.Decode ] variable[schema] assign[=] call[name[DecodeSchema], parameter[]] variable[resp] assign[=] call[name[self].service.get, parameter[binary_operation[binary_operation[call[name[self]._base, parameter[name[id], name[seq]]] + call[name[str], parameter[name[intf]]]] + constant[/decode/]]]] return[call[name[self].service.decode, parameter[name[schema], name[resp]]]]
keyword[def] identifier[decode] ( identifier[self] , identifier[id] , identifier[seq] , identifier[intf] , identifier[filter] = keyword[None] , identifier[frame] = keyword[None] , identifier[inline] = keyword[False] ): literal[string] identifier[schema] = identifier[DecodeSchema] () identifier[resp] = identifier[self] . identifier[service] . identifier[get] ( identifier[self] . identifier[_base] ( identifier[id] , identifier[seq] )+ identifier[str] ( identifier[intf] )+ literal[string] , identifier[params] ={ literal[string] : identifier[filter] , literal[string] : identifier[frame] , literal[string] : identifier[inline] }) keyword[return] identifier[self] . identifier[service] . identifier[decode] ( identifier[schema] , identifier[resp] )
def decode(self, id, seq, intf, filter=None, frame=None, inline=False): # pylint: disable=invalid-name,redefined-builtin "Get a capture's decode.\n\n :param id: Result ID as an int.\n :param seq: TestResult sequence ID as an int.\n :param intf: Interface name as string.\n :param filter: (optional) PCAP filter to apply as string.\n :param frame: (optional) Frame number to decode.\n :param inline: (optional) Use inline version of capture file.\n :return: :class:`captures.Decode <captures.Decode>` object\n :rtype: captures.Decode\n " schema = DecodeSchema() resp = self.service.get(self._base(id, seq) + str(intf) + '/decode/', params={'filter': filter, 'frame': frame, 'inline': inline}) return self.service.decode(schema, resp)
def get_datetime_sorted_rows(dbconn, table_name, uuid=None, column=None): """ Get a list of datetime sorted rows from a table in the database :param dbconn: database connection :param table_name: name of table in the database :param uuid: optional uuid to pull from :param column: optional column/field in the table to pull instead of rows :returns: a list of tuples containing (datetime, row) pairs or (datetime, column) pairs if columns is specified. """ rows = get_rows(dbconn, table_name, uuid=uuid) data = [] for r in rows: dt = datetime.datetime.strptime(r['Time'], "%d/%m/%Y %H:%M:%S") if column is None: data.append((dt, r)) else: data.append((dt, r[column])) data.sort() return data
def function[get_datetime_sorted_rows, parameter[dbconn, table_name, uuid, column]]: constant[ Get a list of datetime sorted rows from a table in the database :param dbconn: database connection :param table_name: name of table in the database :param uuid: optional uuid to pull from :param column: optional column/field in the table to pull instead of rows :returns: a list of tuples containing (datetime, row) pairs or (datetime, column) pairs if columns is specified. ] variable[rows] assign[=] call[name[get_rows], parameter[name[dbconn], name[table_name]]] variable[data] assign[=] list[[]] for taget[name[r]] in starred[name[rows]] begin[:] variable[dt] assign[=] call[name[datetime].datetime.strptime, parameter[call[name[r]][constant[Time]], constant[%d/%m/%Y %H:%M:%S]]] if compare[name[column] is constant[None]] begin[:] call[name[data].append, parameter[tuple[[<ast.Name object at 0x7da1b16bf640>, <ast.Name object at 0x7da1b16bcee0>]]]] call[name[data].sort, parameter[]] return[name[data]]
keyword[def] identifier[get_datetime_sorted_rows] ( identifier[dbconn] , identifier[table_name] , identifier[uuid] = keyword[None] , identifier[column] = keyword[None] ): literal[string] identifier[rows] = identifier[get_rows] ( identifier[dbconn] , identifier[table_name] , identifier[uuid] = identifier[uuid] ) identifier[data] =[] keyword[for] identifier[r] keyword[in] identifier[rows] : identifier[dt] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[r] [ literal[string] ], literal[string] ) keyword[if] identifier[column] keyword[is] keyword[None] : identifier[data] . identifier[append] (( identifier[dt] , identifier[r] )) keyword[else] : identifier[data] . identifier[append] (( identifier[dt] , identifier[r] [ identifier[column] ])) identifier[data] . identifier[sort] () keyword[return] identifier[data]
def get_datetime_sorted_rows(dbconn, table_name, uuid=None, column=None): """ Get a list of datetime sorted rows from a table in the database :param dbconn: database connection :param table_name: name of table in the database :param uuid: optional uuid to pull from :param column: optional column/field in the table to pull instead of rows :returns: a list of tuples containing (datetime, row) pairs or (datetime, column) pairs if columns is specified. """ rows = get_rows(dbconn, table_name, uuid=uuid) data = [] for r in rows: dt = datetime.datetime.strptime(r['Time'], '%d/%m/%Y %H:%M:%S') if column is None: data.append((dt, r)) # depends on [control=['if'], data=[]] else: data.append((dt, r[column])) # depends on [control=['for'], data=['r']] data.sort() return data
def generate_and_write_ecdsa_keypair(filepath=None, password=None): """ <Purpose> Generate an ECDSA keypair, where the encrypted key (using 'password' as the passphrase) is saved to <'filepath'>. The public key portion of the generated ECDSA key is saved to <'filepath'>.pub. If the filepath is not given, the KEYID is used as the filename and the keypair saved to the current working directory. The 'cryptography' library is currently supported. The private key is encrypted according to 'cryptography's approach: "Encrypt using the best available encryption for a given key's backend. This is a curated encryption choice and the algorithm may change over time." <Arguments> filepath: The public and private key files are saved to <filepath>.pub and <filepath>, respectively. If the filepath is not given, the public and private keys are saved to the current working directory as <KEYID>.pub and <KEYID>. KEYID is the generated key's KEYID. password: The password, or passphrase, to encrypt the private portion of the generated ECDSA key. A symmetric encryption key is derived from 'password', so it is not directly used. <Exceptions> securesystemslib.exceptions.FormatError, if the arguments are improperly formatted. securesystemslib.exceptions.CryptoError, if 'filepath' cannot be encrypted. <Side Effects> Writes key files to '<filepath>' and '<filepath>.pub'. <Returns> The 'filepath' of the written key. """ # Generate a new ECDSA key object. The 'cryptography' library is currently # supported and performs the actual cryptographic operations. ecdsa_key = securesystemslib.keys.generate_ecdsa_key() if not filepath: filepath = os.path.join(os.getcwd(), ecdsa_key['keyid']) else: logger.debug('The filepath has been specified. Not using the key\'s' ' KEYID as the default filepath.') # Does 'filepath' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(filepath) # If the caller does not provide a password argument, prompt for one. if password is None: # pragma: no cover # It is safe to specify the full path of 'filepath' in the prompt and not # worry about leaking sensitive information about the key's location. # However, care should be taken when including the full path in exceptions # and log files. password = get_password('Enter a password for the ECDSA' ' key (' + Fore.RED + filepath + Fore.RESET + '): ', confirm=True) else: logger.debug('The password has been specified. Not prompting for one') # Does 'password' have the correct format? securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) # If the parent directory of filepath does not exist, # create it (and all its parent directories, if necessary). securesystemslib.util.ensure_parent_dir(filepath) # Create a temporary file, write the contents of the public key, and move # to final destination. file_object = securesystemslib.util.TempFile() # Generate the ECDSA public key file contents in metadata format (i.e., does # not include the keyid portion). keytype = ecdsa_key['keytype'] keyval = ecdsa_key['keyval'] scheme = ecdsa_key['scheme'] ecdsakey_metadata_format = securesystemslib.keys.format_keyval_to_metadata( keytype, scheme, keyval, private=False) file_object.write(json.dumps(ecdsakey_metadata_format).encode('utf-8')) # Write the public key (i.e., 'public', which is in PEM format) to # '<filepath>.pub'. (1) Create a temporary file, (2) write the contents of # the public key, and (3) move to final destination. file_object.move(filepath + '.pub') # Write the encrypted key string, conformant to # 'securesystemslib.formats.ENCRYPTEDKEY_SCHEMA', to '<filepath>'. file_object = securesystemslib.util.TempFile() # Raise 'securesystemslib.exceptions.CryptoError' if 'ecdsa_key' cannot be # encrypted. encrypted_key = securesystemslib.keys.encrypt_key(ecdsa_key, password) file_object.write(encrypted_key.encode('utf-8')) file_object.move(filepath) return filepath
def function[generate_and_write_ecdsa_keypair, parameter[filepath, password]]: constant[ <Purpose> Generate an ECDSA keypair, where the encrypted key (using 'password' as the passphrase) is saved to <'filepath'>. The public key portion of the generated ECDSA key is saved to <'filepath'>.pub. If the filepath is not given, the KEYID is used as the filename and the keypair saved to the current working directory. The 'cryptography' library is currently supported. The private key is encrypted according to 'cryptography's approach: "Encrypt using the best available encryption for a given key's backend. This is a curated encryption choice and the algorithm may change over time." <Arguments> filepath: The public and private key files are saved to <filepath>.pub and <filepath>, respectively. If the filepath is not given, the public and private keys are saved to the current working directory as <KEYID>.pub and <KEYID>. KEYID is the generated key's KEYID. password: The password, or passphrase, to encrypt the private portion of the generated ECDSA key. A symmetric encryption key is derived from 'password', so it is not directly used. <Exceptions> securesystemslib.exceptions.FormatError, if the arguments are improperly formatted. securesystemslib.exceptions.CryptoError, if 'filepath' cannot be encrypted. <Side Effects> Writes key files to '<filepath>' and '<filepath>.pub'. <Returns> The 'filepath' of the written key. ] variable[ecdsa_key] assign[=] call[name[securesystemslib].keys.generate_ecdsa_key, parameter[]] if <ast.UnaryOp object at 0x7da20e9b3f10> begin[:] variable[filepath] assign[=] call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], call[name[ecdsa_key]][constant[keyid]]]] call[name[securesystemslib].formats.PATH_SCHEMA.check_match, parameter[name[filepath]]] if compare[name[password] is constant[None]] begin[:] variable[password] assign[=] call[name[get_password], parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[Enter a password for the ECDSA key (] + name[Fore].RED] + name[filepath]] + name[Fore].RESET] + constant[): ]]]] call[name[securesystemslib].formats.PASSWORD_SCHEMA.check_match, parameter[name[password]]] call[name[securesystemslib].util.ensure_parent_dir, parameter[name[filepath]]] variable[file_object] assign[=] call[name[securesystemslib].util.TempFile, parameter[]] variable[keytype] assign[=] call[name[ecdsa_key]][constant[keytype]] variable[keyval] assign[=] call[name[ecdsa_key]][constant[keyval]] variable[scheme] assign[=] call[name[ecdsa_key]][constant[scheme]] variable[ecdsakey_metadata_format] assign[=] call[name[securesystemslib].keys.format_keyval_to_metadata, parameter[name[keytype], name[scheme], name[keyval]]] call[name[file_object].write, parameter[call[call[name[json].dumps, parameter[name[ecdsakey_metadata_format]]].encode, parameter[constant[utf-8]]]]] call[name[file_object].move, parameter[binary_operation[name[filepath] + constant[.pub]]]] variable[file_object] assign[=] call[name[securesystemslib].util.TempFile, parameter[]] variable[encrypted_key] assign[=] call[name[securesystemslib].keys.encrypt_key, parameter[name[ecdsa_key], name[password]]] call[name[file_object].write, parameter[call[name[encrypted_key].encode, parameter[constant[utf-8]]]]] call[name[file_object].move, parameter[name[filepath]]] return[name[filepath]]
keyword[def] identifier[generate_and_write_ecdsa_keypair] ( identifier[filepath] = keyword[None] , identifier[password] = keyword[None] ): literal[string] identifier[ecdsa_key] = identifier[securesystemslib] . identifier[keys] . identifier[generate_ecdsa_key] () keyword[if] keyword[not] identifier[filepath] : identifier[filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), identifier[ecdsa_key] [ literal[string] ]) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] literal[string] ) identifier[securesystemslib] . identifier[formats] . identifier[PATH_SCHEMA] . identifier[check_match] ( identifier[filepath] ) keyword[if] identifier[password] keyword[is] keyword[None] : identifier[password] = identifier[get_password] ( literal[string] literal[string] + identifier[Fore] . identifier[RED] + identifier[filepath] + identifier[Fore] . identifier[RESET] + literal[string] , identifier[confirm] = keyword[True] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[securesystemslib] . identifier[formats] . identifier[PASSWORD_SCHEMA] . identifier[check_match] ( identifier[password] ) identifier[securesystemslib] . identifier[util] . identifier[ensure_parent_dir] ( identifier[filepath] ) identifier[file_object] = identifier[securesystemslib] . identifier[util] . identifier[TempFile] () identifier[keytype] = identifier[ecdsa_key] [ literal[string] ] identifier[keyval] = identifier[ecdsa_key] [ literal[string] ] identifier[scheme] = identifier[ecdsa_key] [ literal[string] ] identifier[ecdsakey_metadata_format] = identifier[securesystemslib] . identifier[keys] . identifier[format_keyval_to_metadata] ( identifier[keytype] , identifier[scheme] , identifier[keyval] , identifier[private] = keyword[False] ) identifier[file_object] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[ecdsakey_metadata_format] ). identifier[encode] ( literal[string] )) identifier[file_object] . identifier[move] ( identifier[filepath] + literal[string] ) identifier[file_object] = identifier[securesystemslib] . identifier[util] . identifier[TempFile] () identifier[encrypted_key] = identifier[securesystemslib] . identifier[keys] . identifier[encrypt_key] ( identifier[ecdsa_key] , identifier[password] ) identifier[file_object] . identifier[write] ( identifier[encrypted_key] . identifier[encode] ( literal[string] )) identifier[file_object] . identifier[move] ( identifier[filepath] ) keyword[return] identifier[filepath]
def generate_and_write_ecdsa_keypair(filepath=None, password=None): """ <Purpose> Generate an ECDSA keypair, where the encrypted key (using 'password' as the passphrase) is saved to <'filepath'>. The public key portion of the generated ECDSA key is saved to <'filepath'>.pub. If the filepath is not given, the KEYID is used as the filename and the keypair saved to the current working directory. The 'cryptography' library is currently supported. The private key is encrypted according to 'cryptography's approach: "Encrypt using the best available encryption for a given key's backend. This is a curated encryption choice and the algorithm may change over time." <Arguments> filepath: The public and private key files are saved to <filepath>.pub and <filepath>, respectively. If the filepath is not given, the public and private keys are saved to the current working directory as <KEYID>.pub and <KEYID>. KEYID is the generated key's KEYID. password: The password, or passphrase, to encrypt the private portion of the generated ECDSA key. A symmetric encryption key is derived from 'password', so it is not directly used. <Exceptions> securesystemslib.exceptions.FormatError, if the arguments are improperly formatted. securesystemslib.exceptions.CryptoError, if 'filepath' cannot be encrypted. <Side Effects> Writes key files to '<filepath>' and '<filepath>.pub'. <Returns> The 'filepath' of the written key. """ # Generate a new ECDSA key object. The 'cryptography' library is currently # supported and performs the actual cryptographic operations. ecdsa_key = securesystemslib.keys.generate_ecdsa_key() if not filepath: filepath = os.path.join(os.getcwd(), ecdsa_key['keyid']) # depends on [control=['if'], data=[]] else: logger.debug("The filepath has been specified. Not using the key's KEYID as the default filepath.") # Does 'filepath' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(filepath) # If the caller does not provide a password argument, prompt for one. if password is None: # pragma: no cover # It is safe to specify the full path of 'filepath' in the prompt and not # worry about leaking sensitive information about the key's location. # However, care should be taken when including the full path in exceptions # and log files. password = get_password('Enter a password for the ECDSA key (' + Fore.RED + filepath + Fore.RESET + '): ', confirm=True) # depends on [control=['if'], data=['password']] else: logger.debug('The password has been specified. Not prompting for one') # Does 'password' have the correct format? securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) # If the parent directory of filepath does not exist, # create it (and all its parent directories, if necessary). securesystemslib.util.ensure_parent_dir(filepath) # Create a temporary file, write the contents of the public key, and move # to final destination. file_object = securesystemslib.util.TempFile() # Generate the ECDSA public key file contents in metadata format (i.e., does # not include the keyid portion). keytype = ecdsa_key['keytype'] keyval = ecdsa_key['keyval'] scheme = ecdsa_key['scheme'] ecdsakey_metadata_format = securesystemslib.keys.format_keyval_to_metadata(keytype, scheme, keyval, private=False) file_object.write(json.dumps(ecdsakey_metadata_format).encode('utf-8')) # Write the public key (i.e., 'public', which is in PEM format) to # '<filepath>.pub'. (1) Create a temporary file, (2) write the contents of # the public key, and (3) move to final destination. file_object.move(filepath + '.pub') # Write the encrypted key string, conformant to # 'securesystemslib.formats.ENCRYPTEDKEY_SCHEMA', to '<filepath>'. file_object = securesystemslib.util.TempFile() # Raise 'securesystemslib.exceptions.CryptoError' if 'ecdsa_key' cannot be # encrypted. encrypted_key = securesystemslib.keys.encrypt_key(ecdsa_key, password) file_object.write(encrypted_key.encode('utf-8')) file_object.move(filepath) return filepath
def _cache_get_last_in_slice(url_dict, start_int, total_int, authn_subj_list): """Return None if cache entry does not exist.""" key_str = _gen_cache_key_for_slice(url_dict, start_int, total_int, authn_subj_list) # TODO: Django docs state that cache.get() should return None on unknown key. try: last_ts_tup = django.core.cache.cache.get(key_str) except KeyError: last_ts_tup = None logging.debug('Cache get. key="{}" -> last_ts_tup={}'.format(key_str, last_ts_tup)) return last_ts_tup
def function[_cache_get_last_in_slice, parameter[url_dict, start_int, total_int, authn_subj_list]]: constant[Return None if cache entry does not exist.] variable[key_str] assign[=] call[name[_gen_cache_key_for_slice], parameter[name[url_dict], name[start_int], name[total_int], name[authn_subj_list]]] <ast.Try object at 0x7da1b1b6bbb0> call[name[logging].debug, parameter[call[constant[Cache get. key="{}" -> last_ts_tup={}].format, parameter[name[key_str], name[last_ts_tup]]]]] return[name[last_ts_tup]]
keyword[def] identifier[_cache_get_last_in_slice] ( identifier[url_dict] , identifier[start_int] , identifier[total_int] , identifier[authn_subj_list] ): literal[string] identifier[key_str] = identifier[_gen_cache_key_for_slice] ( identifier[url_dict] , identifier[start_int] , identifier[total_int] , identifier[authn_subj_list] ) keyword[try] : identifier[last_ts_tup] = identifier[django] . identifier[core] . identifier[cache] . identifier[cache] . identifier[get] ( identifier[key_str] ) keyword[except] identifier[KeyError] : identifier[last_ts_tup] = keyword[None] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[key_str] , identifier[last_ts_tup] )) keyword[return] identifier[last_ts_tup]
def _cache_get_last_in_slice(url_dict, start_int, total_int, authn_subj_list): """Return None if cache entry does not exist.""" key_str = _gen_cache_key_for_slice(url_dict, start_int, total_int, authn_subj_list) # TODO: Django docs state that cache.get() should return None on unknown key. try: last_ts_tup = django.core.cache.cache.get(key_str) # depends on [control=['try'], data=[]] except KeyError: last_ts_tup = None # depends on [control=['except'], data=[]] logging.debug('Cache get. key="{}" -> last_ts_tup={}'.format(key_str, last_ts_tup)) return last_ts_tup
def inline(root_text, base_dir="", replacer=None, ifexists_replacer=None): """Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included. """ def _sub_line(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) try: with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() except IOError: # TODO actually do logging here print("Cannot open {0} for in-lining".format(full_path)) return u"" else: # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text def _sub_line_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) if os.path.exists(full_path): with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() # Append extra info after input included_text = "\n".join((included_text, match.group(2))) else: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_line, result) result = include_pattern.sub(_sub_line, result) result = input_ifexists_pattern.sub(_sub_line_ifexists, result) return result
def function[inline, parameter[root_text, base_dir, replacer, ifexists_replacer]]: constant[Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included. ] def function[_sub_line, parameter[match]]: constant[Function to be used with re.sub to inline files for each match.] variable[fname] assign[=] call[name[match].group, parameter[constant[1]]] if <ast.UnaryOp object at 0x7da207f9bf10> begin[:] variable[full_fname] assign[=] call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da207f9ae90>, <ast.Constant object at 0x7da207f9b910>]]]] variable[full_path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[base_dir], name[full_fname]]]]] <ast.Try object at 0x7da207f98790> def function[_sub_line_ifexists, parameter[match]]: constant[Function to be used with re.sub for the input_ifexists_pattern.] variable[fname] assign[=] call[name[match].group, parameter[constant[1]]] if <ast.UnaryOp object at 0x7da207f9a2c0> begin[:] variable[full_fname] assign[=] call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da1b2345ae0>, <ast.Constant object at 0x7da1b2347670>]]]] variable[full_path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[base_dir], name[full_fname]]]]] if call[name[os].path.exists, parameter[name[full_path]]] begin[:] with call[name[codecs].open, parameter[name[full_path], constant[r]]] begin[:] variable[included_text] assign[=] call[name[f].read, parameter[]] variable[included_text] assign[=] call[constant[ ].join, parameter[tuple[[<ast.Name object at 0x7da1b2347cd0>, <ast.Call object at 0x7da1b2347ee0>]]]] variable[included_text] assign[=] call[name[inline], parameter[name[included_text]]] return[name[included_text]] variable[result] assign[=] call[name[remove_comments], parameter[name[root_text]]] variable[result] assign[=] call[name[input_pattern].sub, parameter[name[_sub_line], name[result]]] variable[result] assign[=] call[name[include_pattern].sub, parameter[name[_sub_line], name[result]]] variable[result] assign[=] call[name[input_ifexists_pattern].sub, parameter[name[_sub_line_ifexists], name[result]]] return[name[result]]
keyword[def] identifier[inline] ( identifier[root_text] , identifier[base_dir] = literal[string] , identifier[replacer] = keyword[None] , identifier[ifexists_replacer] = keyword[None] ): literal[string] keyword[def] identifier[_sub_line] ( identifier[match] ): literal[string] identifier[fname] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] keyword[not] identifier[fname] . identifier[endswith] ( literal[string] ): identifier[full_fname] = literal[string] . identifier[join] (( identifier[fname] , literal[string] )) keyword[else] : identifier[full_fname] = identifier[fname] identifier[full_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[full_fname] )) keyword[try] : keyword[with] identifier[codecs] . identifier[open] ( identifier[full_path] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] : identifier[included_text] = identifier[f] . identifier[read] () keyword[except] identifier[IOError] : identifier[print] ( literal[string] . identifier[format] ( identifier[full_path] )) keyword[return] literal[string] keyword[else] : identifier[included_text] = identifier[inline] ( identifier[included_text] , identifier[base_dir] = identifier[base_dir] ) keyword[return] identifier[included_text] keyword[def] identifier[_sub_line_ifexists] ( identifier[match] ): literal[string] identifier[fname] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] keyword[not] identifier[fname] . identifier[endswith] ( literal[string] ): identifier[full_fname] = literal[string] . identifier[join] (( identifier[fname] , literal[string] )) keyword[else] : identifier[full_fname] = identifier[fname] identifier[full_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[full_fname] )) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[full_path] ): keyword[with] identifier[codecs] . identifier[open] ( identifier[full_path] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] : identifier[included_text] = identifier[f] . identifier[read] () identifier[included_text] = literal[string] . identifier[join] (( identifier[included_text] , identifier[match] . identifier[group] ( literal[int] ))) keyword[else] : identifier[included_text] = identifier[match] . identifier[group] ( literal[int] ) identifier[included_text] = identifier[inline] ( identifier[included_text] , identifier[base_dir] = identifier[base_dir] ) keyword[return] identifier[included_text] identifier[result] = identifier[remove_comments] ( identifier[root_text] ) identifier[result] = identifier[input_pattern] . identifier[sub] ( identifier[_sub_line] , identifier[result] ) identifier[result] = identifier[include_pattern] . identifier[sub] ( identifier[_sub_line] , identifier[result] ) identifier[result] = identifier[input_ifexists_pattern] . identifier[sub] ( identifier[_sub_line_ifexists] , identifier[result] ) keyword[return] identifier[result]
def inline(root_text, base_dir='', replacer=None, ifexists_replacer=None): """Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included. """ def _sub_line(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = '.'.join((fname, 'tex')) # depends on [control=['if'], data=[]] else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) try: with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except IOError: # TODO actually do logging here print('Cannot open {0} for in-lining'.format(full_path)) return u'' # depends on [control=['except'], data=[]] else: # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text def _sub_line_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = '.'.join((fname, 'tex')) # depends on [control=['if'], data=[]] else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) if os.path.exists(full_path): with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() # depends on [control=['with'], data=['f']] # Append extra info after input included_text = '\n'.join((included_text, match.group(2))) # depends on [control=['if'], data=[]] else: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_line, result) result = include_pattern.sub(_sub_line, result) result = input_ifexists_pattern.sub(_sub_line_ifexists, result) return result
def is_attr_private(attrname: str) -> Optional[Match[str]]: """Check that attribute name is private (at least two leading underscores, at most one trailing underscore) """ regex = re.compile("^_{2,}.*[^_]+_?$") return regex.match(attrname)
def function[is_attr_private, parameter[attrname]]: constant[Check that attribute name is private (at least two leading underscores, at most one trailing underscore) ] variable[regex] assign[=] call[name[re].compile, parameter[constant[^_{2,}.*[^_]+_?$]]] return[call[name[regex].match, parameter[name[attrname]]]]
keyword[def] identifier[is_attr_private] ( identifier[attrname] : identifier[str] )-> identifier[Optional] [ identifier[Match] [ identifier[str] ]]: literal[string] identifier[regex] = identifier[re] . identifier[compile] ( literal[string] ) keyword[return] identifier[regex] . identifier[match] ( identifier[attrname] )
def is_attr_private(attrname: str) -> Optional[Match[str]]: """Check that attribute name is private (at least two leading underscores, at most one trailing underscore) """ regex = re.compile('^_{2,}.*[^_]+_?$') return regex.match(attrname)
def train(self, dataset): r"""Train model with given feature. Parameters ---------- X : array-like, shape=(n_samples, n_features) Train feature vector. Y : array-like, shape=(n_samples, n_labels) Target labels. Attributes ---------- clfs\_ : list of :py:mod:`libact.models` object instances Classifier instances. Returns ------- self : object Retuen self. """ X, Y = dataset.format_sklearn() X = np.array(X) Y = np.array(Y) self.n_labels_ = np.shape(Y)[1] self.n_features_ = np.shape(X)[1] self.clfs_ = [] for i in range(self.n_labels_): # TODO should we handle it here or we should handle it before calling if len(np.unique(Y[:, i])) == 1: clf = DummyClf() else: clf = copy.deepcopy(self.base_clf) self.clfs_.append(clf) Parallel(n_jobs=self.n_jobs, backend='threading')( delayed(_fit_model)(self.clfs_[i], X, Y[:, i]) for i in range(self.n_labels_)) #clf.train(Dataset(X, Y[:, i])) return self
def function[train, parameter[self, dataset]]: constant[Train model with given feature. Parameters ---------- X : array-like, shape=(n_samples, n_features) Train feature vector. Y : array-like, shape=(n_samples, n_labels) Target labels. Attributes ---------- clfs\_ : list of :py:mod:`libact.models` object instances Classifier instances. Returns ------- self : object Retuen self. ] <ast.Tuple object at 0x7da18eb551e0> assign[=] call[name[dataset].format_sklearn, parameter[]] variable[X] assign[=] call[name[np].array, parameter[name[X]]] variable[Y] assign[=] call[name[np].array, parameter[name[Y]]] name[self].n_labels_ assign[=] call[call[name[np].shape, parameter[name[Y]]]][constant[1]] name[self].n_features_ assign[=] call[call[name[np].shape, parameter[name[X]]]][constant[1]] name[self].clfs_ assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[self].n_labels_]]] begin[:] if compare[call[name[len], parameter[call[name[np].unique, parameter[call[name[Y]][tuple[[<ast.Slice object at 0x7da18eb55300>, <ast.Name object at 0x7da18eb54d90>]]]]]]] equal[==] constant[1]] begin[:] variable[clf] assign[=] call[name[DummyClf], parameter[]] call[name[self].clfs_.append, parameter[name[clf]]] call[call[name[Parallel], parameter[]], parameter[<ast.GeneratorExp object at 0x7da18eb579d0>]] return[name[self]]
keyword[def] identifier[train] ( identifier[self] , identifier[dataset] ): literal[string] identifier[X] , identifier[Y] = identifier[dataset] . identifier[format_sklearn] () identifier[X] = identifier[np] . identifier[array] ( identifier[X] ) identifier[Y] = identifier[np] . identifier[array] ( identifier[Y] ) identifier[self] . identifier[n_labels_] = identifier[np] . identifier[shape] ( identifier[Y] )[ literal[int] ] identifier[self] . identifier[n_features_] = identifier[np] . identifier[shape] ( identifier[X] )[ literal[int] ] identifier[self] . identifier[clfs_] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[n_labels_] ): keyword[if] identifier[len] ( identifier[np] . identifier[unique] ( identifier[Y] [:, identifier[i] ]))== literal[int] : identifier[clf] = identifier[DummyClf] () keyword[else] : identifier[clf] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[base_clf] ) identifier[self] . identifier[clfs_] . identifier[append] ( identifier[clf] ) identifier[Parallel] ( identifier[n_jobs] = identifier[self] . identifier[n_jobs] , identifier[backend] = literal[string] )( identifier[delayed] ( identifier[_fit_model] )( identifier[self] . identifier[clfs_] [ identifier[i] ], identifier[X] , identifier[Y] [:, identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[n_labels_] )) keyword[return] identifier[self]
def train(self, dataset): """Train model with given feature. Parameters ---------- X : array-like, shape=(n_samples, n_features) Train feature vector. Y : array-like, shape=(n_samples, n_labels) Target labels. Attributes ---------- clfs\\_ : list of :py:mod:`libact.models` object instances Classifier instances. Returns ------- self : object Retuen self. """ (X, Y) = dataset.format_sklearn() X = np.array(X) Y = np.array(Y) self.n_labels_ = np.shape(Y)[1] self.n_features_ = np.shape(X)[1] self.clfs_ = [] for i in range(self.n_labels_): # TODO should we handle it here or we should handle it before calling if len(np.unique(Y[:, i])) == 1: clf = DummyClf() # depends on [control=['if'], data=[]] else: clf = copy.deepcopy(self.base_clf) self.clfs_.append(clf) # depends on [control=['for'], data=['i']] Parallel(n_jobs=self.n_jobs, backend='threading')((delayed(_fit_model)(self.clfs_[i], X, Y[:, i]) for i in range(self.n_labels_))) #clf.train(Dataset(X, Y[:, i])) return self
def split(cls, entry): """Split a declaration name into a (declaration, subpath) tuple. Examples: >>> DeclarationSet.split('foo__bar') ('foo', 'bar') >>> DeclarationSet.split('foo') ('foo', None) >>> DeclarationSet.split('foo__bar__baz') ('foo', 'bar__baz') """ if enums.SPLITTER in entry: return entry.split(enums.SPLITTER, 1) else: return (entry, None)
def function[split, parameter[cls, entry]]: constant[Split a declaration name into a (declaration, subpath) tuple. Examples: >>> DeclarationSet.split('foo__bar') ('foo', 'bar') >>> DeclarationSet.split('foo') ('foo', None) >>> DeclarationSet.split('foo__bar__baz') ('foo', 'bar__baz') ] if compare[name[enums].SPLITTER in name[entry]] begin[:] return[call[name[entry].split, parameter[name[enums].SPLITTER, constant[1]]]]
keyword[def] identifier[split] ( identifier[cls] , identifier[entry] ): literal[string] keyword[if] identifier[enums] . identifier[SPLITTER] keyword[in] identifier[entry] : keyword[return] identifier[entry] . identifier[split] ( identifier[enums] . identifier[SPLITTER] , literal[int] ) keyword[else] : keyword[return] ( identifier[entry] , keyword[None] )
def split(cls, entry): """Split a declaration name into a (declaration, subpath) tuple. Examples: >>> DeclarationSet.split('foo__bar') ('foo', 'bar') >>> DeclarationSet.split('foo') ('foo', None) >>> DeclarationSet.split('foo__bar__baz') ('foo', 'bar__baz') """ if enums.SPLITTER in entry: return entry.split(enums.SPLITTER, 1) # depends on [control=['if'], data=['entry']] else: return (entry, None)
def log(self, name, val, **tags): """Log metric name with value val. You must include at least one tag as a kwarg""" global _last_timestamp, _last_metrics # do not allow .log after closing assert not self.done.is_set(), "worker thread has been closed" # check if valid metric name assert all(c in _valid_metric_chars for c in name), "invalid metric name " + name val = float(val) #Duck type to float/int, if possible. if int(val) == val: val = int(val) if self.host_tag and 'host' not in tags: tags['host'] = self.host_tag # get timestamp from system time, unless it's supplied as a tag timestamp = int(tags.pop('timestamp', time.time())) assert not self.done.is_set(), "tsdb object has been closed" assert tags != {}, "Need at least one tag" tagvals = ' '.join(['%s=%s' % (k, v) for k, v in tags.items()]) # OpenTSDB has major problems if you insert a data point with the same # metric, timestamp and tags. So we keep a temporary set of what points # we have sent for the last timestamp value. If we encounter a duplicate, # it is dropped. unique_str = "%s, %s, %s, %s, %s" % (name, timestamp, tagvals, self.host, self.port) if timestamp == _last_timestamp or _last_timestamp == None: if unique_str in _last_metrics: return # discard duplicate metrics else: _last_metrics.add(unique_str) else: _last_timestamp = timestamp _last_metrics.clear() line = "put %s %d %s %s\n" % (name, timestamp, val, tagvals) try: self.q.put(line, False) self.queued += 1 except queue.Full: print("potsdb - Warning: dropping oldest metric because Queue is full. Size: %s" % self.q.qsize(), file=sys.stderr) self.q.get() #Drop the oldest metric to make room self.q.put(line, False) return line
def function[log, parameter[self, name, val]]: constant[Log metric name with value val. You must include at least one tag as a kwarg] <ast.Global object at 0x7da18f58f460> assert[<ast.UnaryOp object at 0x7da18f58f220>] assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da18f58ee30>]]] variable[val] assign[=] call[name[float], parameter[name[val]]] if compare[call[name[int], parameter[name[val]]] equal[==] name[val]] begin[:] variable[val] assign[=] call[name[int], parameter[name[val]]] if <ast.BoolOp object at 0x7da18f58eb00> begin[:] call[name[tags]][constant[host]] assign[=] name[self].host_tag variable[timestamp] assign[=] call[name[int], parameter[call[name[tags].pop, parameter[constant[timestamp], call[name[time].time, parameter[]]]]]] assert[<ast.UnaryOp object at 0x7da18f58ca60>] assert[compare[name[tags] not_equal[!=] dictionary[[], []]]] variable[tagvals] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18f58cd90>]] variable[unique_str] assign[=] binary_operation[constant[%s, %s, %s, %s, %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f811cf0>, <ast.Name object at 0x7da18f8104c0>, <ast.Name object at 0x7da18f810040>, <ast.Attribute object at 0x7da18f8119c0>, <ast.Attribute object at 0x7da18c4ce950>]]] if <ast.BoolOp object at 0x7da18c4cc820> begin[:] if compare[name[unique_str] in name[_last_metrics]] begin[:] return[None] variable[line] assign[=] binary_operation[constant[put %s %d %s %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cdf00>, <ast.Name object at 0x7da18c4cd5d0>, <ast.Name object at 0x7da18c4ce350>, <ast.Name object at 0x7da18c4cfca0>]]] <ast.Try object at 0x7da18c4ccf10> return[name[line]]
keyword[def] identifier[log] ( identifier[self] , identifier[name] , identifier[val] ,** identifier[tags] ): literal[string] keyword[global] identifier[_last_timestamp] , identifier[_last_metrics] keyword[assert] keyword[not] identifier[self] . identifier[done] . identifier[is_set] (), literal[string] keyword[assert] identifier[all] ( identifier[c] keyword[in] identifier[_valid_metric_chars] keyword[for] identifier[c] keyword[in] identifier[name] ), literal[string] + identifier[name] identifier[val] = identifier[float] ( identifier[val] ) keyword[if] identifier[int] ( identifier[val] )== identifier[val] : identifier[val] = identifier[int] ( identifier[val] ) keyword[if] identifier[self] . identifier[host_tag] keyword[and] literal[string] keyword[not] keyword[in] identifier[tags] : identifier[tags] [ literal[string] ]= identifier[self] . identifier[host_tag] identifier[timestamp] = identifier[int] ( identifier[tags] . identifier[pop] ( literal[string] , identifier[time] . identifier[time] ())) keyword[assert] keyword[not] identifier[self] . identifier[done] . identifier[is_set] (), literal[string] keyword[assert] identifier[tags] !={}, literal[string] identifier[tagvals] = literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[tags] . identifier[items] ()]) identifier[unique_str] = literal[string] %( identifier[name] , identifier[timestamp] , identifier[tagvals] , identifier[self] . identifier[host] , identifier[self] . identifier[port] ) keyword[if] identifier[timestamp] == identifier[_last_timestamp] keyword[or] identifier[_last_timestamp] == keyword[None] : keyword[if] identifier[unique_str] keyword[in] identifier[_last_metrics] : keyword[return] keyword[else] : identifier[_last_metrics] . identifier[add] ( identifier[unique_str] ) keyword[else] : identifier[_last_timestamp] = identifier[timestamp] identifier[_last_metrics] . identifier[clear] () identifier[line] = literal[string] %( identifier[name] , identifier[timestamp] , identifier[val] , identifier[tagvals] ) keyword[try] : identifier[self] . identifier[q] . identifier[put] ( identifier[line] , keyword[False] ) identifier[self] . identifier[queued] += literal[int] keyword[except] identifier[queue] . identifier[Full] : identifier[print] ( literal[string] % identifier[self] . identifier[q] . identifier[qsize] (), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[self] . identifier[q] . identifier[get] () identifier[self] . identifier[q] . identifier[put] ( identifier[line] , keyword[False] ) keyword[return] identifier[line]
def log(self, name, val, **tags): """Log metric name with value val. You must include at least one tag as a kwarg""" global _last_timestamp, _last_metrics # do not allow .log after closing assert not self.done.is_set(), 'worker thread has been closed' # check if valid metric name assert all((c in _valid_metric_chars for c in name)), 'invalid metric name ' + name val = float(val) #Duck type to float/int, if possible. if int(val) == val: val = int(val) # depends on [control=['if'], data=['val']] if self.host_tag and 'host' not in tags: tags['host'] = self.host_tag # depends on [control=['if'], data=[]] # get timestamp from system time, unless it's supplied as a tag timestamp = int(tags.pop('timestamp', time.time())) assert not self.done.is_set(), 'tsdb object has been closed' assert tags != {}, 'Need at least one tag' tagvals = ' '.join(['%s=%s' % (k, v) for (k, v) in tags.items()]) # OpenTSDB has major problems if you insert a data point with the same # metric, timestamp and tags. So we keep a temporary set of what points # we have sent for the last timestamp value. If we encounter a duplicate, # it is dropped. unique_str = '%s, %s, %s, %s, %s' % (name, timestamp, tagvals, self.host, self.port) if timestamp == _last_timestamp or _last_timestamp == None: if unique_str in _last_metrics: return # discard duplicate metrics # depends on [control=['if'], data=[]] else: _last_metrics.add(unique_str) # depends on [control=['if'], data=[]] else: _last_timestamp = timestamp _last_metrics.clear() line = 'put %s %d %s %s\n' % (name, timestamp, val, tagvals) try: self.q.put(line, False) self.queued += 1 # depends on [control=['try'], data=[]] except queue.Full: print('potsdb - Warning: dropping oldest metric because Queue is full. Size: %s' % self.q.qsize(), file=sys.stderr) self.q.get() #Drop the oldest metric to make room self.q.put(line, False) # depends on [control=['except'], data=[]] return line
def list_ikepolicies(self, retrieve_all=True, **_params): """Fetches a list of all configured IKEPolicies for a project.""" return self.list('ikepolicies', self.ikepolicies_path, retrieve_all, **_params)
def function[list_ikepolicies, parameter[self, retrieve_all]]: constant[Fetches a list of all configured IKEPolicies for a project.] return[call[name[self].list, parameter[constant[ikepolicies], name[self].ikepolicies_path, name[retrieve_all]]]]
keyword[def] identifier[list_ikepolicies] ( identifier[self] , identifier[retrieve_all] = keyword[True] ,** identifier[_params] ): literal[string] keyword[return] identifier[self] . identifier[list] ( literal[string] , identifier[self] . identifier[ikepolicies_path] , identifier[retrieve_all] , ** identifier[_params] )
def list_ikepolicies(self, retrieve_all=True, **_params): """Fetches a list of all configured IKEPolicies for a project.""" return self.list('ikepolicies', self.ikepolicies_path, retrieve_all, **_params)
def segs2flags(seg_start, seg_end, seg_label, win_size): ''' This function converts segment endpoints and respective segment labels to fix-sized class labels. ARGUMENTS: - seg_start: segment start points (in seconds) - seg_end: segment endpoints (in seconds) - seg_label: segment labels - win_size: fix-sized window (in seconds) RETURNS: - flags: numpy array of class indices - class_names: list of classnames (strings) ''' flags = [] class_names = list(set(seg_label)) curPos = win_size / 2.0 while curPos < seg_end[-1]: for i in range(len(seg_start)): if curPos > seg_start[i] and curPos <= seg_end[i]: break flags.append(class_names.index(seg_label[i])) curPos += win_size return numpy.array(flags), class_names
def function[segs2flags, parameter[seg_start, seg_end, seg_label, win_size]]: constant[ This function converts segment endpoints and respective segment labels to fix-sized class labels. ARGUMENTS: - seg_start: segment start points (in seconds) - seg_end: segment endpoints (in seconds) - seg_label: segment labels - win_size: fix-sized window (in seconds) RETURNS: - flags: numpy array of class indices - class_names: list of classnames (strings) ] variable[flags] assign[=] list[[]] variable[class_names] assign[=] call[name[list], parameter[call[name[set], parameter[name[seg_label]]]]] variable[curPos] assign[=] binary_operation[name[win_size] / constant[2.0]] while compare[name[curPos] less[<] call[name[seg_end]][<ast.UnaryOp object at 0x7da18dc06830>]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[seg_start]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b21a7a30> begin[:] break call[name[flags].append, parameter[call[name[class_names].index, parameter[call[name[seg_label]][name[i]]]]]] <ast.AugAssign object at 0x7da18f7215d0> return[tuple[[<ast.Call object at 0x7da1b21a7850>, <ast.Name object at 0x7da1b21a7b20>]]]
keyword[def] identifier[segs2flags] ( identifier[seg_start] , identifier[seg_end] , identifier[seg_label] , identifier[win_size] ): literal[string] identifier[flags] =[] identifier[class_names] = identifier[list] ( identifier[set] ( identifier[seg_label] )) identifier[curPos] = identifier[win_size] / literal[int] keyword[while] identifier[curPos] < identifier[seg_end] [- literal[int] ]: keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[seg_start] )): keyword[if] identifier[curPos] > identifier[seg_start] [ identifier[i] ] keyword[and] identifier[curPos] <= identifier[seg_end] [ identifier[i] ]: keyword[break] identifier[flags] . identifier[append] ( identifier[class_names] . identifier[index] ( identifier[seg_label] [ identifier[i] ])) identifier[curPos] += identifier[win_size] keyword[return] identifier[numpy] . identifier[array] ( identifier[flags] ), identifier[class_names]
def segs2flags(seg_start, seg_end, seg_label, win_size): """ This function converts segment endpoints and respective segment labels to fix-sized class labels. ARGUMENTS: - seg_start: segment start points (in seconds) - seg_end: segment endpoints (in seconds) - seg_label: segment labels - win_size: fix-sized window (in seconds) RETURNS: - flags: numpy array of class indices - class_names: list of classnames (strings) """ flags = [] class_names = list(set(seg_label)) curPos = win_size / 2.0 while curPos < seg_end[-1]: for i in range(len(seg_start)): if curPos > seg_start[i] and curPos <= seg_end[i]: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] flags.append(class_names.index(seg_label[i])) curPos += win_size # depends on [control=['while'], data=['curPos']] return (numpy.array(flags), class_names)
def get_time(self, instance): """ Return the current mission time for the specified instance. :rtype: ~datetime.datetime """ url = '/instances/{}'.format(instance) response = self.get_proto(url) message = yamcsManagement_pb2.YamcsInstance() message.ParseFromString(response.content) if message.HasField('missionTime'): return parse_isostring(message.missionTime) return None
def function[get_time, parameter[self, instance]]: constant[ Return the current mission time for the specified instance. :rtype: ~datetime.datetime ] variable[url] assign[=] call[constant[/instances/{}].format, parameter[name[instance]]] variable[response] assign[=] call[name[self].get_proto, parameter[name[url]]] variable[message] assign[=] call[name[yamcsManagement_pb2].YamcsInstance, parameter[]] call[name[message].ParseFromString, parameter[name[response].content]] if call[name[message].HasField, parameter[constant[missionTime]]] begin[:] return[call[name[parse_isostring], parameter[name[message].missionTime]]] return[constant[None]]
keyword[def] identifier[get_time] ( identifier[self] , identifier[instance] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[instance] ) identifier[response] = identifier[self] . identifier[get_proto] ( identifier[url] ) identifier[message] = identifier[yamcsManagement_pb2] . identifier[YamcsInstance] () identifier[message] . identifier[ParseFromString] ( identifier[response] . identifier[content] ) keyword[if] identifier[message] . identifier[HasField] ( literal[string] ): keyword[return] identifier[parse_isostring] ( identifier[message] . identifier[missionTime] ) keyword[return] keyword[None]
def get_time(self, instance): """ Return the current mission time for the specified instance. :rtype: ~datetime.datetime """ url = '/instances/{}'.format(instance) response = self.get_proto(url) message = yamcsManagement_pb2.YamcsInstance() message.ParseFromString(response.content) if message.HasField('missionTime'): return parse_isostring(message.missionTime) # depends on [control=['if'], data=[]] return None
def _get_maximal_adaptation(self, previous_weights): """! @brief Calculates maximum changes of weight in line with comparison between previous weights and current weights. @param[in] previous_weights (list): Weights from the previous step of learning process. @return (double) Value that represents maximum changes of weight after adaptation process. """ dimension = len(self._data[0]) maximal_adaptation = 0.0 for neuron_index in range(self._size): for dim in range(dimension): current_adaptation = previous_weights[neuron_index][dim] - self._weights[neuron_index][dim] if current_adaptation < 0: current_adaptation = -current_adaptation if maximal_adaptation < current_adaptation: maximal_adaptation = current_adaptation return maximal_adaptation
def function[_get_maximal_adaptation, parameter[self, previous_weights]]: constant[! @brief Calculates maximum changes of weight in line with comparison between previous weights and current weights. @param[in] previous_weights (list): Weights from the previous step of learning process. @return (double) Value that represents maximum changes of weight after adaptation process. ] variable[dimension] assign[=] call[name[len], parameter[call[name[self]._data][constant[0]]]] variable[maximal_adaptation] assign[=] constant[0.0] for taget[name[neuron_index]] in starred[call[name[range], parameter[name[self]._size]]] begin[:] for taget[name[dim]] in starred[call[name[range], parameter[name[dimension]]]] begin[:] variable[current_adaptation] assign[=] binary_operation[call[call[name[previous_weights]][name[neuron_index]]][name[dim]] - call[call[name[self]._weights][name[neuron_index]]][name[dim]]] if compare[name[current_adaptation] less[<] constant[0]] begin[:] variable[current_adaptation] assign[=] <ast.UnaryOp object at 0x7da1b0192200> if compare[name[maximal_adaptation] less[<] name[current_adaptation]] begin[:] variable[maximal_adaptation] assign[=] name[current_adaptation] return[name[maximal_adaptation]]
keyword[def] identifier[_get_maximal_adaptation] ( identifier[self] , identifier[previous_weights] ): literal[string] identifier[dimension] = identifier[len] ( identifier[self] . identifier[_data] [ literal[int] ]) identifier[maximal_adaptation] = literal[int] keyword[for] identifier[neuron_index] keyword[in] identifier[range] ( identifier[self] . identifier[_size] ): keyword[for] identifier[dim] keyword[in] identifier[range] ( identifier[dimension] ): identifier[current_adaptation] = identifier[previous_weights] [ identifier[neuron_index] ][ identifier[dim] ]- identifier[self] . identifier[_weights] [ identifier[neuron_index] ][ identifier[dim] ] keyword[if] identifier[current_adaptation] < literal[int] : identifier[current_adaptation] =- identifier[current_adaptation] keyword[if] identifier[maximal_adaptation] < identifier[current_adaptation] : identifier[maximal_adaptation] = identifier[current_adaptation] keyword[return] identifier[maximal_adaptation]
def _get_maximal_adaptation(self, previous_weights): """! @brief Calculates maximum changes of weight in line with comparison between previous weights and current weights. @param[in] previous_weights (list): Weights from the previous step of learning process. @return (double) Value that represents maximum changes of weight after adaptation process. """ dimension = len(self._data[0]) maximal_adaptation = 0.0 for neuron_index in range(self._size): for dim in range(dimension): current_adaptation = previous_weights[neuron_index][dim] - self._weights[neuron_index][dim] if current_adaptation < 0: current_adaptation = -current_adaptation # depends on [control=['if'], data=['current_adaptation']] if maximal_adaptation < current_adaptation: maximal_adaptation = current_adaptation # depends on [control=['if'], data=['maximal_adaptation', 'current_adaptation']] # depends on [control=['for'], data=['dim']] # depends on [control=['for'], data=['neuron_index']] return maximal_adaptation
def select_parser(self, request, resource): """ Select appropriate parser based on the request. :param request: the HTTP request :param resource: the invoked resource """ # 1. get from resource if resource.mapper: return resource.mapper # 2. get from content type mapper_name = self._get_name_from_content_type(request) if mapper_name: return self._get_mapper(mapper_name) # 3. get from url mapper_name = self._get_name_from_url(request) if mapper_name: return self._get_mapper(mapper_name) # 4. use resource's default if resource.default_mapper: return resource.default_mapper # 5. use manager's default return self._get_default_mapper()
def function[select_parser, parameter[self, request, resource]]: constant[ Select appropriate parser based on the request. :param request: the HTTP request :param resource: the invoked resource ] if name[resource].mapper begin[:] return[name[resource].mapper] variable[mapper_name] assign[=] call[name[self]._get_name_from_content_type, parameter[name[request]]] if name[mapper_name] begin[:] return[call[name[self]._get_mapper, parameter[name[mapper_name]]]] variable[mapper_name] assign[=] call[name[self]._get_name_from_url, parameter[name[request]]] if name[mapper_name] begin[:] return[call[name[self]._get_mapper, parameter[name[mapper_name]]]] if name[resource].default_mapper begin[:] return[name[resource].default_mapper] return[call[name[self]._get_default_mapper, parameter[]]]
keyword[def] identifier[select_parser] ( identifier[self] , identifier[request] , identifier[resource] ): literal[string] keyword[if] identifier[resource] . identifier[mapper] : keyword[return] identifier[resource] . identifier[mapper] identifier[mapper_name] = identifier[self] . identifier[_get_name_from_content_type] ( identifier[request] ) keyword[if] identifier[mapper_name] : keyword[return] identifier[self] . identifier[_get_mapper] ( identifier[mapper_name] ) identifier[mapper_name] = identifier[self] . identifier[_get_name_from_url] ( identifier[request] ) keyword[if] identifier[mapper_name] : keyword[return] identifier[self] . identifier[_get_mapper] ( identifier[mapper_name] ) keyword[if] identifier[resource] . identifier[default_mapper] : keyword[return] identifier[resource] . identifier[default_mapper] keyword[return] identifier[self] . identifier[_get_default_mapper] ()
def select_parser(self, request, resource): """ Select appropriate parser based on the request. :param request: the HTTP request :param resource: the invoked resource """ # 1. get from resource if resource.mapper: return resource.mapper # depends on [control=['if'], data=[]] # 2. get from content type mapper_name = self._get_name_from_content_type(request) if mapper_name: return self._get_mapper(mapper_name) # depends on [control=['if'], data=[]] # 3. get from url mapper_name = self._get_name_from_url(request) if mapper_name: return self._get_mapper(mapper_name) # depends on [control=['if'], data=[]] # 4. use resource's default if resource.default_mapper: return resource.default_mapper # depends on [control=['if'], data=[]] # 5. use manager's default return self._get_default_mapper()
def menu_daily(self, building_id): """Get a menu object corresponding to the daily menu for the venue with building_id. :param building_id: A string representing the id of a building, e.g. "abc". >>> commons_today = din.menu_daily("593") """ today = str(datetime.date.today()) v2_response = DiningV2(self.bearer, self.token).menu(building_id, today) response = {'result_data': {'Document': {}}} response["result_data"]["Document"]["menudate"] = datetime.datetime.strptime(today, '%Y-%m-%d').strftime('%-m/%d/%Y') if building_id in VENUE_NAMES: response["result_data"]["Document"]["location"] = VENUE_NAMES[building_id] else: response["result_data"]["Document"]["location"] = v2_response["result_data"]["days"][0]["cafes"][building_id]["name"] response["result_data"]["Document"]["tblMenu"] = {"tblDayPart": get_meals(v2_response, building_id)} return response
def function[menu_daily, parameter[self, building_id]]: constant[Get a menu object corresponding to the daily menu for the venue with building_id. :param building_id: A string representing the id of a building, e.g. "abc". >>> commons_today = din.menu_daily("593") ] variable[today] assign[=] call[name[str], parameter[call[name[datetime].date.today, parameter[]]]] variable[v2_response] assign[=] call[call[name[DiningV2], parameter[name[self].bearer, name[self].token]].menu, parameter[name[building_id], name[today]]] variable[response] assign[=] dictionary[[<ast.Constant object at 0x7da2044c2320>], [<ast.Dict object at 0x7da2044c12a0>]] call[call[call[name[response]][constant[result_data]]][constant[Document]]][constant[menudate]] assign[=] call[call[name[datetime].datetime.strptime, parameter[name[today], constant[%Y-%m-%d]]].strftime, parameter[constant[%-m/%d/%Y]]] if compare[name[building_id] in name[VENUE_NAMES]] begin[:] call[call[call[name[response]][constant[result_data]]][constant[Document]]][constant[location]] assign[=] call[name[VENUE_NAMES]][name[building_id]] call[call[call[name[response]][constant[result_data]]][constant[Document]]][constant[tblMenu]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c77f0>], [<ast.Call object at 0x7da20c6c7700>]] return[name[response]]
keyword[def] identifier[menu_daily] ( identifier[self] , identifier[building_id] ): literal[string] identifier[today] = identifier[str] ( identifier[datetime] . identifier[date] . identifier[today] ()) identifier[v2_response] = identifier[DiningV2] ( identifier[self] . identifier[bearer] , identifier[self] . identifier[token] ). identifier[menu] ( identifier[building_id] , identifier[today] ) identifier[response] ={ literal[string] :{ literal[string] :{}}} identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[today] , literal[string] ). identifier[strftime] ( literal[string] ) keyword[if] identifier[building_id] keyword[in] identifier[VENUE_NAMES] : identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[VENUE_NAMES] [ identifier[building_id] ] keyword[else] : identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[v2_response] [ literal[string] ][ literal[string] ][ literal[int] ][ literal[string] ][ identifier[building_id] ][ literal[string] ] identifier[response] [ literal[string] ][ literal[string] ][ literal[string] ]={ literal[string] : identifier[get_meals] ( identifier[v2_response] , identifier[building_id] )} keyword[return] identifier[response]
def menu_daily(self, building_id): """Get a menu object corresponding to the daily menu for the venue with building_id. :param building_id: A string representing the id of a building, e.g. "abc". >>> commons_today = din.menu_daily("593") """ today = str(datetime.date.today()) v2_response = DiningV2(self.bearer, self.token).menu(building_id, today) response = {'result_data': {'Document': {}}} response['result_data']['Document']['menudate'] = datetime.datetime.strptime(today, '%Y-%m-%d').strftime('%-m/%d/%Y') if building_id in VENUE_NAMES: response['result_data']['Document']['location'] = VENUE_NAMES[building_id] # depends on [control=['if'], data=['building_id', 'VENUE_NAMES']] else: response['result_data']['Document']['location'] = v2_response['result_data']['days'][0]['cafes'][building_id]['name'] response['result_data']['Document']['tblMenu'] = {'tblDayPart': get_meals(v2_response, building_id)} return response
def ip_rtm_config_router_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def") rtm_config = ET.SubElement(ip, "rtm-config", xmlns="urn:brocade.com:mgmt:brocade-rtm") router_id = ET.SubElement(rtm_config, "router-id") router_id.text = kwargs.pop('router_id') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[ip_rtm_config_router_id, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[ip] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ip]]] variable[rtm_config] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[rtm-config]]] variable[router_id] assign[=] call[name[ET].SubElement, parameter[name[rtm_config], constant[router-id]]] name[router_id].text assign[=] call[name[kwargs].pop, parameter[constant[router_id]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[ip_rtm_config_router_id] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[rtm_config] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] , identifier[xmlns] = literal[string] ) identifier[router_id] = identifier[ET] . identifier[SubElement] ( identifier[rtm_config] , literal[string] ) identifier[router_id] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def ip_rtm_config_router_id(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') ip = ET.SubElement(config, 'ip', xmlns='urn:brocade.com:mgmt:brocade-common-def') rtm_config = ET.SubElement(ip, 'rtm-config', xmlns='urn:brocade.com:mgmt:brocade-rtm') router_id = ET.SubElement(rtm_config, 'router-id') router_id.text = kwargs.pop('router_id') callback = kwargs.pop('callback', self._callback) return callback(config)
def write_outro (self): """Write outro comments.""" self.stoptime = time.time() duration = self.stoptime - self.starttime self.comment(_("Stopped checking at %(time)s (%(duration)s)") % {"time": strformat.strtime(self.stoptime), "duration": strformat.strduration_long(duration)})
def function[write_outro, parameter[self]]: constant[Write outro comments.] name[self].stoptime assign[=] call[name[time].time, parameter[]] variable[duration] assign[=] binary_operation[name[self].stoptime - name[self].starttime] call[name[self].comment, parameter[binary_operation[call[name[_], parameter[constant[Stopped checking at %(time)s (%(duration)s)]]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0ab9f00>, <ast.Constant object at 0x7da1b0ab9f60>], [<ast.Call object at 0x7da1b0ab80a0>, <ast.Call object at 0x7da1b0ab8820>]]]]]
keyword[def] identifier[write_outro] ( identifier[self] ): literal[string] identifier[self] . identifier[stoptime] = identifier[time] . identifier[time] () identifier[duration] = identifier[self] . identifier[stoptime] - identifier[self] . identifier[starttime] identifier[self] . identifier[comment] ( identifier[_] ( literal[string] )% { literal[string] : identifier[strformat] . identifier[strtime] ( identifier[self] . identifier[stoptime] ), literal[string] : identifier[strformat] . identifier[strduration_long] ( identifier[duration] )})
def write_outro(self): """Write outro comments.""" self.stoptime = time.time() duration = self.stoptime - self.starttime self.comment(_('Stopped checking at %(time)s (%(duration)s)') % {'time': strformat.strtime(self.stoptime), 'duration': strformat.strduration_long(duration)})
def key(username, key, all): """Create an admin API key.""" if username and username not in current_app.config['ADMIN_USERS']: raise click.UsageError('User {} not an admin'.format(username)) def create_key(admin, key): key = ApiKey( user=admin, key=key, scopes=[Scope.admin, Scope.write, Scope.read], text='Admin key created by alertad script', expire_time=None ) try: db.get_db() # init db on global app context key = key.create() except Exception as e: click.echo('ERROR: {}'.format(e)) else: click.echo('{} {}'.format(key.key, key.user)) if all: for admin in current_app.config['ADMIN_USERS']: create_key(admin, key) elif username: create_key(username, key) else: raise click.UsageError("Must set '--username' or use '--all'")
def function[key, parameter[username, key, all]]: constant[Create an admin API key.] if <ast.BoolOp object at 0x7da2054a5720> begin[:] <ast.Raise object at 0x7da2054a44c0> def function[create_key, parameter[admin, key]]: variable[key] assign[=] call[name[ApiKey], parameter[]] <ast.Try object at 0x7da2054a79a0> if name[all] begin[:] for taget[name[admin]] in starred[call[name[current_app].config][constant[ADMIN_USERS]]] begin[:] call[name[create_key], parameter[name[admin], name[key]]]
keyword[def] identifier[key] ( identifier[username] , identifier[key] , identifier[all] ): literal[string] keyword[if] identifier[username] keyword[and] identifier[username] keyword[not] keyword[in] identifier[current_app] . identifier[config] [ literal[string] ]: keyword[raise] identifier[click] . identifier[UsageError] ( literal[string] . identifier[format] ( identifier[username] )) keyword[def] identifier[create_key] ( identifier[admin] , identifier[key] ): identifier[key] = identifier[ApiKey] ( identifier[user] = identifier[admin] , identifier[key] = identifier[key] , identifier[scopes] =[ identifier[Scope] . identifier[admin] , identifier[Scope] . identifier[write] , identifier[Scope] . identifier[read] ], identifier[text] = literal[string] , identifier[expire_time] = keyword[None] ) keyword[try] : identifier[db] . identifier[get_db] () identifier[key] = identifier[key] . identifier[create] () keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[e] )) keyword[else] : identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[key] . identifier[key] , identifier[key] . identifier[user] )) keyword[if] identifier[all] : keyword[for] identifier[admin] keyword[in] identifier[current_app] . identifier[config] [ literal[string] ]: identifier[create_key] ( identifier[admin] , identifier[key] ) keyword[elif] identifier[username] : identifier[create_key] ( identifier[username] , identifier[key] ) keyword[else] : keyword[raise] identifier[click] . identifier[UsageError] ( literal[string] )
def key(username, key, all): """Create an admin API key.""" if username and username not in current_app.config['ADMIN_USERS']: raise click.UsageError('User {} not an admin'.format(username)) # depends on [control=['if'], data=[]] def create_key(admin, key): key = ApiKey(user=admin, key=key, scopes=[Scope.admin, Scope.write, Scope.read], text='Admin key created by alertad script', expire_time=None) try: db.get_db() # init db on global app context key = key.create() # depends on [control=['try'], data=[]] except Exception as e: click.echo('ERROR: {}'.format(e)) # depends on [control=['except'], data=['e']] else: click.echo('{} {}'.format(key.key, key.user)) if all: for admin in current_app.config['ADMIN_USERS']: create_key(admin, key) # depends on [control=['for'], data=['admin']] # depends on [control=['if'], data=[]] elif username: create_key(username, key) # depends on [control=['if'], data=[]] else: raise click.UsageError("Must set '--username' or use '--all'")
def double_ell_distance (mjr0, mnr0, pa0, mjr1, mnr1, pa1, dx, dy): """Given two ellipses separated by *dx* and *dy*, compute their separation in terms of σ. Based on Pineau et al (2011A&A...527A.126P). The "0" ellipse is taken to be centered at (0, 0), while the "1" ellipse is centered at (dx, dy). """ # 1. We need to rotate the frame so that ellipse 1 lies on the X axis. theta = -np.arctan2 (dy, dx) # 2. We also need to express these rotated ellipses in "biv" format. sx0, sy0, cxy0 = ellbiv (mjr0, mnr0, pa0 + theta) sx1, sy1, cxy1 = ellbiv (mjr1, mnr1, pa1 + theta) # 3. Their convolution is: sx, sy, cxy = bivconvolve (sx0, sy0, cxy0, sx1, sy1, cxy1) # 4. The separation between the centers is still just: d = np.sqrt (dx**2 + dy**2) # 5. The effective sigma in the purely X direction, taking into account # the covariance term, is: sigma_eff = sx * np.sqrt (1 - (cxy / (sx * sy))**2) # 6. Therefore the answer is: return d / sigma_eff
def function[double_ell_distance, parameter[mjr0, mnr0, pa0, mjr1, mnr1, pa1, dx, dy]]: constant[Given two ellipses separated by *dx* and *dy*, compute their separation in terms of σ. Based on Pineau et al (2011A&A...527A.126P). The "0" ellipse is taken to be centered at (0, 0), while the "1" ellipse is centered at (dx, dy). ] variable[theta] assign[=] <ast.UnaryOp object at 0x7da1b2644970> <ast.Tuple object at 0x7da1b2647520> assign[=] call[name[ellbiv], parameter[name[mjr0], name[mnr0], binary_operation[name[pa0] + name[theta]]]] <ast.Tuple object at 0x7da1b2644df0> assign[=] call[name[ellbiv], parameter[name[mjr1], name[mnr1], binary_operation[name[pa1] + name[theta]]]] <ast.Tuple object at 0x7da1b2646ce0> assign[=] call[name[bivconvolve], parameter[name[sx0], name[sy0], name[cxy0], name[sx1], name[sy1], name[cxy1]]] variable[d] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[dx] ** constant[2]] + binary_operation[name[dy] ** constant[2]]]]] variable[sigma_eff] assign[=] binary_operation[name[sx] * call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[binary_operation[name[cxy] / binary_operation[name[sx] * name[sy]]] ** constant[2]]]]]] return[binary_operation[name[d] / name[sigma_eff]]]
keyword[def] identifier[double_ell_distance] ( identifier[mjr0] , identifier[mnr0] , identifier[pa0] , identifier[mjr1] , identifier[mnr1] , identifier[pa1] , identifier[dx] , identifier[dy] ): literal[string] identifier[theta] =- identifier[np] . identifier[arctan2] ( identifier[dy] , identifier[dx] ) identifier[sx0] , identifier[sy0] , identifier[cxy0] = identifier[ellbiv] ( identifier[mjr0] , identifier[mnr0] , identifier[pa0] + identifier[theta] ) identifier[sx1] , identifier[sy1] , identifier[cxy1] = identifier[ellbiv] ( identifier[mjr1] , identifier[mnr1] , identifier[pa1] + identifier[theta] ) identifier[sx] , identifier[sy] , identifier[cxy] = identifier[bivconvolve] ( identifier[sx0] , identifier[sy0] , identifier[cxy0] , identifier[sx1] , identifier[sy1] , identifier[cxy1] ) identifier[d] = identifier[np] . identifier[sqrt] ( identifier[dx] ** literal[int] + identifier[dy] ** literal[int] ) identifier[sigma_eff] = identifier[sx] * identifier[np] . identifier[sqrt] ( literal[int] -( identifier[cxy] /( identifier[sx] * identifier[sy] ))** literal[int] ) keyword[return] identifier[d] / identifier[sigma_eff]
def double_ell_distance(mjr0, mnr0, pa0, mjr1, mnr1, pa1, dx, dy): """Given two ellipses separated by *dx* and *dy*, compute their separation in terms of σ. Based on Pineau et al (2011A&A...527A.126P). The "0" ellipse is taken to be centered at (0, 0), while the "1" ellipse is centered at (dx, dy). """ # 1. We need to rotate the frame so that ellipse 1 lies on the X axis. theta = -np.arctan2(dy, dx) # 2. We also need to express these rotated ellipses in "biv" format. (sx0, sy0, cxy0) = ellbiv(mjr0, mnr0, pa0 + theta) (sx1, sy1, cxy1) = ellbiv(mjr1, mnr1, pa1 + theta) # 3. Their convolution is: (sx, sy, cxy) = bivconvolve(sx0, sy0, cxy0, sx1, sy1, cxy1) # 4. The separation between the centers is still just: d = np.sqrt(dx ** 2 + dy ** 2) # 5. The effective sigma in the purely X direction, taking into account # the covariance term, is: sigma_eff = sx * np.sqrt(1 - (cxy / (sx * sy)) ** 2) # 6. Therefore the answer is: return d / sigma_eff
def remove(self, word): """ Remove a word from the word frequency list Args: word (str): The word to remove """ self._dictionary.pop(word.lower()) self._update_dictionary()
def function[remove, parameter[self, word]]: constant[ Remove a word from the word frequency list Args: word (str): The word to remove ] call[name[self]._dictionary.pop, parameter[call[name[word].lower, parameter[]]]] call[name[self]._update_dictionary, parameter[]]
keyword[def] identifier[remove] ( identifier[self] , identifier[word] ): literal[string] identifier[self] . identifier[_dictionary] . identifier[pop] ( identifier[word] . identifier[lower] ()) identifier[self] . identifier[_update_dictionary] ()
def remove(self, word): """ Remove a word from the word frequency list Args: word (str): The word to remove """ self._dictionary.pop(word.lower()) self._update_dictionary()
def argmax(self, axis=None, skipna=True): """ Return an ndarray of the maximum argument indexer. Parameters ---------- axis : {None} Dummy argument for consistency with Series skipna : bool, default True See Also -------- numpy.ndarray.argmax """ nv.validate_minmax_axis(axis) return nanops.nanargmax(self._values, skipna=skipna)
def function[argmax, parameter[self, axis, skipna]]: constant[ Return an ndarray of the maximum argument indexer. Parameters ---------- axis : {None} Dummy argument for consistency with Series skipna : bool, default True See Also -------- numpy.ndarray.argmax ] call[name[nv].validate_minmax_axis, parameter[name[axis]]] return[call[name[nanops].nanargmax, parameter[name[self]._values]]]
keyword[def] identifier[argmax] ( identifier[self] , identifier[axis] = keyword[None] , identifier[skipna] = keyword[True] ): literal[string] identifier[nv] . identifier[validate_minmax_axis] ( identifier[axis] ) keyword[return] identifier[nanops] . identifier[nanargmax] ( identifier[self] . identifier[_values] , identifier[skipna] = identifier[skipna] )
def argmax(self, axis=None, skipna=True): """ Return an ndarray of the maximum argument indexer. Parameters ---------- axis : {None} Dummy argument for consistency with Series skipna : bool, default True See Also -------- numpy.ndarray.argmax """ nv.validate_minmax_axis(axis) return nanops.nanargmax(self._values, skipna=skipna)
def _load_package_config(reload_=False): """Loads the package configurations from the global `acorn.cfg` file. """ global _packages from acorn.config import settings packset = settings("acorn", reload_) if packset.has_section("acorn.packages"): for package, value in packset.items("acorn.packages"): _packages[package] = value.strip() == "1"
def function[_load_package_config, parameter[reload_]]: constant[Loads the package configurations from the global `acorn.cfg` file. ] <ast.Global object at 0x7da1b15a12d0> from relative_module[acorn.config] import module[settings] variable[packset] assign[=] call[name[settings], parameter[constant[acorn], name[reload_]]] if call[name[packset].has_section, parameter[constant[acorn.packages]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b15a3dc0>, <ast.Name object at 0x7da1b15a1c00>]]] in starred[call[name[packset].items, parameter[constant[acorn.packages]]]] begin[:] call[name[_packages]][name[package]] assign[=] compare[call[name[value].strip, parameter[]] equal[==] constant[1]]
keyword[def] identifier[_load_package_config] ( identifier[reload_] = keyword[False] ): literal[string] keyword[global] identifier[_packages] keyword[from] identifier[acorn] . identifier[config] keyword[import] identifier[settings] identifier[packset] = identifier[settings] ( literal[string] , identifier[reload_] ) keyword[if] identifier[packset] . identifier[has_section] ( literal[string] ): keyword[for] identifier[package] , identifier[value] keyword[in] identifier[packset] . identifier[items] ( literal[string] ): identifier[_packages] [ identifier[package] ]= identifier[value] . identifier[strip] ()== literal[string]
def _load_package_config(reload_=False): """Loads the package configurations from the global `acorn.cfg` file. """ global _packages from acorn.config import settings packset = settings('acorn', reload_) if packset.has_section('acorn.packages'): for (package, value) in packset.items('acorn.packages'): _packages[package] = value.strip() == '1' # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def assemble_experiment_temp_dir(config): """Create a temp directory from which to run an experiment. The new directory will include: - Copies of custom experiment files which don't match the exclusion policy - Templates and static resources from Dallinger - An export of the loaded configuration - Heroku-specific files (Procile, runtime.txt) from Dallinger Assumes the experiment root directory is the current working directory. Returns the absolute path of the new directory. """ app_id = config.get("id") dst = os.path.join(tempfile.mkdtemp(), app_id) # Copy local experiment files, minus some shutil.copytree(os.getcwd(), dst, ignore=exclusion_policy()) # Export the loaded configuration config.write(filter_sensitive=True, directory=dst) # Save the experiment id with open(os.path.join(dst, "experiment_id.txt"), "w") as file: file.write(app_id) # Copy Dallinger files dallinger_root = dallinger_package_path() ensure_directory(os.path.join(dst, "static", "scripts")) ensure_directory(os.path.join(dst, "static", "css")) frontend_files = [ os.path.join("static", "css", "dallinger.css"), os.path.join("static", "scripts", "dallinger2.js"), os.path.join("static", "scripts", "reqwest.min.js"), os.path.join("static", "scripts", "require.js"), os.path.join("static", "scripts", "reconnecting-websocket.js"), os.path.join("static", "scripts", "spin.min.js"), os.path.join("static", "scripts", "tracker.js"), os.path.join("static", "scripts", "store+json2.min.js"), os.path.join("templates", "error.html"), os.path.join("templates", "error-complete.html"), os.path.join("templates", "launch.html"), os.path.join("templates", "complete.html"), os.path.join("templates", "questionnaire.html"), os.path.join("templates", "thanks.html"), os.path.join("templates", "waiting.html"), os.path.join("static", "robots.txt"), ] frontend_dirs = [os.path.join("templates", "base")] for filename in frontend_files: src = os.path.join(dallinger_root, "frontend", filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copy(src, dst_filepath) for filename in frontend_dirs: src = os.path.join(dallinger_root, "frontend", filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copytree(src, dst_filepath) # Copy Heroku files heroku_files = ["Procfile", "runtime.txt"] for filename in heroku_files: src = os.path.join(dallinger_root, "heroku", filename) shutil.copy(src, os.path.join(dst, filename)) if not config.get("clock_on"): # If the clock process has been disabled, overwrite the Procfile: src = os.path.join(dallinger_root, "heroku", "Procfile_no_clock") shutil.copy(src, os.path.join(dst, "Procfile")) return dst
def function[assemble_experiment_temp_dir, parameter[config]]: constant[Create a temp directory from which to run an experiment. The new directory will include: - Copies of custom experiment files which don't match the exclusion policy - Templates and static resources from Dallinger - An export of the loaded configuration - Heroku-specific files (Procile, runtime.txt) from Dallinger Assumes the experiment root directory is the current working directory. Returns the absolute path of the new directory. ] variable[app_id] assign[=] call[name[config].get, parameter[constant[id]]] variable[dst] assign[=] call[name[os].path.join, parameter[call[name[tempfile].mkdtemp, parameter[]], name[app_id]]] call[name[shutil].copytree, parameter[call[name[os].getcwd, parameter[]], name[dst]]] call[name[config].write, parameter[]] with call[name[open], parameter[call[name[os].path.join, parameter[name[dst], constant[experiment_id.txt]]], constant[w]]] begin[:] call[name[file].write, parameter[name[app_id]]] variable[dallinger_root] assign[=] call[name[dallinger_package_path], parameter[]] call[name[ensure_directory], parameter[call[name[os].path.join, parameter[name[dst], constant[static], constant[scripts]]]]] call[name[ensure_directory], parameter[call[name[os].path.join, parameter[name[dst], constant[static], constant[css]]]]] variable[frontend_files] assign[=] list[[<ast.Call object at 0x7da1b03801c0>, <ast.Call object at 0x7da1b0382890>, <ast.Call object at 0x7da1b0383f70>, <ast.Call object at 0x7da1b0381660>, <ast.Call object at 0x7da1b03812d0>, <ast.Call object at 0x7da1b0380910>, <ast.Call object at 0x7da1b0380fa0>, <ast.Call object at 0x7da1b03805b0>, <ast.Call object at 0x7da1b0383970>, <ast.Call object at 0x7da1b03811e0>, <ast.Call object at 0x7da1b0380fd0>, <ast.Call object at 0x7da1b0381f90>, <ast.Call object at 0x7da1b0383d90>, <ast.Call object at 0x7da1b03820e0>, <ast.Call object at 0x7da1b0383d00>, <ast.Call object at 0x7da1b03832e0>]] variable[frontend_dirs] assign[=] list[[<ast.Call object at 0x7da1b0381690>]] for taget[name[filename]] in starred[name[frontend_files]] begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[dallinger_root], constant[frontend], name[filename]]] variable[dst_filepath] assign[=] call[name[os].path.join, parameter[name[dst], name[filename]]] if <ast.UnaryOp object at 0x7da1b0380040> begin[:] call[name[shutil].copy, parameter[name[src], name[dst_filepath]]] for taget[name[filename]] in starred[name[frontend_dirs]] begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[dallinger_root], constant[frontend], name[filename]]] variable[dst_filepath] assign[=] call[name[os].path.join, parameter[name[dst], name[filename]]] if <ast.UnaryOp object at 0x7da1b03825f0> begin[:] call[name[shutil].copytree, parameter[name[src], name[dst_filepath]]] variable[heroku_files] assign[=] list[[<ast.Constant object at 0x7da1b0380ac0>, <ast.Constant object at 0x7da1b03808b0>]] for taget[name[filename]] in starred[name[heroku_files]] begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[dallinger_root], constant[heroku], name[filename]]] call[name[shutil].copy, parameter[name[src], call[name[os].path.join, parameter[name[dst], name[filename]]]]] if <ast.UnaryOp object at 0x7da1b0383730> begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[dallinger_root], constant[heroku], constant[Procfile_no_clock]]] call[name[shutil].copy, parameter[name[src], call[name[os].path.join, parameter[name[dst], constant[Procfile]]]]] return[name[dst]]
keyword[def] identifier[assemble_experiment_temp_dir] ( identifier[config] ): literal[string] identifier[app_id] = identifier[config] . identifier[get] ( literal[string] ) identifier[dst] = identifier[os] . identifier[path] . identifier[join] ( identifier[tempfile] . identifier[mkdtemp] (), identifier[app_id] ) identifier[shutil] . identifier[copytree] ( identifier[os] . identifier[getcwd] (), identifier[dst] , identifier[ignore] = identifier[exclusion_policy] ()) identifier[config] . identifier[write] ( identifier[filter_sensitive] = keyword[True] , identifier[directory] = identifier[dst] ) keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , literal[string] ), literal[string] ) keyword[as] identifier[file] : identifier[file] . identifier[write] ( identifier[app_id] ) identifier[dallinger_root] = identifier[dallinger_package_path] () identifier[ensure_directory] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , literal[string] , literal[string] )) identifier[ensure_directory] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , literal[string] , literal[string] )) identifier[frontend_files] =[ identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), ] identifier[frontend_dirs] =[ identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] )] keyword[for] identifier[filename] keyword[in] identifier[frontend_files] : identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[dallinger_root] , literal[string] , identifier[filename] ) identifier[dst_filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , identifier[filename] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dst_filepath] ): identifier[shutil] . identifier[copy] ( identifier[src] , identifier[dst_filepath] ) keyword[for] identifier[filename] keyword[in] identifier[frontend_dirs] : identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[dallinger_root] , literal[string] , identifier[filename] ) identifier[dst_filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , identifier[filename] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dst_filepath] ): identifier[shutil] . identifier[copytree] ( identifier[src] , identifier[dst_filepath] ) identifier[heroku_files] =[ literal[string] , literal[string] ] keyword[for] identifier[filename] keyword[in] identifier[heroku_files] : identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[dallinger_root] , literal[string] , identifier[filename] ) identifier[shutil] . identifier[copy] ( identifier[src] , identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , identifier[filename] )) keyword[if] keyword[not] identifier[config] . identifier[get] ( literal[string] ): identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[dallinger_root] , literal[string] , literal[string] ) identifier[shutil] . identifier[copy] ( identifier[src] , identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , literal[string] )) keyword[return] identifier[dst]
def assemble_experiment_temp_dir(config): """Create a temp directory from which to run an experiment. The new directory will include: - Copies of custom experiment files which don't match the exclusion policy - Templates and static resources from Dallinger - An export of the loaded configuration - Heroku-specific files (Procile, runtime.txt) from Dallinger Assumes the experiment root directory is the current working directory. Returns the absolute path of the new directory. """ app_id = config.get('id') dst = os.path.join(tempfile.mkdtemp(), app_id) # Copy local experiment files, minus some shutil.copytree(os.getcwd(), dst, ignore=exclusion_policy()) # Export the loaded configuration config.write(filter_sensitive=True, directory=dst) # Save the experiment id with open(os.path.join(dst, 'experiment_id.txt'), 'w') as file: file.write(app_id) # depends on [control=['with'], data=['file']] # Copy Dallinger files dallinger_root = dallinger_package_path() ensure_directory(os.path.join(dst, 'static', 'scripts')) ensure_directory(os.path.join(dst, 'static', 'css')) frontend_files = [os.path.join('static', 'css', 'dallinger.css'), os.path.join('static', 'scripts', 'dallinger2.js'), os.path.join('static', 'scripts', 'reqwest.min.js'), os.path.join('static', 'scripts', 'require.js'), os.path.join('static', 'scripts', 'reconnecting-websocket.js'), os.path.join('static', 'scripts', 'spin.min.js'), os.path.join('static', 'scripts', 'tracker.js'), os.path.join('static', 'scripts', 'store+json2.min.js'), os.path.join('templates', 'error.html'), os.path.join('templates', 'error-complete.html'), os.path.join('templates', 'launch.html'), os.path.join('templates', 'complete.html'), os.path.join('templates', 'questionnaire.html'), os.path.join('templates', 'thanks.html'), os.path.join('templates', 'waiting.html'), os.path.join('static', 'robots.txt')] frontend_dirs = [os.path.join('templates', 'base')] for filename in frontend_files: src = os.path.join(dallinger_root, 'frontend', filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copy(src, dst_filepath) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] for filename in frontend_dirs: src = os.path.join(dallinger_root, 'frontend', filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copytree(src, dst_filepath) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # Copy Heroku files heroku_files = ['Procfile', 'runtime.txt'] for filename in heroku_files: src = os.path.join(dallinger_root, 'heroku', filename) shutil.copy(src, os.path.join(dst, filename)) # depends on [control=['for'], data=['filename']] if not config.get('clock_on'): # If the clock process has been disabled, overwrite the Procfile: src = os.path.join(dallinger_root, 'heroku', 'Procfile_no_clock') shutil.copy(src, os.path.join(dst, 'Procfile')) # depends on [control=['if'], data=[]] return dst
def seqToKV(seq, strict=False): """Represent a sequence of pairs of strings as newline-terminated key:value pairs. The pairs are generated in the order given. @param seq: The pairs @type seq: [(str, (unicode|str))] @return: A string representation of the sequence @rtype: str """ def err(msg): formatted = 'seqToKV warning: %s: %r' % (msg, seq) if strict: raise KVFormError(formatted) else: logging.warn(formatted) lines = [] for k, v in seq: if isinstance(k, types.StringType): k = k.decode('UTF8') elif not isinstance(k, types.UnicodeType): err('Converting key to string: %r' % k) k = str(k) if '\n' in k: raise KVFormError( 'Invalid input for seqToKV: key contains newline: %r' % (k,)) if ':' in k: raise KVFormError( 'Invalid input for seqToKV: key contains colon: %r' % (k,)) if k.strip() != k: err('Key has whitespace at beginning or end: %r' % (k,)) if isinstance(v, types.StringType): v = v.decode('UTF8') elif not isinstance(v, types.UnicodeType): err('Converting value to string: %r' % (v,)) v = str(v) if '\n' in v: raise KVFormError( 'Invalid input for seqToKV: value contains newline: %r' % (v,)) if v.strip() != v: err('Value has whitespace at beginning or end: %r' % (v,)) lines.append(k + ':' + v + '\n') return ''.join(lines).encode('UTF8')
def function[seqToKV, parameter[seq, strict]]: constant[Represent a sequence of pairs of strings as newline-terminated key:value pairs. The pairs are generated in the order given. @param seq: The pairs @type seq: [(str, (unicode|str))] @return: A string representation of the sequence @rtype: str ] def function[err, parameter[msg]]: variable[formatted] assign[=] binary_operation[constant[seqToKV warning: %s: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4ce140>, <ast.Name object at 0x7da18c4cd2a0>]]] if name[strict] begin[:] <ast.Raise object at 0x7da18c4ccb50> variable[lines] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18c4cd030>, <ast.Name object at 0x7da18c4ced40>]]] in starred[name[seq]] begin[:] if call[name[isinstance], parameter[name[k], name[types].StringType]] begin[:] variable[k] assign[=] call[name[k].decode, parameter[constant[UTF8]]] if compare[constant[ ] in name[k]] begin[:] <ast.Raise object at 0x7da18c4cdc00> if compare[constant[:] in name[k]] begin[:] <ast.Raise object at 0x7da18c4cd810> if compare[call[name[k].strip, parameter[]] not_equal[!=] name[k]] begin[:] call[name[err], parameter[binary_operation[constant[Key has whitespace at beginning or end: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cc610>]]]]] if call[name[isinstance], parameter[name[v], name[types].StringType]] begin[:] variable[v] assign[=] call[name[v].decode, parameter[constant[UTF8]]] if compare[constant[ ] in name[v]] begin[:] <ast.Raise object at 0x7da20c6c7f70> if compare[call[name[v].strip, parameter[]] not_equal[!=] name[v]] begin[:] call[name[err], parameter[binary_operation[constant[Value has whitespace at beginning or end: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede65f0>]]]]] call[name[lines].append, parameter[binary_operation[binary_operation[binary_operation[name[k] + constant[:]] + name[v]] + constant[ ]]]] return[call[call[constant[].join, parameter[name[lines]]].encode, parameter[constant[UTF8]]]]
keyword[def] identifier[seqToKV] ( identifier[seq] , identifier[strict] = keyword[False] ): literal[string] keyword[def] identifier[err] ( identifier[msg] ): identifier[formatted] = literal[string] %( identifier[msg] , identifier[seq] ) keyword[if] identifier[strict] : keyword[raise] identifier[KVFormError] ( identifier[formatted] ) keyword[else] : identifier[logging] . identifier[warn] ( identifier[formatted] ) identifier[lines] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[seq] : keyword[if] identifier[isinstance] ( identifier[k] , identifier[types] . identifier[StringType] ): identifier[k] = identifier[k] . identifier[decode] ( literal[string] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[k] , identifier[types] . identifier[UnicodeType] ): identifier[err] ( literal[string] % identifier[k] ) identifier[k] = identifier[str] ( identifier[k] ) keyword[if] literal[string] keyword[in] identifier[k] : keyword[raise] identifier[KVFormError] ( literal[string] %( identifier[k] ,)) keyword[if] literal[string] keyword[in] identifier[k] : keyword[raise] identifier[KVFormError] ( literal[string] %( identifier[k] ,)) keyword[if] identifier[k] . identifier[strip] ()!= identifier[k] : identifier[err] ( literal[string] %( identifier[k] ,)) keyword[if] identifier[isinstance] ( identifier[v] , identifier[types] . identifier[StringType] ): identifier[v] = identifier[v] . identifier[decode] ( literal[string] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[v] , identifier[types] . identifier[UnicodeType] ): identifier[err] ( literal[string] %( identifier[v] ,)) identifier[v] = identifier[str] ( identifier[v] ) keyword[if] literal[string] keyword[in] identifier[v] : keyword[raise] identifier[KVFormError] ( literal[string] %( identifier[v] ,)) keyword[if] identifier[v] . identifier[strip] ()!= identifier[v] : identifier[err] ( literal[string] %( identifier[v] ,)) identifier[lines] . identifier[append] ( identifier[k] + literal[string] + identifier[v] + literal[string] ) keyword[return] literal[string] . identifier[join] ( identifier[lines] ). identifier[encode] ( literal[string] )
def seqToKV(seq, strict=False): """Represent a sequence of pairs of strings as newline-terminated key:value pairs. The pairs are generated in the order given. @param seq: The pairs @type seq: [(str, (unicode|str))] @return: A string representation of the sequence @rtype: str """ def err(msg): formatted = 'seqToKV warning: %s: %r' % (msg, seq) if strict: raise KVFormError(formatted) # depends on [control=['if'], data=[]] else: logging.warn(formatted) lines = [] for (k, v) in seq: if isinstance(k, types.StringType): k = k.decode('UTF8') # depends on [control=['if'], data=[]] elif not isinstance(k, types.UnicodeType): err('Converting key to string: %r' % k) k = str(k) # depends on [control=['if'], data=[]] if '\n' in k: raise KVFormError('Invalid input for seqToKV: key contains newline: %r' % (k,)) # depends on [control=['if'], data=['k']] if ':' in k: raise KVFormError('Invalid input for seqToKV: key contains colon: %r' % (k,)) # depends on [control=['if'], data=['k']] if k.strip() != k: err('Key has whitespace at beginning or end: %r' % (k,)) # depends on [control=['if'], data=['k']] if isinstance(v, types.StringType): v = v.decode('UTF8') # depends on [control=['if'], data=[]] elif not isinstance(v, types.UnicodeType): err('Converting value to string: %r' % (v,)) v = str(v) # depends on [control=['if'], data=[]] if '\n' in v: raise KVFormError('Invalid input for seqToKV: value contains newline: %r' % (v,)) # depends on [control=['if'], data=['v']] if v.strip() != v: err('Value has whitespace at beginning or end: %r' % (v,)) # depends on [control=['if'], data=['v']] lines.append(k + ':' + v + '\n') # depends on [control=['for'], data=[]] return ''.join(lines).encode('UTF8')
def _set_mac_learning(self, v, load=False): """ Setter method for mac_learning, mapped from YANG variable /overlay_gateway/site/mac_learning (container) If this variable is read-only (config: false) in the source YANG file, then _set_mac_learning is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mac_learning() directly. YANG Description: This configuration allows to specify MAC learning mode for layer2-extension tunnels. By default dynamic MAC learning is used. It can be changed to control plane learning via protocols like BGP-EVPN. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC learning mode', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """mac_learning must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC learning mode', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='container', is_config=True)""", }) self.__mac_learning = t if hasattr(self, '_set'): self._set()
def function[_set_mac_learning, parameter[self, v, load]]: constant[ Setter method for mac_learning, mapped from YANG variable /overlay_gateway/site/mac_learning (container) If this variable is read-only (config: false) in the source YANG file, then _set_mac_learning is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mac_learning() directly. YANG Description: This configuration allows to specify MAC learning mode for layer2-extension tunnels. By default dynamic MAC learning is used. It can be changed to control plane learning via protocols like BGP-EVPN. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da2054a7700> name[self].__mac_learning assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_mac_learning] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[mac_learning] . identifier[mac_learning] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__mac_learning] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_mac_learning(self, v, load=False): """ Setter method for mac_learning, mapped from YANG variable /overlay_gateway/site/mac_learning (container) If this variable is read-only (config: false) in the source YANG file, then _set_mac_learning is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mac_learning() directly. YANG Description: This configuration allows to specify MAC learning mode for layer2-extension tunnels. By default dynamic MAC learning is used. It can be changed to control plane learning via protocols like BGP-EVPN. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=mac_learning.mac_learning, is_container='container', presence=False, yang_name='mac-learning', rest_name='mac-learning', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC learning mode', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'mac_learning must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=mac_learning.mac_learning, is_container=\'container\', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure MAC learning mode\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-tunnels\', defining_module=\'brocade-tunnels\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__mac_learning = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def restart_complete(self, state, new_address): ''' Called when we get notified that the restart has been completed by some agent who has volontureed to do so. ''' if state.timeout_call_id: state.agent.cancel_delayed_call(state.timeout_call_id) state.timeout_call_id = None return self._send_restarted_notifications(new_address)
def function[restart_complete, parameter[self, state, new_address]]: constant[ Called when we get notified that the restart has been completed by some agent who has volontureed to do so. ] if name[state].timeout_call_id begin[:] call[name[state].agent.cancel_delayed_call, parameter[name[state].timeout_call_id]] name[state].timeout_call_id assign[=] constant[None] return[call[name[self]._send_restarted_notifications, parameter[name[new_address]]]]
keyword[def] identifier[restart_complete] ( identifier[self] , identifier[state] , identifier[new_address] ): literal[string] keyword[if] identifier[state] . identifier[timeout_call_id] : identifier[state] . identifier[agent] . identifier[cancel_delayed_call] ( identifier[state] . identifier[timeout_call_id] ) identifier[state] . identifier[timeout_call_id] = keyword[None] keyword[return] identifier[self] . identifier[_send_restarted_notifications] ( identifier[new_address] )
def restart_complete(self, state, new_address): """ Called when we get notified that the restart has been completed by some agent who has volontureed to do so. """ if state.timeout_call_id: state.agent.cancel_delayed_call(state.timeout_call_id) state.timeout_call_id = None # depends on [control=['if'], data=[]] return self._send_restarted_notifications(new_address)
def get_new_oids(self): ''' Returns a list of unique oids that have not been extracted yet. Essentially, a diff of distinct oids in the source database compared to cube. ''' table = self.lconfig.get('table') _oid = self.lconfig.get('_oid') if is_array(_oid): _oid = _oid[0] # get the db column, not the field alias last_id = self.container.get_last_field(field='_oid') ids = [] if last_id: try: # try to convert to integer... if not, assume unicode value last_id = float(last_id) where = "%s.%s > %s" % (table, _oid, last_id) except (TypeError, ValueError): where = "%s.%s > '%s'" % (table, _oid, last_id) ids = self.sql_get_oids(where) return ids
def function[get_new_oids, parameter[self]]: constant[ Returns a list of unique oids that have not been extracted yet. Essentially, a diff of distinct oids in the source database compared to cube. ] variable[table] assign[=] call[name[self].lconfig.get, parameter[constant[table]]] variable[_oid] assign[=] call[name[self].lconfig.get, parameter[constant[_oid]]] if call[name[is_array], parameter[name[_oid]]] begin[:] variable[_oid] assign[=] call[name[_oid]][constant[0]] variable[last_id] assign[=] call[name[self].container.get_last_field, parameter[]] variable[ids] assign[=] list[[]] if name[last_id] begin[:] <ast.Try object at 0x7da1b0b81de0> variable[ids] assign[=] call[name[self].sql_get_oids, parameter[name[where]]] return[name[ids]]
keyword[def] identifier[get_new_oids] ( identifier[self] ): literal[string] identifier[table] = identifier[self] . identifier[lconfig] . identifier[get] ( literal[string] ) identifier[_oid] = identifier[self] . identifier[lconfig] . identifier[get] ( literal[string] ) keyword[if] identifier[is_array] ( identifier[_oid] ): identifier[_oid] = identifier[_oid] [ literal[int] ] identifier[last_id] = identifier[self] . identifier[container] . identifier[get_last_field] ( identifier[field] = literal[string] ) identifier[ids] =[] keyword[if] identifier[last_id] : keyword[try] : identifier[last_id] = identifier[float] ( identifier[last_id] ) identifier[where] = literal[string] %( identifier[table] , identifier[_oid] , identifier[last_id] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): identifier[where] = literal[string] %( identifier[table] , identifier[_oid] , identifier[last_id] ) identifier[ids] = identifier[self] . identifier[sql_get_oids] ( identifier[where] ) keyword[return] identifier[ids]
def get_new_oids(self): """ Returns a list of unique oids that have not been extracted yet. Essentially, a diff of distinct oids in the source database compared to cube. """ table = self.lconfig.get('table') _oid = self.lconfig.get('_oid') if is_array(_oid): _oid = _oid[0] # get the db column, not the field alias # depends on [control=['if'], data=[]] last_id = self.container.get_last_field(field='_oid') ids = [] if last_id: try: # try to convert to integer... if not, assume unicode value last_id = float(last_id) where = '%s.%s > %s' % (table, _oid, last_id) # depends on [control=['try'], data=[]] except (TypeError, ValueError): where = "%s.%s > '%s'" % (table, _oid, last_id) # depends on [control=['except'], data=[]] ids = self.sql_get_oids(where) # depends on [control=['if'], data=[]] return ids
def orthonormal_VanillaLSTMBuilder(lstm_layers, input_dims, lstm_hiddens, dropout_x=0., dropout_h=0., debug=False): """Build a standard LSTM cell, with variational dropout, with weights initialized to be orthonormal (https://arxiv.org/abs/1312.6120) Parameters ---------- lstm_layers : int Currently only support one layer input_dims : int word vector dimensions lstm_hiddens : int hidden size dropout_x : float dropout on inputs, not used in this implementation, see `biLSTM` below dropout_h : float dropout on hidden states debug : bool set to True to skip orthonormal initialization Returns ------- lstm_cell : VariationalDropoutCell A LSTM cell """ assert lstm_layers == 1, 'only accept one layer lstm' W = orthonormal_initializer(lstm_hiddens, lstm_hiddens + input_dims, debug) W_h, W_x = W[:, :lstm_hiddens], W[:, lstm_hiddens:] b = nd.zeros((4 * lstm_hiddens,)) b[lstm_hiddens:2 * lstm_hiddens] = -1.0 lstm_cell = rnn.LSTMCell(input_size=input_dims, hidden_size=lstm_hiddens, i2h_weight_initializer=mx.init.Constant(np.concatenate([W_x] * 4, 0)), h2h_weight_initializer=mx.init.Constant(np.concatenate([W_h] * 4, 0)), h2h_bias_initializer=mx.init.Constant(b)) wrapper = VariationalDropoutCell(lstm_cell, drop_states=dropout_h) return wrapper
def function[orthonormal_VanillaLSTMBuilder, parameter[lstm_layers, input_dims, lstm_hiddens, dropout_x, dropout_h, debug]]: constant[Build a standard LSTM cell, with variational dropout, with weights initialized to be orthonormal (https://arxiv.org/abs/1312.6120) Parameters ---------- lstm_layers : int Currently only support one layer input_dims : int word vector dimensions lstm_hiddens : int hidden size dropout_x : float dropout on inputs, not used in this implementation, see `biLSTM` below dropout_h : float dropout on hidden states debug : bool set to True to skip orthonormal initialization Returns ------- lstm_cell : VariationalDropoutCell A LSTM cell ] assert[compare[name[lstm_layers] equal[==] constant[1]]] variable[W] assign[=] call[name[orthonormal_initializer], parameter[name[lstm_hiddens], binary_operation[name[lstm_hiddens] + name[input_dims]], name[debug]]] <ast.Tuple object at 0x7da1b212f910> assign[=] tuple[[<ast.Subscript object at 0x7da1b212d060>, <ast.Subscript object at 0x7da1b212c220>]] variable[b] assign[=] call[name[nd].zeros, parameter[tuple[[<ast.BinOp object at 0x7da1b212dd20>]]]] call[name[b]][<ast.Slice object at 0x7da1b212c760>] assign[=] <ast.UnaryOp object at 0x7da1b212ea70> variable[lstm_cell] assign[=] call[name[rnn].LSTMCell, parameter[]] variable[wrapper] assign[=] call[name[VariationalDropoutCell], parameter[name[lstm_cell]]] return[name[wrapper]]
keyword[def] identifier[orthonormal_VanillaLSTMBuilder] ( identifier[lstm_layers] , identifier[input_dims] , identifier[lstm_hiddens] , identifier[dropout_x] = literal[int] , identifier[dropout_h] = literal[int] , identifier[debug] = keyword[False] ): literal[string] keyword[assert] identifier[lstm_layers] == literal[int] , literal[string] identifier[W] = identifier[orthonormal_initializer] ( identifier[lstm_hiddens] , identifier[lstm_hiddens] + identifier[input_dims] , identifier[debug] ) identifier[W_h] , identifier[W_x] = identifier[W] [:,: identifier[lstm_hiddens] ], identifier[W] [:, identifier[lstm_hiddens] :] identifier[b] = identifier[nd] . identifier[zeros] (( literal[int] * identifier[lstm_hiddens] ,)) identifier[b] [ identifier[lstm_hiddens] : literal[int] * identifier[lstm_hiddens] ]=- literal[int] identifier[lstm_cell] = identifier[rnn] . identifier[LSTMCell] ( identifier[input_size] = identifier[input_dims] , identifier[hidden_size] = identifier[lstm_hiddens] , identifier[i2h_weight_initializer] = identifier[mx] . identifier[init] . identifier[Constant] ( identifier[np] . identifier[concatenate] ([ identifier[W_x] ]* literal[int] , literal[int] )), identifier[h2h_weight_initializer] = identifier[mx] . identifier[init] . identifier[Constant] ( identifier[np] . identifier[concatenate] ([ identifier[W_h] ]* literal[int] , literal[int] )), identifier[h2h_bias_initializer] = identifier[mx] . identifier[init] . identifier[Constant] ( identifier[b] )) identifier[wrapper] = identifier[VariationalDropoutCell] ( identifier[lstm_cell] , identifier[drop_states] = identifier[dropout_h] ) keyword[return] identifier[wrapper]
def orthonormal_VanillaLSTMBuilder(lstm_layers, input_dims, lstm_hiddens, dropout_x=0.0, dropout_h=0.0, debug=False): """Build a standard LSTM cell, with variational dropout, with weights initialized to be orthonormal (https://arxiv.org/abs/1312.6120) Parameters ---------- lstm_layers : int Currently only support one layer input_dims : int word vector dimensions lstm_hiddens : int hidden size dropout_x : float dropout on inputs, not used in this implementation, see `biLSTM` below dropout_h : float dropout on hidden states debug : bool set to True to skip orthonormal initialization Returns ------- lstm_cell : VariationalDropoutCell A LSTM cell """ assert lstm_layers == 1, 'only accept one layer lstm' W = orthonormal_initializer(lstm_hiddens, lstm_hiddens + input_dims, debug) (W_h, W_x) = (W[:, :lstm_hiddens], W[:, lstm_hiddens:]) b = nd.zeros((4 * lstm_hiddens,)) b[lstm_hiddens:2 * lstm_hiddens] = -1.0 lstm_cell = rnn.LSTMCell(input_size=input_dims, hidden_size=lstm_hiddens, i2h_weight_initializer=mx.init.Constant(np.concatenate([W_x] * 4, 0)), h2h_weight_initializer=mx.init.Constant(np.concatenate([W_h] * 4, 0)), h2h_bias_initializer=mx.init.Constant(b)) wrapper = VariationalDropoutCell(lstm_cell, drop_states=dropout_h) return wrapper
def _read_config_file(config_file, verbose): """Read configuration file options into a dictionary.""" config_file = os.path.abspath(config_file) if not os.path.exists(config_file): raise RuntimeError("Couldn't open configuration file '{}'.".format(config_file)) if config_file.endswith(".json"): with io.open(config_file, mode="r", encoding="utf-8") as json_file: # Minify the JSON file to strip embedded comments minified = jsmin(json_file.read()) conf = json.loads(minified) elif config_file.endswith(".yaml"): with io.open(config_file, mode="r", encoding="utf-8") as yaml_file: conf = yaml.safe_load(yaml_file) else: try: import imp conf = {} configmodule = imp.load_source("configuration_module", config_file) for k, v in vars(configmodule).items(): if k.startswith("__"): continue elif isfunction(v): continue conf[k] = v except Exception: exc_type, exc_value = sys.exc_info()[:2] exc_info_list = traceback.format_exception_only(exc_type, exc_value) exc_text = "\n".join(exc_info_list) print( "Failed to read configuration file: " + config_file + "\nDue to " + exc_text, file=sys.stderr, ) raise conf["_config_file"] = config_file return conf
def function[_read_config_file, parameter[config_file, verbose]]: constant[Read configuration file options into a dictionary.] variable[config_file] assign[=] call[name[os].path.abspath, parameter[name[config_file]]] if <ast.UnaryOp object at 0x7da1b006c610> begin[:] <ast.Raise object at 0x7da1b006e6e0> if call[name[config_file].endswith, parameter[constant[.json]]] begin[:] with call[name[io].open, parameter[name[config_file]]] begin[:] variable[minified] assign[=] call[name[jsmin], parameter[call[name[json_file].read, parameter[]]]] variable[conf] assign[=] call[name[json].loads, parameter[name[minified]]] call[name[conf]][constant[_config_file]] assign[=] name[config_file] return[name[conf]]
keyword[def] identifier[_read_config_file] ( identifier[config_file] , identifier[verbose] ): literal[string] identifier[config_file] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[config_file] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[config_file] ): keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[config_file] )) keyword[if] identifier[config_file] . identifier[endswith] ( literal[string] ): keyword[with] identifier[io] . identifier[open] ( identifier[config_file] , identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[json_file] : identifier[minified] = identifier[jsmin] ( identifier[json_file] . identifier[read] ()) identifier[conf] = identifier[json] . identifier[loads] ( identifier[minified] ) keyword[elif] identifier[config_file] . identifier[endswith] ( literal[string] ): keyword[with] identifier[io] . identifier[open] ( identifier[config_file] , identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[yaml_file] : identifier[conf] = identifier[yaml] . identifier[safe_load] ( identifier[yaml_file] ) keyword[else] : keyword[try] : keyword[import] identifier[imp] identifier[conf] ={} identifier[configmodule] = identifier[imp] . identifier[load_source] ( literal[string] , identifier[config_file] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[vars] ( identifier[configmodule] ). identifier[items] (): keyword[if] identifier[k] . identifier[startswith] ( literal[string] ): keyword[continue] keyword[elif] identifier[isfunction] ( identifier[v] ): keyword[continue] identifier[conf] [ identifier[k] ]= identifier[v] keyword[except] identifier[Exception] : identifier[exc_type] , identifier[exc_value] = identifier[sys] . identifier[exc_info] ()[: literal[int] ] identifier[exc_info_list] = identifier[traceback] . identifier[format_exception_only] ( identifier[exc_type] , identifier[exc_value] ) identifier[exc_text] = literal[string] . identifier[join] ( identifier[exc_info_list] ) identifier[print] ( literal[string] + identifier[config_file] + literal[string] + identifier[exc_text] , identifier[file] = identifier[sys] . identifier[stderr] , ) keyword[raise] identifier[conf] [ literal[string] ]= identifier[config_file] keyword[return] identifier[conf]
def _read_config_file(config_file, verbose): """Read configuration file options into a dictionary.""" config_file = os.path.abspath(config_file) if not os.path.exists(config_file): raise RuntimeError("Couldn't open configuration file '{}'.".format(config_file)) # depends on [control=['if'], data=[]] if config_file.endswith('.json'): with io.open(config_file, mode='r', encoding='utf-8') as json_file: # Minify the JSON file to strip embedded comments minified = jsmin(json_file.read()) # depends on [control=['with'], data=['json_file']] conf = json.loads(minified) # depends on [control=['if'], data=[]] elif config_file.endswith('.yaml'): with io.open(config_file, mode='r', encoding='utf-8') as yaml_file: conf = yaml.safe_load(yaml_file) # depends on [control=['with'], data=['yaml_file']] # depends on [control=['if'], data=[]] else: try: import imp conf = {} configmodule = imp.load_source('configuration_module', config_file) for (k, v) in vars(configmodule).items(): if k.startswith('__'): continue # depends on [control=['if'], data=[]] elif isfunction(v): continue # depends on [control=['if'], data=[]] conf[k] = v # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] except Exception: (exc_type, exc_value) = sys.exc_info()[:2] exc_info_list = traceback.format_exception_only(exc_type, exc_value) exc_text = '\n'.join(exc_info_list) print('Failed to read configuration file: ' + config_file + '\nDue to ' + exc_text, file=sys.stderr) raise # depends on [control=['except'], data=[]] conf['_config_file'] = config_file return conf
def add(self, *args, **kargs): """Ex: add(dst="2001:db8:cafe:f000::/56") add(dst="2001:db8:cafe:f000::/56", gw="2001:db8:cafe::1") add(dst="2001:db8:cafe:f000::/64", gw="2001:db8:cafe::1", dev="eth0") """ self.invalidate_cache() self.routes.append(self.make_route(*args, **kargs))
def function[add, parameter[self]]: constant[Ex: add(dst="2001:db8:cafe:f000::/56") add(dst="2001:db8:cafe:f000::/56", gw="2001:db8:cafe::1") add(dst="2001:db8:cafe:f000::/64", gw="2001:db8:cafe::1", dev="eth0") ] call[name[self].invalidate_cache, parameter[]] call[name[self].routes.append, parameter[call[name[self].make_route, parameter[<ast.Starred object at 0x7da1b2111c60>]]]]
keyword[def] identifier[add] ( identifier[self] ,* identifier[args] ,** identifier[kargs] ): literal[string] identifier[self] . identifier[invalidate_cache] () identifier[self] . identifier[routes] . identifier[append] ( identifier[self] . identifier[make_route] (* identifier[args] ,** identifier[kargs] ))
def add(self, *args, **kargs): """Ex: add(dst="2001:db8:cafe:f000::/56") add(dst="2001:db8:cafe:f000::/56", gw="2001:db8:cafe::1") add(dst="2001:db8:cafe:f000::/64", gw="2001:db8:cafe::1", dev="eth0") """ self.invalidate_cache() self.routes.append(self.make_route(*args, **kargs))
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'entity') and self.entity is not None: _dict['entity'] = self.entity if hasattr(self, 'description') and self.description is not None: _dict['description'] = self.description if hasattr(self, 'metadata') and self.metadata is not None: _dict['metadata'] = self.metadata if hasattr(self, 'fuzzy_match') and self.fuzzy_match is not None: _dict['fuzzy_match'] = self.fuzzy_match if hasattr(self, 'created') and self.created is not None: _dict['created'] = datetime_to_string(self.created) if hasattr(self, 'updated') and self.updated is not None: _dict['updated'] = datetime_to_string(self.updated) if hasattr(self, 'values') and self.values is not None: _dict['values'] = [x._to_dict() for x in self.values] return _dict
def function[_to_dict, parameter[self]]: constant[Return a json dictionary representing this model.] variable[_dict] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da20c76ca60> begin[:] call[name[_dict]][constant[entity]] assign[=] name[self].entity if <ast.BoolOp object at 0x7da20c76c970> begin[:] call[name[_dict]][constant[description]] assign[=] name[self].description if <ast.BoolOp object at 0x7da20c76c460> begin[:] call[name[_dict]][constant[metadata]] assign[=] name[self].metadata if <ast.BoolOp object at 0x7da2054a5f60> begin[:] call[name[_dict]][constant[fuzzy_match]] assign[=] name[self].fuzzy_match if <ast.BoolOp object at 0x7da2054a53f0> begin[:] call[name[_dict]][constant[created]] assign[=] call[name[datetime_to_string], parameter[name[self].created]] if <ast.BoolOp object at 0x7da2054a6e30> begin[:] call[name[_dict]][constant[updated]] assign[=] call[name[datetime_to_string], parameter[name[self].updated]] if <ast.BoolOp object at 0x7da2054a7670> begin[:] call[name[_dict]][constant[values]] assign[=] <ast.ListComp object at 0x7da1b23456c0> return[name[_dict]]
keyword[def] identifier[_to_dict] ( identifier[self] ): literal[string] identifier[_dict] ={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[entity] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[entity] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[description] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[description] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[metadata] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[metadata] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[fuzzy_match] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[fuzzy_match] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[created] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[datetime_to_string] ( identifier[self] . identifier[created] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[updated] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[datetime_to_string] ( identifier[self] . identifier[updated] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[values] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]=[ identifier[x] . identifier[_to_dict] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[values] ] keyword[return] identifier[_dict]
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'entity') and self.entity is not None: _dict['entity'] = self.entity # depends on [control=['if'], data=[]] if hasattr(self, 'description') and self.description is not None: _dict['description'] = self.description # depends on [control=['if'], data=[]] if hasattr(self, 'metadata') and self.metadata is not None: _dict['metadata'] = self.metadata # depends on [control=['if'], data=[]] if hasattr(self, 'fuzzy_match') and self.fuzzy_match is not None: _dict['fuzzy_match'] = self.fuzzy_match # depends on [control=['if'], data=[]] if hasattr(self, 'created') and self.created is not None: _dict['created'] = datetime_to_string(self.created) # depends on [control=['if'], data=[]] if hasattr(self, 'updated') and self.updated is not None: _dict['updated'] = datetime_to_string(self.updated) # depends on [control=['if'], data=[]] if hasattr(self, 'values') and self.values is not None: _dict['values'] = [x._to_dict() for x in self.values] # depends on [control=['if'], data=[]] return _dict
def show_version(self): """ Print version information to the out file. """ version_info = self.get_cli_version() version_info += self.get_runtime_version() print(version_info, file=self.out_file)
def function[show_version, parameter[self]]: constant[ Print version information to the out file. ] variable[version_info] assign[=] call[name[self].get_cli_version, parameter[]] <ast.AugAssign object at 0x7da18f813010> call[name[print], parameter[name[version_info]]]
keyword[def] identifier[show_version] ( identifier[self] ): literal[string] identifier[version_info] = identifier[self] . identifier[get_cli_version] () identifier[version_info] += identifier[self] . identifier[get_runtime_version] () identifier[print] ( identifier[version_info] , identifier[file] = identifier[self] . identifier[out_file] )
def show_version(self): """ Print version information to the out file. """ version_info = self.get_cli_version() version_info += self.get_runtime_version() print(version_info, file=self.out_file)
def verify_classification(self, classification): """ Mark the given ClassifiedFailure as verified. Handles the classification not currently being related to this TextLogError and no Metadata existing. """ if classification not in self.classified_failures.all(): self.create_match("ManualDetector", classification) # create a TextLogErrorMetadata instance for this TextLogError if it # doesn't exist. We can't use update_or_create here since OneToOne # relations don't use an object manager so a missing relation is simply # None as opposed to RelatedManager. if self.metadata is None: TextLogErrorMetadata.objects.create(text_log_error=self, best_classification=classification, best_is_verified=True) else: self.metadata.best_classification = classification self.metadata.best_is_verified = True self.metadata.save(update_fields=['best_classification', 'best_is_verified']) self.metadata.failure_line.elastic_search_insert() # Send event to NewRelic when a verifing an autoclassified failure. match = self.matches.filter(classified_failure=classification).first() if not match: return newrelic.agent.record_custom_event('user_verified_classification', { 'matcher': match.matcher_name, 'job_id': self.id, })
def function[verify_classification, parameter[self, classification]]: constant[ Mark the given ClassifiedFailure as verified. Handles the classification not currently being related to this TextLogError and no Metadata existing. ] if compare[name[classification] <ast.NotIn object at 0x7da2590d7190> call[name[self].classified_failures.all, parameter[]]] begin[:] call[name[self].create_match, parameter[constant[ManualDetector], name[classification]]] if compare[name[self].metadata is constant[None]] begin[:] call[name[TextLogErrorMetadata].objects.create, parameter[]] call[name[self].metadata.failure_line.elastic_search_insert, parameter[]] variable[match] assign[=] call[call[name[self].matches.filter, parameter[]].first, parameter[]] if <ast.UnaryOp object at 0x7da1b0632560> begin[:] return[None] call[name[newrelic].agent.record_custom_event, parameter[constant[user_verified_classification], dictionary[[<ast.Constant object at 0x7da1b0630190>, <ast.Constant object at 0x7da1b06324a0>], [<ast.Attribute object at 0x7da1b0633fa0>, <ast.Attribute object at 0x7da1b0632f20>]]]]
keyword[def] identifier[verify_classification] ( identifier[self] , identifier[classification] ): literal[string] keyword[if] identifier[classification] keyword[not] keyword[in] identifier[self] . identifier[classified_failures] . identifier[all] (): identifier[self] . identifier[create_match] ( literal[string] , identifier[classification] ) keyword[if] identifier[self] . identifier[metadata] keyword[is] keyword[None] : identifier[TextLogErrorMetadata] . identifier[objects] . identifier[create] ( identifier[text_log_error] = identifier[self] , identifier[best_classification] = identifier[classification] , identifier[best_is_verified] = keyword[True] ) keyword[else] : identifier[self] . identifier[metadata] . identifier[best_classification] = identifier[classification] identifier[self] . identifier[metadata] . identifier[best_is_verified] = keyword[True] identifier[self] . identifier[metadata] . identifier[save] ( identifier[update_fields] =[ literal[string] , literal[string] ]) identifier[self] . identifier[metadata] . identifier[failure_line] . identifier[elastic_search_insert] () identifier[match] = identifier[self] . identifier[matches] . identifier[filter] ( identifier[classified_failure] = identifier[classification] ). identifier[first] () keyword[if] keyword[not] identifier[match] : keyword[return] identifier[newrelic] . identifier[agent] . identifier[record_custom_event] ( literal[string] ,{ literal[string] : identifier[match] . identifier[matcher_name] , literal[string] : identifier[self] . identifier[id] , })
def verify_classification(self, classification): """ Mark the given ClassifiedFailure as verified. Handles the classification not currently being related to this TextLogError and no Metadata existing. """ if classification not in self.classified_failures.all(): self.create_match('ManualDetector', classification) # depends on [control=['if'], data=['classification']] # create a TextLogErrorMetadata instance for this TextLogError if it # doesn't exist. We can't use update_or_create here since OneToOne # relations don't use an object manager so a missing relation is simply # None as opposed to RelatedManager. if self.metadata is None: TextLogErrorMetadata.objects.create(text_log_error=self, best_classification=classification, best_is_verified=True) # depends on [control=['if'], data=[]] else: self.metadata.best_classification = classification self.metadata.best_is_verified = True self.metadata.save(update_fields=['best_classification', 'best_is_verified']) self.metadata.failure_line.elastic_search_insert() # Send event to NewRelic when a verifing an autoclassified failure. match = self.matches.filter(classified_failure=classification).first() if not match: return # depends on [control=['if'], data=[]] newrelic.agent.record_custom_event('user_verified_classification', {'matcher': match.matcher_name, 'job_id': self.id})
def prior_bayes_information(self, expparams, n_samples=None): """ Evaluates the local Bayesian Information Matrix (BIM) for a set of samples from the SMC particle set, with uniform weights. :param expparams: Parameters describing the experiment that was performed. :type expparams: :class:`~numpy.ndarray` of dtype given by the :attr:`~qinfer.abstract_model.Model.expparams_dtype` property of the underlying model :param n_samples int: Number of samples to draw from particle distribution, to evaluate BIM over. """ if n_samples is None: n_samples = self.particle_locations.shape[0] return self._bim(self.prior.sample(n_samples), expparams)
def function[prior_bayes_information, parameter[self, expparams, n_samples]]: constant[ Evaluates the local Bayesian Information Matrix (BIM) for a set of samples from the SMC particle set, with uniform weights. :param expparams: Parameters describing the experiment that was performed. :type expparams: :class:`~numpy.ndarray` of dtype given by the :attr:`~qinfer.abstract_model.Model.expparams_dtype` property of the underlying model :param n_samples int: Number of samples to draw from particle distribution, to evaluate BIM over. ] if compare[name[n_samples] is constant[None]] begin[:] variable[n_samples] assign[=] call[name[self].particle_locations.shape][constant[0]] return[call[name[self]._bim, parameter[call[name[self].prior.sample, parameter[name[n_samples]]], name[expparams]]]]
keyword[def] identifier[prior_bayes_information] ( identifier[self] , identifier[expparams] , identifier[n_samples] = keyword[None] ): literal[string] keyword[if] identifier[n_samples] keyword[is] keyword[None] : identifier[n_samples] = identifier[self] . identifier[particle_locations] . identifier[shape] [ literal[int] ] keyword[return] identifier[self] . identifier[_bim] ( identifier[self] . identifier[prior] . identifier[sample] ( identifier[n_samples] ), identifier[expparams] )
def prior_bayes_information(self, expparams, n_samples=None): """ Evaluates the local Bayesian Information Matrix (BIM) for a set of samples from the SMC particle set, with uniform weights. :param expparams: Parameters describing the experiment that was performed. :type expparams: :class:`~numpy.ndarray` of dtype given by the :attr:`~qinfer.abstract_model.Model.expparams_dtype` property of the underlying model :param n_samples int: Number of samples to draw from particle distribution, to evaluate BIM over. """ if n_samples is None: n_samples = self.particle_locations.shape[0] # depends on [control=['if'], data=['n_samples']] return self._bim(self.prior.sample(n_samples), expparams)
def detect_intent_with_sentiment_analysis(project_id, session_id, texts, language_code): """Returns the result of detect intent with texts as inputs and analyzes the sentiment of the query text. Using the same `session_id` between requests allows continuation of the conversaion.""" import dialogflow_v2beta1 as dialogflow session_client = dialogflow.SessionsClient() session_path = session_client.session_path(project_id, session_id) print('Session path: {}\n'.format(session_path)) for text in texts: text_input = dialogflow.types.TextInput( text=text, language_code=language_code) query_input = dialogflow.types.QueryInput(text=text_input) # Enable sentiment analysis sentiment_config = dialogflow.types.SentimentAnalysisRequestConfig( analyze_query_text_sentiment=True) # Set the query parameters with sentiment analysis query_params = dialogflow.types.QueryParameters( sentiment_analysis_request_config=sentiment_config) response = session_client.detect_intent( session=session_path, query_input=query_input, query_params=query_params) print('=' * 20) print('Query text: {}'.format(response.query_result.query_text)) print('Detected intent: {} (confidence: {})\n'.format( response.query_result.intent.display_name, response.query_result.intent_detection_confidence)) print('Fulfillment text: {}\n'.format( response.query_result.fulfillment_text)) # Score between -1.0 (negative sentiment) and 1.0 (positive sentiment). print('Query Text Sentiment Score: {}\n'.format( response.query_result.sentiment_analysis_result .query_text_sentiment.score)) print('Query Text Sentiment Magnitude: {}\n'.format( response.query_result.sentiment_analysis_result .query_text_sentiment.magnitude))
def function[detect_intent_with_sentiment_analysis, parameter[project_id, session_id, texts, language_code]]: constant[Returns the result of detect intent with texts as inputs and analyzes the sentiment of the query text. Using the same `session_id` between requests allows continuation of the conversaion.] import module[dialogflow_v2beta1] as alias[dialogflow] variable[session_client] assign[=] call[name[dialogflow].SessionsClient, parameter[]] variable[session_path] assign[=] call[name[session_client].session_path, parameter[name[project_id], name[session_id]]] call[name[print], parameter[call[constant[Session path: {} ].format, parameter[name[session_path]]]]] for taget[name[text]] in starred[name[texts]] begin[:] variable[text_input] assign[=] call[name[dialogflow].types.TextInput, parameter[]] variable[query_input] assign[=] call[name[dialogflow].types.QueryInput, parameter[]] variable[sentiment_config] assign[=] call[name[dialogflow].types.SentimentAnalysisRequestConfig, parameter[]] variable[query_params] assign[=] call[name[dialogflow].types.QueryParameters, parameter[]] variable[response] assign[=] call[name[session_client].detect_intent, parameter[]] call[name[print], parameter[binary_operation[constant[=] * constant[20]]]] call[name[print], parameter[call[constant[Query text: {}].format, parameter[name[response].query_result.query_text]]]] call[name[print], parameter[call[constant[Detected intent: {} (confidence: {}) ].format, parameter[name[response].query_result.intent.display_name, name[response].query_result.intent_detection_confidence]]]] call[name[print], parameter[call[constant[Fulfillment text: {} ].format, parameter[name[response].query_result.fulfillment_text]]]] call[name[print], parameter[call[constant[Query Text Sentiment Score: {} ].format, parameter[name[response].query_result.sentiment_analysis_result.query_text_sentiment.score]]]] call[name[print], parameter[call[constant[Query Text Sentiment Magnitude: {} ].format, parameter[name[response].query_result.sentiment_analysis_result.query_text_sentiment.magnitude]]]]
keyword[def] identifier[detect_intent_with_sentiment_analysis] ( identifier[project_id] , identifier[session_id] , identifier[texts] , identifier[language_code] ): literal[string] keyword[import] identifier[dialogflow_v2beta1] keyword[as] identifier[dialogflow] identifier[session_client] = identifier[dialogflow] . identifier[SessionsClient] () identifier[session_path] = identifier[session_client] . identifier[session_path] ( identifier[project_id] , identifier[session_id] ) identifier[print] ( literal[string] . identifier[format] ( identifier[session_path] )) keyword[for] identifier[text] keyword[in] identifier[texts] : identifier[text_input] = identifier[dialogflow] . identifier[types] . identifier[TextInput] ( identifier[text] = identifier[text] , identifier[language_code] = identifier[language_code] ) identifier[query_input] = identifier[dialogflow] . identifier[types] . identifier[QueryInput] ( identifier[text] = identifier[text_input] ) identifier[sentiment_config] = identifier[dialogflow] . identifier[types] . identifier[SentimentAnalysisRequestConfig] ( identifier[analyze_query_text_sentiment] = keyword[True] ) identifier[query_params] = identifier[dialogflow] . identifier[types] . identifier[QueryParameters] ( identifier[sentiment_analysis_request_config] = identifier[sentiment_config] ) identifier[response] = identifier[session_client] . identifier[detect_intent] ( identifier[session] = identifier[session_path] , identifier[query_input] = identifier[query_input] , identifier[query_params] = identifier[query_params] ) identifier[print] ( literal[string] * literal[int] ) identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[query_result] . identifier[query_text] )) identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[query_result] . identifier[intent] . identifier[display_name] , identifier[response] . identifier[query_result] . identifier[intent_detection_confidence] )) identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[query_result] . identifier[fulfillment_text] )) identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[query_result] . identifier[sentiment_analysis_result] . identifier[query_text_sentiment] . identifier[score] )) identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[query_result] . identifier[sentiment_analysis_result] . identifier[query_text_sentiment] . identifier[magnitude] ))
def detect_intent_with_sentiment_analysis(project_id, session_id, texts, language_code): """Returns the result of detect intent with texts as inputs and analyzes the sentiment of the query text. Using the same `session_id` between requests allows continuation of the conversaion.""" import dialogflow_v2beta1 as dialogflow session_client = dialogflow.SessionsClient() session_path = session_client.session_path(project_id, session_id) print('Session path: {}\n'.format(session_path)) for text in texts: text_input = dialogflow.types.TextInput(text=text, language_code=language_code) query_input = dialogflow.types.QueryInput(text=text_input) # Enable sentiment analysis sentiment_config = dialogflow.types.SentimentAnalysisRequestConfig(analyze_query_text_sentiment=True) # Set the query parameters with sentiment analysis query_params = dialogflow.types.QueryParameters(sentiment_analysis_request_config=sentiment_config) response = session_client.detect_intent(session=session_path, query_input=query_input, query_params=query_params) print('=' * 20) print('Query text: {}'.format(response.query_result.query_text)) print('Detected intent: {} (confidence: {})\n'.format(response.query_result.intent.display_name, response.query_result.intent_detection_confidence)) print('Fulfillment text: {}\n'.format(response.query_result.fulfillment_text)) # Score between -1.0 (negative sentiment) and 1.0 (positive sentiment). print('Query Text Sentiment Score: {}\n'.format(response.query_result.sentiment_analysis_result.query_text_sentiment.score)) print('Query Text Sentiment Magnitude: {}\n'.format(response.query_result.sentiment_analysis_result.query_text_sentiment.magnitude)) # depends on [control=['for'], data=['text']]
def _check_for_life_signs(self): """Check Connection for life signs. First check if any data has been sent, if not send a heartbeat to the remote server. If we have not received any data what so ever within two intervals, we need to raise an exception so that we can close the connection. :rtype: bool """ if not self._running.is_set(): return False if self._writes_since_check == 0: self.send_heartbeat_impl() self._lock.acquire() try: if self._reads_since_check == 0: self._threshold += 1 if self._threshold >= 2: self._running.clear() self._raise_or_append_exception() return False else: self._threshold = 0 finally: self._reads_since_check = 0 self._writes_since_check = 0 self._lock.release() return self._start_new_timer()
def function[_check_for_life_signs, parameter[self]]: constant[Check Connection for life signs. First check if any data has been sent, if not send a heartbeat to the remote server. If we have not received any data what so ever within two intervals, we need to raise an exception so that we can close the connection. :rtype: bool ] if <ast.UnaryOp object at 0x7da18ede72e0> begin[:] return[constant[False]] if compare[name[self]._writes_since_check equal[==] constant[0]] begin[:] call[name[self].send_heartbeat_impl, parameter[]] call[name[self]._lock.acquire, parameter[]] <ast.Try object at 0x7da18ede64d0> return[call[name[self]._start_new_timer, parameter[]]]
keyword[def] identifier[_check_for_life_signs] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_running] . identifier[is_set] (): keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_writes_since_check] == literal[int] : identifier[self] . identifier[send_heartbeat_impl] () identifier[self] . identifier[_lock] . identifier[acquire] () keyword[try] : keyword[if] identifier[self] . identifier[_reads_since_check] == literal[int] : identifier[self] . identifier[_threshold] += literal[int] keyword[if] identifier[self] . identifier[_threshold] >= literal[int] : identifier[self] . identifier[_running] . identifier[clear] () identifier[self] . identifier[_raise_or_append_exception] () keyword[return] keyword[False] keyword[else] : identifier[self] . identifier[_threshold] = literal[int] keyword[finally] : identifier[self] . identifier[_reads_since_check] = literal[int] identifier[self] . identifier[_writes_since_check] = literal[int] identifier[self] . identifier[_lock] . identifier[release] () keyword[return] identifier[self] . identifier[_start_new_timer] ()
def _check_for_life_signs(self): """Check Connection for life signs. First check if any data has been sent, if not send a heartbeat to the remote server. If we have not received any data what so ever within two intervals, we need to raise an exception so that we can close the connection. :rtype: bool """ if not self._running.is_set(): return False # depends on [control=['if'], data=[]] if self._writes_since_check == 0: self.send_heartbeat_impl() # depends on [control=['if'], data=[]] self._lock.acquire() try: if self._reads_since_check == 0: self._threshold += 1 if self._threshold >= 2: self._running.clear() self._raise_or_append_exception() return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self._threshold = 0 # depends on [control=['try'], data=[]] finally: self._reads_since_check = 0 self._writes_since_check = 0 self._lock.release() return self._start_new_timer()
def attr_triple(value): """ Check that interprets the value as `urwid.AttrSpec` triple for the colour modes 1,16 and 256. It assumes a <6 tuple of attribute strings for mono foreground, mono background, 16c fg, 16c bg, 256 fg and 256 bg respectively. If any of these are missing, we downgrade to the next lower available pair, defaulting to 'default'. :raises: VdtValueTooLongError, VdtTypeError :rtype: triple of `urwid.AttrSpec` """ keys = ['dfg', 'dbg', '1fg', '1bg', '16fg', '16bg', '256fg', '256bg'] acc = {} if not isinstance(value, (list, tuple)): value = value, if len(value) > 6: raise VdtValueTooLongError(value) # ensure we have exactly 6 attribute strings attrstrings = (value + (6 - len(value)) * [None])[:6] # add fallbacks for the empty list attrstrings = (2 * ['default']) + attrstrings for i, value in enumerate(attrstrings): if value: acc[keys[i]] = value else: acc[keys[i]] = acc[keys[i - 2]] try: mono = AttrSpec(acc['1fg'], acc['1bg'], 1) normal = AttrSpec(acc['16fg'], acc['16bg'], 16) high = AttrSpec(acc['256fg'], acc['256bg'], 256) except AttrSpecError as e: raise ValidateError(str(e)) return mono, normal, high
def function[attr_triple, parameter[value]]: constant[ Check that interprets the value as `urwid.AttrSpec` triple for the colour modes 1,16 and 256. It assumes a <6 tuple of attribute strings for mono foreground, mono background, 16c fg, 16c bg, 256 fg and 256 bg respectively. If any of these are missing, we downgrade to the next lower available pair, defaulting to 'default'. :raises: VdtValueTooLongError, VdtTypeError :rtype: triple of `urwid.AttrSpec` ] variable[keys] assign[=] list[[<ast.Constant object at 0x7da1b07ce920>, <ast.Constant object at 0x7da1b07ce440>, <ast.Constant object at 0x7da1b07cc1c0>, <ast.Constant object at 0x7da1b07cdfc0>, <ast.Constant object at 0x7da1b07cc670>, <ast.Constant object at 0x7da1b07cd750>, <ast.Constant object at 0x7da1b07cd720>, <ast.Constant object at 0x7da1b07cd990>]] variable[acc] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da1b07ce2f0> begin[:] variable[value] assign[=] tuple[[<ast.Name object at 0x7da1b07cf640>]] if compare[call[name[len], parameter[name[value]]] greater[>] constant[6]] begin[:] <ast.Raise object at 0x7da1b07ce170> variable[attrstrings] assign[=] call[binary_operation[name[value] + binary_operation[binary_operation[constant[6] - call[name[len], parameter[name[value]]]] * list[[<ast.Constant object at 0x7da1b07cd480>]]]]][<ast.Slice object at 0x7da1b07cdcf0>] variable[attrstrings] assign[=] binary_operation[binary_operation[constant[2] * list[[<ast.Constant object at 0x7da1b07ccfa0>]]] + name[attrstrings]] for taget[tuple[[<ast.Name object at 0x7da1b07cdd80>, <ast.Name object at 0x7da1b07cd660>]]] in starred[call[name[enumerate], parameter[name[attrstrings]]]] begin[:] if name[value] begin[:] call[name[acc]][call[name[keys]][name[i]]] assign[=] name[value] <ast.Try object at 0x7da1b07ce5c0> return[tuple[[<ast.Name object at 0x7da1b0796e60>, <ast.Name object at 0x7da1b07947f0>, <ast.Name object at 0x7da1b0797820>]]]
keyword[def] identifier[attr_triple] ( identifier[value] ): literal[string] identifier[keys] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[acc] ={} keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): identifier[value] = identifier[value] , keyword[if] identifier[len] ( identifier[value] )> literal[int] : keyword[raise] identifier[VdtValueTooLongError] ( identifier[value] ) identifier[attrstrings] =( identifier[value] +( literal[int] - identifier[len] ( identifier[value] ))*[ keyword[None] ])[: literal[int] ] identifier[attrstrings] =( literal[int] *[ literal[string] ])+ identifier[attrstrings] keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[attrstrings] ): keyword[if] identifier[value] : identifier[acc] [ identifier[keys] [ identifier[i] ]]= identifier[value] keyword[else] : identifier[acc] [ identifier[keys] [ identifier[i] ]]= identifier[acc] [ identifier[keys] [ identifier[i] - literal[int] ]] keyword[try] : identifier[mono] = identifier[AttrSpec] ( identifier[acc] [ literal[string] ], identifier[acc] [ literal[string] ], literal[int] ) identifier[normal] = identifier[AttrSpec] ( identifier[acc] [ literal[string] ], identifier[acc] [ literal[string] ], literal[int] ) identifier[high] = identifier[AttrSpec] ( identifier[acc] [ literal[string] ], identifier[acc] [ literal[string] ], literal[int] ) keyword[except] identifier[AttrSpecError] keyword[as] identifier[e] : keyword[raise] identifier[ValidateError] ( identifier[str] ( identifier[e] )) keyword[return] identifier[mono] , identifier[normal] , identifier[high]
def attr_triple(value): """ Check that interprets the value as `urwid.AttrSpec` triple for the colour modes 1,16 and 256. It assumes a <6 tuple of attribute strings for mono foreground, mono background, 16c fg, 16c bg, 256 fg and 256 bg respectively. If any of these are missing, we downgrade to the next lower available pair, defaulting to 'default'. :raises: VdtValueTooLongError, VdtTypeError :rtype: triple of `urwid.AttrSpec` """ keys = ['dfg', 'dbg', '1fg', '1bg', '16fg', '16bg', '256fg', '256bg'] acc = {} if not isinstance(value, (list, tuple)): value = (value,) # depends on [control=['if'], data=[]] if len(value) > 6: raise VdtValueTooLongError(value) # depends on [control=['if'], data=[]] # ensure we have exactly 6 attribute strings attrstrings = (value + (6 - len(value)) * [None])[:6] # add fallbacks for the empty list attrstrings = 2 * ['default'] + attrstrings for (i, value) in enumerate(attrstrings): if value: acc[keys[i]] = value # depends on [control=['if'], data=[]] else: acc[keys[i]] = acc[keys[i - 2]] # depends on [control=['for'], data=[]] try: mono = AttrSpec(acc['1fg'], acc['1bg'], 1) normal = AttrSpec(acc['16fg'], acc['16bg'], 16) high = AttrSpec(acc['256fg'], acc['256bg'], 256) # depends on [control=['try'], data=[]] except AttrSpecError as e: raise ValidateError(str(e)) # depends on [control=['except'], data=['e']] return (mono, normal, high)
def users_for_perm( cls, instance, perm_name, user_ids=None, group_ids=None, limit_group_permissions=False, skip_group_perms=False, db_session=None, ): """ return PermissionTuples for users AND groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param user_ids: limits the permissions to specific user ids :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param skip_group_perms: do not attach group permissions to the resultset :param db_session: :return: """ # noqa db_session = get_db_session(db_session, instance) users_perms = resource_permissions_for_users( cls.models_proxy, [perm_name], [instance.resource_id], user_ids=user_ids, group_ids=group_ids, limit_group_permissions=limit_group_permissions, skip_group_perms=skip_group_perms, db_session=db_session, ) if instance.owner_user_id: users_perms.append( PermissionTuple( instance.owner, ALL_PERMISSIONS, "user", None, instance, True, True ) ) if instance.owner_group_id and not skip_group_perms: for user in instance.owner_group.users: users_perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", instance.owner_group, instance, True, True, ) ) return users_perms
def function[users_for_perm, parameter[cls, instance, perm_name, user_ids, group_ids, limit_group_permissions, skip_group_perms, db_session]]: constant[ return PermissionTuples for users AND groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param user_ids: limits the permissions to specific user ids :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param skip_group_perms: do not attach group permissions to the resultset :param db_session: :return: ] variable[db_session] assign[=] call[name[get_db_session], parameter[name[db_session], name[instance]]] variable[users_perms] assign[=] call[name[resource_permissions_for_users], parameter[name[cls].models_proxy, list[[<ast.Name object at 0x7da1b0faf370>]], list[[<ast.Attribute object at 0x7da1b0fae410>]]]] if name[instance].owner_user_id begin[:] call[name[users_perms].append, parameter[call[name[PermissionTuple], parameter[name[instance].owner, name[ALL_PERMISSIONS], constant[user], constant[None], name[instance], constant[True], constant[True]]]]] if <ast.BoolOp object at 0x7da1b0fe43a0> begin[:] for taget[name[user]] in starred[name[instance].owner_group.users] begin[:] call[name[users_perms].append, parameter[call[name[PermissionTuple], parameter[name[user], name[ALL_PERMISSIONS], constant[group], name[instance].owner_group, name[instance], constant[True], constant[True]]]]] return[name[users_perms]]
keyword[def] identifier[users_for_perm] ( identifier[cls] , identifier[instance] , identifier[perm_name] , identifier[user_ids] = keyword[None] , identifier[group_ids] = keyword[None] , identifier[limit_group_permissions] = keyword[False] , identifier[skip_group_perms] = keyword[False] , identifier[db_session] = keyword[None] , ): literal[string] identifier[db_session] = identifier[get_db_session] ( identifier[db_session] , identifier[instance] ) identifier[users_perms] = identifier[resource_permissions_for_users] ( identifier[cls] . identifier[models_proxy] , [ identifier[perm_name] ], [ identifier[instance] . identifier[resource_id] ], identifier[user_ids] = identifier[user_ids] , identifier[group_ids] = identifier[group_ids] , identifier[limit_group_permissions] = identifier[limit_group_permissions] , identifier[skip_group_perms] = identifier[skip_group_perms] , identifier[db_session] = identifier[db_session] , ) keyword[if] identifier[instance] . identifier[owner_user_id] : identifier[users_perms] . identifier[append] ( identifier[PermissionTuple] ( identifier[instance] . identifier[owner] , identifier[ALL_PERMISSIONS] , literal[string] , keyword[None] , identifier[instance] , keyword[True] , keyword[True] ) ) keyword[if] identifier[instance] . identifier[owner_group_id] keyword[and] keyword[not] identifier[skip_group_perms] : keyword[for] identifier[user] keyword[in] identifier[instance] . identifier[owner_group] . identifier[users] : identifier[users_perms] . identifier[append] ( identifier[PermissionTuple] ( identifier[user] , identifier[ALL_PERMISSIONS] , literal[string] , identifier[instance] . identifier[owner_group] , identifier[instance] , keyword[True] , keyword[True] , ) ) keyword[return] identifier[users_perms]
def users_for_perm(cls, instance, perm_name, user_ids=None, group_ids=None, limit_group_permissions=False, skip_group_perms=False, db_session=None): """ return PermissionTuples for users AND groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param user_ids: limits the permissions to specific user ids :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param skip_group_perms: do not attach group permissions to the resultset :param db_session: :return: """ # noqa db_session = get_db_session(db_session, instance) users_perms = resource_permissions_for_users(cls.models_proxy, [perm_name], [instance.resource_id], user_ids=user_ids, group_ids=group_ids, limit_group_permissions=limit_group_permissions, skip_group_perms=skip_group_perms, db_session=db_session) if instance.owner_user_id: users_perms.append(PermissionTuple(instance.owner, ALL_PERMISSIONS, 'user', None, instance, True, True)) # depends on [control=['if'], data=[]] if instance.owner_group_id and (not skip_group_perms): for user in instance.owner_group.users: users_perms.append(PermissionTuple(user, ALL_PERMISSIONS, 'group', instance.owner_group, instance, True, True)) # depends on [control=['for'], data=['user']] # depends on [control=['if'], data=[]] return users_perms
def get(self, name, default=None): """Get the value at ``name`` for this :class:`Config` container The returned value is obtained from: * the value at ``name`` in the :attr:`settings` dictionary if available. * the value at ``name`` in the :attr:`params` dictionary if available. * the ``default`` value. """ try: return self._get(name, default) except KeyError: return default
def function[get, parameter[self, name, default]]: constant[Get the value at ``name`` for this :class:`Config` container The returned value is obtained from: * the value at ``name`` in the :attr:`settings` dictionary if available. * the value at ``name`` in the :attr:`params` dictionary if available. * the ``default`` value. ] <ast.Try object at 0x7da18bc734f0>
keyword[def] identifier[get] ( identifier[self] , identifier[name] , identifier[default] = keyword[None] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[_get] ( identifier[name] , identifier[default] ) keyword[except] identifier[KeyError] : keyword[return] identifier[default]
def get(self, name, default=None): """Get the value at ``name`` for this :class:`Config` container The returned value is obtained from: * the value at ``name`` in the :attr:`settings` dictionary if available. * the value at ``name`` in the :attr:`params` dictionary if available. * the ``default`` value. """ try: return self._get(name, default) # depends on [control=['try'], data=[]] except KeyError: return default # depends on [control=['except'], data=[]]
def next(self): """Next that shows progress in statusbar for each <freq> cells""" self.progress_status() # Check abortes state and raise StopIteration if aborted if self.aborted: statustext = _("File loading aborted.") post_command_event(self.main_window, self.main_window.StatusBarMsg, text=statustext) raise StopIteration return self.parent_cls.next(self)
def function[next, parameter[self]]: constant[Next that shows progress in statusbar for each <freq> cells] call[name[self].progress_status, parameter[]] if name[self].aborted begin[:] variable[statustext] assign[=] call[name[_], parameter[constant[File loading aborted.]]] call[name[post_command_event], parameter[name[self].main_window, name[self].main_window.StatusBarMsg]] <ast.Raise object at 0x7da1b26ae3e0> return[call[name[self].parent_cls.next, parameter[name[self]]]]
keyword[def] identifier[next] ( identifier[self] ): literal[string] identifier[self] . identifier[progress_status] () keyword[if] identifier[self] . identifier[aborted] : identifier[statustext] = identifier[_] ( literal[string] ) identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[main_window] . identifier[StatusBarMsg] , identifier[text] = identifier[statustext] ) keyword[raise] identifier[StopIteration] keyword[return] identifier[self] . identifier[parent_cls] . identifier[next] ( identifier[self] )
def next(self): """Next that shows progress in statusbar for each <freq> cells""" self.progress_status() # Check abortes state and raise StopIteration if aborted if self.aborted: statustext = _('File loading aborted.') post_command_event(self.main_window, self.main_window.StatusBarMsg, text=statustext) raise StopIteration # depends on [control=['if'], data=[]] return self.parent_cls.next(self)
def _rr_line(self, section): """Process one line from the text format answer, authority, or additional data sections. """ deleting = None # Name token = self.tok.get(want_leading = True) if not token.is_whitespace(): self.last_name = dns.name.from_text(token.value, None) name = self.last_name token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError # TTL try: ttl = int(token.value, 0) token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError except dns.exception.SyntaxError: raise dns.exception.SyntaxError except Exception: ttl = 0 # Class try: rdclass = dns.rdataclass.from_text(token.value) token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError if rdclass == dns.rdataclass.ANY or rdclass == dns.rdataclass.NONE: deleting = rdclass rdclass = self.zone_rdclass except dns.exception.SyntaxError: raise dns.exception.SyntaxError except Exception: rdclass = dns.rdataclass.IN # Type rdtype = dns.rdatatype.from_text(token.value) token = self.tok.get() if not token.is_eol_or_eof(): self.tok.unget(token) rd = dns.rdata.from_text(rdclass, rdtype, self.tok, None) covers = rd.covers() else: rd = None covers = dns.rdatatype.NONE rrset = self.message.find_rrset(section, name, rdclass, rdtype, covers, deleting, True, self.updating) if not rd is None: rrset.add(rd, ttl)
def function[_rr_line, parameter[self, section]]: constant[Process one line from the text format answer, authority, or additional data sections. ] variable[deleting] assign[=] constant[None] variable[token] assign[=] call[name[self].tok.get, parameter[]] if <ast.UnaryOp object at 0x7da18fe931f0> begin[:] name[self].last_name assign[=] call[name[dns].name.from_text, parameter[name[token].value, constant[None]]] variable[name] assign[=] name[self].last_name variable[token] assign[=] call[name[self].tok.get, parameter[]] if <ast.UnaryOp object at 0x7da18fe91b10> begin[:] <ast.Raise object at 0x7da18fe91030> <ast.Try object at 0x7da18fe92f80> <ast.Try object at 0x7da18fe90cd0> variable[rdtype] assign[=] call[name[dns].rdatatype.from_text, parameter[name[token].value]] variable[token] assign[=] call[name[self].tok.get, parameter[]] if <ast.UnaryOp object at 0x7da18fe91ff0> begin[:] call[name[self].tok.unget, parameter[name[token]]] variable[rd] assign[=] call[name[dns].rdata.from_text, parameter[name[rdclass], name[rdtype], name[self].tok, constant[None]]] variable[covers] assign[=] call[name[rd].covers, parameter[]] variable[rrset] assign[=] call[name[self].message.find_rrset, parameter[name[section], name[name], name[rdclass], name[rdtype], name[covers], name[deleting], constant[True], name[self].updating]] if <ast.UnaryOp object at 0x7da18dc07250> begin[:] call[name[rrset].add, parameter[name[rd], name[ttl]]]
keyword[def] identifier[_rr_line] ( identifier[self] , identifier[section] ): literal[string] identifier[deleting] = keyword[None] identifier[token] = identifier[self] . identifier[tok] . identifier[get] ( identifier[want_leading] = keyword[True] ) keyword[if] keyword[not] identifier[token] . identifier[is_whitespace] (): identifier[self] . identifier[last_name] = identifier[dns] . identifier[name] . identifier[from_text] ( identifier[token] . identifier[value] , keyword[None] ) identifier[name] = identifier[self] . identifier[last_name] identifier[token] = identifier[self] . identifier[tok] . identifier[get] () keyword[if] keyword[not] identifier[token] . identifier[is_identifier] (): keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] keyword[try] : identifier[ttl] = identifier[int] ( identifier[token] . identifier[value] , literal[int] ) identifier[token] = identifier[self] . identifier[tok] . identifier[get] () keyword[if] keyword[not] identifier[token] . identifier[is_identifier] (): keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] keyword[except] identifier[dns] . identifier[exception] . identifier[SyntaxError] : keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] keyword[except] identifier[Exception] : identifier[ttl] = literal[int] keyword[try] : identifier[rdclass] = identifier[dns] . identifier[rdataclass] . identifier[from_text] ( identifier[token] . identifier[value] ) identifier[token] = identifier[self] . identifier[tok] . identifier[get] () keyword[if] keyword[not] identifier[token] . identifier[is_identifier] (): keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] keyword[if] identifier[rdclass] == identifier[dns] . identifier[rdataclass] . identifier[ANY] keyword[or] identifier[rdclass] == identifier[dns] . identifier[rdataclass] . identifier[NONE] : identifier[deleting] = identifier[rdclass] identifier[rdclass] = identifier[self] . identifier[zone_rdclass] keyword[except] identifier[dns] . identifier[exception] . identifier[SyntaxError] : keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] keyword[except] identifier[Exception] : identifier[rdclass] = identifier[dns] . identifier[rdataclass] . identifier[IN] identifier[rdtype] = identifier[dns] . identifier[rdatatype] . identifier[from_text] ( identifier[token] . identifier[value] ) identifier[token] = identifier[self] . identifier[tok] . identifier[get] () keyword[if] keyword[not] identifier[token] . identifier[is_eol_or_eof] (): identifier[self] . identifier[tok] . identifier[unget] ( identifier[token] ) identifier[rd] = identifier[dns] . identifier[rdata] . identifier[from_text] ( identifier[rdclass] , identifier[rdtype] , identifier[self] . identifier[tok] , keyword[None] ) identifier[covers] = identifier[rd] . identifier[covers] () keyword[else] : identifier[rd] = keyword[None] identifier[covers] = identifier[dns] . identifier[rdatatype] . identifier[NONE] identifier[rrset] = identifier[self] . identifier[message] . identifier[find_rrset] ( identifier[section] , identifier[name] , identifier[rdclass] , identifier[rdtype] , identifier[covers] , identifier[deleting] , keyword[True] , identifier[self] . identifier[updating] ) keyword[if] keyword[not] identifier[rd] keyword[is] keyword[None] : identifier[rrset] . identifier[add] ( identifier[rd] , identifier[ttl] )
def _rr_line(self, section): """Process one line from the text format answer, authority, or additional data sections. """ deleting = None # Name token = self.tok.get(want_leading=True) if not token.is_whitespace(): self.last_name = dns.name.from_text(token.value, None) # depends on [control=['if'], data=[]] name = self.last_name token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError # depends on [control=['if'], data=[]] # TTL try: ttl = int(token.value, 0) token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except dns.exception.SyntaxError: raise dns.exception.SyntaxError # depends on [control=['except'], data=[]] except Exception: ttl = 0 # depends on [control=['except'], data=[]] # Class try: rdclass = dns.rdataclass.from_text(token.value) token = self.tok.get() if not token.is_identifier(): raise dns.exception.SyntaxError # depends on [control=['if'], data=[]] if rdclass == dns.rdataclass.ANY or rdclass == dns.rdataclass.NONE: deleting = rdclass rdclass = self.zone_rdclass # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except dns.exception.SyntaxError: raise dns.exception.SyntaxError # depends on [control=['except'], data=[]] except Exception: rdclass = dns.rdataclass.IN # depends on [control=['except'], data=[]] # Type rdtype = dns.rdatatype.from_text(token.value) token = self.tok.get() if not token.is_eol_or_eof(): self.tok.unget(token) rd = dns.rdata.from_text(rdclass, rdtype, self.tok, None) covers = rd.covers() # depends on [control=['if'], data=[]] else: rd = None covers = dns.rdatatype.NONE rrset = self.message.find_rrset(section, name, rdclass, rdtype, covers, deleting, True, self.updating) if not rd is None: rrset.add(rd, ttl) # depends on [control=['if'], data=[]]