code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _kernel_shape(self, input_shape): kernel_size_iter = iter(self._kernel_size) return [self._filters if c == 'O' else input_shape[self._lhs_spec.index('C')] if c == 'I' else next(kernel_size_iter) for c in self._rhs_spec]
Helper to calculate the kernel shape.
def _process_state_final_run(self, job_record): uow = self.uow_dao.get_one(job_record.related_unit_of_work) if uow.is_processed: self.update_job(job_record, uow, job.STATE_PROCESSED) elif uow.is_noop: self.update_job(job_record, uow, job.STATE_NOOP) elif uow.is_canceled: self.update_job(job_record, uow, job.STATE_SKIPPED) elif uow.is_invalid: msg = 'Job {0}: UOW for {1}@{2} is in {3}; ' \ 'relying on the Garbage Collector to either recycle or cancel the UOW.' \ .format(job_record.db_id, job_record.process_name, job_record.timeperiod, uow.state) self._log_message(INFO, job_record.process_name, job_record.timeperiod, msg) else: msg = 'Suppressed creating UOW for {0}@{1}; Job is in {2}; uow is in {3}' \ .format(job_record.process_name, job_record.timeperiod, job_record.state, uow.state) self._log_message(INFO, job_record.process_name, job_record.timeperiod, msg) timetable_tree = self.timetable.get_tree(job_record.process_name) timetable_tree.build_tree()
method takes care of processing job records in STATE_FINAL_RUN state
def prepare(self): headers = self.prepare_headers(self.http_headers, self.soap_action) soap_header = self.prepare_soap_header(self.soap_header) soap_body = self.prepare_soap_body( self.method, self.parameters, self.namespace ) data = self.prepare_soap_envelope(soap_header, soap_body) return (headers, data)
Prepare the SOAP message for sending to the server.
def _add_monitor(monitors, handle, callback, devices, events): for conn_string in devices: data = monitors.get(conn_string) if data is None: data = dict() monitors[conn_string] = data for event in events: event_dict = data.get(event) if event_dict is None: event_dict = dict() data[event] = event_dict event_dict[handle] = callback
Add the given monitor to the listed devices and events.
def count_records(self, record_counter, file): counter = record_counter events_counter = 0 for record in file.get_records(): recid = record[2] counter[recid] = counter.get(recid, 0) + 1 events_counter += 1 self.stat['user_record_events'] = events_counter return counter
Count the number of viewed records.
def cssify(css_dict): css = '' for key, value in dict_items(css_dict): css += '{key} {{ '.format(key=key) for field, field_value in dict_items(value): css += ' {field}: {field_value};'.format(field=field, field_value=field_value) css += '} ' return css.strip()
Function to get CartoCSS from Python dicts
def min_percent(self, value: float) -> 'Size': raise_not_number(value) self.minimum = '{}%'.format(value) return self
Set the minimum percentage of free space to use.
def size_of_type(abi_type): if 'string' in abi_type: return None if 'byte' in abi_type: return None if '[' in abi_type: return None if abi_type == 'bool': return 8 if abi_type == 'address': return 160 return int(re.sub(r"\D", "", abi_type))
Returns size in bits of abi_type
def geometry_from_json(obj): obj_type = obj.get('type', None) if not obj_type: return None if obj_type == 'FeatureCollection': features = obj.get('features', []) if len(features): obj = obj['features'][0] obj_type = obj.get('type', None) else: return None if obj_type == 'Feature': geom = obj['geometry'] else: geom = obj if 'coordinates' in geom: return geom
try to find a geometry in the provided JSON object
def register_sigma_task(self, *args, **kwargs): kwargs["task_class"] = SigmaTask return self.register_task(*args, **kwargs)
Register a sigma task.
def close(self): old_pool, self.pool = self.pool, None try: while True: conn = old_pool.get(block=False) if conn: conn.close() except queue.Empty: pass
Close all pooled connections and disable the pool.
def _calendar_classes_for_occurrence(self, occurrence): classes = [slugify(occurrence.event.polymorphic_ctype.name)] if occurrence.is_all_day: classes.append('is-all-day') if occurrence.is_protected_from_regeneration: classes.append('is-user-modified') if occurrence.is_cancelled: classes.append('is-cancelled') if not occurrence.event.show_in_calendar: classes.append('do-not-show-in-calendar') classes = ['fcc-%s' % class_ for class_ in classes] return classes
Return css classes to be used in admin calendar JSON
def start_output (self): super(DOTLogger, self).start_output() if self.has_part("intro"): self.write_intro() self.writeln() self.writeln(u"digraph G {") self.writeln(u" graph [") self.writeln(u" charset=\"%s\"," % self.get_charset_encoding()) self.writeln(u" ];") self.flush()
Write start of checking info as DOT comment.
def parse(self, parser): lineno = next(parser.stream).lineno context = nodes.ContextReference() kwargs = [] while parser.stream.look().type == lexer.TOKEN_ASSIGN: key = parser.stream.expect(lexer.TOKEN_NAME) next(parser.stream) kwargs.append( nodes.Keyword(key.value, parser.parse_expression()), ) parser.stream.skip_if('comma') body = parser.parse_statements(['name:endactiveurl'], drop_needle=True) args = [context] call_method = self.call_method( 'render_tag', args=args, kwargs=kwargs, ) return nodes.CallBlock(call_method, [], [], body).set_lineno(lineno)
parse content of extension
def _download_pastebin(self): paste_id = self.url.split("/")[-1] url = "https://pastebin.com/raw/" + paste_id return self._download_raw(url)
Download content from Pastebin itself.
def process_formdata(self, valuelist): super(EmptyNone, self).process_formdata(valuelist) self.data = self.data or None
Replace empty values by None
def upload(remote_location, remotes=None, ignores=None, static_root="/static/", prefix="", dry_run=False): if remotes is None: remotes, ignores = _resources_files( abs_paths=remote_location.startswith('s3://')) if remote_location.startswith('s3://'): from deployutils.s3 import S3Backend backend = S3Backend(remote_location, static_root=static_root, dry_run=dry_run) backend.upload(list_local(remotes, prefix), prefix) else: excludes = [] if ignores: for ignore in ignores: excludes += ['--exclude', ignore] shell_command(['/usr/bin/rsync'] + excludes + ['-pOthrRvz', '--rsync-path', '/usr/bin/rsync'] + remotes + [remote_location], dry_run=dry_run)
Upload resources to a stage server.
def _load_apis(self): helpscout = __import__('helpscout.apis') for class_name in helpscout.apis.__all__: if not class_name.startswith('_'): cls = getattr(helpscout.apis, class_name) api = AuthProxy(self.session, cls) setattr(self, class_name, api) self.__apis__[class_name] = api
Find available APIs and set instances property auth proxies.
def write_template_file(source, target, content): print(target) data = format_template_file(source, content) with open(target, 'w') as f: for line in data: if type(line) != str: line = line.encode('utf-8') f.write(line)
Write a new file from a given pystache template file and content
def extract_program_summary(data): from bs4 import BeautifulSoup soup = BeautifulSoup(data, 'html.parser') try: return soup.find( 'div', {'class': 'episode-synopsis'} ).find_all('div')[-1].text.strip() except Exception: _LOGGER.info('No summary found for program: %s', soup.find('a', {'class': 'prog_name'})) return "No summary"
Extract the summary data from a program's detail page
def sync(self): self.elk.send(az_encode()) self.elk.send(zd_encode()) self.elk.send(zp_encode()) self.elk.send(zs_encode()) self.get_descriptions(TextDescriptions.ZONE.value)
Retrieve zones from ElkM1
def com_daltonmaag_check_ufolint(font): import subprocess ufolint_cmd = ["ufolint", font] try: subprocess.check_output(ufolint_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: yield FAIL, ("ufolint failed the UFO source. Output follows :" "\n\n{}\n").format(e.output.decode()) except OSError: yield ERROR, "ufolint is not available!" else: yield PASS, "ufolint passed the UFO source."
Run ufolint on UFO source directory.
def run(self): cmd = ['gsutil', 'ls', os.path.join(self.source_dir, '**')] try: files_list = subprocess.check_output(cmd).split('\n') except subprocess.CalledProcessError: logging.error('Can''t read source directory') all_submissions = [ s for s in files_list if s.endswith('.zip') or s.endswith('.tar') or s.endswith('.tar.gz') ] for submission_path in all_submissions: self.validate_and_copy_one_submission(submission_path) self.stats.log_stats() self.save_id_to_path_mapping() if self.containers_file: with open(self.containers_file, 'w') as f: f.write('\n'.join(sorted(self.list_of_containers)))
Runs validation of all submissions.
def view_links(obj): result=format_html('') result+=format_html('<a href="%s" style="white-space: nowrap">Show duplicates</a><br/>'%reverse('duplicates', args=(obj.pk,))) result+=format_html('<a href="%s" style="white-space: nowrap">Show submissions</a><br/>'%obj.grading_url()) result+=format_html('<a href="%s" style="white-space: nowrap">Download submissions</a>'%reverse('assarchive', args=(obj.pk,))) return result
Link to performance data and duplicate overview.
def get(self, path): uri = self.config.get_target() + path headers = self._get_headers() logging.debug("URI=GET " + str(uri)) logging.debug("HEADERS=" + str(headers)) response = self.session.get(uri, headers=headers) if response.status_code == 200: return response.json() elif response.status_code == 401: raise predix.admin.cf.config.CloudFoundryLoginError('token invalid') else: response.raise_for_status()
Generic GET with headers
async def execute_fetchall( self, sql: str, parameters: Iterable[Any] = None ) -> Iterable[sqlite3.Row]: if parameters is None: parameters = [] return await self._execute(self._execute_fetchall, sql, parameters)
Helper to execute a query and return all the data.
def _on_process_error(self, error): if self is None: return err = PROCESS_ERROR_STRING[error] self._formatter.append_message(err + '\r\n', output_format=OutputFormat.ErrorMessageFormat)
Display child process error in the text edit.
def dimension_type(self): base_type = self._base_type if base_type == "categorical": return self._resolve_categorical() if base_type == "enum.variable": return self._resolve_array_type() if base_type == "enum.datetime": return DT.DATETIME if base_type == "enum.numeric": return DT.BINNED_NUMERIC if base_type == "enum.text": return DT.TEXT raise NotImplementedError("unrecognized dimension type %s" % base_type)
Return member of DIMENSION_TYPE appropriate to dimension_dict.
def created_by_column(self, obj): try: first_addition_logentry = admin.models.LogEntry.objects.filter( object_id=obj.pk, content_type_id=self._get_obj_ct(obj).pk, action_flag=admin.models.ADDITION, ).get() return first_addition_logentry.user except admin.models.LogEntry.DoesNotExist: return None
Return user who first created an item in Django admin
def _send_accum_trace(self, device_uuid): if device_uuid not in self._connections: self._logger.debug("Dropping trace data for device without an active connection, uuid=0x%X", device_uuid) return conn_data = self._connections[device_uuid] trace = conn_data['trace_accum'] if len(trace) > 0: slug = self._build_device_slug(device_uuid) tracing_topic = self.topics.prefix + 'devices/{}/data/tracing'.format(slug) data = {'type': 'notification', 'operation': 'trace'} data['trace'] = binascii.hexlify(trace) data['trace_origin'] = device_uuid self._logger.debug('Publishing trace: (topic=%s)', tracing_topic) self.client.publish(tracing_topic, data) conn_data['trace_scheduled'] = False conn_data['last_trace'] = monotonic() conn_data['trace_accum'] = bytes()
Send whatever accumulated tracing data we have for the device.
async def ping(self, reconnect=True): if self._writer is None and self._reader is None: if reconnect: await self._connect() reconnect = False else: raise Error("Already closed") try: await self._execute_command(COMMAND.COM_PING, "") await self._read_ok_packet() except Exception: if reconnect: await self._connect() await self.ping(False) else: raise
Check if the server is alive
def connect(self, slot): if not callable(slot): raise ValueError("Connection to non-callable '%s' object failed" % slot.__class__.__name__) if (isinstance(slot, partial) or '<' in slot.__name__): if slot not in self._slots: self._slots.append(slot) elif inspect.ismethod(slot): slotSelf = slot.__self__ slotDict = weakref.WeakKeyDictionary() slotDict[slotSelf] = slot.__func__ if slotDict not in self._slots: self._slots.append(slotDict) else: newSlotRef = weakref.ref(slot) if newSlotRef not in self._slots: self._slots.append(newSlotRef)
Connects the signal to any callable object
def _update_view(self, p_data): view = self._viewdata_to_view(p_data) if self.column_mode == _APPEND_COLUMN or self.column_mode == _COPY_COLUMN: self._add_column(view) elif self.column_mode == _INSERT_COLUMN: self._add_column(view, self.columns.focus_position) elif self.column_mode == _EDIT_COLUMN: current_column = self.columns.focus current_column.title = p_data['title'] current_column.view = view self._viewwidget_visible = False self._blur_commandline()
Creates a view from the data entered in the view widget.
def emulate_abs(self, x_val, y_val, timeval): x_event = self.create_event_object( "Absolute", 0x00, x_val, timeval) y_event = self.create_event_object( "Absolute", 0x01, y_val, timeval) return x_event, y_event
Emulate the absolute co-ordinates of the mouse cursor.
def update_color(string, name, style='normal', when='auto'): clear() write_color(string, name, style, when)
Replace the existing line with the given colored string.
def setup_columns(self): tv = self.view['tv_categories'] tv.set_model(self.model) cell = gtk.CellRendererText() tvcol = gtk.TreeViewColumn('Name', cell) def cell_data_func(col, cell, mod, it): if mod[it][0]: cell.set_property('text', mod[it][0].name) return tvcol.set_cell_data_func(cell, cell_data_func) tv.append_column(tvcol) return
Creates the treeview stuff
def __get_doc_block_parts_wrapper(self): self.__get_doc_block_parts_source() helper = self._get_data_type_helper() parameters = list() for parameter_info in self._parameters: parameters.append( {'parameter_name': parameter_info['name'], 'python_type': helper.column_type_to_python_type(parameter_info), 'data_type_descriptor': parameter_info['data_type_descriptor'], 'description': self.__get_parameter_doc_description(parameter_info['name'])}) self._doc_block_parts_wrapper['description'] = self._doc_block_parts_source['description'] self._doc_block_parts_wrapper['parameters'] = parameters
Generates the DocBlock parts to be used by the wrapper generator.
def getAllElementsOfHirarchy(self): allElements=[] for element in self.getAllElements(): allElements.append(element) if isinstance(element, BaseElement): allElements.extend(element.getAllElementsOfHirarchy()) return allElements
returns ALL elements of the complete hirarchy as a flat list
def relworkdir(self): if getattr(self, "workdir", None) is None: return None try: return os.path.relpath(self.workdir) except OSError: return self.workdir
Return a relative version of the workdir
def _do_two_gaussian_fit(freqs, signal, bounds=None): initial = _two_func_initializer(freqs, signal) initial = (initial[0], initial[1], initial[6], initial[7], initial[2], initial[3], initial[10], initial[11]) w = (ut.gaussian(freqs, initial[0], 0.075, 1, 0, 0) + ut.gaussian(freqs, initial[1], 0.075, 1, 0, 0)) func_list = [[ut.gaussian, [0,2,4,6,7], ut.gaussian(freqs, initial[0], 0.075, 1, 0, 0)], [ut.gaussian, [1,3,5,6,7], ut.gaussian(freqs, initial[1], 0.075, 1, 0, 0)]] params, _ = lsq.leastsqbound(mopt.err_func, initial, args=(freqs, np.real(signal), ut.two_gaussian, w, func_list), bounds=bounds) return params
Helper function for the two gaussian fit
def _get_dstk_intersections(self, address, dstk_address): normalized_address = self._normalize(address) normalized_dstk_address = self._normalize(dstk_address) address_uniques = set(normalized_address) - set(normalized_dstk_address) dstk_address_uniques = set(normalized_dstk_address) - set(normalized_address) if self.logger: self.logger.debug("Address Uniques {0}".format(address_uniques)) if self.logger: self.logger.debug("DSTK Address Uniques {0}".format(dstk_address_uniques)) return (len(address_uniques), len(dstk_address_uniques))
Find the unique tokens in the original address and the returned address.
def extract_cosponsors(bill): logger.debug("Extracting Cosponsors") cosponsor_map = [] cosponsors = bill.get('cosponsors', []) bill_id = bill.get('bill_id', None) for co in cosponsors: co_list = [] co_list.append(co.get('thomas_id')) co_list.append(bill_id) co_list.append(co.get('district')) co_list.append(co.get('state')) cosponsor_map.append(co_list) logger.debug("End Extractioning Cosponsors") return cosponsor_map
Return a list of list relating cosponsors to legislation.
def group_pop(name, app, **kwargs): ctx = Context(**kwargs) ctx.execute_action('group:app:remove', **{ 'storage': ctx.repo.create_secure_service('storage'), 'name': name, 'app': app, })
Remove application from the specified routing group.
def attrget(self, groupname, attrname, rownr): return self._attrget(groupname, attrname, rownr)
Get the value of an attribute in the given row in a group.
def load_configuration(self, **kwargs): for key in settings.ACTIVE_URL_KWARGS: kwargs.setdefault(key, settings.ACTIVE_URL_KWARGS[key]) self.css_class = kwargs['css_class'] self.parent_tag = kwargs['parent_tag'] self.menu = kwargs['menu'] self.ignore_params = kwargs['ignore_params']
load configuration, merge with default settings
def DeleteAttribute(self, attribute): if "w" not in self.mode: raise IOError("Deleting attribute %s from read only object." % attribute) if self.mode != "w" and attribute.lock_protected and not self.transaction: raise IOError("Object must be locked to delete attribute %s." % attribute) if attribute in self.synced_attributes: self._to_delete.add(attribute) del self.synced_attributes[attribute] if attribute in self.new_attributes: del self.new_attributes[attribute] if attribute.versioned and attribute.creates_new_object_version: self._new_version = True self._dirty = True
Clears the attribute from this object.
def _execute_commands_from_dir(self, directory): commands = get_commands_from_dir(directory) print('\tAttempting to execute {0} failed commands'.format(len(commands))) return self.execute(commands, ignored_commands=None, execute_fails=True)
Re-attempt to split and execute the failed commands
def show_lbaas_healthmonitor(self, lbaas_healthmonitor, **_params): return self.get(self.lbaas_healthmonitor_path % (lbaas_healthmonitor), params=_params)
Fetches information for a lbaas_healthmonitor.
async def await_all(): tasks = asyncio.Task.all_tasks() for task in tasks: try: await task except RuntimeError as e: if 'Task cannot await on itself' not in str(e): raise e except AssertionError as e: if 'yield from wasn\'t used with future' not in str(e): raise e
Simple utility function that drains all pending tasks
def create(args): from bob.db.utils import session_try_nolock dbfile = args.files[0] if args.recreate: if args.verbose and os.path.exists(dbfile): print('unlinking %s...' % dbfile) if os.path.exists(dbfile): os.unlink(dbfile) if not os.path.exists(os.path.dirname(dbfile)): os.makedirs(os.path.dirname(dbfile)) create_tables(args) s = session_try_nolock(args.type, dbfile, echo=(args.verbose > 2)) add_clients(s, args.verbose) add_files(s, args.imagedir, args.verbose) add_protocols(s, args.verbose) s.commit() s.close()
Creates or re-creates this database
def send_message(self, message, room_id, **kwargs): return SendMessage(settings=self.settings, **kwargs).call( message=message, room_id=room_id, **kwargs )
Send a message to a given room
def _get_value(self, exc_type, exc_value, exc_traceback): stack_info = get_stack_info( iter_traceback_frames(exc_traceback), transformer=self.transform, capture_locals=self.client.capture_locals, ) exc_module = getattr(exc_type, '__module__', None) if exc_module: exc_module = str(exc_module) exc_type = getattr(exc_type, '__name__', '<unknown>') return { 'value': to_unicode(exc_value), 'type': str(exc_type), 'module': to_unicode(exc_module), 'stacktrace': stack_info, }
Convert exception info to a value for the values list.
def update_id_list(self, update_ids_finished_cb): if not self._update_ids_finished_cb: self._update_ids_finished_cb = update_ids_finished_cb self.anchor_ids = [] self.active_anchor_ids = [] self.anchor_data = {} self.nr_of_anchors = 0 self.ids_valid = False self.data_valid = False logger.debug('Updating ids of memory {}'.format(self.id)) self.mem_handler.read(self, LocoMemory2.ADR_ID_LIST, LocoMemory2.ID_LIST_LEN)
Request an update of the id list
def cut(self, buffer): from_, to = self.operator_range(buffer.document) from_ += buffer.cursor_position to += buffer.cursor_position to -= 1 document = Document(buffer.text, to, SelectionState( original_cursor_position=from_, type=self.selection_type)) new_document, clipboard_data = document.cut_selection() return new_document, clipboard_data
Turn text object into `ClipboardData` instance.
def commit_signal(data_id): if not getattr(settings, 'FLOW_MANAGER_DISABLE_AUTO_CALLS', False): immediate = getattr(settings, 'FLOW_MANAGER_SYNC_AUTO_CALLS', False) async_to_sync(manager.communicate)(data_id=data_id, save_settings=False, run_sync=immediate)
Nudge manager at the end of every Data object save event.
def reverse(args): from .query import Database db = Database() output = sys.stdout if args.selftest: from bob.db.utils import null output = null() r = db.reverse(args.path) for f in r: output.write('%d\n' % f.id) if not r: return 1 return 0
Returns a list of file database identifiers given the path stems
def keyevent2tuple(event): return (event.type(), event.key(), event.modifiers(), event.text(), event.isAutoRepeat(), event.count())
Convert QKeyEvent instance into a tuple
def num_elements(self): if self.is_fully_defined(): size = 1 for dim in self._dims: size *= dim.value return size else: return None
Returns the total number of elements, or none for incomplete shapes.
def getDefaultApplicationForMimeType(self, pchMimeType, pchAppKeyBuffer, unAppKeyBufferLen): fn = self.function_table.getDefaultApplicationForMimeType result = fn(pchMimeType, pchAppKeyBuffer, unAppKeyBufferLen) return result
return the app key that will open this mime type
def _reset_internal(self): super()._reset_internal() self.sim.data.qpos[self._ref_joint_pos_indexes] = self.mujoco_robot.init_qpos if self.has_gripper: self.sim.data.qpos[ self._ref_joint_gripper_actuator_indexes ] = self.gripper.init_qpos
Sets initial pose of arm and grippers.
def finalize(self): ddb_files = list(filter(None, [work.outdir.has_abiext("DDB") for work in self])) out_ddb = self.outdir.path_in("out_DDB") desc = "DDB file merged by %s on %s" % (self.__class__.__name__, time.asctime()) mrgddb = wrappers.Mrgddb(manager=self.manager, verbose=0) mrgddb.merge(self.outdir.path, ddb_files, out_ddb=out_ddb, description=desc) print("Final DDB file available at %s" % out_ddb) retcode = super().finalize() return retcode
This method is called when the flow is completed.
def abort(self, err): if _debug: IOQController._debug("abort %r", err) if (self.state == CTRL_IDLE): if _debug: IOQController._debug(" - idle") return while True: iocb = self.ioQueue.get(block=0) if not iocb: break if _debug: IOQController._debug(" - iocb: %r", iocb) iocb.ioState = ABORTED iocb.ioError = err iocb.trigger() if (self.state != CTRL_IDLE): if _debug: IOQController._debug(" - busy after aborts")
Abort all pending requests.
def oid2name(self, oid): "Look up the parameter name for a given OID" if not self._oid_lookup: for name, data in self._parameters.items(): self._oid_lookup[data['OID']] = data['Name'] return self._oid_lookup[oid]
Look up the parameter name for a given OID
def until(method, timeout = 30, message=''): end_time = time.time() + timeout while True: try: value = method() if value: return value except: pass time.sleep(1) if time.time() > end_time: break raise Exception(message)
Calls the method until the return value is not False.
def _check_if_tag_already_exists(self): version = self.data['new_version'] if self.vcs.tag_exists(version): return True else: return False
Check if tag already exists and show the difference if so
def _sample_template(sample, out_dir): bam_fn = dd.get_work_bam(sample) genome = dd.get_genome_build(sample) if genome in supported: peaks = sample.get("peaks_files", []).get("main") if peaks: r_code = ("library(ChIPQC);\n" "sample = ChIPQCsample(\"{bam_fn}\"," "\"{peaks}\", " "annotation = \"{genome}\"," ");\n" "ChIPQCreport(sample);\n") r_code_fn = os.path.join(out_dir, "chipqc.r") with open(r_code_fn, 'w') as inh: inh.write(r_code.format(**locals())) return r_code_fn
R code to get QC for one sample
def service_action(self, service, action): "Perform given action on service for the selected cluster" try: service = api.get_cluster(self.cluster).get_service(service) except ApiException: print("Service not found") return None if action == "start": service.start() if action == "restart": service.restart() if action == "stop": service.stop() return True
Perform given action on service for the selected cluster
def _strip_counters(self, sub_line): try: end = sub_line.rindex('}') except ValueError: return sub_line else: return sub_line[:(end + 1)]
Find the codeline end by taking out the counters and durations.
def _download_args(options): return dict( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, to_dir=options.to_dir, )
Return args for download_setuptools function from cmdline args.
def getDriverName(self, nDriver, pchValue, unBufferSize): fn = self.function_table.getDriverName result = fn(nDriver, pchValue, unBufferSize) return result
Returns the length of the number of bytes necessary to hold this string including the trailing null.
def destructuring_stmt_handle(self, original, loc, tokens): internal_assert(len(tokens) == 2, "invalid destructuring assignment tokens", tokens) matches, item = tokens out = match_handle(loc, [matches, "in", item, None]) out += self.pattern_error(original, loc, match_to_var, match_check_var) return out
Process match assign blocks.
def parse_md(self): post_content = _MARKDOWN.convert(self.raw_src) if hasattr(_MARKDOWN, 'Meta'): for key in _MARKDOWN.Meta: print "\t meta: %s: %s (%s)" % (key, _MARKDOWN.Meta[key][0], type(_MARKDOWN.Meta[key][0])) if key == 'pubdate': setattr(self, key, datetime.datetime.fromtimestamp(float(_MARKDOWN.Meta[key][0]))) else: setattr(self, key, _MARKDOWN.Meta[key][0]) self.content = post_content self.stub = self.__key__ if not hasattr(self, 'pubdate'): print '\t Notice: setting default pubdate' setattr(self, 'pubdate', datetime.datetime.now())
Takes a post path and returns a dictionary of variables
def filter(self, query: str) -> Optional['CompatNodeIterator']: if not self._last_node: return None return filter(self._last_node, query)
Further filter the results using this iterator as base.
def pop_event(self): if len(self.event_list) > 0: evt = self.event_list.pop(0) return evt return None
Pop an event from event_list.
def roc_auc(y_true, y_score): notnull = ~np.isnan(y_true) fpr, tpr, thresholds = sklearn.metrics.roc_curve(y_true[notnull], y_score[notnull]) return sklearn.metrics.auc(fpr, tpr)
Returns are under the ROC curve
def run_sim(morphology='patdemo/cells/j4a.hoc', cell_rotation=dict(x=4.99, y=-4.33, z=3.14), closest_idx=dict(x=-200., y=0., z=800.)): cell = LFPy.Cell(morphology=morphology, **cell_parameters) cell.set_rotation(**cell_rotation) synapse_parameters = { 'idx' : cell.get_closest_idx(**closest_idx), 'e' : 0., 'syntype' : 'ExpSynI', 'tau' : 0.5, 'weight' : 0.0878, 'record_current' : True, } synapse = LFPy.Synapse(cell, **synapse_parameters) synapse.set_spike_times(np.array([1.])) print "running simulation..." cell.simulate(rec_imem=True,rec_isyn=True) grid_electrode = LFPy.RecExtElectrode(cell,**grid_electrode_parameters) point_electrode = LFPy.RecExtElectrode(cell,**point_electrode_parameters) grid_electrode.calc_lfp() point_electrode.calc_lfp() print "done" return cell, synapse, grid_electrode, point_electrode
set up simple cell simulation with LFPs in the plane
def saved_groups(self): if self._saved_groups is None: self._saved_groups = False fqfn_saved = os.path.join(self.tcex.args.tc_temp_path, 'groups-saved') if ( self.enable_saved_file and os.path.isfile(fqfn_saved) and os.access(fqfn_saved, os.R_OK) ): self._saved_groups = True self.tcex.log.debug('groups-saved file found') return self._saved_groups
Return True if saved group files exits, else False.
def hash_file(filepath: str) -> str: md5 = hashlib.md5() acc_hash(filepath, md5) return md5.hexdigest()
Return the hexdigest MD5 hash of content of file at `filepath`.
def safe_makedir(dname): if not dname: return dname num_tries = 0 max_tries = 5 while not os.path.exists(dname): try: os.makedirs(dname) except OSError: if num_tries > max_tries: raise num_tries += 1 time.sleep(2) return dname
Make a directory if it doesn't exist, handling concurrent race conditions.
def write(self, more): if more: self.output += str(more).upper() self.output += '\n'
Append the Unicode representation of `s` to our output.
def pub_dates(soup): pub_dates = [] tags = raw_parser.pub_date(soup) for tag in tags: pub_date = OrderedDict() copy_attribute(tag.attrs, 'publication-format', pub_date) copy_attribute(tag.attrs, 'date-type', pub_date) copy_attribute(tag.attrs, 'pub-type', pub_date) for tag_attr in ["date-type", "pub-type"]: if tag_attr in tag.attrs: (day, month, year) = ymd(tag) pub_date['day'] = day pub_date['month'] = month pub_date['year'] = year pub_date['date'] = date_struct_nn(year, month, day) pub_dates.append(pub_date) return pub_dates
return a list of all the pub dates
def validate_max_pods(namespace): minimum_pods_required = ceil((namespace.node_count * 2 + 6 + 1) / namespace.node_count) if namespace.max_pods != 0 and namespace.max_pods < minimum_pods_required: raise CLIError('--max-pods must be at least {} for a managed Kubernetes cluster to function.' .format(minimum_pods_required))
Validates that max_pods is set to a reasonable minimum number.
def sum_coefs(self): return numpy.sum(self.ar_coefs) + numpy.sum(self.ma_coefs)
The sum of all AR and MA coefficients
def _do_leave(self, leave, in_port, msg): datapath = msg.datapath parser = datapath.ofproto_parser self._mcast.setdefault(leave.address, {}) if in_port in self._mcast[leave.address]: self._del_flow_entry( datapath, in_port, leave.address) del self._mcast[leave.address][in_port] actions = [] for port in self._mcast[leave.address]: actions.append(parser.OFPActionOutput(port)) if len(actions): self._set_flow_entry( datapath, actions, self.server_port, leave.address) else: self._del_flow_entry( datapath, self.server_port, leave.address)
the process when the querier received a LEAVE message.
def update(self,o): self.open() try: self._db.update(o._db) except AttributeError: self._db.update(o)
Update from another index or index dict
def _get_filtered_stmts(self, _, node, _stmts, mystmt): if self.statement() is mystmt: return [node], True return _stmts, False
method used in _filter_stmts to get statements and trigger break
def unparse_qsl(qsl, sort=False, reverse=False): result = [] items = qsl if sort: items = sorted(items, key=lambda x: x[0], reverse=reverse) for keys, values in items: query_name = quote(keys) result.append(query_name + "=" + quote(values)) return "&".join(result)
Reverse conversion for parse_qsl
def strip_tags(self, content, smart=False): from lxml.html import fromstring return get_node_text(fromstring(content), smart=smart)
Strip tags from the HTML content.
def _delete_vdev_info(self, vdev): vdev = vdev.lower() rules_file_name = '/etc/udev/rules.d/51-qeth-0.0.%s.rules' % vdev cmd = 'rm -f %s\n' % rules_file_name address = '0.0.%s' % str(vdev).zfill(4) udev_file_name = '/etc/udev/rules.d/70-persistent-net.rules' cmd += "sed -i '/%s/d' %s\n" % (address, udev_file_name) cmd += "sed -i '/%s/d' %s\n" % (address, '/boot/zipl/active_devices.txt') return cmd
handle udev rules file.
def compare_ordereddict(self, X, Y): child = self.compare_dicts(X, Y) if isinstance(child, DeepExplanation): return child for i, j in zip(X.items(), Y.items()): if i[0] != j[0]: c = self.get_context() msg = "X{0} and Y{1} are in a different order".format( red(c.current_X_keys), green(c.current_Y_keys) ) return DeepExplanation(msg) return True
Compares two instances of an OrderedDict.
def rotate_point(xorigin, yorigin, x, y, angle): rotx = (x - xorigin) * np.cos(angle) - (y - yorigin) * np.sin(angle) roty = (x - yorigin) * np.sin(angle) + (y - yorigin) * np.cos(angle) return rotx, roty
Rotate the given point by angle
def check_types(func): call = PythonCall(func) @wraps(func) def decorator(*args, **kwargs): parameters = call.bind(args, kwargs) for arg_name, expected_type in func.__annotations__.items(): if not isinstance(parameters[arg_name], expected_type): raise TypeError("{} must be a {}".format( arg_name, expected_type)) return call.apply(args, kwargs) return decorator
Check if annotated function arguments are of the correct type
def fetch(self, station: str) -> str: valid_station(station) try: resp = getattr(requests, self.method.lower())(self.url.format(self.rtype, station)) if resp.status_code != 200: raise SourceError(f'{self.__class__.__name__} server returned {resp.status_code}') except requests.exceptions.ConnectionError: raise ConnectionError(f'Unable to connect to {self.__class__.__name__} server') report = self._extract(resp.text, station) return ' '.join(report.split())
Fetches a report string from the service
def valid_options(kwargs, allowed_options): diff = set(kwargs) - set(allowed_options) if diff: print("Invalid option(s): ", ', '.join(diff)) return False return True
Checks that kwargs are valid API options
def update_metadata_filters(metadata, jupyter_md, cell_metadata): cell_metadata = [m for m in cell_metadata if m not in ['language', 'magic_args']] if 'cell_metadata_filter' in metadata.get('jupytext', {}): metadata_filter = metadata_filter_as_dict(metadata.get('jupytext', {})['cell_metadata_filter']) if isinstance(metadata_filter.get('excluded'), list): metadata_filter['excluded'] = [key for key in metadata_filter['excluded'] if key not in cell_metadata] metadata_filter.setdefault('additional', []) if isinstance(metadata_filter.get('additional'), list): for key in cell_metadata: if key not in metadata_filter['additional']: metadata_filter['additional'].append(key) metadata.setdefault('jupytext', {})['cell_metadata_filter'] = metadata_filter_as_string(metadata_filter) if not jupyter_md: cell_metadata = {'additional': cell_metadata, 'excluded': 'all'} metadata.setdefault('jupytext', {})['notebook_metadata_filter'] = '-all' metadata.setdefault('jupytext', {})['cell_metadata_filter'] = metadata_filter_as_string(cell_metadata)
Update or set the notebook and cell metadata filters
def serialize_numeric(self, tag): str_func = int.__str__ if isinstance(tag, int) else float.__str__ return str_func(tag) + tag.suffix
Return the literal representation of a numeric tag.
def branches(self): result = self.git(self.default + ['branch', '-a', '--no-color']) return [l.strip(' *\n') for l in result.split('\n') if l.strip(' *\n')]
All branches in a list
def update_user(self, username, profile, owner_privkey): url = self.base_url + "/users/" + username + "/update" owner_pubkey = get_pubkey_from_privkey(owner_privkey) payload = { 'profile': profile, 'owner_pubkey': owner_pubkey } resp = self._post_request(url, payload) try: unsigned_tx = resp['unsigned_tx'] except: return resp dht_resp = write_dht_profile(profile) dht_resp = dht_resp[0] if not dht_resp['status'] == 'success': return {"error": "DHT write failed"} signed_tx = sign_all_unsigned_inputs(owner_privkey, unsigned_tx) return self.broadcast_transaction(signed_tx)
Update profile_hash on blockchain
def base_definition_post_delete(sender, instance, **kwargs): if hasattr(instance._state, '_deletion'): model = popattr(instance._state, '_deletion') for field in instance.base._meta.fields: perform_ddl('remove_field', model, field)
Make sure to delete fields inherited from an abstract model base.
def _function_handler(function, args, kwargs, pipe): signal.signal(signal.SIGINT, signal.SIG_IGN) result = process_execute(function, *args, **kwargs) send_result(pipe, result)
Runs the actual function in separate process and returns its result.