code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def gen_source(self, ast, name, customize, is_lambda=False, returnNone=False): rn = self.return_none self.return_none = returnNone old_name = self.name self.name = name if len(ast) == 0: self.println(self.indent, 'pass') else: self.customize(customize) if is_lambda: self.write(self.traverse(ast, is_lambda=is_lambda)) else: self.text = self.traverse(ast, is_lambda=is_lambda) self.println(self.text) self.name = old_name self.return_none = rn
convert SyntaxTree to Python source code
def OnMouse( self, event ): node = HotMapNavigator.findNodeAtPosition(self.hot_map, event.GetPosition()) self.SetHighlight( node, event.GetPosition() )
Handle mouse-move event by selecting a given element
def check_sim_out(self): now = time.time() if now - self.last_sim_send_time < 0.02 or self.rc_channels_scaled is None: return self.last_sim_send_time = now servos = [] for ch in range(1,9): servos.append(self.scale_channel(ch, getattr(self.rc_channels_scaled, 'chan%u_scaled' % ch))) servos.extend([0,0,0, 0,0,0]) buf = struct.pack('<14H', *servos) try: self.sim_out.send(buf) except socket.error as e: if not e.errno in [ errno.ECONNREFUSED ]: raise return
check if we should send new servos to flightgear
def pip_remove(self, name=None, prefix=None, pkgs=None): logger.debug(str((prefix, pkgs))) if isinstance(pkgs, (list, tuple)): pkg = ' '.join(pkgs) else: pkg = pkgs extra_args = ['uninstall', '--yes', pkg] return self._call_pip(name=name, prefix=prefix, extra_args=extra_args)
Remove a pip package in given environment by `name` or `prefix`.
def _ir_calibrate(self, data): fk1 = float(self["planck_fk1"]) fk2 = float(self["planck_fk2"]) bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) res = (fk2 / xu.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs['units'] = 'K' res.attrs['standard_name'] = 'toa_brightness_temperature' return res
Calibrate IR channels to BT.
def mouse_press_event(self, x, y, button): if button == 1: print("Left mouse button pressed @", x, y) if button == 2: print("Right mouse button pressed @", x, y)
Reports left and right mouse button presses + position
def _file_local_list(self, dest): if os.path.isdir(dest): destdir = dest else: destdir = os.path.dirname(dest) filelist = set() for root, dirs, files in salt.utils.path.os_walk(destdir, followlinks=True): for name in files: path = os.path.join(root, name) filelist.add(path) return filelist
Helper util to return a list of files in a directory
def start(self): if self.stream is None: from pyaudio import PyAudio, paInt16 self.pa = PyAudio() self.stream = self.pa.open( 16000, 1, paInt16, True, frames_per_buffer=self.chunk_size ) self._wrap_stream_read(self.stream) self.engine.start() self.running = True self.is_paused = False self.thread = Thread(target=self._handle_predictions) self.thread.daemon = True self.thread.start()
Start listening from stream
def str2listtuple(self, string_message): "Covert a string that is ready to be sent to graphite into a tuple" if type(string_message).__name__ not in ('str', 'unicode'): raise TypeError("Must provide a string or unicode") if not string_message.endswith('\n'): string_message += "\n" tpl_list = [] for line in string_message.split('\n'): line = line.strip() if not line: continue path, metric, timestamp = (None, None, None) try: (path, metric, timestamp) = line.split() except ValueError: raise ValueError( "message must contain - metric_name, value and timestamp '%s'" % line) try: timestamp = float(timestamp) except ValueError: raise ValueError("Timestamp must be float or int") tpl_list.append((path, (timestamp, metric))) if len(tpl_list) == 0: raise GraphiteSendException("No messages to send") payload = pickle.dumps(tpl_list) header = struct.pack("!L", len(payload)) message = header + payload return message
Covert a string that is ready to be sent to graphite into a tuple
def append(self, item): validated_value = self.get_validated_object(item) if validated_value is not None: self.__modified_data__.append(validated_value)
Appending elements to our list
def trace(msg): if os.environ.get('JARN_TRACE') == '1': print('TRACE:', msg, file=sys.stderr)
Print a trace message to stderr if environment variable is set.
def agents(): print 'The following LiveSync agents are active:' agent_list = LiveSyncAgent.find().order_by(LiveSyncAgent.backend_name, db.func.lower(LiveSyncAgent.name)).all() table_data = [['ID', 'Name', 'Backend', 'Initial Export', 'Queue']] for agent in agent_list: initial = (cformat('%{green!}done%{reset}') if agent.initial_data_exported else cformat('%{yellow!}pending%{reset}')) if agent.backend is None: backend_title = cformat('%{red!}invalid backend ({})%{reset}').format(agent.backend_name) else: backend_title = agent.backend.title table_data.append([unicode(agent.id), agent.name, backend_title, initial, unicode(agent.queue.filter_by(processed=False).count())]) table = AsciiTable(table_data) table.justify_columns[4] = 'right' print table.table if not all(a.initial_data_exported for a in agent_list): print print "You need to perform the initial data export for some agents." print cformat("To do so, run " "%{yellow!}indico livesync initial_export %{reset}%{yellow}<agent_id>%{reset} for those agents.")
Lists the currently active agents
def print_unfinished_line(self): if self.state is STATE_RUNNING: if not callbacks.process(self.read_buffer): self.print_lines(self.read_buffer) self.read_buffer = b''
The unfinished line stayed long enough in the buffer to be printed
def keys(self): keys = [] for val in self.form.scales.values(): keys += val.dtype.fields.keys() return keys
List of reader's keys.
def match(self, chars): start_index = self.next_index for char in self: if char not in chars: self.next_index -= 1 break return self[start_index:self.next_index]
Return all next characters that are listed in `chars` as a string
async def async_init(self) -> None: if not self._client_established: await self.request( 'put', 'clients/{0}'.format(self.client_uuid), data={ 'app_id': DEFAULT_APP_ID, 'app_version': DEFAULT_APP_VERSION, 'locale': self._locale }) self._client_established = True resp = await self.request( 'post', 'clients/{0}/sessions'.format(self.client_uuid), data={ 'email': self._email, 'password': self._password }) if not self.user_uuid: self.user_uuid = resp['result']['user']['user_uuid'] self._session_expiry = resp['result']['session_expiration_timestamp'] self.tiles = Tile(self.request, self.user_uuid)
Create a Tile session.
def _getLPA( self ): " Provides line, pos and absPosition line as string " return str( self.line ) + ":" + \ str( self.pos ) + ":" + \ str( self.absPosition )
Provides line, pos and absPosition line as string
def sshpull(host, maildir, localmaildir, noop=False, verbose=False, filterfile=None): store = _SSHStore(host, maildir) _pull(store, localmaildir, noop, verbose, filterfile)
Pull a remote maildir to the local one.
def derive_resource_name(name): if name.startswith("Anon"): name = name[4:] if name.endswith("Handler"): name = name[:-7] if name == "Maas": name = "MAAS" return name
A stable, human-readable name and identifier for a resource.
def _bsecurate_cli_get_reader_formats(args): all_formats = curate.get_reader_formats() if args.no_description: liststr = all_formats.keys() else: liststr = format_columns(all_formats.items()) return '\n'.join(liststr)
Handles the get-file-types subcommand
def update_topic_counter(sender, topic, user, request, response, **kwargs): topic.__class__._default_manager.filter(id=topic.id).update(views_count=F('views_count') + 1)
Handles the update of the views counter associated with topics.
def _onInstanceAttribute(self, name, line, pos, absPosition, level): attributes = self.objectsStack[level - 1].instanceAttributes for item in attributes: if item.name == name: return attributes.append(InstanceAttribute(name, line, pos, absPosition))
Memorizes a class instance attribute
def to_dict(self): d = super(TargetPort, self).to_dict() d['portName'] = self.port_name return d
Save this target port into a dictionary.
def take_column(self, keys, *extra_keys): import utool as ut keys = ut.ensure_iterable(keys) + list(extra_keys) key_to_list = ut.dict_subset(self._key_to_list, keys) newself = self.__class__(key_to_list, self._meta.copy()) return newself
Takes a subset of columns
def restore_checkpoint(filename): with gzip.open(filename) as f: generation, config, population, species_set, rndstate = pickle.load(f) random.setstate(rndstate) return Population(config, (population, species_set, generation))
Resumes the simulation from a previous saved point.
def start_stress(self, stress_cmd): with open(os.devnull, 'w') as dev_null: try: stress_proc = subprocess.Popen(stress_cmd, stdout=dev_null, stderr=dev_null) self.set_stress_process(psutil.Process(stress_proc.pid)) except OSError: logging.debug("Unable to start stress")
Starts a new stress process with a given cmd
def _build_server_url(server_host, server_path) -> str: server_url = urljoin(server_host, server_path) if server_url[-1] == '/': return server_url return '{}/'.format(server_url)
Build the server url making sure it ends in a trailing slash.
def x10_housecode(self): housecode = None if self.is_x10: housecode = insteonplm.utils.byte_to_housecode(self.addr[1]) return housecode
Emit the X10 house code.
def add_to_path(p): old_path = sys.path if p not in sys.path: sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path
Adds a path to python paths and removes it after the 'with' block ends
def backup(self): count = 0 name = "{}.bkp".format(self.filename) backup = os.path.join(self.cwd, name) while os.path.exists(backup): count += 1 name = "{}.bkp{}".format(self.filename, count) backup = os.path.join(self.cwd, name) self.hey("Moving existing {} to {}".format(self.filename, name)) os.rename(os.path.join(self.cwd, self.filename), backup)
Backups files with the same name of the instance filename
def on_change(self, value): self._modifier(self.inst, self.prop, value)
Calles modifier on instance with passed value
def labels(self, text=None, coordinates=None, colorlist=None, sizes=None, fonts=None, opacity=1.0): if coordinates is None: coordinates=self.coordinates l=len(coordinates) if text is None: if len(self.topology.get('atom_types'))==l: text=[self.topology['atom_types'][i]+str(i+1) for i in range(l)] else: text=[str(i+1) for i in range(l)] text_representation = self.add_representation('text', {'coordinates': coordinates, 'text': text, 'colors': colorlist, 'sizes': sizes, 'fonts': fonts, 'opacity': opacity}) def update(self=self, text_representation=text_representation): self.update_representation(text_representation, {'coordinates': coordinates}) self.update_callbacks.append(update)
Display atomic labels for the system
def declare(queues): current_queues.declare(queues=queues) click.secho( 'Queues {} have been declared.'.format( queues or current_queues.queues.keys()), fg='green' )
Initialize the given queues.
def _validated(self, data): try: return self.convert(data) except (TypeError, ValueError) as ex: raise NotValid(*ex.args)
Convert data or die trying.
def gui(): sel = psidialogs.multi_choice(libraries(), 'select libraries to remove from %s!' % libraries_dir(), title='remove boards') print('%s selected' % sel) if sel: if psidialogs.ask_yes_no('Do you really want to remove selected libraries?\n' + '\n'.join(sel)): for x in sel: remove_lib(x) print('%s was removed' % x)
remove libraries by GUI.
def merge_from(self, other): if other.country_code is not None: self.country_code = other.country_code if other.national_number is not None: self.national_number = other.national_number if other.extension is not None: self.extension = other.extension if other.italian_leading_zero is not None: self.italian_leading_zero = other.italian_leading_zero if other.number_of_leading_zeros is not None: self.number_of_leading_zeros = other.number_of_leading_zeros if other.raw_input is not None: self.raw_input = other.raw_input if other.country_code_source is not CountryCodeSource.UNSPECIFIED: self.country_code_source = other.country_code_source if other.preferred_domestic_carrier_code is not None: self.preferred_domestic_carrier_code = other.preferred_domestic_carrier_code
Merge information from another PhoneNumber object into this one.
def jinja_extensions_feature(app): app.jinja_env.globals['momentjs'] = MomentJsFilters app.jinja_env.filters.update(MomentJsFilters().get_filters()) app.jinja_env.filters.update(DateFilters().get_filters()) app.jinja_env.filters.update(HumanizeFilters().get_filters()) app.jinja_env.globals.update(dict( asset=functions.asset, dev_proxy=functions.dev_proxy ))
Enables custom templating extensions
def folder2db(folder_name, debug, energy_limit, skip_folders, goto_reaction): folder_name = folder_name.rstrip('/') skip = [] for s in skip_folders.split(', '): for sk in s.split(','): skip.append(sk) pub_id = _folder2db.main(folder_name, debug, energy_limit, skip, goto_reaction) if pub_id: print('') print('') print('Ready to release the data?') print( " Send it to the Catalysis-Hub server with 'cathub db2server {folder_name}/{pub_id}.db'.".format(**locals())) print(" Then log in at www.catalysis-hub.org/upload/ to verify and release. ")
Read folder and collect data in local sqlite3 database
def atlas_node_stop( atlas_state ): for component in atlas_state.keys(): log.debug("Stopping Atlas component '%s'" % component) atlas_state[component].ask_join() atlas_state[component].join() return True
Stop the atlas node threads
def upsert_result(client_site_url, apikey, resource_id, result): url = client_site_url + u"deadoralive/upsert" params = result.copy() params["resource_id"] = resource_id requests.post(url, headers=dict(Authorization=apikey), params=params)
Post the given link check result to the client site.
def gen_sites(path): " Seek sites by path. " for root, _, _ in walklevel(path, 2): try: yield Site(root) except AssertionError: continue
Seek sites by path.
def _map_query_path_to_outputs(construct_result, query_path_to_location_info): query_path_to_output_fields = {} for output_name, field in six.iteritems(construct_result.fields): field_name = field.location.field output_query_path = field.location.query_path output_field_info = constants.SqlOutput( field_name=field_name, output_name=output_name, graphql_type=query_path_to_location_info[output_query_path].type) output_field_mapping = query_path_to_output_fields.setdefault(output_query_path, []) output_field_mapping.append(output_field_info) return query_path_to_output_fields
Assign the output fields of a ConstructResult block to their respective query_path.
def table_ensure(cls, rr): dbs = rr.db_list().run() if not rr.dbname in dbs: logging.info('creating rethinkdb database %s', repr(rr.dbname)) rr.db_create(rr.dbname).run() tables = rr.table_list().run() if not cls.table in tables: logging.info( 'creating rethinkdb table %s in database %s', repr(cls.table), repr(rr.dbname)) cls.table_create(rr)
Creates the table if it doesn't exist.
def update_admin_object_resource(name, server=None, **kwargs): if 'jndiName' in kwargs: del kwargs['jndiName'] return _update_element(name, 'resources/admin-object-resource', kwargs, server)
Update a JMS destination
def init1(self, dae): self.v0 = matrix(dae.y[self.v])
Set initial voltage for time domain simulation
def _iter_over_selections(obj, dim, values): from .groupby import _dummy_copy dummy = None for value in values: try: obj_sel = obj.sel(**{dim: value}) except (KeyError, IndexError): if dummy is None: dummy = _dummy_copy(obj) obj_sel = dummy yield obj_sel
Iterate over selections of an xarray object in the provided order.
def create_lzma(archive, compression, cmd, verbosity, interactive, filenames): return _create(archive, compression, cmd, 'alone', verbosity, filenames)
Create an LZMA archive with the lzma Python module.
def _setup(self): default_settings.reload() environment_variable = self._kwargs.get( "ENVVAR_FOR_DYNACONF", default_settings.ENVVAR_FOR_DYNACONF ) settings_module = os.environ.get(environment_variable) self._wrapped = Settings( settings_module=settings_module, **self._kwargs ) self.logger.debug("Lazy Settings _setup ...")
Initial setup, run once.
def delete(self, for_update=False): hash_key = self.get_hash_id() try: original_instance = self.load(hash_key, convert_key=False) except KeyError: return all_index = self._query.all_index() all_index.remove(hash_key) for field in self._indexes: for index in field.get_indexes(): index.remove(original_instance) if not for_update: for field in self._fields.values(): if isinstance(field, _ContainerField): field._delete(self) self.__database__.delete(hash_key)
Delete the given model instance.
def extract_images_generic(pike, root, log, options): jpegs = [] pngs = [] for _, xref, ext in extract_images(pike, root, log, options, extract_image_generic): log.debug('xref = %s ext = %s', xref, ext) if ext == '.png': pngs.append(xref) elif ext == '.jpg': jpegs.append(xref) log.debug("Optimizable images: JPEGs: %s PNGs: %s", len(jpegs), len(pngs)) return jpegs, pngs
Extract any >=2bpp image we think we can improve
def spacings(self): result_invsq = (self.reciprocal**2).sum(axis=0) result = np.zeros(3, float) for i in range(3): if result_invsq[i] > 0: result[i] = result_invsq[i]**(-0.5) return result
Computes the distances between neighboring crystal planes
def remove_class(self, ioclass): current_ioclasses = self.ioclasses new_ioclasses = filter(lambda x: x.name != ioclass.name, current_ioclasses) self.modify(new_ioclasses=new_ioclasses)
Remove VNXIOClass instance from policy.
def count_channels(self): merge = self.index['merge'] if len(self.idx_chan.selectedItems()) > 1: if merge.isEnabled(): return else: merge.setEnabled(True) else: self.index['merge'].setCheckState(Qt.Unchecked) self.index['merge'].setEnabled(False)
If more than one channel selected, activate merge checkbox.
def _dict_increment(self, dictionary, key): if key in dictionary: dictionary[key] += 1 else: dictionary[key] = 1
Increments the value of the dictionary at the specified key.
def adj_nodes_ali(ali_nodes): for node in ali_nodes: node.cloud = "alicloud" node.cloud_disp = "AliCloud" node.private_ips = ip_to_str(node.extra['vpc_attributes']['private_ip_address']) node.public_ips = ip_to_str(node.public_ips) node.zone = node.extra['zone_id'] node.size = node.extra['instance_type'] if node.size.startswith('ecs.'): node.size = node.size[len('ecs.'):] return ali_nodes
Adjust details specific to AliCloud.
def move(self, dst): "Closes then moves the file to dst." self.close() shutil.move(self.path, dst)
Closes then moves the file to dst.
def NotifyAboutEnd(self): flow_ref = None if self.runner_args.client_id: flow_ref = rdf_objects.FlowReference( client_id=self.client_id, flow_id=self.urn.Basename()) num_results = len(self.ResultCollection()) notification_lib.Notify( self.creator, rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED, "Flow %s completed with %d %s" % (self.__class__.__name__, num_results, num_results == 1 and "result" or "results"), rdf_objects.ObjectReference( reference_type=rdf_objects.ObjectReference.Type.FLOW, flow=flow_ref))
Send out a final notification about the end of this flow.
def use_comparative_objective_view(self): self._object_views['objective'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_objective_view() except AttributeError: pass
Pass through to provider ObjectiveLookupSession.use_comparative_objective_view
def word_under_cursor_pos(self): self._vim.command('normal e') end = self.cursor() self._vim.command('normal b') beg = self.cursor() return beg, end
Return start and end positions of the cursor respectively.
def log_assist_request_without_audio(assist_request): if logging.getLogger().isEnabledFor(logging.DEBUG): resp_copy = embedded_assistant_pb2.AssistRequest() resp_copy.CopyFrom(assist_request) if len(resp_copy.audio_in) > 0: size = len(resp_copy.audio_in) resp_copy.ClearField('audio_in') logging.debug('AssistRequest: audio_in (%d bytes)', size) return logging.debug('AssistRequest: %s', resp_copy)
Log AssistRequest fields without audio data.
def dismissWorkers(self, num_workers): for i in range(min(num_workers, len(self.workers))): worker = self.workers.pop() worker.dismiss()
Tell num_workers worker threads to quit after their current task.
def start(self): if (self.cf.link is not None): if (self._added is False): self.create() logger.debug('First time block is started, add block') else: logger.debug('Block already registered, starting logging' ' for id=%d', self.id) pk = CRTPPacket() pk.set_header(5, CHAN_SETTINGS) pk.data = (CMD_START_LOGGING, self.id, self.period) self.cf.send_packet(pk, expected_reply=( CMD_START_LOGGING, self.id))
Start the logging for this entry
async def _connect(self): self.waiting = True await self.client.start(self.ip) self.waiting = False if self.client.protocol is None: raise IOError("Could not connect to '{}'.".format(self.ip)) self.open = True
Start asynchronous reconnect loop.
def rastrigin(self, x): if not isscalar(x[0]): N = len(x[0]) return [10 * N + sum(xi**2 - 10 * np.cos(2 * np.pi * xi)) for xi in x] N = len(x) return 10 * N + sum(x**2 - 10 * np.cos(2 * np.pi * x))
Rastrigin test objective function
def _resolve(self): endpoints = {} r = self.resolver.query(self.service, 'SRV') for rec in r.response.additional: name = rec.name.to_text() addr = rec.items[0].address endpoints[name] = {'addr': addr} for rec in r.response.answer[0].items: name = '.'.join(rec.target.labels) endpoints[name]['port'] = rec.port return [ 'http://{ip}:{port}'.format( ip=v['addr'], port=v['port'] ) for v in endpoints.values() ]
Query the consul DNS server for the service IP and port
def _http_date(_date_str: str) -> Optional[datetime.datetime]: if _date_str is not None: timetuple = parsedate(_date_str) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
Process a date string, return a datetime object
def makeControlModePacket(ID, mode): pkt = makeWritePacket(ID, xl320.XL320_CONTROL_MODE, le(mode)) return pkt
Sets the xl-320 to either servo or wheel mode
def to_attrs(args, nocreate_if_none=['id', 'for', 'class']): if not args: return '' s = [''] for k, v in sorted(args.items()): k = u_str(k) v = u_str(v) if k.startswith('_'): k = k[1:] if v is None: if k not in nocreate_if_none: s.append(k) else: if k.lower() in __noescape_attrs__: t = u_str(v) else: t = cgi.escape(u_str(v)) t = '"%s"' % t.replace('"', '&quot;') s.append('%s=%s' % (k, t)) return ' '.join(s)
Make python dict to k="v" format
def versions(self): versions = [] for v, _ in self.restarts: if len(versions) == 0 or v != versions[-1]: versions.append(v) return versions
Return all version changes.
def sqrt(n): if isinstance(n, Rational): n = Constructible(n) elif not isinstance(n, Constructible): raise ValueError('the square root is not implemented for the type %s' % type(n)) r = n._try_sqrt() if r is not None: return r return Constructible(Constructible.lift_rational_field(0, n.field), Constructible.lift_rational_field(1, n.field), (n, n.field))
return the square root of n in an exact representation
def callback(request): message_id = request.GET.get('messageId') status_id = request.GET.get('status') status_msg = NEXMO_STATUSES.get(status_id, UNKNOWN_STATUS) error_id = int(request.GET.get('err-code')) error_msg = NEXMO_MESSAGES.get(error_id, UNKNOWN_MESSAGE) logger.info(u'Nexmo callback: Sms = %s, Status = %s, message = %s' % ( message_id, status_msg, error_msg )) return HttpResponse('')
Callback URL for Nexmo.
def form_valid(self, form): response = self.ulogin_response(form.cleaned_data['token'], self.request.get_host()) if 'error' in response: return render(self.request, self.error_template_name, {'json': response}) if user_is_authenticated(get_user(self.request)): user, identity, registered = \ self.handle_authenticated_user(response) else: user, identity, registered = \ self.handle_anonymous_user(response) assign.send(sender=ULoginUser, user=get_user(self.request), request=self.request, registered=registered, ulogin_user=identity, ulogin_data=response) return redirect(self.request.GET.get(REDIRECT_FIELD_NAME) or '/')
The request from ulogin service is correct
def find(self, entry_id, query=None): if query is None: query = {} if self.content_type_id is not None: query['content_type'] = self.content_type_id normalize_select(query) return super(EntriesProxy, self).find(entry_id, query=query)
Gets a single entry by ID.
def incrementSub(self, amount=1): self._subProgressBar.setValue(self.subValue() + amount) QApplication.instance().processEvents()
Increments the sub-progress bar by amount.
def parse(cls, key): try: if isinstance(key, int): return key elif re.match('[<]KEY_.*[>]', key): return getattr(curses, key[1:-1]) elif re.match('[<].*[>]', key): return getattr(curses.ascii, key[1:-1]) elif key.startswith('0x'): return int(key, 16) elif len(key) == 2: return tuple(cls.parse(k) for k in key) else: code = ord(key) if 0 <= code <= 255: return code raise exceptions.ConfigError('Invalid configuration! `%s` is ' 'not in the ascii range' % key) except (AttributeError, ValueError, TypeError): raise exceptions.ConfigError('Invalid configuration! "%s" is not a ' 'valid key' % key)
Parse a key represented by a string and return its character code.
def connect(self, **kwargs): try: self.gce = get_driver(Provider.GCE)( self.user_id, self.key, project=self.project, **kwargs) except: raise ComputeEngineManagerException("Unable to connect to Google Compute Engine.")
Connect to Google Compute Engine.
def get(cls, name, definition, output_dir): if 'url' in definition: return URLResource(name, definition, output_dir) elif 'pypi' in definition: return PyPIResource(name, definition, output_dir) else: return Resource(name, definition, output_dir)
Dispatch to the right subclass based on the definition.
def done(self): logger.info('Marking %s as done', self) fn = self.get_path() try: os.makedirs(os.path.dirname(fn)) except OSError: pass open(fn, 'w').close()
Creates temporary file to mark the task as `done`
def avl_release_kids(node): left, right = node.left, node.right if left is not None: left.parent = None if right is not None: right.parent = None node.balance = 0 node.left = None node.right = None return node, left, right
splits a node from its kids maintaining parent pointers
def match(self, name): for pat in self.pats: if fnmatch.fnmatch(name, pat): return True return False
Returns True if name matches one of the patterns.
def find_gui_and_backend(): matplotlib = sys.modules['matplotlib'] backend = matplotlib.rcParams['backend'] gui = backend2gui.get(backend, None) return gui, backend
Return the gui and mpl backend.
def joined(self, a, b): mapping = self._mapping try: return mapping[a] is mapping[b] except KeyError: return False
Returns True if a and b are members of the same set.
def _deduce_security(kwargs) -> nmcli.SECURITY_TYPES: sec_translation = { 'wpa-psk': nmcli.SECURITY_TYPES.WPA_PSK, 'none': nmcli.SECURITY_TYPES.NONE, 'wpa-eap': nmcli.SECURITY_TYPES.WPA_EAP, } if not kwargs.get('securityType'): if kwargs.get('psk') and kwargs.get('eapConfig'): raise ConfigureArgsError( 'Cannot deduce security type: psk and eap both passed') elif kwargs.get('psk'): kwargs['securityType'] = 'wpa-psk' elif kwargs.get('eapConfig'): kwargs['securityType'] = 'wpa-eap' else: kwargs['securityType'] = 'none' try: return sec_translation[kwargs['securityType']] except KeyError: raise ConfigureArgsError('securityType must be one of {}' .format(','.join(sec_translation.keys())))
Make sure that the security_type is known, or throw.
def output_file_name(self): safe_path = re.sub(r":|/", "_", self.source_urn.Path().lstrip("/")) return "results_%s%s" % (safe_path, self.output_file_extension)
Name of the file where plugin's output should be written to.
def main(): p = argparse.ArgumentParser(add_help="Recursively list interesting files.") p.add_argument( 'directory', nargs="?", default="", help="The directory to process (current dir if omitted)." ) p.add_argument( '--verbose', '-v', action='store_true', help="Increase verbosity." ) args = p.parse_args() args.curdir = os.getcwd() if not args.directory: args.direcotry = args.curdir if args.verbose: print(args) for chsm, fname in list_files(args.directory): print(chsm, fname)
Print checksum and file name for all files in the directory.
def xor(*variables): sum_ = False for value in variables: sum_ = sum_ ^ bool(value) return sum_
XOR definition for multiple variables
def job_file(self): job_file_name = '%s.job' % (self.name) job_file_path = os.path.join(self.initial_dir, job_file_name) self._job_file = job_file_path return self._job_file
The path to the submit description file representing this job.
def collection(self, attribute): return { "dependencies": self.dependencies, "publics": self.publics, "members": self.members, "types": self.types, "executables": self.executables, "interfaces": self.interfaces }[attribute]
Returns the collection corresponding the attribute name.
def save_config(self): self.config_section['match_fuzzy'] = self.model_completer.match_fuzzy self.config_section['enable_vi_bindings'] = self.enable_vi_bindings self.config_section['show_completion_columns'] = \ self.show_completion_columns self.config_section['show_help'] = self.show_help self.config_section['theme'] = self.theme self.config_obj.write()
Save the config to the config file.
def setTopRight(self, loc): offset = self.getTopRight().getOffset(loc) return self.setLocation(self.getTopLeft().offset(offset))
Move this region so its top right corner is on ``loc``
def itermonthdays2(cls, year, month): for day in NepCal.itermonthdates(year, month): if day.month == month: yield (day.day, day.weekday()) else: yield (0, day.weekday())
Similar to itermonthdays2 but returns tuples of day and weekday.
def send_rgb(dev, red, green, blue, dimmer): cv = [0 for v in range(0, 512)] cv[0] = red cv[1] = green cv[2] = blue cv[6] = dimmer sent = dev.send_multi_value(1, cv) return sent
Send a set of RGB values to the light
def update(self, dist): assert isinstance(dist, DDist) for k, c in iteritems(dist.counts): self.counts[k] += c self.total += dist.total
Adds the given distribution's counts to the current distribution.
def _getSectionIds(self, server, sections): if not sections: return [] allSectionIds = {} machineIdentifier = server.machineIdentifier if isinstance(server, PlexServer) else server url = self.PLEXSERVERS.replace('{machineId}', machineIdentifier) data = self.query(url, self._session.get) for elem in data[0]: allSectionIds[elem.attrib.get('id', '').lower()] = elem.attrib.get('id') allSectionIds[elem.attrib.get('title', '').lower()] = elem.attrib.get('id') allSectionIds[elem.attrib.get('key', '').lower()] = elem.attrib.get('id') log.debug(allSectionIds) sectionIds = [] for section in sections: sectionKey = section.key if isinstance(section, LibrarySection) else section sectionIds.append(allSectionIds[sectionKey.lower()]) return sectionIds
Converts a list of section objects or names to sectionIds needed for library sharing.
def _construct_module(info, target): for path in paths: real_path = os.path.abspath(os.path.join(target, path.format(**info))) log("Making directory '%s'" % real_path) os.makedirs(real_path) for item in templates.values(): source = os.path.join('dev/templates', item[0]) filename = os.path.abspath( os.path.join(target, item[1].format(**info))) log("Creating file from template '%s'" % filename, emitter='MANAGE') write_template_file(source, filename, info)
Build a module from templates and user supplied information
def CreateUser(username, password=None, is_admin=False): grr_api = maintenance_utils.InitGRRRootAPI() try: user_exists = grr_api.GrrUser(username).Get() is not None except api_errors.ResourceNotFoundError: user_exists = False if user_exists: raise UserAlreadyExistsError("User '%s' already exists." % username) user_type, password = _GetUserTypeAndPassword( username, password=password, is_admin=is_admin) grr_api.CreateGrrUser( username=username, user_type=user_type, password=password)
Creates a new GRR user.
def send_up(self, count): for i in range(count): self.interface.send_key(Key.UP)
Sends the given number of up key presses.
def thumb(self, obj): format, created = Format.objects.get_or_create(name='newman_thumb', defaults={ 'max_width': 100, 'max_height': 100, 'flexible_height': False, 'stretch': False, 'nocrop': True, }) if created: format.sites = Site.objects.all() info = obj.get_formated_photo(format) return '<a href="%(href)s"><img src="%(src)s"></a>' % { 'href': '%s/' % obj.pk, 'src': info['url'] }
Generates html and thumbnails for admin site.
def unplug(self): if not self.__plugged: return members = set([method for _, method in inspect.getmembers(self, predicate=inspect.ismethod)]) for message in global_callbacks: global_callbacks[message] -= members self.__plugged = False
Remove the actor's methods from the callback registry.
def error_retry_codes(self, value): if isinstance(value, six.string_types): value = [int(x) for x in value.split(",")] self._set_option("error_retry_codes", value)
Set value for error_retry_codes.