code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def topics_in(self, d, topn=5):
return self.theta.features[d].top(topn) | List the top ``topn`` topics in document ``d``. |
def _validate_prepostloop_callable(cls, func: Callable[[None], None]) -> None:
cls._validate_callable_param_count(func, 0)
signature = inspect.signature(func)
if signature.return_annotation is not None:
raise TypeError("{} must declare return a return type of 'None'".format(
... | Check parameter and return types for preloop and postloop hooks. |
def _base_environ(self, **request):
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('local... | Override the default values for the wsgi environment variables. |
def _get_script_args(cls, type_, name, header, script_text):
"For Windows, add a .py extension"
ext = dict(console='.pya', gui='.pyw')[type_]
if ext not in os.environ['PATHEXT'].lower().split(';'):
msg = (
"{ext} not listed in PATHEXT; scripts will not be "
... | For Windows, add a .py extension |
def probes_used_generate_vector(probe_files_full, probe_files_model):
import numpy as np
C_probesUsed = np.ndarray((len(probe_files_full),), 'bool')
C_probesUsed.fill(False)
c=0
for k in sorted(probe_files_full.keys()):
if probe_files_model.has_key(k): C_probesUsed[c] = True
c+=1
return C_probesUsed | Generates boolean matrices indicating which are the probes for each model |
def _flush(self, commit=True, using=None):
if commit:
if self._uses_savepoints():
self._commit_all_savepoints(using)
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
for key, value in c.items():
... | Flushes the internal cache, either to the memcache or rolls back |
def execute(self, input_data):
raw_bytes = input_data['sample']['raw_bytes']
matches = self.rules.match_data(raw_bytes)
flat_data = collections.defaultdict(list)
for filename, match_list in matches.iteritems():
for match in match_list:
if 'description' in matc... | yara worker execute method |
def pages(self):
rev = self.db.get('site:rev')
if int(rev) != self.revision:
self.reload_site()
return self._pages | Get pages, reloading the site if needed. |
def NegateQueryFilter(es_query):
query = es_query.to_dict().get("query", {})
filtered = query.get("filtered", {})
negated_filter = filtered.get("filter", {})
return Not(**negated_filter) | Return a filter removing the contents of the provided query. |
def push_images():
registry = conf.get('docker.registry')
docker_images = conf.get('docker.images', [])
if registry is None:
log.err("You must define docker.registry conf variable to push images")
sys.exit(-1)
for image in docker_images:
push_image(registry, image) | Push all project docker images to a remote registry. |
def _response(self, pdu):
if _debug: UDPDirector._debug("_response %r", pdu)
addr = pdu.pduSource
peer = self.peers.get(addr, None)
if not peer:
peer = self.actorClass(self, addr)
peer.response(pdu) | Incoming datagrams are routed through an actor. |
def com_google_fonts_check_name_no_copyright_on_description(ttFont):
failed = False
for name in ttFont['name'].names:
if 'opyright' in name.string.decode(name.getEncoding())\
and name.nameID == NameID.DESCRIPTION:
failed = True
if failed:
yield FAIL, ("Namerecords with ID={} (NameID.DESCRIPTI... | Description strings in the name table must not contain copyright info. |
def do_status(self):
pid = self.pid.get()
status_color = 'green' if pid else 'red'
status_dot = self._colorize(UNICODE['dot'], status_color, encode=True)
active_txt = {
'active': '{} since {}'.format(self._colorize('active (running)', 'green'), self.pid.birthday()... | Get the status of the service. |
def author_display(author, *args):
url = getattr(author, 'get_absolute_url', lambda: None)()
short_name = getattr(author, 'get_short_name', lambda: six.text_type(author))()
if url:
return mark_safe('<a href="{}">{}</a>'.format(url, short_name))
else:
return short_name | Returns either the linked or not-linked profile name. |
def _relative(self, uri):
if uri.startswith("http:") or \
uri.startswith("https:") or \
uri.startswith("file:") or \
uri.startswith("/"):
return uri
elif exists(uri):
return relpath(uri, self.basedir)
else:
return uri | if uri is relative, re-relate it to our basedir |
def __learn_oneself(self):
if not self.__parent_path or not self.__text_nodes:
raise Exception("This error occurred because the step constructor\
had insufficient textnodes or it had empty string\
for its parent xpath")
self.tnodes... | calculate cardinality, total and average string length |
def _add_global_counter(self):
assert self._global_step is None
with self.g.as_default(), self.g.name_scope(None):
try:
self._global_step = self.g.get_tensor_by_name('global_step:0')
except KeyError:
self._global_step = tf.Variable(0, name='global_step', trainable=False) | Adds a global counter, called once for setup by @property global_step. |
def run(
self
):
if self._function and not self._kwargs:
return self._function()
if self._function and self._kwargs:
return self._function(**self._kwargs) | Engage contained function with optional keyword arguments. |
def shipping_options(request, country):
qrs = models.ShippingRate.objects.filter(countries__in=[country])
serializer = serializers.ShippingRateSerializer(qrs, many=True)
return Response(
data=serializer.data,
status=status.HTTP_200_OK
) | Get the shipping options for a given country |
def _initialize_variables(self):
self._global_vars = {}
for mtf_dimension_name in (
self._layout_validator.splittable_mtf_dimension_names):
for mesh_dimension_name in (
self._layout_validator.mesh_dimension_name_to_size):
name = _global_var_name(mtf_dimension_name, mesh_dimension... | Initializing the variables of the IP. |
def worker_errordown(self, node, error):
self.config.hook.pytest_testnodedown(node=node, error=error)
try:
crashitem = self.sched.remove_node(node)
except KeyError:
pass
else:
if crashitem:
self.handle_crashitem(crashitem, node)
... | Emitted by the WorkerController when a node dies. |
def apply_lsadmin(fn):
cmd = ["lsadmin", "showconf", "lim"]
try:
output = subprocess.check_output(cmd).decode('utf-8')
except:
return None
return fn(output.split("\n")) | apply fn to each line of lsadmin, returning the result |
def remove_index(self):
self.index_client.close(self.index_name)
self.index_client.delete(self.index_name) | Remove Elasticsearch index associated to the campaign |
def who(self, *args):
if len(self._users) == 0:
self.log('No users connected')
if len(self._clients) == 0:
self.log('No clients connected')
return
Row = namedtuple("Row", ['User', 'Client', 'IP'])
rows = []
for user in self._users.v... | Display a table of connected users and clients |
def parse_wait_time(text: str) -> int:
val = RATELIMIT.findall(text)
if len(val) > 0:
try:
res = val[0]
if res[1] == 'minutes':
return int(res[0]) * 60
if res[1] == 'seconds':
return int(res[0])
except Exception as e:
... | Parse the waiting time from the exception |
async def on_isupport_excepts(self, value):
if not value:
value = BAN_EXCEPT_MODE
self._channel_modes.add(value)
self._channel_modes_behaviour[rfc1459.protocol.BEHAVIOUR_LIST].add(value) | Server allows ban exceptions. |
def nodes_minimum_distance_validation(self):
if self.layer and self.layer.nodes_minimum_distance:
minimum_distance = self.layer.nodes_minimum_distance
near_nodes = Node.objects.exclude(pk=self.id).filter(geometry__distance_lte=(self.geometry, D(m=minimum_distance))).count()
if near_nodes > 0... | if minimum distance is specified, ensure node is not too close to other nodes; |
def _lstree(files, dirs):
for f, sha1 in files:
yield "100644 blob {}\t{}\0".format(sha1, f)
for d, sha1 in dirs:
yield "040000 tree {}\t{}\0".format(sha1, d) | Make git ls-tree like output. |
def on_task_status(self, task):
if not self.interactive:
super(OneScheduler, self).on_task_status(task)
try:
procesok = task['track']['process']['ok']
except KeyError as e:
logger.error("Bad status pack: %s", e)
return None
if procesok:
... | Ignore not processing error in interactive mode |
def clear(self):
while self.current or self.idlers or self.queue or self.rpcs:
current = self.current
idlers = self.idlers
queue = self.queue
rpcs = self.rpcs
_logging_debug('Clearing stale EventLoop instance...')
if current:
_logging_debug(' current = %s', current)
... | Remove all pending events without running any. |
def wrap_tuple_streams(unwrapped, kdims, streams):
param_groups = [(s.contents.keys(), s) for s in streams]
pairs = [(name,s) for (group, s) in param_groups for name in group]
substituted = []
for pos,el in enumerate(wrap_tuple(unwrapped)):
if el is None and pos < len(kdims):
matche... | Fills in tuple keys with dimensioned stream values as appropriate. |
def to_decimal(value, ctx):
if isinstance(value, bool):
return Decimal(1) if value else Decimal(0)
elif isinstance(value, int):
return Decimal(value)
elif isinstance(value, Decimal):
return value
elif isinstance(value, str):
try:
return Decimal(value)
... | Tries conversion of any value to a decimal |
def get(self, request, *args, **kwargs):
measurements = Measurement.objects.all()
return data_csv(self.request, measurements) | The queryset returns all measurement objects |
def compile(self):
if self.buffer is None:
self.buffer = self._compile_value(self.data, 0)
return self.buffer.strip() | Return Hip string if already compiled else compile it. |
def _get_object_key(self, p_object):
matched_key = None
matched_index = None
if hasattr(p_object, self._searchNames[0]):
return getattr(p_object, self._searchNames[0])
for x in xrange(len(self._searchNames)):
key = self._searchNames[x]
if hasattr(p_obj... | Get key from object |
def deobfuscate_email(text):
text = unescape(text)
text = _deobfuscate_dot1_re.sub('.', text)
text = _deobfuscate_dot2_re.sub(r'\1.\2', text)
text = _deobfuscate_dot3_re.sub(r'\1.\2', text)
text = _deobfuscate_at1_re.sub('@', text)
text = _deobfuscate_at2_re.sub(r'\1@\2', text)
text = _deobf... | Deobfuscate email addresses in provided text |
def cbow_fasttext_batch(centers, contexts, num_tokens, subword_lookup, dtype,
index_dtype):
_, contexts_row, contexts_col = contexts
data, row, col = subword_lookup(contexts_row, contexts_col)
centers = mx.nd.array(centers, dtype=index_dtype)
contexts = mx.nd.sparse.csr_matrix(
... | Create a batch for CBOW training objective with subwords. |
def _first(self, **spec):
for record in self._entries(spec).order_by(model.Entry.local_date,
model.Entry.id)[:1]:
return entry.Entry(record)
return None | Get the earliest entry in this category, optionally including subcategories |
def write(self, directory, name=None, session=None, replaceParamFile=None):
if name != None:
filename = '%s.%s' % (name, self.fileExtension)
filePath = os.path.join(directory, filename)
else:
filePath = os.path.join(directory, self.filename)
if type(self.raste... | Index Map Write to File Method |
def ensure_ec_params(jwk_dict, private):
provided = frozenset(jwk_dict.keys())
if private is not None and private:
required = EC_PUBLIC_REQUIRED | EC_PRIVATE_REQUIRED
else:
required = EC_PUBLIC_REQUIRED
return ensure_params('EC', provided, required) | Ensure all required EC parameters are present in dictionary |
def inactive(self):
qset = super(StaffMemberManager, self).get_queryset()
return qset.filter(is_active=False) | Return inactive staff members |
def workers(self):
worker_keys = self.redis_client.keys("Worker*")
workers_data = {}
for worker_key in worker_keys:
worker_info = self.redis_client.hgetall(worker_key)
worker_id = binary_to_hex(worker_key[len("Workers:"):])
workers_data[worker_id] = {
... | Get a dictionary mapping worker ID to worker information. |
def list_manga_series(self, filter=None, content_type='jp_manga'):
result = self._manga_api.list_series(filter, content_type)
return result | Get a list of manga series |
def ep(self, exc: Exception) -> bool:
if not isinstance(exc, ConnectionAbortedError):
return False
if len(exc.args) != 2:
return False
origin, reason = exc.args
logging.getLogger(__name__).warning('Exited')
return True | Return False if the exception had not been handled gracefully |
def validate(schema_file, config_file, deprecation):
result = validator_from_config_file(config_file, schema_file)
result.validate(error_on_deprecated=deprecation)
for error in result.errors():
click.secho('Error : %s' % error, err=True, fg='red')
for warning in result.warnings():
clic... | Validate a configuration file against a confirm schema. |
def Set(self, interface_name, property_name, value, *args, **kwargs):
self.log('Set %s.%s%s' % (interface_name,
property_name,
self.format_args((value,))))
try:
iface_props = self.props[interface_name]
except KeyErro... | Standard D-Bus API for setting a property value |
def _process_messages(self):
try:
for message in self.consumer:
try:
if message is None:
self.logger.debug("no message")
break
loaded_dict = json.loads(message.value)
self.logg... | Processes messages received from kafka |
def dumps(self):
error = {'code': self.code,
'message': str(self.message)}
if self.data is not None:
error['data'] = self.data
return error | Return the Exception data in a format for JSON-RPC. |
def stamp(self, **kwargs):
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
return NameFactory.stamp_format.format(**kwargs_copy) | Return the path for a stamp file for a scatter gather job |
def _init_state(self, initial_state: Union[int, np.ndarray]):
state = np.reshape(
sim.to_valid_state_vector(initial_state, self._num_qubits),
(self._num_shards, self._shard_size))
state_handle = mem_manager.SharedMemManager.create_array(
state.view(dtype=np.float32))
... | Initializes a the shard wavefunction and sets the initial state. |
def Compile(self, filter_implemention):
return self.operator_method(self.attribute_obj, filter_implemention,
*self.args) | Returns the data_store filter implementation from the attribute. |
def view_indexes(self, done=None):
ret = []
if done is None:
done = set()
idx = 0
while idx < self.count():
if not idx in done:
break
idx += 1
while idx < self.count():
w = self.wp(idx)
if idx in done:
... | return a list waypoint indexes in view order |
def verbosedump(value, fn, compress=None):
print('Saving "%s"... (%s)' % (fn, type(value)))
dump(value, fn, compress=compress) | Verbose wrapper around dump |
def sync_groups_from_ad(self):
ad_list = ADGroupMapping.objects.values_list('ad_group', 'group')
mappings = {ad_group: group for ad_group, group in ad_list}
user_ad_groups = set(self.ad_groups.filter(groups__isnull=False).values_list(flat=True))
all_mapped_groups = set(mappings.values())... | Determine which Django groups to add or remove based on AD groups. |
def format_exception(e):
from .utils.printing import fill
return '\n'.join(fill(line) for line in traceback.format_exception_only(type(e), e)) | Returns a string containing the type and text of the exception. |
def _check(self):
_logger.debug('Check if timeout.')
self._call_later_handle = None
if self._touch_time is not None:
difference = self._event_loop.time() - self._touch_time
_logger.debug('Time difference %s', difference)
if difference > self._timeout:
... | Check and close connection if needed. |
def _get_conn():
try:
conn = psycopg2.connect(
host=__opts__['master_job_cache.postgres.host'],
user=__opts__['master_job_cache.postgres.user'],
password=__opts__['master_job_cache.postgres.passwd'],
database=__opts__['master_job_cache.postgres.db'... | Return a postgres connection. |
def count(self):
args = self.format_args
if args is None or \
(isinstance(args, dict) and self.count_field not in args):
raise TypeError("count is required")
return args[self.count_field] if isinstance(args, dict) else args | A count based on `count_field` and `format_args`. |
def roll(cls, num, sides, add):
rolls = []
for i in range(num):
rolls.append(random.randint(1, sides))
rolls.append(add)
return rolls | Rolls a die of sides sides, num times, sums them, and adds add |
def tabulate(lol, headers, eol='\n'):
yield '| %s |' % ' | '.join(headers) + eol
yield '| %s:|' % ':| '.join(['-' * len(w) for w in headers]) + eol
for row in lol:
yield '| %s |' % ' | '.join(str(c) for c in row) + eol | Use the pypi tabulate package instead! |
def list_wordpressess(self, service_id, version_number):
content = self._fetch("/service/%s/version/%d/wordpress" % (service_id, version_number))
return map(lambda x: FastlyWordpress(self, x), content) | Get all of the wordpresses for a specified service and version. |
def draw_edges(self):
if self.backend == "matplotlib":
for i, (n1, n2) in enumerate(self.edges):
x1, y1 = self.locs[n1]
x2, y2 = self.locs[n2]
color = self.edge_colors[i]
line = Line2D(
xdata=[x1, x2],
... | Draws edges to screen. |
def persist(self):
if self.object_hash:
data = dill.dumps(self.object_property)
f = ContentFile(data)
self.object_file.save(self.object_hash, f, save=False)
f.close()
self._persisted = True
return self._persisted | a private method that persists an estimator object to the filesystem |
def wind_shear(shear: str, unit_alt: str = 'ft', unit_wind: str = 'kt') -> str:
unit_alt = SPOKEN_UNITS.get(unit_alt, unit_alt)
unit_wind = SPOKEN_UNITS.get(unit_wind, unit_wind)
return translate.wind_shear(shear, unit_alt, unit_wind, spoken=True) or 'Wind shear unknown' | Format wind shear string into a spoken word string |
def intersection(self, other):
taxa1 = self.labels
taxa2 = other.labels
return taxa1 & taxa2 | Returns the intersection of the taxon sets of two Trees |
def full_path(base, fmt):
ext = fmt['extension']
suffix = fmt.get('suffix')
prefix = fmt.get('prefix')
full = base
if prefix:
prefix_dir, prefix_file_name = os.path.split(prefix)
notebook_dir, notebook_file_name = os.path.split(base)
sep = base[len(notebook_dir):-len(notebook... | Return the full path for the notebook, given the base path |
def aligner_from_header(in_bam):
from bcbio.pipeline.alignment import TOOLS
with pysam.Samfile(in_bam, "rb") as bamfile:
for pg in bamfile.header.get("PG", []):
for ka in TOOLS.keys():
if pg.get("PN", "").lower().find(ka) >= 0:
return ka | Identify aligner from the BAM header; handling pre-aligned inputs. |
def ikev2scan(ip, **kwargs):
return sr(IP(dst=ip) / UDP() / IKEv2(init_SPI=RandString(8),
exch_type=34) / IKEv2_payload_SA(prop=IKEv2_payload_Proposal()), **kwargs) | Send a IKEv2 SA to an IP and wait for answers. |
def load_config_from_file(app, filepath):
try:
app.config.from_pyfile(filepath)
return True
except IOError:
print("Did not find settings file %s for additional settings, skipping it" % filepath, file=sys.stderr)
return False | Helper function to load config from a specified file |
def _get_dispatches_for_update(filter_kwargs):
dispatches = Dispatch.objects.prefetch_related('message').filter(
**filter_kwargs
).select_for_update(
**GET_DISPATCHES_ARGS[1]
).order_by('-message__time_created')
try:
dispatches = list(dispatches)
except NotSupportedError:
... | Distributed friendly version using ``select for update``. |
def discovery(self, compute_resource):
if compute_resource is None:
cr_list = ComputeResource.all(self.client)
print("ERROR: You must specify a ComputeResource.")
print("Available ComputeResource's:")
for cr in cr_list:
print(cr.name)
s... | An example that discovers hosts and VMs in the inventory. |
def log_message(self, msg, *args):
if args:
msg = msg % args
self.logger.info(msg) | Hook to log a message. |
def ConsultarLocalidades(self, cod_provincia, sep="||"):
"Consulta las localidades habilitadas"
ret = self.client.consultarLocalidadesPorProvincia(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
... | Consulta las localidades habilitadas |
def write_eof(self):
self._check_status()
if not self._writable:
raise TransportError('transport is not writable')
if self._closing:
raise TransportError('transport is closing')
try:
self._handle.shutdown(self._on_write_complete)
except pyuv.er... | Shut down the write direction of the transport. |
def POST(self, id):
id = int(id)
model.del_todo(id)
raise web.seeother('/') | Delete based on ID |
def run(app, appbuilder, host, port, debug):
_appbuilder = import_application(app, appbuilder)
_appbuilder.get_app.run(host=host, port=port, debug=debug) | Runs Flask dev web server. |
def volume_detach(self,
name,
timeout=300):
try:
volume = self.volume_show(name)
except KeyError as exc:
raise SaltCloudSystemExit('Unable to find {0} volume: {1}'.format(name, exc))
if not volume['attachments']:
ret... | Detach a block device |
def read(file_or_stream, fmt, as_version=4, **kwargs):
fmt = long_form_one_format(fmt)
if fmt['extension'] == '.ipynb':
notebook = nbformat.read(file_or_stream, as_version, **kwargs)
rearrange_jupytext_metadata(notebook.metadata)
return notebook
return reads(file_or_stream.read(), fm... | Read a notebook from a file |
def report_pyflakes(document):
reporter = _FlakesReporter()
pyflakes.api.check(document.text, '', reporter=reporter)
def format_flake_message(message):
return [
('class:flakemessage.prefix', 'pyflakes:'),
('', ' '),
('class:flakemessage', message.message % message... | Run pyflakes on document and return list of ReporterError instances. |
def clean(self, *args, **kwargs):
if not self.pk:
node = self.node
if node.participation_settings.comments_allowed is False:
raise ValidationError("Comments not allowed for this node")
if 'nodeshot.core.layers' in settings.INSTALLED_APPS:
layer... | Check if comments can be inserted for parent node or parent layer |
def replace(self, source, dest):
for i, broker in enumerate(self.replicas):
if broker == source:
self.replicas[i] = dest
return | Replace source broker with destination broker in replica set if found. |
def CallLoggedAndAccounted(f):
@functools.wraps(f)
def Decorator(*args, **kwargs):
try:
start_time = time.time()
result = f(*args, **kwargs)
latency = time.time() - start_time
stats_collector_instance.Get().RecordEvent(
"db_request_latency", latency, fields=[f.__name__])
... | Decorator to log and account for a DB call. |
def _parse_key(key):
splt = key.split("\\")
hive = splt.pop(0)
key = '\\'.join(splt)
return hive, key | split the hive from the key |
def push(**kwargs):
output, err = cli_syncthing_adapter.refresh(**kwargs)
if output:
click.echo("%s" % output, err=err)
if kwargs['verbose'] and not err:
with click.progressbar(
iterable=None,
length=100,
label='Synchronizing') as bar:
device_num = 0
max_devices = 1
pre... | Force synchronization of directory. |
def choice(anon, obj, field, val):
return anon.faker.choice(field=field) | Randomly chooses one of the choices set on the field. |
def login(self, username, password, application, application_url):
logger.debug(str((username, application, application_url)))
method = self._anaconda_client_api.authenticate
return self._create_worker(method, username, password, application,
application_url) | Login to anaconda cloud. |
def textContent(self, text: str) -> None:
if self._inner_element:
self._inner_element.textContent = text
else:
super().textContent = text | Set text content to inner node. |
def in_reply_to(self) -> Optional[UnstructuredHeader]:
try:
return cast(UnstructuredHeader, self[b'in-reply-to'][0])
except (KeyError, IndexError):
return None | The ``In-Reply-To`` header. |
def randbetween(ctx, bottom, top):
bottom = conversions.to_integer(bottom, ctx)
top = conversions.to_integer(top, ctx)
return random.randint(bottom, top) | Returns a random integer number between the numbers you specify |
def _save_traceback_history(self, status, trace, job_exc):
failure_date = datetime.datetime.utcnow()
new_history = {
"date": failure_date,
"status": status,
"exceptiontype": job_exc.__name__
}
traces = trace.split("---- Original exception: -----")
... | Create traceback history or add a new traceback to history. |
def check_webhook_validation(app_configs=None, **kwargs):
from . import settings as djstripe_settings
messages = []
validation_options = ("verify_signature", "retrieve_event")
if djstripe_settings.WEBHOOK_VALIDATION is None:
messages.append(
checks.Warning(
"Webhook validation is disabled, this is a securi... | Check that DJSTRIPE_WEBHOOK_VALIDATION is valid |
def _summarize_peaks(peaks):
previous = peaks[0]
new_peaks = [previous]
for pos in peaks:
if pos > previous + 10:
new_peaks.add(pos)
previous = pos
return new_peaks | merge peaks position if closer than 10 |
async def load_user(self, request):
if USER_KEY not in request:
session = await self.load(request)
if 'id' not in session:
return None
request[USER_KEY] = request.user = await self._user_loader(session['id'])
return request[USER_KEY] | Load user from request. |
def _iter_restrict(self, zeros, ones):
inputs = list(self.inputs)
unmapped = dict()
for i, v in enumerate(self.inputs):
if v in zeros:
inputs[i] = 0
elif v in ones:
inputs[i] = 1
else:
unmapped[v] = i
vs ... | Iterate through indices of all table entries that vary. |
def _setup_authentication(self, username, password):
if self.version < 1.1:
if not username:
username = self._key
self._key = None
if not username:
return
if not password:
password = '12345'
password_mgr = urllib2.HTTPPa... | Create the authentication object with the given credentials. |
def main(cls):
chain = cls.create()
args = chain._run_argparser(sys.argv[1:])
chain._run_chain(sys.stdout, args.dry_run)
chain._finalize(args.dry_run) | Hook to run this `Chain` from the command line |
def add_seq(self, seq):
self.buffer.append(seq)
self.buf_count += 1
if self.buf_count % self.buffer_size == 0: self.flush() | Use this method to add a SeqRecord object to this fasta. |
def add_product_version_to_build_configuration(id=None, name=None, product_version_id=None):
data = remove_product_version_from_build_configuration_raw(id, name, product_version_id)
if data:
return utils.format_json_list(data) | Associate an existing ProductVersion with a BuildConfiguration |
def authorize_url(self, state=''):
url = 'https://openapi.youku.com/v2/oauth2/authorize?'
params = {
'client_id': self.client_id,
'response_type': 'code',
'state': state,
'redirect_uri': self.redirect_uri
}
return url + urlencode(params) | return user authorize url |
def opls_notation(atom_key):
conflicts = ['ne', 'he', 'na']
if atom_key in conflicts:
raise _AtomKeyConflict((
"One of the OPLS conflicting "
"atom_keys has occured '{0}'. "
"For how to solve this issue see the manual or "
"MolecularSystem._atom_key_swap()... | Return element for OPLS forcefield atom key. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.