code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _get_signature(self, entry):
if self._sha1_sigs:
if not os.path.exists(entry.filename):
return None
try:
with open(entry.filename, "r") as f:
data = f.read()
return hashlib.sha1(data.encode("utf-8")).digest()
... | Return the signature of the file stored in entry. |
def length(self):
return sum([shot.length for shot in self.shots if not shot.is_splay]) | Total surveyed cave length, not including splays. |
def close_connection (self):
if self.url_connection is not None:
try:
self.url_connection.quit()
except Exception:
pass
self.url_connection = None | Release the open connection from the connection pool. |
def run_actor(self, actor):
set_actor(actor)
if not actor.mailbox.address:
address = ('127.0.0.1', 0)
actor._loop.create_task(
actor.mailbox.start_serving(address=address)
)
actor._loop.run_forever() | Start running the ``actor``. |
def as_dict(self):
"Get the WorkResult as a dict."
return {
'output': self.output,
'test_outcome': self.test_outcome,
'worker_outcome': self.worker_outcome,
'diff': self.diff,
} | Get the WorkResult as a dict. |
def encode(value, encoding='utf-8', encoding_errors='strict'):
if isinstance(value, bytes):
return value
if not isinstance(value, basestring):
value = str(value)
if isinstance(value, unicode):
value = value.encode(encoding, encoding_errors)
return value | Return a bytestring representation of the value. |
def function_end(self):
self.newline_label(self.shared.function_name + "_exit", True, True)
self.move("%14", "%15")
self.pop("%14")
self.newline_text("RET", True) | Inserts an exit label and function return instructions |
def param_dict_to_list(dict,skeys=None):
RV = SP.concatenate([dict[key].flatten() for key in skeys])
return RV
pass | convert from param dictionary to list |
def render_registration(self):
surface = self.get_surface()
if self.canvas is None or self.df_canvas_corners.shape[0] == 0:
return surface
corners = self.df_canvas_corners.copy()
corners['w'] = 1
transform = self.canvas.shapes_to_canvas_transform
canvas_corner... | Render pinned points on video frame as red rectangle. |
def fileserver(opts, backends):
return LazyLoader(_module_dirs(opts, 'fileserver'),
opts,
tag='fileserver',
whitelist=backends,
pack={'__utils__': utils(opts)}) | Returns the file server modules |
def on_batch_begin(self, train, **kwargs:Any)->None:
"Record learning rate and momentum at beginning of batch."
if train:
self.lrs.append(self.opt.lr)
self.moms.append(self.opt.mom) | Record learning rate and momentum at beginning of batch. |
def precheck():
cfg_path = local_config['PROJECT']['CONFIG_PATH']
logging = set_logging(local_config)
if os.path.exists(cfg_path):
logger.info('%s: config_path parameter: %s' % (inspect.stack()[0][3], cfg_path))
logger.info(
'%s: Existing configuration file found. precheck pass.'... | Verify project runtime dependencies |
def add_dependency(self, value):
if value.name in self.dependencies:
self.dependencies[value.name.lower()].append(value)
else:
self.dependencies[value.name.lower()] = [ value ] | Adds the specified executable dependency to the list for this executable. |
def NoCache(self, *targets):
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_nocache()
return tlist | Tags a target so that it will not be cached |
def parse_field(source, loc, tokens):
name = tokens[0].lower()
value = normalize_value(tokens[2])
if name == 'author' and ' and ' in value:
value = [field.strip() for field in value.split(' and ')]
return (name, value) | Returns the tokens of a field as key-value pair. |
def writeln(self, text, fg='black', bg='white'):
if not isinstance(text, str):
text = str(text)
self.write(text + '\n', fg=fg, bg=bg) | write to the console with linefeed |
def send_keys(self, keyserver, *keyids):
result = self._result_map['list'](self)
log.debug('send_keys: %r', keyids)
data = _util._make_binary_stream("", self._encoding)
args = ['--keyserver', keyserver, '--send-keys']
args.extend(keyids)
self._handle_io(args, data, result... | Send keys to a keyserver. |
def to_det_id(self, det_id_or_det_oid):
try:
int(det_id_or_det_oid)
except ValueError:
return self.get_det_id(det_id_or_det_oid)
else:
return det_id_or_det_oid | Convert det ID or OID to det ID |
def pack_value(self, val):
if isinstance(val, bytes):
val = list(iterbytes(val))
slen = len(val)
if self.pad:
pad = b'\0\0' * (slen % 2)
else:
pad = b''
return struct.pack('>' + 'H' * slen, *val) + pad, slen, None | Convert 8-byte string into 16-byte list |
def builds(self, confs):
self._named_builds = {}
self._builds = []
for values in confs:
if len(values) == 2:
self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference))
elif len(values) == 4:
self._builds.append(BuildConf(v... | For retro compatibility directly assigning builds |
def _svg_path(self, pathcodes, data):
def gen_path_elements(pathcodes, data):
counts = {'M': 1, 'L': 1, 'C': 3, 'Z': 0}
it = iter(data)
for code in pathcodes:
yield code
for _ in range(counts[code]):
p = next(it)
... | Return the SVG path's 'd' element. |
def getStats(jobStore):
def aggregateStats(fileHandle,aggregateObject):
try:
stats = json.load(fileHandle, object_hook=Expando)
for key in list(stats.keys()):
if key in aggregateObject:
aggregateObject[key].append(stats[key])
else:
... | Collect and return the stats and config data. |
def _register_data_plane_account_arguments(self, command_name):
from azure.cli.core.commands.parameters import get_resource_name_completion_list
from ._validators import validate_client_parameters
command = self.command_loader.command_table.get(command_name, None)
if not command:
... | Add parameters required to create a storage client |
def _new_sample(self, sink):
if self.running:
buf = sink.emit('pull-sample').get_buffer()
mem = buf.get_all_memory()
success, info = mem.map(Gst.MapFlags.READ)
if success:
data = info.data
mem.unmap(info)
self.queue.... | The callback for appsink's "new-sample" signal. |
def pid_from_context(_, context):
pid = (context or {}).get('pid')
return pid.pid_value if pid else missing | Get PID from marshmallow context. |
def path(self):
if (self.source_file.startswith('a/') and
self.target_file.startswith('b/')):
filepath = self.source_file[2:]
elif (self.source_file.startswith('a/') and
self.target_file == '/dev/null'):
filepath = self.source_file[2:]
elif (... | Return the file path abstracted from VCS. |
def DualDBFlow(cls):
if issubclass(cls, flow.GRRFlow):
raise ValueError("Mixin class shouldn't inherit from GRRFlow.")
if cls.__name__[-5:] != "Mixin":
raise ValueError("Flow mixin should have a name that ends in 'Mixin'.")
flow_name = cls.__name__[:-5]
aff4_cls = type(flow_name, (cls, flow.GRRFlow), {}... | Decorator that creates AFF4 and RELDB flows from a given mixin. |
def _get_ratings_page(self):
self._build_url()
soup = get_soup(self.url)
if soup:
return soup
self._build_url(shorten=False)
soup = get_soup(self.url)
if soup:
return soup
return SearchDaily(self.category, date=self.date).fetch_result() | Do a limited search for the correct url. |
def save(cls, filename, config):
mode = os.O_WRONLY | os.O_TRUNC | os.O_CREAT
with os.fdopen(os.open(filename, mode, 0o600), 'w') as fname:
yaml.safe_dump(config, fname, indent=4, default_flow_style=False) | Save configuration to yaml file. |
def read_calibration(
detx=None, det_id=None, from_file=False, det_id_table=None
):
from km3pipe.calib import Calibration
if not (detx or det_id or from_file):
return None
if detx is not None:
return Calibration(filename=detx)
if from_file:
det_ids = np.unique(det_id_tabl... | Retrive calibration from file, the DB. |
def resolve(self,size, distribution):
if not self.variablesize():
raise Exception("Can only resize patterns with * wildcards")
nrofwildcards = 0
for x in self.sequence:
if x == '*':
nrofwildcards += 1
assert (len(distribution) == nrofwildcards)
... | Resolve a variable sized pattern to all patterns of a certain fixed size |
def calculate_quality_metrics(metrics, designs, verbose=False):
for metric in metrics:
if metric.progress_update:
print metric.progress_update
metric.load(designs, verbose) | Have each metric calculate all the information it needs. |
def make_compound_ctype(self, varname):
compoundname = self.get_var_type(varname)
nfields = self.inq_compound(compoundname)
fields = []
for i in range(nfields):
(fieldname, fieldtype,
fieldrank, fieldshape) = self.inq_compound_field(compoundname, i)
a... | Create a ctypes type that corresponds to a compound type in memory. |
def _fetch_metric(self, metric_name):
request = {
'Namespace': self.CLOUDWATCH_NAMESPACE,
'MetricName': metric_name,
'Dimensions': [
{
'Name': 'TrainingJobName',
'Value': self.name
}
],
... | Fetch all the values of a named metric, and add them to _data |
def token(self):
if AUTH_TOKEN_HEADER in self.request.headers:
return self.request.headers[AUTH_TOKEN_HEADER]
else:
return self.get_cookie(AUTH_COOKIE_NAME) | The token used for the request |
def simplify_recursive(typ):
if isinstance(typ, UnionType):
return combine_types(typ.items)
elif isinstance(typ, ClassType):
simplified = ClassType(typ.name, [simplify_recursive(arg) for arg in typ.args])
args = simplified.args
if (simplified.name == 'Dict' and len(args) == 2
... | Simplify all components of a type. |
def _remove_deprecated_options(self, old_version):
old_defaults = self._load_old_defaults(old_version)
for section in old_defaults.sections():
for option, _ in old_defaults.items(section, raw=self.raw):
if self.get_default(section, option) is NoDefault:
... | Remove options which are present in the .ini file but not in defaults |
def _lab_to_rgb(labs):
labs, n_dim = _check_color_dim(labs)
y = (labs[:, 0] + 16.) / 116.
x = (labs[:, 1] / 500.) + y
z = y - (labs[:, 2] / 200.)
xyz = np.concatenate(([x], [y], [z]))
over = xyz > 0.2068966
xyz[over] = xyz[over] ** 3.
xyz[~over] = (xyz[~over] - 0.13793103448275862) / 7.7... | Convert Nx3 or Nx4 lab to rgb |
def __init_yaml():
global _yaml_initialized
if not _yaml_initialized:
_yaml_initialized = True
yaml.add_constructor(u'tag:yaml.org,2002:Frame', _frame_constructor)
yaml.add_constructor(u'tag:yaml.org,2002:Signal', _signal_constructor)
yaml.add_representer(canmatrix.Frame, _frame_... | Lazy init yaml because canmatrix might not be fully loaded when loading this format. |
def copy_from(self, other_state):
for prop in self.properties().values():
setattr(self, prop.name, getattr(other_state, prop.name)) | Copy data from another shard state entity to self. |
def _get_config(self, rel_path, view, api_version=1):
self._require_min_api_version(api_version)
params = view and dict(view=view) or None
resp = self._get_resource_root().get(self._path() + '/' + rel_path,
params=params)
return json_to_config(resp, view == 'full') | Retrieves an ApiConfig list from the given relative path. |
def _convert_value(value):
try:
return restore(ast.literal_eval(value))
except (ValueError, SyntaxError):
if SETTINGS.COMMAND_LINE.STRICT_PARSING:
raise
return value | Parse string as python literal if possible and fallback to string. |
def isRealmUser(self, realmname, username, environ):
try:
course = self.course_factory.get_course(realmname)
ok = self.user_manager.has_admin_rights_on_course(course, username=username)
return ok
except:
return False | Returns True if this username is valid for the realm, False otherwise. |
def gen_procfile(ctx, wsgi, dev):
if wsgi is None:
if os.path.exists("wsgi.py"):
wsgi = "wsgi.py"
elif os.path.exists("app.py"):
wsgi = "app.py"
else:
wsgi = "app.py"
ctx.invoke(gen_apppy)
def write_procfile(filename, server_process, debug)... | Generates Procfiles which can be used with honcho or foreman. |
def indexables(self):
if self._indexables is None:
d = self.description
self._indexables = [GenericIndexCol(name='index', axis=0)]
for i, n in enumerate(d._v_names):
dc = GenericDataIndexableCol(
name=n, pos=i, values=[n], version=self.vers... | create the indexables from the table description |
def textbetween(variable,
firstnum=None,
secondnum=None,
locationoftext='regular'):
if locationoftext == 'regular':
return variable[firstnum:secondnum]
elif locationoftext == 'toend':
return variable[firstnum:]
elif locationoftext == 'tostart':... | Get The Text Between Two Parts |
def complex_out_dtype(self):
if self.__complex_out_dtype is None:
raise AttributeError(
'no complex variant of output dtype {} defined'
''.format(dtype_repr(self.scalar_out_dtype)))
else:
return self.__complex_out_dtype | The complex dtype corresponding to this space's `out_dtype`. |
def date(ctx, year, month, day):
return _date(conversions.to_integer(year, ctx), conversions.to_integer(month, ctx), conversions.to_integer(day, ctx)) | Defines a date value |
def npd_to_pmf(nodal_plane_dist, use_default=False):
if isinstance(nodal_plane_dist, PMF):
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined') | Returns the nodal plane distribution as an instance of the PMF class |
def _from_dict(cls, _dict):
args = {}
if 'grammars' in _dict:
args['grammars'] = [
Grammar._from_dict(x) for x in (_dict.get('grammars'))
]
else:
raise ValueError(
'Required property \'grammars\' not present in Grammars JSON')
... | Initialize a Grammars object from a json dictionary. |
def run(self):
for msg in self.messages:
col = getattr(msg, 'col', 0)
yield msg.lineno, col, (msg.tpl % msg.message_args), msg.__class__ | Yield the error messages. |
def kdists(matrix, k=7, ix=None):
ix = ix or kindex(matrix, k)
return matrix[ix][np.newaxis].T | Returns the k-th nearest distances, row-wise, as a column vector |
def use_plenary_composition_view(self):
self._object_views['composition'] = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_composition_view()
except AttributeError:
pass | Pass through to provider CompositionLookupSession.use_plenary_composition_view |
def _get_opts_seaborn(self, opts, style):
if opts is None:
if self.chart_opts is None:
opts = {}
else:
opts = self.chart_opts
if style is None:
if self.chart_style is None:
style = self.chart_style
else:
... | Initialialize for chart rendering |
def main():
actions = []
try:
options, arguments = getopt.getopt(sys.argv[1:], 'cdh', [
'convert', 'to-html', 'demo', 'help',
])
for option, value in options:
if option in ('-c', '--convert', '--to-html'):
actions.append(functools.partial(convert_c... | Command line interface for the ``coloredlogs`` program. |
def page_prev(self):
window_start = (self.parent.value('window_start') -
self.parent.value('window_length'))
if window_start < 0:
return
self.parent.overview.update_position(window_start) | Go to the previous page. |
def close(self):
for channel in self._listening_to:
self.toredis.unsubscribe(channel)
self.toredis.unsubscribe(self.group_pubsub) | Unsubscribe the group and all jobs being listened too |
def stop(self):
if self._thread is not None and self._thread.isAlive():
self._done.set() | Stop the running task |
def _setup_http_session(self):
headers = {"Content-type": "application/json"}
if (self._id_token):
headers.update({"authorization": "Bearer {}".format(
self._id_token)})
self._session.headers.update(headers)
self._session.verify = False | Sets up the common HTTP session parameters used by requests. |
def field_from_json(self, key_and_type, json_value):
assert ':' in key_and_type
key, type_code = key_and_type.split(':', 1)
from_json = self.field_function(type_code, 'from_json')
value = from_json(json_value)
return key, value | Convert a JSON-serializable representation back to a field. |
def convert_table(app, docname, source):
num_tables = 0
for i,j in enumerate(source):
table = []
output = ''
in_table = False
for l in j.split('\n'):
r = l.strip()
if r.startswith('|'):
table.append(r)
in_table = True
... | Find tables in a markdown and then convert them into the rst format |
def distance(a, b):
R = 3963
lat1, lon1 = math.radians(a[0]), math.radians(a[1])
lat2, lon2 = math.radians(b[0]), math.radians(b[1])
return math.acos(math.sin(lat1) * math.sin(lat2) +
math.cos(lat1) * math.cos(lat2) * math.cos(lon1 - lon2)) * R | Calculates distance between two latitude-longitude coordinates. |
def create_cookie(name, value, domain, httponly=None, **kwargs):
if domain == 'localhost':
domain = ''
config = dict(
name=name,
value=value,
version=0,
port=None,
domain=domain,
path='/',
secure=False,
expires=None,
discard=True,
... | Creates `cookielib.Cookie` instance |
def must_contain(self, value, q, strict=False):
if value is not None:
if value.find(q) != -1:
return
self.shout('Value %r does not contain %r', strict, value, q) | if value must contain q |
def _raise_on_error(data: Union[str, dict]) -> None:
if isinstance(data, str):
raise_error(data)
elif 'status' in data and data['status'] != 'success':
raise_error(data['data']['message']) | Raise the appropriate exception on error. |
def d(msg, *args, **kwargs):
return logging.log(DEBUG, msg, *args, **kwargs) | log a message at debug level; |
def _serialize(self, include_run_logs=False, strict_json=False):
result = {'command': self.command,
'name': self.name,
'started_at': self.started_at,
'completed_at': self.completed_at,
'success': self.successful,
'soft_tim... | Serialize a representation of this Task to a Python dict. |
def _build_graph(self):
if not self.colors:
self.palette = self.env.get_template('palette.js')
self.template_vars.update({'palette': self.palette.render()})
self.colors = {x['name']: 'palette.color()' for x in self.json_data}
template_vars = []
for index, data... | Build Rickshaw graph syntax with all data |
def exclude(self, d, item):
try:
md = d.__metadata__
pmd = getattr(md, '__print__', None)
if pmd is None:
return False
excludes = getattr(pmd, 'excludes', [])
return ( item[0] in excludes )
except:
pass
retu... | check metadata for excluded items |
def munge_source(v):
lines = v.split('\n')
if not lines:
return tuple(), ''
firstline = lines[0].lstrip()
while firstline == '' or firstline[0] == '@':
del lines[0]
firstline = lines[0].lstrip()
if not lines:
return tuple(), ''
params = tuple(
parm.strip()... | Take Python source code, return a pair of its parameters and the rest of it dedented |
def rm(venv_name):
inenv = InenvManager()
venv = inenv.get_venv(venv_name)
click.confirm("Delete dir {}".format(venv.path))
shutil.rmtree(venv.path) | Removes the venv by name |
def bool_str(string):
if string not in BOOL_STRS:
raise ValueError('Invalid boolean string: "{}"'.format(string))
return True if string == 'true' else False | Returns a boolean from a string imput of 'true' or 'false |
def describe(self):
description = {
'description': self._description,
'type': self.name,
}
description.update(self.extra_params)
return description | Provide a dictionary with information describing itself. |
def initialize(self):
if self._pooler is None:
params = {
"inputWidth": self.inputWidth,
"lateralInputWidths": [self.cellCount] * self.numOtherCorticalColumns,
"cellCount": self.cellCount,
"sdrSize": self.sdrSize,
"onlineLearning": self.onlineLearning,
"maxSdrSi... | Initialize the internal objects. |
def read_ncstream_err(fobj):
err = read_proto_object(fobj, stream.Error)
raise RuntimeError(err.message) | Handle reading an NcStream error from a file-like object and raise as error. |
def makeref2namesdct(name2refdct):
ref2namesdct = {}
for key, values in name2refdct.items():
for value in values:
ref2namesdct.setdefault(value, set()).add(key)
return ref2namesdct | make the ref2namesdct in the idd_index |
def _rank_qr(self, choices):
from bernard.platforms.facebook import layers as fbl
try:
qr = self.request.get_layer(fbl.QuickReply)
self.chosen = choices[qr.slug]
self.slug = qr.slug
if self.when is None or self.when == qr.slug:
return 1.0
... | Look for the QuickReply layer's slug into available choices. |
def sortable_title(instance):
title = plone_sortable_title(instance)
if safe_callable(title):
title = title()
return title.lower() | Uses the default Plone sortable_text index lower-case |
def dnd_endSnooze(self, **kwargs) -> SlackResponse:
self._validate_xoxp_token()
return self.api_call("dnd.endSnooze", json=kwargs) | Ends the current user's snooze mode immediately. |
def data_filler_customer(self, number_of_rows, pipe):
try:
for i in range(number_of_rows):
pipe.hmset('customer:%s' % i, {
'id': rnd_id_generator(self),
'name': self.faker.first_name(),
'lastname': self.faker.last_name(),
... | creates keys with customer data |
def _split_str(s, n):
length = len(s)
return [s[i:i + n] for i in range(0, length, n)] | split string into list of strings by specified number. |
def refresh_all_state_machines(self):
self.refresh_state_machines(list(self.model.state_machine_manager.state_machines.keys())) | Refreshes all state machine tabs |
def _update_staticmethod(self, oldsm, newsm):
self._update(None, None, oldsm.__get__(0), newsm.__get__(0)) | Update a staticmethod update. |
def remove(self):
self._multivol.deallocate(self.id)
ARRAY_CACHE.pop(self.id, None)
PIXEL_CACHE.pop(self.id, None) | Remove the layer artist for good |
def pick_summary_data(key, summary_df, selected_summaries):
selected_summaries_dict = create_selected_summaries_dict(selected_summaries)
value = selected_summaries_dict[key]
return summary_df.iloc[:, summary_df.columns.get_level_values(1) == value] | picks the selected pandas.DataFrame |
def wandb_pty(resize=True):
master_fd, slave_fd = pty.openpty()
try:
tty.setraw(master_fd)
except termios.error:
pass
if resize:
if SIGWINCH_HANDLER is not None:
SIGWINCH_HANDLER.add_fd(master_fd)
return master_fd, slave_fd | Get a PTY set to raw mode and registered to hear about window size changes. |
def _sign(self,params):
for k, v in params.iteritems():
if type(v) == int: v = str(v)
elif type(v) == float: v = '%.2f'%v
elif type(v) in (list, set):
v = ','.join([str(i) for i in v])
elif type(v) == bool: v = 'true' if v else 'false'
... | Generate API sign code |
def _save_to(self, im, path, format=None):
format = format or im.format
if not format:
_, format = splitext(path)
format = format[1:]
im.format = format.lower()
im.save(filename=path) | Save the image for testing. |
def _wf_to_dict(wf):
inputs, outputs = _get_wf_inout(wf)
out = {"name": _id_to_name(wf.tool["id"]).replace("-", "_"), "inputs": inputs,
"outputs": outputs, "steps": [], "subworkflows": [],
"requirements": []}
for step in wf.steps:
inputs, outputs = _get_step_inout(step)
... | Parse a workflow into cwl2wdl style dictionary. |
def send_root_file(self, filename):
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.config['ROOT_FOLDER'], filename,
cache_timeout=cache_timeout) | Function used to send static files from the root of the domain. |
def quantile(image, q, nonzero=True):
img_arr = image.numpy()
if isinstance(q, (list,tuple)):
q = [qq*100. if qq <= 1. else qq for qq in q]
if nonzero:
img_arr = img_arr[img_arr>0]
vals = [np.percentile(img_arr, qq) for qq in q]
return tuple(vals)
elif isinstance(... | Get the quantile values from an ANTsImage |
def _affine_inv_mult(c, m):
"Applies the inverse affine transform described in `m` to `c`."
size = c.flow.size()
h,w = c.size
m[0,1] *= h/w
m[1,0] *= w/h
c.flow = c.flow.view(-1,2)
a = torch.inverse(m[:2,:2].t())
c.flow = torch.mm(c.flow - m[:2,2], a).view(size)
return c | Applies the inverse affine transform described in `m` to `c`. |
def extern_store_f64(self, context_handle, f64):
c = self._ffi.from_handle(context_handle)
return c.to_value(f64) | Given a context and double, return a new Handle to represent the double. |
def lastChild(self):
ret = libxml2mod.xmlGetLastChild(self._o)
if ret is None:raise treeError('xmlGetLastChild() failed')
__tmp = xmlNode(_obj=ret)
return __tmp | Search the last child of a node. |
def duration(self):
self._duration = self.lib.iperf_get_test_duration(self._test)
return self._duration | The test duration in seconds. |
def generate_url(self, suffix):
url_base_path = os.path.dirname(self.path)
netloc = "{}:{}".format(*self.server.server_address)
return urlunparse((
"http",
netloc,
url_base_path + "/" + suffix,
"", "", "")) | Return URL by combining server details with a path suffix. |
def _solve(self):
while len(self._remove_constr) > 0:
self._remove_constr.pop().delete()
try:
self._prob.solve(lp.ObjectiveSense.Maximize)
except lp.SolverError as e:
raise_from(FluxBalanceError('Failed to solve: {}'.format(
e), result=self._pr... | Solve the problem with the current objective. |
def delete_token():
username = get_admin()[0]
admins = get_couchdb_admins()
if username in admins:
print 'I delete {} CouchDB user'.format(username)
delete_couchdb_admin(username)
if os.path.isfile(LOGIN_FILENAME):
print 'I delete {} token file'.format(LOGIN_FILENAME)
os.... | Delete current token, file & CouchDB admin user |
def plotnoise(noisepkl, mergepkl, plot_width=950, plot_height=400):
d = pickle.load(open(mergepkl))
ndist, imstd, flagfrac = plotnoisedist(noisepkl, plot_width=plot_width/2, plot_height=plot_height)
fluxscale = calcfluxscale(d, imstd, flagfrac)
logger.info('Median image noise is {0:.3} Jy.'.format(fluxs... | Make two panel plot to summary noise analysis with estimated flux scale |
def _Members(self, group):
group.members = set(group.members).union(self.gids.get(group.gid, []))
return group | Unify members of a group and accounts with the group as primary gid. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.