language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def resolve_dependencies(nodes):
""" Figure out which order the nodes in the graph can be executed
in to satisfy all requirements. """
done = set()
while True:
if len(done) == len(nodes):
break
for node in nodes:
if node.name not in done:
match = done.intersection(node.requires)
if len(match) == len(node.requires):
done.add(node.name)
yield node
break
else:
raise ConfigException('Invalid requirements in pipeline!')
|
java
|
public void refresh() {
QueryCache.instance().purgeTableCache(metaModelLocal);
Model fresh = ModelDelegate.findById(this.getClass(), getId());
if (fresh == null) {
throw new StaleModelException("Failed to refresh self because probably record with " +
"this ID does not exist anymore. Stale model: " + this);
}
fresh.copyTo(this);
dirtyAttributeNames.clear();
}
|
java
|
public Observable<ServiceResponseWithHeaders<Page<NodeFile>, FileListFromTaskHeaders>> listFromTaskNextSinglePageAsync(final String nextPageLink, final FileListFromTaskNextOptions fileListFromTaskNextOptions) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
Validator.validate(fileListFromTaskNextOptions);
UUID clientRequestId = null;
if (fileListFromTaskNextOptions != null) {
clientRequestId = fileListFromTaskNextOptions.clientRequestId();
}
Boolean returnClientRequestId = null;
if (fileListFromTaskNextOptions != null) {
returnClientRequestId = fileListFromTaskNextOptions.returnClientRequestId();
}
DateTime ocpDate = null;
if (fileListFromTaskNextOptions != null) {
ocpDate = fileListFromTaskNextOptions.ocpDate();
}
DateTimeRfc1123 ocpDateConverted = null;
if (ocpDate != null) {
ocpDateConverted = new DateTimeRfc1123(ocpDate);
}
String nextUrl = String.format("%s", nextPageLink);
return service.listFromTaskNext(nextUrl, this.client.acceptLanguage(), clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<Page<NodeFile>, FileListFromTaskHeaders>>>() {
@Override
public Observable<ServiceResponseWithHeaders<Page<NodeFile>, FileListFromTaskHeaders>> call(Response<ResponseBody> response) {
try {
ServiceResponseWithHeaders<PageImpl<NodeFile>, FileListFromTaskHeaders> result = listFromTaskNextDelegate(response);
return Observable.just(new ServiceResponseWithHeaders<Page<NodeFile>, FileListFromTaskHeaders>(result.body(), result.headers(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
java
|
protected ChildData getData(String path) {
try {
return new ChildData(path, EMPTY_STAT, client.getData().forPath(path));
} catch (Exception e) {
throw new NiubiException(e);
}
}
|
java
|
public void typeInvalid(final String type) {
this.addStatusMessage(
ProtocolConstants.StatusCodes.Create.TYPE_INVALID,
"\"" + type + "\" is not a valid create request type. Use \""
+ CreateRequestType.USER.getIdentifier()
+ "\" to create a user, \""
+ CreateRequestType.FOLLOW.getIdentifier()
+ "\" to create a follow edge or \""
+ CreateRequestType.STATUS_UPDATE.getIdentifier()
+ "\" to create a status update.");
}
|
java
|
public void setDeviceManufacturerTargeting(com.google.api.ads.admanager.axis.v201811.DeviceManufacturerTargeting deviceManufacturerTargeting) {
this.deviceManufacturerTargeting = deviceManufacturerTargeting;
}
|
java
|
public static void free(TempCharBuffer buf)
{
buf._next = null;
if (buf._buf.length == SIZE)
_freeList.free(buf);
}
|
java
|
public static int getEditDistance(String source, String target, boolean caseSensitive) {
// Levenshtein distance algorithm
int sourceLength = isEmptyOrWhitespace(source) ? 0 : source.length();
int targetLength = isEmptyOrWhitespace(target) ? 0 : target.length();
if (sourceLength == 0) {
return targetLength;
}
if (targetLength == 0) {
return sourceLength;
}
if (!caseSensitive) {
source = Ascii.toLowerCase(source);
target = Ascii.toLowerCase(target);
}
int[][] levMatrix = new int[sourceLength + 1][targetLength + 1];
for (int i = 0; i <= sourceLength; i++) {
levMatrix[i][0] = i;
}
for (int i = 0; i <= targetLength; i++) {
levMatrix[0][i] = i;
}
for (int i = 1; i <= sourceLength; i++) {
char sourceI = source.charAt(i - 1);
for (int j = 1; j <= targetLength; j++) {
char targetJ = target.charAt(j - 1);
int cost = 0;
if (sourceI != targetJ) {
cost = 1;
}
levMatrix[i][j] =
Ints.min(
cost + levMatrix[i - 1][j - 1], levMatrix[i - 1][j] + 1, levMatrix[i][j - 1] + 1);
}
}
return levMatrix[sourceLength][targetLength];
}
|
java
|
public static SecretKey createSecretKey(String algorithm) {
// 声明KeyGenerator对象
KeyGenerator keygen;
// 声明 密钥对象
SecretKey deskey = null;
try {
// 返回生成指定算法的秘密密钥的 KeyGenerator 对象
keygen = KeyGenerator.getInstance(algorithm);
// 生成一个密钥
deskey = keygen.generateKey();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
return deskey;
}
|
java
|
synchronized TransactionQueueEvent nextTransactionEvent()
throws InterruptedException {
if (mQueue.isEmpty()) {
if (mIdleTimeout != 0) {
if (mIdleTimeout < 0) {
wait();
}
else {
wait(mIdleTimeout);
}
}
}
if (mQueue.isEmpty()) {
return null;
}
return (TransactionQueueEvent)mQueue.removeFirst();
}
|
java
|
public byte[] read(final String _resourceName)
{
byte[] ret = null;
EFapsClassLoader.LOG.debug("read ''", _resourceName);
try {
final QueryBuilder queryBuilder = new QueryBuilder(this.classType);
queryBuilder.addWhereAttrEqValue("Name", _resourceName);
final InstanceQuery query = queryBuilder.getCachedQuery("esjp");
query.execute();
if (query.next()) {
final Checkout checkout = new Checkout(query.getCurrentValue());
final InputStream is = checkout.executeWithoutAccessCheck();
ret = new byte[is.available()];
is.read(ret);
is.close();
}
} catch (final EFapsException e) {
EFapsClassLoader.LOG.error("could not access the Database for reading '{}' - {}", _resourceName, e);
} catch (final IOException e) {
EFapsClassLoader.LOG.error("could not read the Javaclass '{}' - {}", _resourceName, e);
}
return ret;
}
|
python
|
def dirsplit(path):
r"""
Args:
path (str):
Returns:
list: components of the path
CommandLine:
python -m utool.util_path --exec-dirsplit
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_path import * # NOQA
>>> paths = []
>>> paths.append('E:/window file/foo')
>>> paths.append('/normal/foo')
>>> paths.append('~/relative/path')
>>> results = [dirsplit(path) for path in paths]
>>> import re
>>> results2 = [re.split('\\/', path) for path in paths]
>>> print(results2)
>>> result = ut.repr2(results)
>>> print(result)
"""
#return path.split(os.sep)
parts = []
remain = path
part = True
#while True:
while part != '' and remain != '':
remain, part = split(remain)
parts.append(part)
parts = [p for p in parts if p != '']
if remain != '':
parts.append(remain)
parts = parts[::-1]
return parts
|
python
|
def __get_is_revertible(self):
"""Return a boolean representing whether this Action is revertible
or not"""
# If it was already reverted
if self.reverted:
return False
errors = []
inst = self.timemachine
if inst.fields != inst.presently.fields or \
inst.foreignkeys != inst.presently.foreignkeys:
self.__undo_errors = [
"Cannot undo action %s. The database schema"
" for %s has changed"
% (self.id,
inst.content_type.name,)]
return False
if self.action_type in ["dl", "md"]:
# If undoing deletion, make sure it actually doesn't exist
if self.action_type == "dl" and inst.presently.exists:
errors.append(
"Cannot undo action %d: the %s you are trying to"
" recreate already exists"
% (self.id,
inst.content_type.name,))
# The only problem we can have by reversing this action
# is that some of its foreignkeys could be pointing to
# objects that have since been deleted.
check_here = inst.at_previous_action
for field in inst.foreignkeys:
fk = check_here.get_timemachine_instance(field)
# If the ForeignKey doesn't have a value
if not fk: continue
if not fk.exists:
errors.append(
"Cannot undo action %s: the %s used to link to"
" a %s that has since been deleted"
% (self.id,
inst.content_type.name,
fk.content_type.name,))
else: # self.action_type == "cr"
# Make sure it actually exists
if not self.timemachine.presently.exists:
errors.append(
"Cannot undo action %s: the %s you are trying"
" to delete doesn't currently exist"
% (self.id, inst.content_type.name,))
# The only problem we can have by undoing this action is
# that it could have foreignkeys pointed to it, so deleting
# it will cause deletion of other objects
else:
links = [rel.get_accessor_name()
for rel in \
inst.get_object()._meta.get_all_related_objects()]
for link in links:
objects = getattr(inst.get_object(), link).all()
for rel in objects:
errors.append(
"Cannot undo action %s: you are trying to"
" delete a %s that has a %s pointing to it" %
(self.id,
inst.content_type.name,
ContentType.objects.get_for_model(rel.__class__),))
self.__undo_errors = errors
return (len(errors) == 0)
|
python
|
def negate(self, topv=None):
"""
Given a CNF formula :math:`\mathcal{F}`, this method creates a CNF
formula :math:`\\neg{\mathcal{F}}`. The negation of the formula is
encoded to CNF with the use of *auxiliary* Tseitin variables [1]_.
A new CNF formula is returned keeping all the newly introduced
variables that can be accessed through the ``auxvars`` variable.
**Note** that the negation of each clause is encoded with one
auxiliary variable if it is not unit size. Otherwise, no auxiliary
variable is introduced.
:param topv: top variable identifier if any.
:type topv: int
:return: an object of class :class:`CNF`.
.. [1] G. S. Tseitin. *On the complexity of derivations in the
propositional calculus*. Studies in Mathematics and
Mathematical Logic, Part II. pp. 115–125, 1968
.. code-block:: python
>>> from pysat.formula import CNF
>>> pos = CNF(from_clauses=[[-1, 2], [3]])
>>> neg = pos.negate()
>>> print neg.clauses
[[1, -4], [-2, -4], [-1, 2, 4], [4, -3]]
>>> print neg.auxvars
[4, -3]
"""
negated = CNF()
negated.nv = topv
if not negated.nv:
negated.nv = self.nv
negated.clauses = []
negated.auxvars = []
for cl in self.clauses:
auxv = -cl[0]
if len(cl) > 1:
negated.nv += 1
auxv = negated.nv
# direct implication
for l in cl:
negated.clauses.append([-l, -auxv])
# opposite implication
negated.clauses.append(cl + [auxv])
# keeping all Tseitin variables
negated.auxvars.append(auxv)
negated.clauses.append(negated.auxvars)
return negated
|
python
|
def gotResolverError(self, failure, protocol, message, address):
'''
Copied from twisted.names.
Removes logging the whole failure traceback.
'''
if failure.check(dns.DomainError, dns.AuthoritativeDomainError):
message.rCode = dns.ENAME
else:
message.rCode = dns.ESERVER
log.msg(failure.getErrorMessage())
self.sendReply(protocol, message, address)
if self.verbose:
log.msg("Lookup failed")
|
java
|
private synchronized TableBucket getNextBucketFromExistingBatch() {
if (this.currentBatch != null) {
TableBucket next = this.currentBatch.next();
if (!this.currentBatch.hasNext()) {
this.currentBatch = null;
}
return next;
}
return null;
}
|
python
|
def CheckSchema(self, database):
"""Checks the schema of a database with that defined in the plugin.
Args:
database (SQLiteDatabase): database.
Returns:
bool: True if the schema of the database matches that defined by
the plugin, or False if the schemas do not match or no schema
is defined by the plugin.
"""
schema_match = False
if self.SCHEMAS:
for schema in self.SCHEMAS:
if database and database.schema == schema:
schema_match = True
return schema_match
|
java
|
public boolean setIfSignificantToUser(final int newValue) {
while (true) {
final int currentValue = _value.get();
if (newValue > currentValue) {
final long currentTimestamp;
if (_significantUpdateIntervalMillis > 0) {
currentTimestamp = System.currentTimeMillis();
if (currentTimestamp - _lastUpdate < _significantUpdateIntervalMillis) {
return false;
}
} else {
currentTimestamp = -1;
}
if (_value.compareAndSet(currentValue, newValue)) {
_lastUpdate = currentTimestamp;
return true;
}
} else {
return false;
}
}
}
|
python
|
def _pad(expr, width, side='left', fillchar=' '):
"""
Pad strings in the sequence or scalar with an additional character to specified side.
:param expr:
:param width: Minimum width of resulting string; additional characters will be filled with spaces
:param side: {‘left’, ‘right’, ‘both’}, default ‘left’
:param fillchar: Additional character for filling, default is whitespace
:return: sequence or scalar
"""
if not isinstance(fillchar, six.string_types):
msg = 'fillchar must be a character, not {0}'
raise TypeError(msg.format(type(fillchar).__name__))
if len(fillchar) != 1:
raise TypeError('fillchar must be a character, not str')
if side not in ('left', 'right', 'both'):
raise ValueError('Invalid side')
return _string_op(expr, Pad, _width=width, _side=side, _fillchar=fillchar)
|
python
|
def same(*values):
"""
Check if all values in a sequence are equal.
Returns True on empty sequences.
Examples
--------
>>> same(1, 1, 1, 1)
True
>>> same(1, 2, 1)
False
>>> same()
True
"""
if not values:
return True
first, rest = values[0], values[1:]
return all(value == first for value in rest)
|
java
|
public void setResultValue(String actionId, Object resultValue) {
if (actionId == null || !actionId.contains(ActivityContext.ID_SEPARATOR)) {
this.actionId = actionId;
this.resultValue = resultValue;
} else {
String[] ids = StringUtils.tokenize(actionId, ActivityContext.ID_SEPARATOR, true);
if (ids.length == 1) {
this.actionId = null;
this.resultValue = resultValue;
} else if (ids.length == 2) {
ResultValueMap resultValueMap = new ResultValueMap();
resultValueMap.put(ids[1], resultValue);
this.actionId = ids[0];
this.resultValue = resultValueMap;
} else {
ResultValueMap resultValueMap = new ResultValueMap();
for (int i = 1; i < ids.length - 1; i++) {
ResultValueMap resultValueMap2 = new ResultValueMap();
resultValueMap.put(ids[i], resultValueMap2);
resultValueMap = resultValueMap2;
}
resultValueMap.put(ids[ids.length - 1], resultValue);
this.actionId = actionId;
this.resultValue = resultValueMap;
}
}
}
|
java
|
private List<FilterChangeListener> getAllListeners() {
@SuppressWarnings("deprecation") final List<FilterChangeListener> globalChangeListeners =
getAnalysisJobBuilder().getFilterChangeListeners();
final List<FilterChangeListener> list =
new ArrayList<>(globalChangeListeners.size() + _localChangeListeners.size());
list.addAll(globalChangeListeners);
list.addAll(_localChangeListeners);
return list;
}
|
python
|
def schunk(string, size):
"""Splits string into n sized chunks."""
return [string[i:i+size] for i in range(0, len(string), size)]
|
java
|
@Override
public void init(RestoreManagerConfig config, SnapshotJobManager jobManager) {
this.config = config;
this.jobManager = jobManager;
}
|
java
|
private static MimeType getParentMimeType(Resource resource) {
MimeType result = null;
if (resource != null && (resource = resource.getParent()) != null) {
ResourceHandle handle = ResourceHandle.use(resource);
result = getMimeType(handle.getProperty(ResourceUtil.PROP_MIME_TYPE, ""));
if (result == null) {
String filename = getResourceName(resource);
result = getMimeType(filename);
}
}
return result;
}
|
python
|
def get_collections(self, unit, names=None, merge=False,
sampling_rate=None, **entities):
''' Retrieve variable data for a specified level in the Dataset.
Args:
unit (str): The unit of analysis to return variables for. Must be
one of 'run', 'session', 'subject', or 'dataset'.
names (list): Optional list of variables names to return. If
None, all available variables are returned.
merge (bool): If True, variables are merged across all observations
of the current unit. E.g., if unit='subject' and return_type=
'collection', variablesfrom all subjects will be merged into a
single collection. If False, each observation is handled
separately, and the result is returned as a list.
sampling_rate (int, str): If unit='run', the sampling rate to
pass onto the returned BIDSRunVariableCollection.
entities: Optional constraints used to limit what gets returned.
Returns:
'''
nodes = self.get_nodes(unit, entities)
var_sets = []
for n in nodes:
var_set = list(n.variables.values())
var_set = [v for v in var_set if v.matches_entities(entities)]
if names is not None:
var_set = [v for v in var_set if v.name in names]
# Additional filtering on Variables past run level, because their
# contents are extracted from TSV files containing rows from
# multiple observations
if unit != 'run':
var_set = [v.filter(entities) for v in var_set]
var_sets.append(var_set)
if merge:
var_sets = [list(chain(*var_sets))]
results = []
for vs in var_sets:
if not vs:
continue
if unit == 'run':
vs = clc.BIDSRunVariableCollection(vs, sampling_rate)
else:
vs = clc.BIDSVariableCollection(vs)
results.append(vs)
if merge:
return results[0] if results else None
return results
|
java
|
protected void beforeInsertDummies(int index, int length) {
if (index > size || index < 0)
throw new IndexOutOfBoundsException("Index: "+index+", Size: "+size);
if (length > 0) {
ensureCapacity(size + length);
System.arraycopy(elements, index, elements, index + length, size-index);
size += length;
}
}
|
python
|
def _replace_fields(self, json_dict):
"""
Delete this object's attributes, and replace with
those in json_dict.
"""
for key in self._json_dict.keys():
if not key.startswith("_"):
delattr(self, key)
self._json_dict = json_dict
self._set_fields(json_dict)
|
python
|
def set_source_filter(self, source):
""" Only search for tweets entered via given source
:param source: String. Name of the source to search for. An example \
would be ``source=twitterfeed`` for tweets submitted via TwitterFeed
:raises: TwitterSearchException
"""
if isinstance(source, str if py3k else basestring) and len(source) >= 2:
self.source_filter = source
else:
raise TwitterSearchException(1009)
|
java
|
public void delete() {
URL url = STORAGE_POLICY_ASSIGNMENT_WITH_ID_URL_TEMPLATE.build(this.getAPI().getBaseURL(), this.getID());
BoxAPIRequest request = new BoxAPIRequest(this.getAPI(), url, HttpMethod.DELETE);
request.send();
}
|
java
|
public @Nonnull Optional<URL> getResource(@Nonnull String path) {
ArgumentUtils.requireNonNull("path", path);
Optional<ResourceLoader> resourceLoader = getSupportingLoader(path);
if (resourceLoader.isPresent()) {
return resourceLoader.get().getResource(path);
}
return Optional.empty();
}
|
java
|
@Override
public Structure getDomain(String pdpDomainName, AtomCache cache) throws IOException, StructureException {
return cache.getStructure(getPDPDomain(pdpDomainName));
}
|
python
|
def gumbel_softmax(x,
z_size,
mode,
softmax_k=0,
temperature_warmup_steps=150000,
summary=True,
name=None):
"""Gumbel softmax discretization bottleneck.
Args:
x: Input to the discretization bottleneck.
z_size: Number of bits, where discrete codes range from 1 to 2**z_size.
mode: tf.estimator.ModeKeys.
softmax_k: If > 0 then do top-k softmax.
temperature_warmup_steps: Number of steps it takes to decay temperature to
0.
summary: Whether to write summaries.
name: Name for the bottleneck scope.
Returns:
Embedding function, discrete code, and loss.
"""
with tf.variable_scope(name, default_name="gumbel_softmax"):
m = tf.layers.dense(x, 2**z_size, name="mask")
if softmax_k > 0:
m, kl = top_k_softmax(m, softmax_k)
return m, m, 1.0 - tf.reduce_mean(kl)
logsm = tf.nn.log_softmax(m)
# Gumbel-softmax sample.
gumbel_samples = gumbel_sample(common_layers.shape_list(m))
steps = temperature_warmup_steps
gumbel_samples *= common_layers.inverse_exp_decay(steps // 5) * 0.5
temperature = 1.2 - common_layers.inverse_lin_decay(steps)
# 10% of the time keep reasonably high temperature to keep learning.
temperature = tf.cond(
tf.less(tf.random_uniform([]), 0.9), lambda: temperature,
lambda: tf.random_uniform([], minval=0.5, maxval=1.0))
s = tf.nn.softmax((logsm + gumbel_samples) / temperature)
m = tf.nn.softmax(m)
kl = -tf.reduce_max(logsm, axis=-1)
if summary:
tf.summary.histogram("max-log", tf.reshape(kl, [-1]))
# Calculate the argmax and construct hot vectors.
maxvec = tf.reshape(tf.argmax(m, axis=-1), [-1])
maxvhot = tf.stop_gradient(tf.one_hot(maxvec, 2**z_size))
# Add losses that prevent too few being used.
distrib = tf.reshape(logsm, [-1, 2**z_size]) * maxvhot
d_mean = tf.reduce_mean(distrib, axis=[0], keep_dims=True)
d_variance = tf.reduce_mean(
tf.squared_difference(distrib, d_mean), axis=[0])
d_dev = -tf.reduce_mean(d_variance)
ret = s
if mode != tf.estimator.ModeKeys.TRAIN:
ret = tf.reshape(maxvhot, common_layers.shape_list(s)) # Just hot @eval.
return m, ret, d_dev * 5.0 + tf.reduce_mean(kl) * 0.002
|
python
|
def parse(self):
"""Parse our .PLT file and return :class:`Plot` object or raise :exc:`ParseException`."""
plt = Plot(name_from_filename(self.pltfilename))
with open(self.pltfilename, 'rb') as pltfile:
segment = None
for line in pltfile:
if not line:
continue
c, val = line[:1], line[1:]
if c == 'Z':
edist = None
try:
ymin, ymax, xmin, xmax, zmin, zmax = (float(v) for v in val.split())
except ValueError:
ymin, ymax, xmin, xmax, zmin, zmax, _, edist = val.split()
ymin, ymax, xmin, xmax, zmin, zmax, edist = \
(float(v) for v in (ymin, ymax, xmin, xmax, zmin, zmax, edist))
plt.set_bounds(ymin, ymax, xmin, xmax, zmin, zmax, edist)
elif c == 'S':
if not plt.name:
plt.name = val.strip()
elif c == 'G':
plt.utm_zone = int(val)
elif c == 'O':
plt.datum = val
elif c == 'N':
date, comment = None, '' # both date and comment are optional
try:
name, _, m, d, y, comment = val.split(None, 5)
date = datetime.date(int(y), int(m), int(d))
except ValueError:
try:
name, _, m, d, y = val.split()
date = datetime.date(int(y), int(m), int(d))
except ValueError:
name = val
comment = comment[1:].strip()
segment = Segment(name, date, comment)
elif c == 'M':
flags = None # flags are optional
try:
y, x, z, name, _, l, u, d, r, _, edist = val.split()
except ValueError:
y, x, z, name, _, l, u, d, r, _, edist, flags = val.split()
cmd = MoveCommand(float(y), float(x), float(z), name[1:],
float(l), float(r), float(u), float(d), float(edist), flags)
segment.add_command(cmd)
elif c in ('D', 'd'):
# 'D' for normal stations, 'd' for "hidden" stations with the 'P' flag
flags = None
try:
y, x, z, name, _, l, u, d, r, _, edist = val.split()
except ValueError:
y, x, z, name, _, l, u, d, r, _, edist, flags = val.split()
cmd = DrawCommand(float(y), float(x), float(z), name[1:],
float(l), float(r), float(u), float(d), float(edist), flags)
cmd.cmd = c
segment.add_command(cmd)
elif c == 'X':
segment.set_bounds(*(float(v) for v in val.split()))
# An X-bounds command signifies end of segment
plt.add_segment(segment)
segment = None
elif c == 'P':
name, y, x, z = val.split()
plt.add_fixed_point(name, (float(y), float(x), float(z)))
elif c == 'C':
plt.loop_count = int(val)
elif c == 'R':
count, common, from_sta, to_sta, stations = val.split(None, 4)
plt.add_loop(int(count), common, from_sta, to_sta, stations.split())
elif c == '\x1A':
continue # "soft EOF" ascii SUB ^Z
else:
msg = "Unknown PLT control code '%s': %s" % (c, val)
if self.strict_mode:
raise ParseException(msg)
else:
log.warning(msg)
return plt
|
java
|
private BitMatrix extractDataRegion(BitMatrix bitMatrix) {
int symbolSizeRows = version.getSymbolSizeRows();
int symbolSizeColumns = version.getSymbolSizeColumns();
if (bitMatrix.getHeight() != symbolSizeRows) {
throw new IllegalArgumentException("Dimension of bitMatrix must match the version size");
}
int dataRegionSizeRows = version.getDataRegionSizeRows();
int dataRegionSizeColumns = version.getDataRegionSizeColumns();
int numDataRegionsRow = symbolSizeRows / dataRegionSizeRows;
int numDataRegionsColumn = symbolSizeColumns / dataRegionSizeColumns;
int sizeDataRegionRow = numDataRegionsRow * dataRegionSizeRows;
int sizeDataRegionColumn = numDataRegionsColumn * dataRegionSizeColumns;
BitMatrix bitMatrixWithoutAlignment = new BitMatrix(sizeDataRegionColumn, sizeDataRegionRow);
for (int dataRegionRow = 0; dataRegionRow < numDataRegionsRow; ++dataRegionRow) {
int dataRegionRowOffset = dataRegionRow * dataRegionSizeRows;
for (int dataRegionColumn = 0; dataRegionColumn < numDataRegionsColumn; ++dataRegionColumn) {
int dataRegionColumnOffset = dataRegionColumn * dataRegionSizeColumns;
for (int i = 0; i < dataRegionSizeRows; ++i) {
int readRowOffset = dataRegionRow * (dataRegionSizeRows + 2) + 1 + i;
int writeRowOffset = dataRegionRowOffset + i;
for (int j = 0; j < dataRegionSizeColumns; ++j) {
int readColumnOffset = dataRegionColumn * (dataRegionSizeColumns + 2) + 1 + j;
if (bitMatrix.get(readColumnOffset, readRowOffset)) {
int writeColumnOffset = dataRegionColumnOffset + j;
bitMatrixWithoutAlignment.set(writeColumnOffset, writeRowOffset);
}
}
}
}
}
return bitMatrixWithoutAlignment;
}
|
java
|
public Tag create(String name) {
String url = WxEndpoint.get("url.tag.create");
TagWrapper tag = new TagWrapper(name);
String json = JsonMapper.nonEmptyMapper().toJson(tag);
logger.debug("create tag: {}", json);
String response = wxClient.post(url, json);
TagWrapper wrapper = JsonMapper.defaultMapper().fromJson(response, TagWrapper.class);
return wrapper.getTag();
}
|
python
|
def findFirstHref(link_attrs_list, target_rel):
"""Return the value of the href attribute for the first link tag
in the list that has target_rel as a relationship."""
# XXX: TESTME
matches = findLinksRel(link_attrs_list, target_rel)
if not matches:
return None
first = matches[0]
return first.get('href')
|
java
|
static ObjectGraph createWith(Loader loader, Object... modules) {
return DaggerObjectGraph.makeGraph(null, loader, modules);
}
|
python
|
def validate_min_items(value, minimum, **kwargs):
"""
Validator for ARRAY types to enforce a minimum number of items allowed for
the ARRAY to be valid.
"""
if len(value) < minimum:
raise ValidationError(
MESSAGES['min_items']['invalid'].format(
minimum, len(value),
),
)
|
python
|
def get_instance(self, payload):
"""
Build an instance of IpAccessControlListInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListInstance
:rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListInstance
"""
return IpAccessControlListInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
)
|
java
|
public static boolean isValid(
String variant,
int yearOfEra,
int month,
int dayOfMonth
) {
EraYearMonthDaySystem<HijriCalendar> calsys = CALSYS.get(variant);
return ((calsys != null) && calsys.isValid(HijriEra.ANNO_HEGIRAE, yearOfEra, month, dayOfMonth));
}
|
python
|
def load(self, env=None):
""" Load a section values of given environment.
If nothing to specified, use environmental variable.
If unknown environment was specified, warn it on logger.
:param env: environment key to load in a coercive manner
:type env: string
:rtype: dict
"""
self._load()
e = env or \
os.environ.get(RUNNING_MODE_ENVKEY, DEFAULT_RUNNING_MODE)
if e in self.config:
return self.config[e]
logging.warn("Environment '%s' was not found.", e)
|
java
|
public StartApplicationRequest withInputConfigurations(InputConfiguration... inputConfigurations) {
if (this.inputConfigurations == null) {
setInputConfigurations(new java.util.ArrayList<InputConfiguration>(inputConfigurations.length));
}
for (InputConfiguration ele : inputConfigurations) {
this.inputConfigurations.add(ele);
}
return this;
}
|
java
|
public static PropertyMap getAnnotationMap(ControlBeanContext cbc, AnnotatedElement annotElem)
{
if ( cbc == null )
return new AnnotatedElementMap(annotElem);
return cbc.getAnnotationMap(annotElem);
}
|
java
|
public boolean getBoolean(int index) throws JSONException {
Object o = get(index);
if (o.equals(Boolean.FALSE) || ((o instanceof String) && ((String)o).equalsIgnoreCase("false"))) {
return false;
} else if (o.equals(Boolean.TRUE) || ((o instanceof String) && ((String)o).equalsIgnoreCase("true"))) {
return true;
}
throw new JSONException("JSONArray[" + index + "] is not a Boolean.");
}
|
python
|
def sample_vMF(mu, kappa, num_samples):
"""Generate num_samples N-dimensional samples from von Mises Fisher
distribution around center mu \in R^N with concentration kappa.
"""
dim = len(mu)
result = np.zeros((num_samples, dim))
for nn in range(num_samples):
# sample offset from center (on sphere) with spread kappa
w = _sample_weight(kappa, dim)
# sample a point v on the unit sphere that's orthogonal to mu
v = _sample_orthonormal_to(mu)
# compute new point
result[nn, :] = v * np.sqrt(1. - w ** 2) + w * mu
return result
|
java
|
void removeFromIndexes(TransactionContext transactionContext, Object key) {
Stream<IndexedTypeIdentifier> typeIdentifiers = getKnownClasses().stream()
.filter(searchFactoryHandler::hasIndex)
.map(PojoIndexedTypeIdentifier::new);
Set<Work> deleteWorks = typeIdentifiers
.map(e -> searchWorkCreator.createEntityWork(keyToString(key), e, WorkType.DELETE))
.collect(Collectors.toSet());
performSearchWorks(deleteWorks, transactionContext);
}
|
java
|
protected void addDependenciesRecursively(Namespace namespace, List<Namespace> namespaceList, String dependencyId, List<String> extendedDependencies) throws XmlException {
if(extendedDependencies.contains(dependencyId)) {
return;
}
if(namespace.getId().equals(dependencyId)) {
throw new XmlException("Circular dependency found in " + namespace.getId());
}
Namespace dependency = find(namespaceList, dependencyId);
extendedDependencies.add(dependency.getId());
for(String indirectDependencyId: dependency.getDirectDependencyIds()) {
addDependenciesRecursively(namespace, namespaceList, indirectDependencyId, extendedDependencies);
}
}
|
python
|
def weekday(cls, year, month, day):
"""Returns the weekday of the date. 0 = aaitabar"""
return NepDate.from_bs_date(year, month, day).weekday()
|
java
|
@NotNull
public DoubleStream peek(@NotNull final DoubleConsumer action) {
return new DoubleStream(params, new DoublePeek(iterator, action));
}
|
python
|
def get_longest_non_repeat_v2(string):
"""
Find the length of the longest substring
without repeating characters.
Uses alternative algorithm.
Return max_len and the substring as a tuple
"""
if string is None:
return 0, ''
sub_string = ''
start, max_len = 0, 0
used_char = {}
for index, char in enumerate(string):
if char in used_char and start <= used_char[char]:
start = used_char[char] + 1
else:
if index - start + 1 > max_len:
max_len = index - start + 1
sub_string = string[start: index + 1]
used_char[char] = index
return max_len, sub_string
|
java
|
public boolean renew(String value, String state, long time) {
Nonce nonce = _map.get(state + ':' + value);
if (nonce != null && !nonce.hasExpired()) {
nonce.renew(time > 0 ? time : TTL);
return true;
} else {
return false;
}
}
|
python
|
def export(outfile):
"""Export image anchore data to a JSON file."""
if not nav:
sys.exit(1)
ecode = 0
savelist = list()
for imageId in imagelist:
try:
record = {}
record['image'] = {}
record['image']['imageId'] = imageId
record['image']['imagedata'] = contexts['anchore_db'].load_image_new(imageId)
savelist.append(record)
except Exception as err:
anchore_print_err("could not find record for image ("+str(imageId)+")")
ecode = 1
if ecode == 0:
try:
if outfile == '-':
print json.dumps(savelist, indent=4)
else:
with open(outfile, 'w') as OFH:
OFH.write(json.dumps(savelist))
except Exception as err:
anchore_print_err("operation failed: " + str(err))
ecode = 1
sys.exit(ecode)
|
java
|
@Pure
@SuppressWarnings("checkstyle:magicnumber")
public static String parseHTML(String html) {
if (html == null) {
return null;
}
final Map<String, Integer> transTbl = getHtmlToJavaTranslationTable();
assert transTbl != null;
if (transTbl.isEmpty()) {
return html;
}
final Pattern pattern = Pattern.compile("[&](([a-zA-Z]+)|(#x?[0-9]+))[;]"); //$NON-NLS-1$
final Matcher matcher = pattern.matcher(html);
final StringBuilder result = new StringBuilder();
String entity;
Integer isoCode;
int lastIndex = 0;
while (matcher.find()) {
final int idx = matcher.start();
result.append(html.substring(lastIndex, idx));
lastIndex = matcher.end();
entity = matcher.group(1);
if (entity.startsWith("#x")) { //$NON-NLS-1$
try {
isoCode = Integer.valueOf(entity.substring(2), 16);
} catch (Throwable exception) {
isoCode = null;
}
} else if (entity.startsWith("#")) { //$NON-NLS-1$
try {
isoCode = Integer.valueOf(entity.substring(1));
} catch (Throwable exception) {
isoCode = null;
}
} else {
isoCode = transTbl.get(entity);
}
if (isoCode == null) {
result.append(matcher.group());
} else {
result.append((char) isoCode.intValue());
}
}
if (lastIndex < html.length()) {
result.append(html.substring(lastIndex));
}
return result.toString();
}
|
java
|
protected synchronized Class loadClass(final String name, boolean resolve) throws ClassNotFoundException {
Class c = this.findLoadedClass(name);
if (c != null) return c;
c = (Class) customClasses.get(name);
if (c != null) return c;
try {
c = oldFindClass(name);
} catch (ClassNotFoundException cnfe) {
// IGNORE
}
if (c == null) c = super.loadClass(name, resolve);
if (resolve) resolveClass(c);
return c;
}
|
java
|
public ServiceFuture<OperationStatusResponseInner> restartAsync(String resourceGroupName, String vmName, final ServiceCallback<OperationStatusResponseInner> serviceCallback) {
return ServiceFuture.fromResponse(restartWithServiceResponseAsync(resourceGroupName, vmName), serviceCallback);
}
|
python
|
def _extract_country_code(number):
"""Extracts country calling code from number.
Returns a 2-tuple of (country_calling_code, rest_of_number). It assumes
that the leading plus sign or IDD has already been removed. Returns (0,
number) if number doesn't start with a valid country calling code.
"""
if len(number) == 0 or number[0] == U_ZERO:
# Country codes do not begin with a '0'.
return (0, number)
for ii in range(1, min(len(number), _MAX_LENGTH_COUNTRY_CODE) + 1):
try:
country_code = int(number[:ii])
if country_code in COUNTRY_CODE_TO_REGION_CODE:
return (country_code, number[ii:])
except Exception:
pass
return (0, number)
|
java
|
public void setImageURI(@Nullable String uriString, @Nullable Object callerContext) {
Uri uri = (uriString != null) ? Uri.parse(uriString) : null;
setImageURI(uri, callerContext);
}
|
python
|
def _validate(dns_proto, dns_servers, ip_proto, ip_addrs, gateway):
'''
Ensure that the configuration passed is formatted correctly and contains
valid IP addresses, etc.
'''
errors = []
# Validate DNS configuration
if dns_proto == 'dhcp':
if dns_servers is not None:
errors.append(
'The dns_servers param cannot be set if unless dns_proto is '
'set to \'static\''
)
else:
if str(dns_servers).lower() in ['none', '[]']:
pass
elif not isinstance(dns_servers, list):
errors.append(
'The dns_servers param must be formatted as a list'
)
else:
bad_ips = [x for x in dns_servers
if not salt.utils.validate.net.ipv4_addr(x)]
if bad_ips:
errors.append('Invalid DNS server IPs: {0}'
.format(', '.join(bad_ips)))
# Validate IP configuration
if ip_proto == 'dhcp':
if ip_addrs is not None:
errors.append(
'The ip_addrs param cannot be set if unless ip_proto is set '
'to \'static\''
)
if gateway is not None:
errors.append(
'A gateway IP cannot be set if unless ip_proto is set to '
'\'static\''
)
else:
if not ip_addrs:
errors.append(
'The ip_addrs param is required to set static IPs'
)
elif not isinstance(ip_addrs, list):
errors.append(
'The ip_addrs param must be formatted as a list'
)
else:
bad_ips = [x for x in ip_addrs
if not salt.utils.validate.net.ipv4_addr(x)]
if bad_ips:
errors.append('The following static IPs are invalid: '
'{0}'.format(', '.join(bad_ips)))
# Validate default gateway
if gateway is not None:
if not salt.utils.validate.net.ipv4_addr(gateway):
errors.append('Gateway IP {0} is invalid'.format(gateway))
return errors
|
java
|
public static User getUserByBasicAuthentication(Request applicationRequest, HttpServletRequest request, HttpServletResponse response, String realmDescription) throws IOException, ServletRequestAlreadyRedirectedException
{
// Request applicationRequest = application.getCurrentRequest();
// String realmId = applicationRequest.getRealm().getId();
if (applicationRequest == null)
{
throw new IllegalStateException("application request is missing; is entry point configured?");
}
User user = applicationRequest.getUser();
if (user == null)
{
String header = request.getHeader("Authorization");
if (header == null)
{
response.addHeader("WWW-Authenticate", "Basic realm=\"" + (realmDescription != null ? realmDescription : "access to realm IGLU") + '\"');
response.sendError(401);
throw new ServletRequestAlreadyRedirectedException("user must be authenticated first");
}
else
{
String credentials = header.substring(6);
System.out.println(new LogEntry("about to login"));
try
{
user = applicationRequest.login(new Base64EncodedCredentials(credentials));
}
catch(AuthenticationException ae)
{
System.out.println(new LogEntry(ae.getMessage()));
response.addHeader("WWW-Authenticate", "Basic realm=\"" + (realmDescription != null ? realmDescription : "access to realm IGLU") + '\"');
response.sendError(401);
throw new ServletRequestAlreadyRedirectedException("user must be authenticated first");
}
System.out.println(new LogEntry("logged in:" + user.getId()));
}
}
return user;
}
|
python
|
def translate(self):
"""Run CheckTranslator."""
visitor = CheckTranslator(self.document,
contents=self.contents,
filename=self.filename,
ignore=self.ignore)
self.document.walkabout(visitor)
self.checkers += visitor.checkers
|
python
|
def run(self, node, client):
"""
Upload the file, retaining permissions
See also L{Deployment.run}
"""
perms = os.stat(self.source).st_mode
client.put(path=self.target, chmod=perms,
contents=open(self.source, 'rb').read())
return node
|
python
|
def has_coverage(self):
"""
Returns a boolean for is a parseable .coverage file can be found in the repository
:return: bool
"""
if os.path.exists(self.git_dir + os.sep + '.coverage'):
try:
with open(self.git_dir + os.sep + '.coverage', 'r') as f:
blob = f.read()
blob = blob.split('!')[2]
json.loads(blob)
return True
except Exception:
return False
else:
return False
|
java
|
public static String toAlpha(long i, boolean uppercase) {
final int A = (uppercase ? 'A' : 'a') - 1;
if (i < 0)
throw new IllegalArgumentException(
"Argument must be non-negative, was " + i);
if (i == Long.MAX_VALUE)
throw new IllegalArgumentException(
"Argument must be smaller than Long.MAX_VALUE");
i++;
StringBuilder sb = new StringBuilder();
while (i > 0) {
long d = i%26;
if (d == 0) d = 26;
sb.append((char)(A + d));
i = (i-d)/26;
}
return sb.reverse().toString();
}
|
java
|
public int divide(int x, int y) {
assert(x >= 0 && x < getFieldSize() && y > 0 && y < getFieldSize());
return divTable[x][y];
}
|
python
|
def set_ttl(self, key, ttl):
""" Sets time to live for @key to @ttl seconds
-> #bool True if the timeout was set
"""
return self._client.expire(self.get_key(key), ttl)
|
python
|
def collectstatic(force=False):
"""
collect static files for production httpd
If run with ``settings.DEBUG==True``, this is a no-op
unless ``force`` is set to ``True``
"""
# noise reduction: only collectstatic if not in debug mode
from django.conf import settings
if force or not settings.DEBUG:
tasks.manage('collectstatic', '--noinput')
print('... finished collectstatic')
print('')
else:
print('... skipping collectstatic as settings.DEBUG=True; If you want to generate staticfiles anyway, run ape collectstatic instead;')
|
python
|
async def disconnect(self, conn_id):
"""Asynchronously disconnect from a connected device
Args:
conn_id (int): A unique identifier that will refer to this connection
callback (callback): A callback that will be called as
callback(conn_id, adapter_id, success, failure_reason)
"""
self._ensure_connection(conn_id, True)
dev = self._get_property(conn_id, 'device')
dev.connected = False
self._teardown_connection(conn_id)
|
java
|
public long nextLong() {
Delta d = ranges.get();
if (d.start <= d.stop) {
mutex.lock();
value = d.update(value);
mutex.unlock();
}
return d.start++;
}
|
java
|
public String beginReplaceContent(String resourceGroupName, String automationAccountName, String runbookName, String runbookContent) {
return beginReplaceContentWithServiceResponseAsync(resourceGroupName, automationAccountName, runbookName, runbookContent).toBlocking().single().body();
}
|
python
|
def create_imagefile(options, filename, latlon, ground_width, path_objs, mission_obj, fence_obj, width=600, height=600, used_flightmodes=[], mav_type=None):
'''create path and mission as an image file'''
mt = mp_tile.MPTile(service=options.service)
map_img = mt.area_to_image(latlon[0], latlon[1],
width, height, ground_width)
while mt.tiles_pending() > 0:
print("Waiting on %u tiles" % mt.tiles_pending())
time.sleep(1)
map_img = mt.area_to_image(latlon[0], latlon[1],
width, height, ground_width)
# a function to convert from (lat,lon) to (px,py) on the map
pixmapper = functools.partial(pixel_coords, ground_width=ground_width, mt=mt, topleft=latlon, width=width)
for path_obj in path_objs:
path_obj.draw(map_img, pixmapper, None)
if mission_obj is not None:
for m in mission_obj:
m.draw(map_img, pixmapper, None)
if fence_obj is not None:
fence_obj.draw(map_img, pixmapper, None)
if (options is not None and
mav_type is not None and
options.colour_source == "flightmode"):
tuples = [ (mode, colour_for_flightmode(mav_type, mode))
for mode in used_flightmodes.keys() ]
legend = mp_slipmap.SlipFlightModeLegend("legend", tuples)
legend.draw(map_img, pixmapper, None)
map_img = cv2.cvtColor(map_img, cv2.COLOR_BGR2RGB)
cv2.imwrite(filename, map_img)
|
python
|
def venn3_unweighted(subsets, set_labels=('A', 'B', 'C'), set_colors=('r', 'g', 'b'), alpha=0.4, normalize_to=1.0, subset_areas=(1, 1, 1, 1, 1, 1, 1), ax=None, subset_label_formatter=None):
'''
The version of venn3 without area-weighting.
It is implemented as a wrapper around venn3. Namely, venn3 is invoked as usual, but with all subset areas
set to 1. The subset labels are then replaced in the resulting diagram with the provided subset sizes.
The parameters are all the same as that of venn2.
In addition there is a subset_areas parameter, which specifies the actual subset areas.
(it is (1, 1, 1, 1, 1, 1, 1) by default. You are free to change it, within reason).
'''
v = venn3(subset_areas, set_labels, set_colors, alpha, normalize_to, ax)
# Now rename the labels
if subset_label_formatter is None:
subset_label_formatter = str
subset_ids = ['100', '010', '110', '001', '101', '011', '111']
if isinstance(subsets, dict):
subsets = [subsets.get(t, 0) for t in subset_ids]
elif len(subsets) == 3:
subsets = compute_venn3_subsets(*subsets)
for n, id in enumerate(subset_ids):
lbl = v.get_label_by_id(id)
if lbl is not None:
lbl.set_text(subset_label_formatter(subsets[n]))
return v
|
java
|
@Override
public CPDefinitionSpecificationOptionValue findByCPDefinitionId_First(
long CPDefinitionId,
OrderByComparator<CPDefinitionSpecificationOptionValue> orderByComparator)
throws NoSuchCPDefinitionSpecificationOptionValueException {
CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue =
fetchByCPDefinitionId_First(CPDefinitionId, orderByComparator);
if (cpDefinitionSpecificationOptionValue != null) {
return cpDefinitionSpecificationOptionValue;
}
StringBundler msg = new StringBundler(4);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("CPDefinitionId=");
msg.append(CPDefinitionId);
msg.append("}");
throw new NoSuchCPDefinitionSpecificationOptionValueException(msg.toString());
}
|
java
|
private String toPath(String name, IconSize size) {
return CmsStringUtil.joinPaths(CmsWorkplace.getSkinUri(), ICON_FOLDER, "" + name.hashCode()) + size.getSuffix();
}
|
python
|
def getVATAmount(self):
""" Compute AnalysisProfileVATAmount
"""
price, vat = self.getAnalysisProfilePrice(), self.getAnalysisProfileVAT()
return float(price) * float(vat) / 100
|
python
|
def run_in_transaction(self, func, *args, **kw):
"""Perform a unit of work in a transaction, retrying on abort.
:type func: callable
:param func: takes a required positional argument, the transaction,
and additional positional / keyword arguments as supplied
by the caller.
:type args: tuple
:param args: additional positional arguments to be passed to ``func``.
:type kw: dict
:param kw: optional keyword arguments to be passed to ``func``.
If passed, "timeout_secs" will be removed and used to
override the default timeout.
:rtype: :class:`datetime.datetime`
:returns: timestamp of committed transaction
"""
# Sanity check: Is there a transaction already running?
# If there is, then raise a red flag. Otherwise, mark that this one
# is running.
if getattr(self._local, "transaction_running", False):
raise RuntimeError("Spanner does not support nested transactions.")
self._local.transaction_running = True
# Check out a session and run the function in a transaction; once
# done, flip the sanity check bit back.
try:
with SessionCheckout(self._pool) as session:
return session.run_in_transaction(func, *args, **kw)
finally:
self._local.transaction_running = False
|
java
|
public static DataNode createFromEncoded(String encodedData, String baseUri) {
String data = Entities.unescape(encodedData);
return new DataNode(data);
}
|
python
|
def location_path(self):
"""
Return the Location-Path of the response.
:rtype : String
:return: the Location-Path option
"""
value = []
for option in self.options:
if option.number == defines.OptionRegistry.LOCATION_PATH.number:
value.append(str(option.value))
return "/".join(value)
|
python
|
def get_profile_names_and_default() -> (
typing.Tuple[typing.Sequence[str], typing.Optional[Profile]]):
"""Return the list of profile names and the default profile object.
The list of names is sorted.
"""
with ProfileStore.open() as config:
return sorted(config), config.default
|
java
|
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case AfplibPackage.GCPARC__XCENT:
return getXCENT();
case AfplibPackage.GCPARC__YCENT:
return getYCENT();
case AfplibPackage.GCPARC__MH:
return getMH();
case AfplibPackage.GCPARC__MFR:
return getMFR();
case AfplibPackage.GCPARC__START:
return getSTART();
case AfplibPackage.GCPARC__SWEEP:
return getSWEEP();
}
return super.eGet(featureID, resolve, coreType);
}
|
python
|
def single_device(cl_device_type='GPU', platform=None, fallback_to_any_device_type=False):
"""Get a list containing a single device environment, for a device of the given type on the given platform.
This will only fetch devices that support double (possibly only double with a pragma
defined, but still, it should support double).
Args:
cl_device_type (cl.device_type.* or string): The type of the device we want,
can be a opencl device type or a string matching 'GPU', 'CPU' or 'ALL'.
platform (opencl platform): The opencl platform to select the devices from
fallback_to_any_device_type (boolean): If True, try to fallback to any possible device in the system.
Returns:
list of CLEnvironment: List with one element, the CL runtime environment requested.
"""
if isinstance(cl_device_type, str):
cl_device_type = device_type_from_string(cl_device_type)
device = None
if platform is None:
platforms = cl.get_platforms()
else:
platforms = [platform]
for platform in platforms:
devices = platform.get_devices(device_type=cl_device_type)
for dev in devices:
if device_supports_double(dev):
try:
env = CLEnvironment(platform, dev)
return [env]
except cl.RuntimeError:
pass
if not device:
if fallback_to_any_device_type:
return cl.get_platforms()[0].get_devices()
else:
raise ValueError('No devices of the specified type ({}) found.'.format(
cl.device_type.to_string(cl_device_type)))
raise ValueError('No suitable OpenCL device found.')
|
python
|
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str:
""" Handles the requirements in the target repository, returns a path to a executable of the virtualenv.
"""
return str(self.get_or_create_venv(repo_path).resolve() / "bin" / "python")
|
python
|
def _parse_line_section(self, line):
"""
Parse a line containing a group definition. Returns a tuple:
(group_type, group_name), where group_type is in the set ('hosts',
'children', 'vars').
For example:
[prod]
Returns:
('hosts', 'prod')
For example:
[prod:children]
Returns:
('children', 'prod')
"""
m = re.match("\[(.*)\]", line)
group_def = m.groups()[0]
if ':' in group_def:
group_name, group_type = group_def.split(':')
else:
group_name = group_def
group_type = 'hosts'
return (group_type, group_name)
|
java
|
private boolean setDefine(CompilerOptions options,
String key, Object value) {
boolean success = false;
if (value instanceof String) {
final boolean isTrue = "true".equals(value);
final boolean isFalse = "false".equals(value);
if (isTrue || isFalse) {
options.setDefineToBooleanLiteral(key, isTrue);
} else {
try {
double dblTemp = Double.parseDouble((String) value);
options.setDefineToDoubleLiteral(key, dblTemp);
} catch (NumberFormatException nfe) {
// Not a number, assume string
options.setDefineToStringLiteral(key, (String) value);
}
}
success = true;
} else if (value instanceof Boolean) {
options.setDefineToBooleanLiteral(key, (Boolean) value);
success = true;
} else if (value instanceof Integer) {
options.setDefineToNumberLiteral(key, (Integer) value);
success = true;
} else if (value instanceof Double) {
options.setDefineToDoubleLiteral(key, (Double) value);
success = true;
}
return success;
}
|
java
|
public CreateTrustRequest withConditionalForwarderIpAddrs(String... conditionalForwarderIpAddrs) {
if (this.conditionalForwarderIpAddrs == null) {
setConditionalForwarderIpAddrs(new com.amazonaws.internal.SdkInternalList<String>(conditionalForwarderIpAddrs.length));
}
for (String ele : conditionalForwarderIpAddrs) {
this.conditionalForwarderIpAddrs.add(ele);
}
return this;
}
|
python
|
def get_link_name (self, tag, attrs, attr):
"""Parse attrs for link name. Return name of link."""
if tag == 'a' and attr == 'href':
# Look for name only up to MAX_NAMELEN characters
data = self.parser.peek(MAX_NAMELEN)
data = data.decode(self.parser.encoding, "ignore")
name = linkname.href_name(data)
if not name:
name = attrs.get_true('title', u'')
elif tag == 'img':
name = attrs.get_true('alt', u'')
if not name:
name = attrs.get_true('title', u'')
else:
name = u""
return name
|
python
|
def get_containing(self, name, depth = 0):
"""Return the n-th (n = ``depth``) context containing attribute named ``name``."""
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
if depth <= 0:
return self
depth -= 1
parent = ctx_dict.get('_parent')
if parent is None:
return None
return parent.get_containing(name, depth = depth)
|
python
|
def register (self, cmd):
"""Register a new command with the tool. 'cmd' is expected to be an instance
of `Command`, although here only the `cmd.name` attribute is
investigated. Multiple commands with the same name are not allowed to
be registered. Returns 'self'.
"""
if cmd.name is None:
raise ValueError ('no name set for Command object %r' % cmd)
if cmd.name in self.commands:
raise ValueError ('a command named "%s" has already been '
'registered' % cmd.name)
self.commands[cmd.name] = cmd
return self
|
java
|
@Override
public List<CommerceVirtualOrderItem> findAll(int start, int end) {
return findAll(start, end, null);
}
|
python
|
def bunzip2(filename):
"""Uncompress `filename` in place"""
log.debug("Uncompressing %s", filename)
tmpfile = "%s.tmp" % filename
os.rename(filename, tmpfile)
b = bz2.BZ2File(tmpfile)
f = open(filename, "wb")
while True:
block = b.read(512 * 1024)
if not block:
break
f.write(block)
f.close()
b.close()
shutil.copystat(tmpfile, filename)
shutil.copymode(tmpfile, filename)
os.unlink(tmpfile)
|
java
|
private void configure(RaftMember.Type type, CompletableFuture<Void> future) {
// Set a timer to retry the attempt to leave the cluster.
configureTimeout = cluster.getContext().getThreadContext().schedule(cluster.getContext().getElectionTimeout(), () -> {
configure(type, future);
});
// Attempt to leave the cluster by submitting a LeaveRequest directly to the server state.
// Non-leader states should forward the request to the leader if there is one. Leader states
// will log, replicate, and commit the reconfiguration.
cluster.getContext().getRaftRole().onReconfigure(ReconfigureRequest.builder()
.withIndex(cluster.getConfiguration().index())
.withTerm(cluster.getConfiguration().term())
.withMember(new DefaultRaftMember(id, type, updated))
.build()).whenComplete((response, error) -> {
if (error == null) {
if (response.status() == RaftResponse.Status.OK) {
cancelConfigureTimer();
cluster.configure(new Configuration(response.index(), response.term(), response.timestamp(), response.members()));
future.complete(null);
} else if (response.error() == null
|| response.error().type() == RaftError.Type.UNAVAILABLE
|| response.error().type() == RaftError.Type.PROTOCOL_ERROR
|| response.error().type() == RaftError.Type.NO_LEADER) {
cancelConfigureTimer();
configureTimeout = cluster.getContext().getThreadContext().schedule(cluster.getContext().getElectionTimeout().multipliedBy(2), () -> configure(type, future));
} else {
cancelConfigureTimer();
future.completeExceptionally(response.error().createException());
}
} else {
future.completeExceptionally(error);
}
});
}
|
python
|
def from_arrow_schema(arrow_schema):
""" Convert schema from Arrow to Spark.
"""
return StructType(
[StructField(field.name, from_arrow_type(field.type), nullable=field.nullable)
for field in arrow_schema])
|
python
|
def _filtered_list(self, selector):
"""Iterate over `self.obj` list, extracting `selector` from each
element. The `selector` can be a simple integer index, or any valid
key (hashable object).
"""
res = []
for elem in self.obj:
self._append(elem, selector, res)
return res
|
java
|
protected Integer processInt(String name, String expression, int value, boolean immediateOnly) {
Integer result = null;
boolean immediate = false;
/*
* The expression language value takes precedence over the direct setting.
*/
if (expression.isEmpty()) {
/*
* Direct setting.
*/
result = value;
} else {
/*
* Evaluate the EL expression to get the value.
*/
Object obj = evaluateElExpression(expression);
if (obj == null) {
throw new IllegalArgumentException("EL expression '" + expression + "' for '" + name + "'evaluated to null.");
} else if (obj instanceof Number) {
result = ((Number) obj).intValue();
immediate = isImmediateExpression(expression);
} else {
throw new IllegalArgumentException("Expected '" + name + "' to evaluate to an integer value.");
}
}
return (immediateOnly && !immediate) ? null : result;
}
|
java
|
public void initializationPhase() {
population = new ArrayList<>(populationSize) ;
while (population.size() < populationSize) {
S newSolution = diversificationGeneration() ;
S improvedSolution = improvement(newSolution) ;
population.add(improvedSolution) ;
}
}
|
python
|
def removeLogger(self, logger):
"""
Removes the inputed logger from the set for this widget.
:param logger | <str> || <logging.Logger>
"""
if isinstance(logger, logging.Logger):
logger = logger.name
if logger in self._loggers:
self._loggers.remove(logger)
if logger == 'root':
logger = logging.getLogger()
else:
logger = logging.getLogger(logger)
logger.removeHandler(self.handler())
|
python
|
def snapshot_peek_sigb64( fd, off, bytelen ):
"""
Read the last :bytelen bytes of
fd and interpret it as a base64-encoded
string
"""
fd.seek( off - bytelen, os.SEEK_SET )
sigb64 = fd.read(bytelen)
if len(sigb64) != bytelen:
return None
try:
base64.b64decode(sigb64)
except:
return None
return sigb64
|
java
|
protected ProcessOutput executeProcess(String command,FaxActionType faxActionType)
{
//execute process
ProcessOutput processOutput=ProcessExecutorHelper.executeProcess(this,command);
//validate process output
this.processOutputValidator.validateProcessOutput(this,processOutput,faxActionType);
return processOutput;
}
|
python
|
def get(key, service=None, profile=None): # pylint: disable=W0613
'''
Get a value from the etcd service
'''
client = _get_conn(profile)
result = client.get(key)
return result.value
|
python
|
def clean(self):
"""Return a copy of this Text instance with invalid characters removed."""
return Text(self.__text_cleaner.clean(self[TEXT]), **self.__kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.