language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
private CompletableFuture<UpdateablePageCollection> applyUpdates(Iterator<PageEntry> updates, TimeoutTimer timer) {
UpdateablePageCollection pageCollection = new UpdateablePageCollection(this.state.get().length);
AtomicReference<PageWrapper> lastPage = new AtomicReference<>(null);
val lastPageUpdates = new ArrayList<PageEntry>();
return Futures.loop(
updates::hasNext,
() -> {
// Locate the page where the update is to be executed. Do not apply it yet as it is more efficient
// to bulk-apply multiple at once. Collect all updates for each Page, and only apply them once we have
// "moved on" to another page.
PageEntry next = updates.next();
return locatePage(next.getKey(), pageCollection, timer)
.thenAccept(page -> {
PageWrapper last = lastPage.get();
if (page != last) {
// This key goes to a different page than the one we were looking at.
if (last != null) {
// Commit the outstanding updates.
last.getPage().update(lastPageUpdates);
}
// Update the pointers.
lastPage.set(page);
lastPageUpdates.clear();
}
// Record the current update.
lastPageUpdates.add(next);
});
},
this.executor)
.thenApplyAsync(v -> {
// We need not forget to apply the last batch of updates from the last page.
if (lastPage.get() != null) {
lastPage.get().getPage().update(lastPageUpdates);
}
return pageCollection;
}, this.executor);
}
|
java
|
private int[] determineAll_Buckets_Sarray_Sptrmap(int q) {
int[] buckets = determineAll_Buckets_Sarray(q);
int strLen = length;
sufPtrMap = new int[strLen + 2 * q + 1];
/* computation of first hashvalue */
int alphabetSize = alphabet.size;
int mappedUcharArray = 0;
int tempPower = 1;
int hashCode = 0;
int i;
for (i = q - 1; i >= 0; i--) {
hashCode += seq[start + mappedUcharArray + i] * tempPower;
tempPower *= alphabetSize;
}
int tempModulo = kbs_power_Ulong(alphabetSize, q - 1);
mappedUcharArray += q;
int j;
for (j = 0; j < strLen - 1; j++) {
sufPtrMap[j] = (buckets[hashCode + 1]) - 1;
hashCode -= (seq[start + mappedUcharArray - q]) * tempModulo;
hashCode *= alphabetSize;
hashCode += seq[start + mappedUcharArray];
mappedUcharArray++;
}
sufPtrMap[j] = buckets[hashCode];
/* set the values in sufPtrMap[strLen..strLen+2*d] to [-1, -2, ..., -2*d] */
int beginPtr = -1;
for (j = strLen; j <= strLen + 2 * q; j++) {
sufPtrMap[j] = beginPtr--;
}
return buckets;
}
|
python
|
def init_node(cls, *args, **kwargs):
"""Initializes an ast node with the provided attributes.
Python 2.6+ supports this in the node class initializers, but Python 2.5
does not, so this is intended to be an equivalent.
"""
node = cls()
for name, value in zip(cls._fields, args):
setattr(node, name, value)
for name, value in kwargs:
setattr(node, name, value)
return node
|
java
|
public ChannelHandler[] getClientChannelHandlers() {
NetworkClientHandler networkClientHandler =
creditBasedEnabled ? new CreditBasedPartitionRequestClientHandler() :
new PartitionRequestClientHandler();
return new ChannelHandler[] {
messageEncoder,
new NettyMessage.NettyMessageDecoder(!creditBasedEnabled),
networkClientHandler};
}
|
java
|
@SuppressWarnings("unchecked")
public static <T> Stream<Class<? extends T>> streamClasses(Collection<Class<?>> classes,
Class<? extends T> baseClass) {
return classes
.stream()
.filter(ClassPredicates.classIsDescendantOf(baseClass))
.map(c -> (Class<T>) c);
}
|
python
|
def get_field(self, field, idx):
"""
Return the field ``field`` of elements ``idx`` in the group
:param field: field name
:param idx: element idx
:return: values of the requested field
"""
ret = []
scalar = False
# TODO: ensure idx is unique in this Group
if isinstance(idx, (int, float, str)):
scalar = True
idx = [idx]
models = [self._idx_model[i] for i in idx]
for i, m in zip(idx, models):
ret.append(self.system.__dict__[m].get_field(field, idx=i))
if scalar is True:
return ret[0]
else:
return ret
|
python
|
def pop(self):
"""
Removes the last traversal path node from this traversal path.
"""
node = self.nodes.pop()
self.__keys.remove(node.key)
|
python
|
def _clean_options(method, provided_options):
"""Clean the given input options.
This will make sure that all options are present, either with their default values or with the given values,
and that no other options are present then those supported.
Args:
method (str): the method name
provided_options (dict): the given options
Returns:
dict: the resulting options dictionary
"""
provided_options = provided_options or {}
default_options = get_minimizer_options(method)
result = {}
for name, default in default_options.items():
if name in provided_options:
result[name] = provided_options[name]
else:
result[name] = default_options[name]
return result
|
python
|
def run_experiments(experiments,
search_alg=None,
scheduler=None,
with_server=False,
server_port=TuneServer.DEFAULT_PORT,
verbose=2,
resume=False,
queue_trials=False,
reuse_actors=False,
trial_executor=None,
raise_on_failed_trial=True):
"""Runs and blocks until all trials finish.
Examples:
>>> experiment_spec = Experiment("experiment", my_func)
>>> run_experiments(experiments=experiment_spec)
>>> experiment_spec = {"experiment": {"run": my_func}}
>>> run_experiments(experiments=experiment_spec)
>>> run_experiments(
>>> experiments=experiment_spec,
>>> scheduler=MedianStoppingRule(...))
>>> run_experiments(
>>> experiments=experiment_spec,
>>> search_alg=SearchAlgorithm(),
>>> scheduler=MedianStoppingRule(...))
Returns:
List of Trial objects, holding data for each executed trial.
"""
# This is important to do this here
# because it schematize the experiments
# and it conducts the implicit registration.
experiments = convert_to_experiment_list(experiments)
trials = []
for exp in experiments:
trials += run(
exp,
search_alg=search_alg,
scheduler=scheduler,
with_server=with_server,
server_port=server_port,
verbose=verbose,
resume=resume,
queue_trials=queue_trials,
reuse_actors=reuse_actors,
trial_executor=trial_executor,
raise_on_failed_trial=raise_on_failed_trial)
return trials
|
python
|
def remove_epoch(self, epoch_name):
'''This function removes an epoch from your recording extractor.
Parameters
----------
epoch_name: str
The name of the epoch to be removed
'''
if isinstance(epoch_name, str):
if epoch_name in list(self._epochs.keys()):
del self._epochs[epoch_name]
else:
raise ValueError("This epoch has not been added")
else:
raise ValueError("epoch_name must be a string")
|
java
|
public static String throwableToString(Throwable t)
{
StringWriter s = new StringWriter();
PrintWriter p = new PrintWriter(s);
t.printStackTrace(p);
return s.toString();
}
|
python
|
def create_resource(self, path, transaction):
"""
Render a POST request.
:param path: the path of the request
:param transaction: the transaction
:return: the response
"""
t = self._parent.root.with_prefix(path)
max_len = 0
imax = None
for i in t:
if i == path:
# Resource already present
return self.edit_resource(transaction, path)
elif len(i) > max_len:
imax = i
max_len = len(i)
lp = path
parent_resource = self._parent.root[imax]
if parent_resource.allow_children:
return self.add_resource(transaction, parent_resource, lp)
else:
transaction.response.code = defines.Codes.METHOD_NOT_ALLOWED.number
return transaction
|
java
|
public Set<de.uniulm.omi.cloudiator.flexiant.client.domain.Server> getServers(
final String prefix, @Nullable String locationUUID) throws FlexiantException {
return this
.getResources(prefix, "resourceName", ResourceType.SERVER, Server.class, locationUUID)
.stream()
.map(o -> new de.uniulm.omi.cloudiator.flexiant.client.domain.Server((Server) o))
.collect(Collectors.toSet());
}
|
java
|
public static MsgPhrase get(final String _name)
throws EFapsException
{
final Cache<String, MsgPhrase> cache = InfinispanCache.get().<String, MsgPhrase>getCache(MsgPhrase.NAMECACHE);
if (!cache.containsKey(_name)) {
MsgPhrase.loadMsgPhrase(_name);
}
return cache.get(_name);
}
|
java
|
@Override
public Element unparseServiceCreationConfiguration(final ServiceCreationConfiguration<ClusteringService> serviceCreationConfiguration) {
Element rootElement = unparseConfig(serviceCreationConfiguration);
return rootElement;
}
|
java
|
public void detail(MethodNode methodNode, MethodAttributes attrs, StringBuilder output) {
Validate.notNull(methodNode);
Validate.notNull(attrs);
Validate.notNull(output);
int methodId = attrs.getSignature().getMethodId();
output.append("Class Name: ").append(attrs.getSignature().getClassName().replace('/', '.')).append('\n');
output.append("Method Name: ").append(attrs.getSignature().getMethodName()).append('\n');
output.append("Method Params: ").append(attrs.getSignature().getMethodDescriptor()).append('\n');
output.append("Method Return: ").append(attrs.getSignature().getReturnType()).append('\n');
output.append("Method ID: ").append(methodId).append('\n');
output.append("------------------------------------\n");
UnmodifiableList<ContinuationPoint> cps = attrs.getContinuationPoints();
for (int i = 0; i < cps.size(); i++) {
ContinuationPoint cp = cps.get(i);
int line = cp.getLineNumber() == null ? -1 : cp.getLineNumber();
String header = String.format("Continuation Point ID: %-4d Line: %-4d Type: %s",
i,
line,
cp.getClass().getSimpleName());
output.append(header).append('\n');
// Check out PackStateGenerators class for how things are organized. Brief overview follows...
// container[0] has local variables that are bytes/shorts/ints
// container[1] has local variables that are floats
// container[2] has local variables that are longs
// container[3] has local variables that are doubles
// container[4] has local variables that are Objects
// container[5] has operands that are bytes/shorts/ints
// container[6] has operands that are floats
// container[7] has operands that are longs
// container[8] has operands that are doubles
// container[9] has operands that are Objects
detailLocals(cp, methodNode, output);
detailOperands(cp, output);
output.append('\n');
}
output.append('\n');
}
|
java
|
@Override
public Sequence<?> getBJSequence() {
String seq = getSeqResSequence();
Sequence<AminoAcidCompound> s = null;
try {
s = new ProteinSequence(seq);
} catch (CompoundNotFoundException e) {
logger.error("Could not create sequence object from seqres sequence. Some unknown compound: {}",e.getMessage());
}
//TODO: return a DNA sequence if the content is DNA...
return s;
}
|
java
|
public Entry remove(Entry removePointer)
{
if (tc.isEntryEnabled())
SibTr.entry(tc, "remove", new Object[] { removePointer });
Entry removedEntry = null;
//check that the entry to be removed is not null and is in this list
if(contains(removePointer))
{
//call the internal unsynchronized remove method on the entry to be removed.
removedEntry = removePointer.remove();
}
else //if the entry is not found in this list, throw a runtime exception
{
SIErrorException e = new SIErrorException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.utils.linkedlist.LinkedList",
"1:291:1.3" },
null));
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.utils.linkedlist.LinkedList.remove",
"1:297:1.3",
this);
SibTr.exception(tc, e);
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.utils.linkedlist.LinkedList",
"1:304:1.3" });
if (tc.isEntryEnabled())
SibTr.exit(tc, "remove", e);
throw e;
}
if (tc.isEntryEnabled())
SibTr.exit(tc, "remove", removedEntry);
//return the object which was removed
return removedEntry;
}
|
java
|
public void writeTo(Writer writer) throws IOException {
JsonSerializer.write(Json.jObject("template", asJson()), writer);
writer.flush();
}
|
java
|
public static Object invokeMethod(Object object, Method method, Object... args) throws Throwable {
try {
return method.invoke(object, args);
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
|
python
|
def parameters(self):
"""
Returns the list of currently set search parameters.
:return: the list of AbstractSearchParameter objects
:rtype: list
"""
array = JavaArray(javabridge.call(self.jobject, "getParameters", "()[Lweka/core/setupgenerator/AbstractParameter;"))
result = []
for item in array:
result.append(AbstractParameter(jobject=item.jobject))
return result
|
java
|
public static Scriptable newObject(Object fun, Context cx,
Scriptable scope, Object[] args)
{
if (!(fun instanceof Function)) {
throw notFunctionError(fun);
}
Function function = (Function)fun;
return function.construct(cx, scope, args);
}
|
java
|
public RespectBindingType<PortComponentRefType<T>> getOrCreateRespectBinding()
{
Node node = childNode.getOrCreate("respect-binding");
RespectBindingType<PortComponentRefType<T>> respectBinding = new RespectBindingTypeImpl<PortComponentRefType<T>>(this, "respect-binding", childNode, node);
return respectBinding;
}
|
java
|
public static BandedSemiLocalResult alignSemiLocalLeft0(LinearGapAlignmentScoring scoring, NucleotideSequence seq1, NucleotideSequence seq2,
int offset1, int length1, int offset2, int length2,
int width, int stopPenalty, MutationsBuilder<NucleotideSequence> mutations,
CachedIntArray cachedArray) {
if(offset1 < 0 || length1 < 0 || offset2 < 0 || length2 < 0)
throw new IllegalArgumentException();
int size1 = length1 + 1,
size2 = length2 + 1;
int matchReward = scoring.getScore((byte) 0, (byte) 0);
BandedMatrix matrix = new BandedMatrix(cachedArray, size1, size2, width);
int i, j;
for (i = matrix.getRowFactor() - matrix.getColumnDelta(); i > 0; --i)
matrix.set(0, i, scoring.getGapPenalty() * i);
for (i = matrix.getColumnDelta(); i > 0; --i)
matrix.set(i, 0, scoring.getGapPenalty() * i);
matrix.set(0, 0, 0);
int match, delete, insert, to;
int max = 0;
int iStop = 0, jStop = 0;
int rowMax;
for (i = 0; i < length1; ++i) {
to = Math.min(i + matrix.getRowFactor() - matrix.getColumnDelta() + 1, size2 - 1);
rowMax = Integer.MIN_VALUE;
for (j = Math.max(0, i - matrix.getColumnDelta()); j < to; ++j) {
match = matrix.get(i, j) +
scoring.getScore(seq1.codeAt(offset1 + i), seq2.codeAt(offset2 + j));
delete = matrix.get(i, j + 1) + scoring.getGapPenalty();
insert = matrix.get(i + 1, j) + scoring.getGapPenalty();
matrix.set(i + 1, j + 1, match = Math.max(match, Math.max(delete, insert)));
if (max < match) {
iStop = i + 1;
jStop = j + 1;
max = match;
}
rowMax = Math.max(rowMax, match);
}
if (rowMax - i * matchReward < stopPenalty)
break;
}
int fromL = mutations.size();
i = iStop - 1;
j = jStop - 1;
byte c1, c2;
while (i >= 0 || j >= 0) {
if (i >= 0 && j >= 0 &&
matrix.get(i + 1, j + 1) == matrix.get(i, j) +
scoring.getScore(c1 = seq1.codeAt(offset1 + i),
c2 = seq2.codeAt(offset2 + j))) {
if (c1 != c2)
mutations.appendSubstitution(offset1 + i, c1, c2);
--i;
--j;
} else if (i >= 0 &&
matrix.get(i + 1, j + 1) ==
matrix.get(i, j + 1) + scoring.getGapPenalty()) {
mutations.appendDeletion(offset1 + i, seq1.codeAt(offset1 + i));
--i;
} else if (j >= 0 &&
matrix.get(i + 1, j + 1) ==
matrix.get(i + 1, j) + scoring.getGapPenalty()) {
mutations.appendInsertion(offset1 + i + 1, seq2.codeAt(offset2 + j));
--j;
} else
throw new RuntimeException();
}
mutations.reverseRange(fromL, mutations.size());
return new BandedSemiLocalResult(offset1 + iStop - 1, offset2 + jStop - 1, max);
}
|
java
|
public static <T extends Serializable> Flowable<T> read(final File file) {
return read(file, DEFAULT_BUFFER_SIZE);
}
|
java
|
@SuppressWarnings("checkstyle:npathcomplexity")
private void createImplicitActionReturnType(XtextResource resource, IAcceptor<? super ICodeMining> acceptor) {
final List<XtendFunction> actions = EcoreUtil2.eAllOfType(resource.getContents().get(0), XtendFunction.class);
for (final XtendFunction action : actions) {
// inline annotation only for methods with no return type
if (action.getReturnType() != null) {
continue;
}
// get return type name from operation
final JvmOperation inferredOperation = (JvmOperation) this.jvmModelAssocitions.getPrimaryJvmElement(action);
if (inferredOperation == null || inferredOperation.getReturnType() == null) {
continue;
}
// find document offset for inline annotationn
final ICompositeNode node = NodeModelUtils.findActualNodeFor(action);
final Keyword parenthesis = this.grammar.getAOPMemberAccess().getRightParenthesisKeyword_2_5_6_2();
final Assignment fctname = this.grammar.getAOPMemberAccess().getNameAssignment_2_5_5();
int offsetFctname = -1;
int offsetParenthesis = -1;
for (Iterator<INode> it = node.getAsTreeIterable().iterator(); it.hasNext();) {
final INode child = it.next();
if (child != node) {
final EObject grammarElement = child.getGrammarElement();
if (grammarElement instanceof RuleCall) {
if (fctname.equals(grammarElement.eContainer())) {
offsetFctname = child.getTotalEndOffset();
}
} else if (parenthesis.equals(grammarElement)) {
offsetParenthesis = child.getTotalEndOffset();
break;
}
}
}
int offset = -1;
if (offsetParenthesis >= 0) {
offset = offsetParenthesis;
} else if (offsetFctname >= 0) {
offset = offsetFctname;
}
if (offset >= 0) {
final String returnType = inferredOperation.getReturnType().getSimpleName();
final String text = " " + this.keywords.getColonKeyword() + " " + returnType; //$NON-NLS-1$ //$NON-NLS-2$
acceptor.accept(createNewLineContentCodeMining(offset, text));
}
}
}
|
java
|
public MicroMetaBean getMetaBeanById(String tableName, String id) {
/* JdbcTemplate jdbcTemplate = (JdbcTemplate) MicroDbHolder
.getDbSource(dbName);*/
JdbcTemplate jdbcTemplate = getMicroJdbcTemplate();
String sql = "select * from " + tableName + " where id=?";
logger.debug(sql);
logger.debug("["+id+"]");
Map retMap = jdbcTemplate.queryForMap(sql, id);
MicroMetaBean metaBean = new MicroMetaBean();
metaBean.setId((String) retMap.get("id"));
metaBean.setMeta_content((String) retMap.get("meta_content"));
metaBean.setMeta_key((String) retMap.get("meta_key"));
metaBean.setMeta_name((String) retMap.get("meta_name"));
metaBean.setMeta_type((String) retMap.get("meta_type"));
metaBean.setRemark((String) retMap.get("remark"));
metaBean.setCreate_time((Date) retMap.get("create_time"));
metaBean.setUpdate_time((Date) retMap.get("update_time"));
return metaBean;
}
|
python
|
def function_table(self, function_id=None):
"""Fetch and parse the function table.
Returns:
A dictionary that maps function IDs to information about the
function.
"""
self._check_connected()
function_table_keys = self.redis_client.keys(
ray.gcs_utils.FUNCTION_PREFIX + "*")
results = {}
for key in function_table_keys:
info = self.redis_client.hgetall(key)
function_info_parsed = {
"DriverID": binary_to_hex(info[b"driver_id"]),
"Module": decode(info[b"module"]),
"Name": decode(info[b"name"])
}
results[binary_to_hex(info[b"function_id"])] = function_info_parsed
return results
|
java
|
@Override
public RemoteObjectInstance getRemoteObjectInstance(String appName, String moduleName, String compName, String namespaceString,
String jndiName) throws NamingException, RemoteException {
NamingConstants.JavaColonNamespace namespace = NamingConstants.JavaColonNamespace.fromName(namespaceString);
ComponentMetaData cmd = getCMD(appName, moduleName, compName, namespace);
RemoteObjectInstance roi = null;
try {
ComponentMetaDataAccessorImpl.getComponentMetaDataAccessor().beginContext(cmd);
Iterator<RemoteJavaColonNamingHelper> remoteJCNHelpers = remoteJavaColonNamingHelpers.getServices();
while (remoteJCNHelpers.hasNext()) {
RemoteJavaColonNamingHelper helper = remoteJCNHelpers.next();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "getRemoteObjectInstance - checking " + helper);
}
roi = helper.getRemoteObjectInstance(namespace, jndiName);
if (roi != null)
break;
}
} finally {
ComponentMetaDataAccessorImpl.getComponentMetaDataAccessor().endContext();
}
return roi;
}
|
python
|
def StringV(value, length=None, **kwargs):
"""
Create a new Concrete string (analogous to z3.StringVal())
:param value: The constant value of the concrete string
:returns: The String object representing the concrete string
"""
if length is None:
length = len(value)
if length < len(value):
raise ValueError("Can't make a concrete string value longer than the specified length!")
result = String("StringV", (value, len(value)), length=length, **kwargs)
return result
|
python
|
def norm(A):
"""computes the L2-norm along axis 1 (e.g. genes or embedding dimensions) equivalent to np.linalg.norm(A, axis=1)
"""
return np.sqrt(A.multiply(A).sum(1).A1) if issparse(A) else np.sqrt(np.einsum('ij, ij -> i', A, A))
|
python
|
def running_objects(self):
"""Return the objects associated with this workflow."""
return [obj for obj in self.database_objects
if obj.status in [obj.known_statuses.RUNNING]]
|
python
|
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
self.req.write(chunk)
|
python
|
def set_mode(self, mode, custom_mode = 0, custom_sub_mode = 0):
'''set arbitrary flight mode'''
mav_autopilot = self.field('HEARTBEAT', 'autopilot', None)
if mav_autopilot == mavlink.MAV_AUTOPILOT_PX4:
self.set_mode_px4(mode, custom_mode, custom_sub_mode)
else:
self.set_mode_apm(mode)
|
python
|
def get(self, url, params=None):
"""
Initiate a GET request
"""
r = self.session.get(url, params=params)
return self._response_parser(r, expect_json=False)
|
java
|
public void setDoubleValueForIn(double value, String name, Map<String, Object> map) {
setValueForIn(Double.valueOf(value), name, map);
}
|
java
|
public CustomFormatter create(final String pattern) {
final CustomFormatTokenizer tokenizer = new CustomFormatTokenizer();
final TokenStore allStore = tokenizer.parse(pattern);
if(allStore.getTokens().isEmpty()) {
// 標準のフォーマッタ
return CustomFormatter.DEFAULT_FORMATTER;
} else if(pattern.equalsIgnoreCase("General")) {
return CustomFormatter.DEFAULT_FORMATTER;
}
// セクション単位に分割し、処理していく。
final List<TokenStore> sections = allStore.split(Token.SYMBOL_SEMI_COLON);
if(sections.size() > 4) {
throw new CustomFormatterParseException(pattern,
String.format("section size over 4. but '%s' number of %d secitions.", pattern, sections.size()));
}
final CustomFormatter formatter = new CustomFormatter(pattern);
boolean containsTextFormatter = false;
for(TokenStore section : sections) {
final ConditionFormatter conditionFormatter;
if(textFormatterFactory.isTextPattern(section)) {
conditionFormatter = textFormatterFactory.create(section);
containsTextFormatter = true;
} else if(dateFormatterFactory.isDatePattern(section)) {
conditionFormatter = dateFormatterFactory.create(section);
} else if(numberFormatterFactory.isNumberPattern(section)) {
conditionFormatter = numberFormatterFactory.create(section);
} else {
conditionFormatter = numberFormatterFactory.create(section);
}
formatter.addConditionFormatter(conditionFormatter);
}
// 条件式の設定
int sectionSize = sections.size();
if(containsTextFormatter) {
// 文字列の書式を含む場合は、個数から除外する。
sectionSize--;
}
// 1番目の書式がデフォルトの条件の場合
boolean hasConditionFirst = false;
for(int i=0; i < sectionSize; i++) {
final ConditionFormatter conditionFormatter = formatter.getConditionFormatters().get(i);
if(conditionFormatter.getOperator() != null) {
if(i == 0) {
hasConditionFirst = true;
}
continue;
}
if(sectionSize <= 1) {
// 書式が1つしかない場合
conditionFormatter.setOperator(ConditionOperator.ALL);
} else if(sectionSize == 2) {
if(i==0) {
// ゼロ以上の数の書式
conditionFormatter.setOperator(ConditionOperator.NON_NEGATIVE);
} else if(i==1) {
// その他の書式
if(hasConditionFirst) {
conditionFormatter.setOperator(ConditionOperator.ALL);
} else {
conditionFormatter.setOperator(ConditionOperator.NEGATIVE);
}
}
} else if(sectionSize == 3) {
if(i==0) {
// 正の書式
conditionFormatter.setOperator(ConditionOperator.POSITIVE);
} else if(i==1) {
// 負の数の書式
conditionFormatter.setOperator(ConditionOperator.NEGATIVE);
} else {
// その他の書式
conditionFormatter.setOperator(ConditionOperator.ALL);
}
} else if(sectionSize == 4) {
if(i==0) {
// 正の書式
conditionFormatter.setOperator(ConditionOperator.POSITIVE);
} else if(i==1) {
// 負の数の書式
conditionFormatter.setOperator(ConditionOperator.NEGATIVE);
} else if(i==2) {
// ゼロの書式
conditionFormatter.setOperator(ConditionOperator.ZERO);
} else {
// その他の書式
conditionFormatter.setOperator(ConditionOperator.ALL);
}
} else {
throw new CustomFormatterParseException(pattern,
String.format("section size over 4. but '%s' number of %d secitions.", pattern, sections.size()));
}
}
return formatter;
}
|
java
|
static void mapAutomatically() {
Order order = createOrder();
ModelMapper modelMapper = new ModelMapper();
OrderDTO orderDTO = modelMapper.map(order, OrderDTO.class);
assertOrdersEqual(order, orderDTO);
}
|
java
|
@Override
public AnnotationDefinition convert(XBELExternalAnnotationDefinition t) {
if (t == null) {
return null;
}
String id = t.getId();
String url = t.getUrl();
AnnotationDefinition dest = CommonModelFactory.getInstance()
.createAnnotationDefinition(id);
dest.setURL(url);
return dest;
}
|
python
|
def corethreads(self):
"""
Create a .cds file consisting of fasta records of CDS features for each strain
"""
printtime('Creating CDS files and finding core genes', self.start)
# Create and start threads
for i in range(self.cpus):
# Send the threads to the appropriate destination function
threads = Thread(target=self.coregroups, args=())
# Set the daemon to true - something to do with thread management
threads.setDaemon(True)
# Start the threading
threads.start()
for sample in self.runmetadata.samples:
# Define the name of the file to store the CDS nucleotide sequences
sample.prokka.cds = os.path.join(sample.prokka.outputdir, '{}.cds'.format(sample.name))
self.corequeue.put(sample)
self.corequeue.join()
# Write the core .fasta files for each gene
self.corewriter()
|
python
|
def _clean(self, t, capitalize=None):
"""Convert to normalized unicode and strip trailing full stops."""
if self._from_bibtex:
t = latex_to_unicode(t, capitalize=capitalize)
t = ' '.join([el.rstrip('.') if el.count('.') == 1 else el for el in t.split()])
return t
|
python
|
def order_quote(self, quote_id, extra):
"""Places an order using a quote
::
extras = {
'hardware': {'hostname': 'test', 'domain': 'testing.com'},
'quantity': 2
}
manager = ordering.OrderingManager(env.client)
result = manager.order_quote(12345, extras)
:param int quote_id: ID for the target quote
:param dictionary extra: Overrides for the defaults of SoftLayer_Container_Product_Order
:param int quantity: Quantity to override default
"""
container = self.generate_order_template(quote_id, extra)
return self.client.call('SoftLayer_Billing_Order_Quote', 'placeOrder', container, id=quote_id)
|
java
|
protected final void addValidator(String name, String validatorId, Class<? extends TagHandler> type)
{
_factories.put(name, new UserValidatorHandlerFactory(validatorId, type));
}
|
python
|
def prune_directory(self):
"""Delete any objects that can be loaded and are expired according to
the current lifetime setting.
A file will be deleted if the following conditions are met:
- The file extension matches :py:meth:`bucketcache.backends.Backend.file_extension`
- The object can be loaded by the configured backend.
- The object's expiration date has passed.
Returns:
File size and number of files deleted.
:rtype: :py:class:`~bucketcache.utilities.PrunedFilesInfo`
.. note::
For any buckets that share directories, ``prune_directory`` will
affect files saved with both, if they use the same backend class.
This is not destructive, because only files that have expired
according to the lifetime of the original bucket are deleted.
"""
glob = '*.{ext}'.format(ext=self.backend.file_extension)
totalsize = 0
totalnum = 0
for f in self._path.glob(glob):
filesize = f.stat().st_size
key_hash = f.stem
in_cache = key_hash in self._cache
try:
self._get_obj_from_hash(key_hash)
except KeyExpirationError:
# File has been deleted by `_get_obj_from_hash`
totalsize += filesize
totalnum += 1
except KeyInvalidError:
pass
except Exception:
raise
else:
if not in_cache:
del self._cache[key_hash]
return PrunedFilesInfo(size=totalsize, num=totalnum)
|
java
|
public int deleteCascade(TableIndex tableIndex) throws SQLException {
int count = 0;
if (tableIndex != null) {
// Delete Geometry Indices
GeometryIndexDao geometryIndexDao = getGeometryIndexDao();
if (geometryIndexDao.isTableExists()) {
DeleteBuilder<GeometryIndex, GeometryIndexKey> db = geometryIndexDao
.deleteBuilder();
db.where().eq(GeometryIndex.COLUMN_TABLE_NAME,
tableIndex.getTableName());
PreparedDelete<GeometryIndex> deleteQuery = db.prepare();
geometryIndexDao.delete(deleteQuery);
}
count = delete(tableIndex);
}
return count;
}
|
java
|
void switchTwoRows(int rowIndex, int rowToIndex) {
for (int i = 0; i < getItems().length; i++) {
Object cellData = getItems()[rowToIndex][i];
getItems()[rowToIndex][i] = getItems()[rowIndex][i];
getItems()[rowIndex][i] = cellData;
}
}
|
python
|
def damping_maintain_sign(x, step, damping=1.0, factor=0.5):
'''Famping function which will maintain the sign of the variable being
manipulated. If the step puts it at the other sign, the distance between
`x` and `step` will be shortened by the multiple of `factor`; i.e. if
factor is `x`, the new value of `x` will be 0 exactly.
The provided `damping` is applied as well.
Parameters
----------
x : float
Previous value in iteration, [-]
step : float
Change in `x`, [-]
damping : float, optional
The damping factor to be applied always, [-]
factor : float, optional
If the calculated step changes sign, this factor will be used instead
of the step, [-]
Returns
-------
x_new : float
The new value in the iteration, [-]
Notes
-----
Examples
--------
>>> damping_maintain_sign(100, -200, factor=.5)
50.0
'''
positive = x > 0.0
step_x = x + step
if (positive and step_x < 0) or (not positive and step_x > 0.0):
# print('damping')
step = -factor*x
return x + step*damping
|
java
|
public void marshall(BillingRecord billingRecord, ProtocolMarshaller protocolMarshaller) {
if (billingRecord == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(billingRecord.getDomainName(), DOMAINNAME_BINDING);
protocolMarshaller.marshall(billingRecord.getOperation(), OPERATION_BINDING);
protocolMarshaller.marshall(billingRecord.getInvoiceId(), INVOICEID_BINDING);
protocolMarshaller.marshall(billingRecord.getBillDate(), BILLDATE_BINDING);
protocolMarshaller.marshall(billingRecord.getPrice(), PRICE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public int getIndexForColor(int rgb) {
int red = (rgb >> 16) & 0xff;
int green = (rgb >> 8) & 0xff;
int blue = rgb & 0xff;
OctTreeNode node = root;
for (int level = 0; level <= MAX_LEVEL; level++) {
OctTreeNode child;
int bit = 0x80 >> level;
int index = 0;
if ((red & bit) != 0)
index += 4;
if ((green & bit) != 0)
index += 2;
if ((blue & bit) != 0)
index += 1;
child = node.leaf[index];
if (child == null)
return node.index;
else if (child.isLeaf)
return child.index;
else
node = child;
}
System.out.println("getIndexForColor failed");
return 0;
}
|
java
|
private static ProteinSequence getProteinSequence(String str) {
try {
ProteinSequence s = new ProteinSequence(str);
return s;
} catch (CompoundNotFoundException e) {
logger.error("Unexpected error when creating ProteinSequence",e);
}
return null;
}
|
python
|
def write(self, outfile, rows):
"""
Write a PNG image to the output file.
`rows` should be an iterable that yields each row
(each row is a sequence of values).
The rows should be the rows of the original image,
so there should be ``self.height`` rows of
``self.width * self.planes`` values.
If `interlace` is specified (when creating the instance),
then an interlaced PNG file will be written.
Supply the rows in the normal image order;
the interlacing is carried out internally.
.. note ::
Interlacing requires the entire image to be in working memory.
"""
# Values per row
vpr = self.width * self.planes
def check_rows(rows):
"""
Yield each row in rows,
but check each row first (for correct width).
"""
for i, row in enumerate(rows):
try:
wrong_length = len(row) != vpr
except TypeError:
# When using an itertools.ichain object or
# other generator not supporting __len__,
# we set this to False to skip the check.
wrong_length = False
if wrong_length:
# Note: row numbers start at 0.
raise ProtocolError(
"Expected %d values but got %d value, in row %d" %
(vpr, len(row), i))
yield row
if self.interlace:
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, itertools.chain(*check_rows(rows)))
return self.write_array(outfile, a)
nrows = self.write_passes(outfile, check_rows(rows))
if nrows != self.height:
raise ProtocolError(
"rows supplied (%d) does not match height (%d)" %
(nrows, self.height))
|
python
|
def open(self, file, mode='r', perm=0o0644):
"""
Opens a file on the node
:param file: file path to open
:param mode: open mode
:param perm: file permission in octet form
mode:
'r' read only
'w' write only (truncate)
'+' read/write
'x' create if not exist
'a' append
:return: a file descriptor
"""
args = {
'file': file,
'mode': mode,
'perm': perm,
}
return self._client.json('filesystem.open', args)
|
python
|
def _hexencode(bytestring, insert_spaces = False):
"""Convert a byte string to a hex encoded string.
For example 'J' will return '4A', and ``'\\x04'`` will return '04'.
Args:
bytestring (str): Can be for example ``'A\\x01B\\x45'``.
insert_spaces (bool): Insert space characters between pair of characters to increase readability.
Returns:
A string of twice the length, with characters in the range '0' to '9' and 'A' to 'F'.
The string will be longer if spaces are inserted.
Raises:
TypeError, ValueError
"""
_checkString(bytestring, description='byte string')
separator = '' if not insert_spaces else ' '
# Use plain string formatting instead of binhex.hexlify,
# in order to have it Python 2.x and 3.x compatible
byte_representions = []
for c in bytestring:
byte_representions.append( '{0:02X}'.format(ord(c)) )
return separator.join(byte_representions).strip()
|
python
|
def list(self):
"""List Reserved Capacities"""
mask = """mask[availableInstanceCount, occupiedInstanceCount,
instances[id, billingItem[description, hourlyRecurringFee]], instanceCount, backendRouter[datacenter]]"""
results = self.client.call('Account', 'getReservedCapacityGroups', mask=mask)
return results
|
python
|
def write_ast(patched_ast_node):
"""Extract source form a patched AST node with `sorted_children` field
If the node is patched with sorted_children turned off you can use
`node_region` function for obtaining code using module source code.
"""
result = []
for child in patched_ast_node.sorted_children:
if isinstance(child, ast.AST):
result.append(write_ast(child))
else:
result.append(child)
return ''.join(result)
|
java
|
private static GMetricType getType(final Object obj) {
// FIXME This is far from covering all cases.
// FIXME Wasteful use of high capacity types (eg Short => INT32)
// Direct mapping when possible
if (obj instanceof Long || obj instanceof Integer || obj instanceof Byte || obj instanceof Short)
return GMetricType.INT32;
if (obj instanceof Float)
return GMetricType.FLOAT;
if (obj instanceof Double)
return GMetricType.DOUBLE;
// Convert to double or int if possible
try {
Double.parseDouble(obj.toString());
return GMetricType.DOUBLE;
} catch (NumberFormatException e) {
// Not a double
}
try {
Integer.parseInt(obj.toString());
return GMetricType.UINT32;
} catch (NumberFormatException e) {
// Not an int
}
return GMetricType.STRING;
}
|
python
|
def _print_map_dict(self, argkey, filename, append):
"""Prints a dictionary that has variable => value mappings."""
result = []
skeys = list(sorted(self.curargs[argkey].keys()))
for key in skeys:
result.append("'{}' => {}".format(key, self.curargs[argkey][key]))
self._redirect_output('\n'.join(result), filename, append, msg.info)
|
python
|
def _pop(line, key, use_rest):
'''
Helper for the line parser.
If key is a prefix of line, will remove ir from the line and will
extract the value (space separation), and the rest of the line.
If use_rest is True, the value will be the rest of the line.
Return a tuple with the value and the rest of the line.
'''
value = None
if line.startswith(key):
line = line[len(key):].strip()
if use_rest:
value = line
line = ''
else:
value, line = line.split(' ', 1)
return value, line.strip()
|
python
|
def make_logger(name, stream_type, jobs):
"""Create a logger component.
:param name: name of logger child, i.e. logger will be named
`noodles.<name>`.
:type name: str
:param stream_type: type of the stream that this logger will
be inserted into, should be |pull_map| or |push_map|.
:type stream_type: function
:param jobs: job-keeper instance.
:type jobs: dict, |JobKeeper| or |JobDB|.
:return: a stream.
The resulting stream receives messages and sends them on after
sending an INFO message to the logger. In the case of a |JobMessage|
or |ResultMessage| a meaningful message is composed otherwise the
string representation of the object is passed."""
logger = logging.getLogger('noodles').getChild(name)
# logger.setLevel(logging.DEBUG)
@stream_type
def log_message(message):
if message is EndOfQueue:
logger.info("-end-of-queue-")
elif isinstance(message, JobMessage):
logger.info(
"job %10s: %s", message.key, message.node)
elif isinstance(message, ResultMessage):
job = jobs[message.key]
if is_workflow(message.value):
logger.info(
"result %10s [%s]: %s -> workflow %x", message.key,
job.node, message.status, id(message.value))
else:
value_string = repr(message.value)
logger.info(
"result %10s [%s]: %s -> %s", message.key, job.node,
message.status, _sugar(value_string))
else:
logger.info(
"unknown message: %s", message)
return message
return log_message
|
java
|
public static SourceLineAnnotation createUnknown(@DottedClassName String className, String sourceFile) {
return createUnknown(className, sourceFile, -1, -1);
}
|
java
|
private static void listHierarchy(Task task, String indent)
{
for (Task child : task.getChildTasks())
{
System.out.println(indent + "Task: " + child.getName() + "\t" + child.getStart() + "\t" + child.getFinish());
listHierarchy(child, indent + " ");
}
}
|
python
|
def restore_artifact(src_path: str, artifact_hash: str, conf: Config):
"""Restore the artifact whose hash is `artifact_hash` to `src_path`.
Return True if cached artifact is found, valid, and restored successfully.
Otherwise return False.
"""
cache_dir = conf.get_artifacts_cache_dir()
if not isdir(cache_dir):
return False
cached_artifact_path = join(cache_dir, artifact_hash)
if isfile(cached_artifact_path) or isdir(cached_artifact_path):
# verify cached item hash matches expected hash
actual_hash = hash_tree(cached_artifact_path)
if actual_hash != artifact_hash:
logger.warning(
'Cached artifact {} expected hash {} != actual hash {}',
src_path, artifact_hash, actual_hash)
rmnode(cached_artifact_path)
return False
# if something exists in src_path, check if it matches the cached item
abs_src_path = join(conf.project_root, src_path)
if isfile(abs_src_path) or isdir(abs_src_path):
existing_hash = hash_tree(src_path)
if existing_hash == artifact_hash:
logger.debug('Existing artifact {} matches cached hash {}',
src_path, artifact_hash)
return True
logger.debug('Replacing existing artifact {} with cached one',
src_path)
rmnode(abs_src_path)
logger.debug('Restoring cached artifact {} to {}',
artifact_hash, src_path)
shutil.copy(cached_artifact_path, abs_src_path)
return True
logger.debug('No cached artifact for {} with hash {}',
src_path, artifact_hash)
return False
|
java
|
public void process(Class<?> clazz, Object testInstance) {
processIndependentAnnotations(testInstance.getClass(), testInstance);
injectMocks(testInstance);
}
|
python
|
def exists(self):
""" Check whether the directory exists on the camera. """
if self.name in ("", "/") and self.parent is None:
return True
else:
return self in self.parent.directories
|
java
|
public void initValues(
int mtLv,
String superiorModuleCode,
PrintStream out,
PrintStream err
) {
//
mtLv ++;
//
moduleCode = superiorModuleCode + "." + baseModuleCode;
//
this.out = out;
this.err = err;
//
}
|
python
|
def open(self):
'''
Open the channel for communication.
'''
args = Writer()
args.write_shortstr('')
self.send_frame(MethodFrame(self.channel_id, 20, 10, args))
self.channel.add_synchronous_cb(self._recv_open_ok)
|
python
|
def leftcontext(self, size, placeholder=None, scope=None):
"""Returns the left context for an element, as a list. This method crosses sentence/paragraph boundaries by default, which can be restricted by setting scope"""
if size == 0: return [] #for efficiency
context = []
e = self
while len(context) < size:
e = e.previous(True,scope)
if not e: break
context.append(e)
if placeholder:
while len(context) < size:
context.append(placeholder)
context.reverse()
return context
|
java
|
public final void setRecurrenceRule(String rec) {
if (recurrenceRule == null && rec == null) {
// no unnecessary property creation if everything is null
return;
}
recurrenceRuleProperty().set(rec);
}
|
java
|
public BinaryOWLMetadata createCopy() {
BinaryOWLMetadata copy = new BinaryOWLMetadata();
copy.stringAttributes.putAll(stringAttributes);
copy.intAttributes.putAll(intAttributes);
copy.longAttributes.putAll(longAttributes);
copy.doubleAttributes.putAll(doubleAttributes);
copy.booleanAttributes.putAll(booleanAttributes);
copy.byteArrayAttributes.putAll(byteArrayAttributes);
for(String key : owlObjectAttributes.keySet()) {
List<OWLObject> objectList = new ArrayList<OWLObject>(owlObjectAttributes.get(key));
copy.owlObjectAttributes.put(key, objectList);
}
return copy;
}
|
java
|
public InputStream getInputStream() {
try
{
int responseCode = this.urlConnection.getResponseCode();
try
{
// HACK: manually follow redirects, for the login to work
// HTTPUrlConnection auto redirect doesn't respect the provided headers
if(responseCode ==302)
{
HttpClient redirectClient =
new HttpClient(proxyHost,proxyPort, urlConnection.getHeaderField("Location"),
headers, urlConnection.getRequestMethod(), callback, authHeader);
redirectClient.getInputStream().close();
}
}
catch (Throwable e)
{
System.out.println("Following redirect failed");
}
setCookieHeader = this.urlConnection.getHeaderField("Set-Cookie");
InputStream in = responseCode != HttpURLConnection.HTTP_OK ?
this.urlConnection.getErrorStream() : this.urlConnection.getInputStream();
return in;
} catch (Exception e) {
return null;
}
}
|
python
|
def loop(bot, config, interval, settings):
"""Schedule a BOT (by label) to run on an interval, e.g. 'MyBot -i 60'"""
print_options(bot, config, settings)
click.echo(f'- Interval: {interval}s')
click.echo()
bot_task = BotTask(bot, config)
bot_task.run_loop(interval)
|
python
|
def memberships(self, group, include=None):
"""
Return the GroupMemberships for this group.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param group: Group object or id
"""
return self._get(self._build_url(self.endpoint.memberships(id=group, include=include)))
|
java
|
@Override
public void debug(String message) {
if(this.logger.isDebugEnabled()) {
this.logger.debug(buildMessage(message));
}
}
|
python
|
def get_throttled_by_provisioned_write_event_percent(
table_name, gsi_name, lookback_window_start=15, lookback_period=5):
""" Returns the number of throttled write events during a given time frame
:type table_name: str
:param table_name: Name of the DynamoDB table
:type gsi_name: str
:param gsi_name: Name of the GSI
:type lookback_window_start: int
:param lookback_window_start: Relative start time for the CloudWatch metric
:type lookback_period: int
:param lookback_period: Number of minutes to look at
:returns: float -- Percent of throttled write events by provisioning
"""
try:
metrics = __get_aws_metric(
table_name,
gsi_name,
lookback_window_start,
lookback_period,
'WriteThrottleEvents')
except BotoServerError:
raise
if metrics:
lookback_seconds = lookback_period * 60
throttled_write_events = float(metrics[0]['Sum']) / float(
lookback_seconds)
else:
throttled_write_events = 0
try:
gsi_write_units = dynamodb.get_provisioned_gsi_write_units(
table_name, gsi_name)
throttled_by_provisioned_write_percent = (
float(throttled_write_events) /
float(gsi_write_units) * 100)
except JSONResponseError:
raise
logger.info(
'{0} - GSI: {1} - Throttled write percent '
'by provision: {2:.2f}%'.format(
table_name, gsi_name, throttled_by_provisioned_write_percent))
return throttled_by_provisioned_write_percent
|
java
|
@Override
public String toReplacerPattern(boolean escapeUnprintable) {
StringBuilder rule = new StringBuilder("&");
rule.append(translit.getID());
rule.append("( ");
rule.append(replacer.toReplacerPattern(escapeUnprintable));
rule.append(" )");
return rule.toString();
}
|
java
|
public static Object issueRequest(
MessageProcessor MP,
ControlMessage msg,
SIBUuid8 remoteUuid,
long retry,
int tries,
long requestID)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "issueRequest", new Object[] {MP, msg, remoteUuid, new Long(retry), new Integer(tries), new Long(requestID)});
// Short circuit ME rechability test
if (!MP.getMPIO().isMEReachable(remoteUuid))
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "issueRequest", null);
return null;
}
// Prepare the request map
Object[] awaitResult = new Object[1];
synchronized (_requestMap)
{
_requestMap.put(new Long(requestID), awaitResult);
}
synchronized (awaitResult)
{
// Now send the request, setup the retry alarm, and wait for a result
MP.getMPIO().sendToMe(remoteUuid, SIMPConstants.CONTROL_MESSAGE_PRIORITY, msg);
ResendRecord retryRecord = new ResendRecord(MP, msg, remoteUuid, retry, tries, requestID);
MP.getAlarmManager().create(retry, _alarmHandler, retryRecord);
while (true)
try
{
awaitResult.wait();
break;
}
catch (InterruptedException e)
{
// No FFDC code needed
// We shouldn't be interrupted, but if we are loop around and try again
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "issueRequest", awaitResult[0]);
return awaitResult[0];
}
|
python
|
def _get_completions(self):
"""Return a list of possible completions for the string ending at the point.
Also set begidx and endidx in the process."""
completions = []
self.begidx = self.l_buffer.point
self.endidx = self.l_buffer.point
buf=self.l_buffer.line_buffer
if self.completer:
# get the string to complete
while self.begidx > 0:
self.begidx -= 1
if buf[self.begidx] in self.completer_delims:
self.begidx += 1
break
text = ensure_str(u''.join(buf[self.begidx:self.endidx]))
log(u'complete text="%s"' % ensure_unicode(text))
i = 0
while 1:
try:
r = ensure_unicode(self.completer(text, i))
except IndexError:
break
i += 1
if r is None:
break
elif r and r not in completions:
completions.append(r)
else:
pass
log(u'text completions=<%s>' % map(ensure_unicode, completions))
if (self.complete_filesystem == "on") and not completions:
# get the filename to complete
while self.begidx > 0:
self.begidx -= 1
if buf[self.begidx] in u' \t\n':
self.begidx += 1
break
text = ensure_str(u''.join(buf[self.begidx:self.endidx]))
log(u'file complete text="%s"' % ensure_unicode(text))
completions = map(ensure_unicode, glob.glob(os.path.expanduser(text) + '*'))
if self.mark_directories == u'on':
mc = []
for f in completions:
if os.path.isdir(f):
mc.append(f + os.sep)
else:
mc.append(f)
completions = mc
log(u'fnames=<%s>' % map(ensure_unicode, completions))
return completions
|
java
|
public char getPositionKey() {
String value = Optional.fromNullable(getParameter(SignalParameters.POSITION_KEY.symbol())).or("");
return value.isEmpty() ? ' ' : value.charAt(0);
}
|
java
|
public final boolean start() {
if (running || booting || stopping || terminated) {
// do nothing if this instance is not stopped
return false;
} else {
// mark this instance started
booting = true;
running = true;
onStarted();
return true;
}
}
|
python
|
def args_update(self):
"""Update the argparser namespace with any data from configuration file."""
for key, value in self._config_data.items():
setattr(self._default_args, key, value)
|
python
|
def add_files(self, common_name, x509s, files=None, parent_ca='',
is_ca=False, signees=None, serial=0, overwrite=False):
"""Add a set files comprising a certificate to Certipy
Used with all the defaults, Certipy will manage creation of file paths
to be used to store these files to disk and automatically calls save
on all TLSFiles that it creates (and where it makes sense to).
"""
if common_name in self.store and not overwrite:
raise CertExistsError(
"Certificate {name} already exists!"
" Set overwrite=True to force add."
.format(name=common_name))
elif common_name in self.store and overwrite:
record = self.get_record(common_name)
serial = int(record['serial'])
record['serial'] = serial + 1
TLSFileBundle(common_name).from_record(record).save_x509s(x509s)
else:
file_base_tmpl = "{prefix}/{cn}/{cn}"
file_base = file_base_tmpl.format(
prefix=self.containing_dir, cn=common_name
)
try:
ca_record = self.get_record(parent_ca)
ca_file = ca_record['files']['cert']
except CertNotFoundError:
ca_file = ''
files = files or {
'key': file_base + '.key',
'cert': file_base + '.crt',
'ca': ca_file,
}
bundle = TLSFileBundle(
common_name, files=files, x509s=x509s, is_ca=is_ca,
serial=serial, parent_ca=parent_ca, signees=signees)
self.store[common_name] = bundle.to_record()
self.save()
|
java
|
public final CartLn makeTxdLine(final List<CartLn> pTxdLns, final Long pTdlId,
final Long pCatId, final Tax pTax, final Double pPercent,
final AccSettings pAs) {
CartLn txdLn = null;
for (CartLn tdl : pTxdLns) {
if (tdl.getItsId().equals(pTdlId)) {
txdLn = tdl;
}
}
if (txdLn == null) {
txdLn = new CartLn();
txdLn.setItsId(pTdlId);
InvItemTaxCategory tc = new InvItemTaxCategory();
tc.setItsId(pCatId);
tc.setTaxes(new ArrayList<InvItemTaxCategoryLine>());
txdLn.setTxCat(tc);
pTxdLns.add(txdLn);
}
InvItemTaxCategoryLine itcl = new InvItemTaxCategoryLine();
itcl.setTax(pTax);
itcl.setItsPercentage(BigDecimal.valueOf(pPercent)
.setScale(pAs.getTaxPrecision(), RoundingMode.HALF_UP));
txdLn.getTxCat().getTaxes().add(itcl);
txdLn.getTxCat().setAggrOnlyPercent(txdLn.getTxCat()
.getAggrOnlyPercent().add(itcl.getItsPercentage()));
return txdLn;
}
|
python
|
def push_all(collector, **kwargs):
"""Push all the images"""
configuration = collector.configuration
configuration["harpoon"].do_push = True
configuration["harpoon"].only_pushable = True
make_all(collector, **kwargs)
|
python
|
def find_genus(files, database, threads=12):
"""
Uses MASH to find the genus of fasta files.
:param files: File dictionary returned by filer method.
:param database: Path to reduced refseq database sketch.
:param threads: Number of threads to run mash with.
:return: genus_dict: Dictionary of genus for each sample. Will return NA if genus could not be found.
"""
genus_dict = dict()
tmpdir = str(time.time()).split('.')[-1]
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
for file_name, fasta in files.items():
mash.screen(database, fasta,
threads=threads,
w='',
i=0.95,
output_file=os.path.join(tmpdir, 'screen.tab'))
screen_output = mash.read_mash_screen(os.path.join(tmpdir, 'screen.tab'))
try:
os.remove(os.path.join(tmpdir, 'screen.tab'))
except IOError:
pass
try:
genus = screen_output[0].query_id.split('/')[-3]
if genus == 'Shigella':
genus = 'Escherichia'
genus_dict[file_name] = genus
except IndexError:
genus_dict[file_name] = 'NA'
shutil.rmtree(tmpdir)
return genus_dict
|
java
|
@SuppressWarnings("unchecked")
protected void processInitialWsLogHandlerServices() throws InvalidSyntaxException {
ServiceReference<WsLogHandler>[] servRefs = (ServiceReference<WsLogHandler>[])
bundleContext.getServiceReferences(WsLogHandler.class.getName(), null);
if (servRefs != null) {
for (ServiceReference<WsLogHandler> servRef : servRefs) {
setWsLogHandler(servRef);
}
}
}
|
python
|
def media(soup):
"""
All media tags and some associated data about the related component doi
and the parent of that doi (not always present)
"""
media = []
media_tags = raw_parser.media(soup)
position = 1
for tag in media_tags:
media_item = {}
copy_attribute(tag.attrs, 'mime-subtype', media_item)
copy_attribute(tag.attrs, 'mimetype', media_item)
copy_attribute(tag.attrs, 'xlink:href', media_item, 'xlink_href')
copy_attribute(tag.attrs, 'content-type', media_item)
nodenames = ["sub-article", "media", "fig-group", "fig", "supplementary-material"]
details = tag_details(tag, nodenames)
copy_attribute(details, 'component_doi', media_item)
copy_attribute(details, 'type', media_item)
copy_attribute(details, 'sibling_ordinal', media_item)
# Try to get the component DOI of the parent tag
parent_tag = first_parent(tag, nodenames)
if parent_tag:
acting_parent_tag = component_acting_parent_tag(parent_tag, tag)
if acting_parent_tag:
details = tag_details(acting_parent_tag, nodenames)
copy_attribute(details, 'type', media_item, 'parent_type')
copy_attribute(details, 'ordinal', media_item, 'parent_ordinal')
copy_attribute(details, 'asset', media_item, 'parent_asset')
copy_attribute(details, 'sibling_ordinal', media_item, 'parent_sibling_ordinal')
copy_attribute(details, 'component_doi', media_item, 'parent_component_doi')
# Try to get the parent parent
p_parent_tag = first_parent(parent_tag, nodenames)
if p_parent_tag:
acting_p_parent_tag = component_acting_parent_tag(p_parent_tag, parent_tag)
if acting_p_parent_tag:
details = tag_details(acting_p_parent_tag, nodenames)
copy_attribute(details, 'type', media_item, 'p_parent_type')
copy_attribute(details, 'ordinal', media_item, 'p_parent_ordinal')
copy_attribute(details, 'asset', media_item, 'p_parent_asset')
copy_attribute(details, 'sibling_ordinal', media_item, 'p_parent_sibling_ordinal')
copy_attribute(details, 'component_doi', media_item, 'p_parent_component_doi')
# Try to get the parent parent parent
p_p_parent_tag = first_parent(p_parent_tag, nodenames)
if p_p_parent_tag:
acting_p_p_parent_tag = component_acting_parent_tag(p_p_parent_tag, p_parent_tag)
if acting_p_p_parent_tag:
details = tag_details(acting_p_p_parent_tag, nodenames)
copy_attribute(details, 'type', media_item, 'p_p_parent_type')
copy_attribute(details, 'ordinal', media_item, 'p_p_parent_ordinal')
copy_attribute(details, 'asset', media_item, 'p_p_parent_asset')
copy_attribute(details, 'sibling_ordinal', media_item, 'p_p_parent_sibling_ordinal')
copy_attribute(details, 'component_doi', media_item, 'p_p_parent_component_doi')
# Increment the position
media_item['position'] = position
# Ordinal should be the same as position in this case but set it anyway
media_item['ordinal'] = tag_ordinal(tag)
media.append(media_item)
position += 1
return media
|
java
|
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case KEY:
return isSetKey();
case COLUMN_PARENT:
return isSetColumn_parent();
case COLUMN_SLICES:
return isSetColumn_slices();
case REVERSED:
return isSetReversed();
case COUNT:
return isSetCount();
case CONSISTENCY_LEVEL:
return isSetConsistency_level();
}
throw new IllegalStateException();
}
|
python
|
def _bowtie_major_version(stdout):
"""
bowtie --version returns strings like this:
bowtie version 0.12.7
32-bit
Built on Franklin.local
Tue Sep 7 14:25:02 PDT 2010
"""
version_line = stdout.split("\n")[0]
version_string = version_line.strip().split()[2]
major_version = int(version_string.split(".")[0])
# bowtie version 1 has a leading character of 0 or 1
if major_version == 0 or major_version == 1:
major_version = 1
return major_version
|
python
|
def encode(self, envelope, session, **kwargs):
""" :meth:`.WMessengerOnionCoderLayerProto.encode` method implementation.
:param envelope: original envelope
:param session: original session
:param kwargs: additional arguments
:return: WMessengerBytesEnvelope
"""
return WMessengerBytesEnvelope(b64encode(envelope.message()), meta=envelope)
|
java
|
private List<SequenceState<S, O, D>> retrieveMostLikelySequence() {
// Otherwise an HMM break would have occurred and message would be null.
assert !message.isEmpty();
final S lastState = mostLikelyState();
// Retrieve most likely state sequence in reverse order
final List<SequenceState<S, O, D>> result = new ArrayList<>();
ExtendedState<S, O, D> es = lastExtendedStates.get(lastState);
while(es != null) {
final SequenceState<S, O, D> ss = new SequenceState<>(es.state, es.observation,
es.transitionDescriptor);
result.add(ss);
es = es.backPointer;
}
Collections.reverse(result);
return result;
}
|
java
|
@EventListener(classes = TargetDeletedEvent.class)
protected void targetDelete(final TargetDeletedEvent deleteEvent) {
if (isNotFromSelf(deleteEvent)) {
return;
}
sendDeleteMessage(deleteEvent.getTenant(), deleteEvent.getControllerId(), deleteEvent.getTargetAddress());
}
|
java
|
public static <T> Collector<T, ?, List<T>> tail(int n) {
if (n <= 0)
return empty();
return Collector.<T, Deque<T>, List<T>> of(ArrayDeque::new, (acc, t) -> {
if (acc.size() == n)
acc.pollFirst();
acc.addLast(t);
}, (acc1, acc2) -> {
while (acc2.size() < n && !acc1.isEmpty()) {
acc2.addFirst(acc1.pollLast());
}
return acc2;
}, ArrayList::new);
}
|
python
|
def relativeAreaSTE(self):
'''
return STE area - relative to image area
'''
s = self.noSTE.shape
return np.sum(self.mask_STE) / (s[0] * s[1])
|
java
|
public Map<String, CmsJspImageBean> getScaleWidth() {
if (m_scaleWidth == null) {
m_scaleWidth = CmsCollectionsGenericWrapper.createLazyMap(new CmsScaleWidthTransformer());
}
return m_scaleWidth;
}
|
java
|
protected void connectLeftRight(GrayS32 input, GrayS32 output) {
for( int y = 0; y < input.height; y++ ) {
int x = input.width-1;
int inputLabel = input.unsafe_get(x, y);
int outputLabel = output.unsafe_get(x, y);
if( outputLabel == -1 ) { // see if it needs to create a new output segment
outputLabel = regionMemberCount.size;
output.unsafe_set(x,y,outputLabel);
regionMemberCount.add(1);
mergeList.add(outputLabel);
}
// check right first
for( int i = 0; i < edges.length; i++ ) {
Point2D_I32 offset = edges[i];
// make sure it is inside the image
if( !input.isInBounds(x+offset.x,y+offset.y))
continue;
if( inputLabel == input.unsafe_get(x+offset.x,y+offset.y) ) {
int outputAdj = output.unsafe_get(x+offset.x,y+offset.y);
if( outputAdj == -1 ) { // see if not assigned
regionMemberCount.data[outputLabel]++;
output.unsafe_set(x+offset.x,y+offset.y, outputLabel);
} else if( outputLabel != outputAdj ) { // see if assigned to different regions
markMerge(outputLabel,outputAdj);
} // do nothing, same input and output labels
}
}
// skip check of left of 4-connect
if( connectRule != ConnectRule.EIGHT )
continue;
x = 0;
inputLabel = input.unsafe_get(x, y);
outputLabel = output.unsafe_get(x, y);
if( outputLabel == -1 ) { // see if it needs to create a new output segment
outputLabel = regionMemberCount.size;
output.unsafe_set(x,y,outputLabel);
regionMemberCount.add(1);
mergeList.add(outputLabel);
}
for( int i = 0; i < edges.length; i++ ) {
Point2D_I32 offset = edges[i];
// make sure it is inside the image
if( !input.isInBounds(x+offset.x,y+offset.y))
continue;
if( inputLabel == input.unsafe_get(x+offset.x,y+offset.y) ) {
int outputAdj = output.unsafe_get(x+offset.x,y+offset.y);
if( outputAdj == -1 ) { // see if not assigned
regionMemberCount.data[outputLabel]++;
output.unsafe_set(x+offset.x,y+offset.y, outputLabel);
} else if( outputLabel != outputAdj ) { // see if assigned to different regions
markMerge(outputLabel,outputAdj);
} // do nothing, same input and output labels
}
}
}
}
|
python
|
def get_xml_parser(encoding=None):
"""Returns an ``etree.ETCompatXMLParser`` instance."""
parser = etree.ETCompatXMLParser(
huge_tree=True,
remove_comments=True,
strip_cdata=False,
remove_blank_text=True,
resolve_entities=False,
encoding=encoding
)
return parser
|
java
|
public static void depthTo3D(CameraPinholeBrown param , GrayU16 depth , FastQueue<Point3D_F64> cloud ) {
cloud.reset();
Point2Transform2_F64 p2n = LensDistortionFactory.narrow(param).undistort_F64(true,false);
Point2D_F64 n = new Point2D_F64();
for( int y = 0; y < depth.height; y++ ) {
int index = depth.startIndex + y*depth.stride;
for( int x = 0; x < depth.width; x++ ) {
int mm = depth.data[index++] & 0xFFFF;
// skip pixels with no depth information
if( mm == 0 )
continue;
// this could all be precomputed to speed it up
p2n.compute(x,y,n);
Point3D_F64 p = cloud.grow();
p.z = mm;
p.x = n.x*p.z;
p.y = n.y*p.z;
}
}
}
|
python
|
def getbalance(self, user_id="", as_decimal=True):
"""Calculate the total balance in all addresses belonging to this user.
Args:
user_id (str): this user's unique identifier
as_decimal (bool): balance is returned as a Decimal if True (default)
or a string if False
Returns:
str or Decimal: this account's total coin balance
"""
balance = unicode(self.rpc.call("getbalance", user_id))
self.logger.debug("\"" + user_id + "\"", self.coin, "balance:", balance)
if as_decimal:
return Decimal(balance)
else:
return balance
|
java
|
@Override
protected void initView() {
super.initView();
this.box = new VBox();
this.box.setAlignment(Pos.CENTER);
final ScrollPane scrollPane = new ScrollPane();
scrollPane.setPrefSize(600, 600);
scrollPane.setContent(this.box);
node().setCenter(scrollPane);
addNode(EmbeddedView.ROOT_EMBEDDED_FXML.get().node());
// Load 2 more instance
addNode(EmbeddedView.ROOT_EMBEDDED_FXML.getNew().node());
addNode(EmbeddedView.ROOT_EMBEDDED_FXML.getNew().node());
}
|
java
|
public PagedList<DenyAssignmentInner> listForResource(final String resourceGroupName, final String resourceProviderNamespace, final String parentResourcePath, final String resourceType, final String resourceName) {
ServiceResponse<Page<DenyAssignmentInner>> response = listForResourceSinglePageAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName).toBlocking().single();
return new PagedList<DenyAssignmentInner>(response.body()) {
@Override
public Page<DenyAssignmentInner> nextPage(String nextPageLink) {
return listForResourceNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.