language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public SqlBuilder where(String where) {
if (StrUtil.isNotBlank(where)) {
sql.append(" WHERE ").append(where);
}
return this;
}
|
java
|
protected final Property parseVendorExtension( String vendorExtension ) {
if (vendorExtension == null) return null;
// Remove the curly braces ...
String extension = vendorExtension.replaceFirst("^[{]", "").replaceAll("[}]$", "");
if (extension.trim().length() == 0) return null;
return parseVendorExtensionContent(extension);
}
|
python
|
def add_service_subnet(self, context_id, subnet_id):
"""Adds a service subnet to a tunnel context.
:param int context_id: The id-value representing the context instance.
:param int subnet_id: The id-value representing the service subnet.
:return bool: True if service subnet addition was successful.
"""
return self.context.addServiceSubnetToNetworkTunnel(subnet_id,
id=context_id)
|
python
|
def _loss_lr_subject(self, data, labels, w, theta, bias):
"""Compute the Loss MLR for a single subject (without regularization)
Parameters
----------
data : array, shape=[voxels, samples]
The fMRI data of subject i for the classification task.
labels : array of int, shape=[samples]
The labels for the data samples in data.
w : array, shape=[voxels, features]
The orthogonal transform (mapping) :math:`W_i` for subject i.
theta : array, shape=[classes, features]
The MLR class plane parameters.
bias : array, shape=[classes]
The MLR class biases.
Returns
-------
loss : float
The loss MLR for the subject
"""
if data is None:
return 0.0
samples = data.shape[1]
thetaT_wi_zi_plus_bias = theta.T.dot(w.T.dot(data)) + bias
sum_exp, max_value, _ = utils.sumexp_stable(thetaT_wi_zi_plus_bias)
sum_exp_values = np.log(sum_exp) + max_value
aux = 0.0
for sample in range(samples):
label = labels[sample]
aux += thetaT_wi_zi_plus_bias[label, sample]
return self.alpha / samples / self.gamma * (sum_exp_values.sum() - aux)
|
java
|
public static void removeWeakCounter(Cache<WeakCounterKey, CounterValue> cache, CounterConfiguration configuration,
String counterName) {
ByteString counterNameByteString = ByteString.fromString(counterName);
for (int i = 0; i < configuration.concurrencyLevel(); ++i) {
cache.remove(new WeakCounterKey(counterNameByteString, i));
}
}
|
java
|
@Override
public Connection connect(String url, Properties info) throws SQLException {
if ("false".equals(info.get("javamelody"))) {
// if property javamelody=false then it's not for us
// (we pass here from the DriverManager.getConnection below)
return null;
}
String myUrl = url;
// we load first the driver class from the info or the url, to be sure that it will be found
String proxiedDriver = info.getProperty("driver");
if (proxiedDriver == null && myUrl != null) {
// if not in the info, the driver class could also be passed at the end of the url, for example ...?driver=org.h2.Driver
final int index = myUrl.indexOf("driver=");
if (index != -1) {
proxiedDriver = myUrl.substring(index + "driver=".length());
myUrl = myUrl.substring(0, index - 1);
}
}
if (proxiedDriver == null) {
// if the driver is not defined in the info or in the url
// it could still be found automatically if the driver is in the classpath
// or (in WEB-INF/lib and if the jdbc drivers are not loaded by the JDK before this webapp)
// but we don't want to create proxies and increment counts for the connections inside datasources
// so we only accept and go further if driver is defined in the info or in the url
return null;
}
try {
// on utilise Thread.currentThread().getContextClassLoader() car le driver peut ne pas être
// dans le même classLoader que les classes de javamelody
// Class driverClass =
Class.forName(proxiedDriver, true, Thread.currentThread().getContextClassLoader());
// et non Class.forName(proxiedDriver);
} catch (final ClassNotFoundException e) {
throw new SQLException(e.getMessage(), e);
}
final Properties myInfo = (Properties) info.clone();
myInfo.remove("driver");
myInfo.put("javamelody", "false");
Parameters.initJdbcDriverParameters(myUrl, myInfo);
// we could call driverClass.newInstance().connect(myUrl, myInfo)
// possibly by looking the driver which accepts the url in DriverManager.getDrivers()
// but we prefer calling the standard DriverManager.getConnection(myUrl, myInfo)
return JdbcWrapper.SINGLETON
.createConnectionProxy(DriverManager.getConnection(myUrl, myInfo));
}
|
java
|
public static <T> boolean hasIntersection(T[] from, T[] target) {
if (isEmpty(target)) {
return true;
}
if (isEmpty(from)) {
return false;
}
for (int i = 0; i < from.length; i++) {
for (int j = 0; j < target.length; j++) {
if (from[i] == target[j]) {
return true;
}
}
}
return false;
}
|
python
|
def __join_connections(self):
"""Wait for all connections to close. There are no side-effects here.
We just want to try and leave -after- everything has closed, in
general.
"""
interval_s = nsq.config.client.CONNECTION_CLOSE_AUDIT_WAIT_S
graceful_wait_s = nsq.config.client.CONNECTION_QUIT_CLOSE_TIMEOUT_S
graceful = False
while graceful_wait_s > 0:
if not self.__connections:
break
connected_list = [c.is_connected for (n, c, g) in self.__connections]
if any(connected_list) is False:
graceful = True
break
# We need to give the greenlets periodic control, in order to finish
# up.
gevent.sleep(interval_s)
graceful_wait_s -= interval_s
if graceful is False:
connected_list = [c for (n, c, g) in self.__connections if c.is_connected]
_logger.error("We were told to terminate, but not all "
"connections were stopped: [%s]", connected_list)
|
python
|
def get_checks(
target_type=None,
tags=None,
ruleset_name=None,
ruleset_file=None,
ruleset=None,
logging_level=logging.WARNING,
checks_paths=None,
skips=None,
):
"""
Get the sanity checks for the target.
:param skips: name of checks to skip
:param target_type: TargetType enum
:param tags: list of str (if not None, the checks will be filtered by tags.)
:param ruleset_name: str (e.g. fedora; if None, default would be used)
:param ruleset_file: fileobj instance holding ruleset configuration
:param ruleset: dict, content of a ruleset file
:param logging_level: logging level (default logging.WARNING)
:param checks_paths: list of str, directories where the checks are present
:return: list of check instances
"""
_set_logging(level=logging_level)
logger.debug("Finding checks started.")
return _get_checks(
target_type=target_type,
tags=tags,
ruleset_name=ruleset_name,
ruleset_file=ruleset_file,
ruleset=ruleset,
checks_paths=checks_paths,
skips=skips,
)
|
python
|
def _invoke(cls, cmd):
"""Invoke the given command, and return a tuple of process and raw binary output.
stderr flows to wherever its currently mapped for the parent process - generally to
the terminal where the user can see the error.
:param list cmd: The command in the form of a list of strings
:returns: The completed process object and its standard output.
:raises: Scm.LocalException if there was a problem exec'ing the command at all.
"""
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
except OSError as e:
# Binary DNE or is not executable
raise cls.LocalException('Failed to execute command {}: {}'.format(' '.join(cmd), e))
out, _ = process.communicate()
return process, out
|
python
|
def find(self, objects):
"""Find exactly one match in the list of objects.
:param objects: objects to filter
:type objects: :class:`list`
:return: the one matching object
:raises groupy.exceptions.NoMatchesError: if no objects match
:raises groupy.exceptions.MultipleMatchesError: if multiple objects match
"""
matches = list(self.__call__(objects))
if not matches:
raise exceptions.NoMatchesError(objects, self.tests)
elif len(matches) > 1:
raise exceptions.MultipleMatchesError(objects, self.tests,
matches=matches)
return matches[0]
|
python
|
def extract_date(cls, date_str):
"""
Tries to extract a `datetime` object from the given string, expecting
date information only.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not date_str:
raise DateTimeFormatterException('date_str must a valid string {}.'.format(date_str))
try:
return cls._extract_timestamp(date_str, cls.DATE_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid date string {}.'.format(date_str))
|
python
|
def diff(self, container):
"""
Inspect changes on a container's filesystem.
Args:
container (str): The container to diff
Returns:
(str)
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self._result(
self._get(self._url("/containers/{0}/changes", container)), True
)
|
java
|
public void setTerm(long term) {
if (term > this.term) {
this.term = term;
this.leader = null;
this.lastVotedFor = null;
meta.storeTerm(this.term);
meta.storeVote(this.lastVotedFor);
log.debug("Set term {}", term);
}
}
|
python
|
def set_transfer_spec(self):
''' run the function to set the transfer spec on error set associated exception '''
_ret = False
try:
self._args.transfer_spec_func(self._args)
_ret = True
except Exception as ex:
self.notify_exception(AsperaTransferSpecError(ex), False)
return _ret
|
java
|
public static final int codePointAt(char[] text, int index) {
char c1 = text[index++];
if (isHighSurrogate(c1)) {
if (index < text.length) {
char c2 = text[index];
if (isLowSurrogate(c2)) {
return toCodePoint(c1, c2);
}
}
}
return c1;
}
|
java
|
public static ByteBuf copiedBuffer(ByteBuf buffer) {
int readable = buffer.readableBytes();
if (readable > 0) {
ByteBuf copy = buffer(readable);
copy.writeBytes(buffer, buffer.readerIndex(), readable);
return copy;
} else {
return EMPTY_BUFFER;
}
}
|
java
|
public Response createSystemProperty(SystemProperty property) {
return restClient.post("system/properties", property, new HashMap<String, String>());
}
|
java
|
private void restoreNestedVariables() {
if (nestedVars != null) {
Iterator iter = nestedVars.iterator();
while (iter.hasNext()) {
String varName = (String) iter.next();
varName = findAlias(varName);
Object obj = originalNestedVars.get(varName);
if (obj != null) {
invokingJspCtxt.setAttribute(varName, obj);
} else {
invokingJspCtxt.removeAttribute(varName, PAGE_SCOPE);
}
}
}
}
|
java
|
@Override
public ListEndpointGroupsResult listEndpointGroups(ListEndpointGroupsRequest request) {
request = beforeClientExecution(request);
return executeListEndpointGroups(request);
}
|
java
|
public static void write(final X509Certificate certificate,
final @NonNull OutputStream outputStream, KeyFileFormat fileFormat)
throws IOException, CertificateEncodingException
{
final byte[] certificateBytes = certificate.getEncoded();
switch (fileFormat)
{
case PEM :
outputStream.write(
CertificateReader.BEGIN_CERTIFICATE_PREFIX.getBytes(StandardCharsets.US_ASCII));
outputStream.write(Base64.encodeBase64(certificateBytes, true));
outputStream.write(
CertificateReader.END_CERTIFICATE_SUFFIX.getBytes(StandardCharsets.US_ASCII));
break;
default :
outputStream.write(certificateBytes);
break;
}
outputStream.close();
}
|
java
|
public ViewHolder findViewHolderForItemId(long id) {
final int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
final ViewHolder holder = getChildViewHolderInt(getChildAt(i));
if (holder != null && holder.getItemId() == id) {
return holder;
}
}
return mRecycler.findViewHolderForItemId(id);
}
|
python
|
def del_variables(self, variables):
"""
Deletes variables from the NoisyOrModel.
Parameters
----------
variables: list, tuple, dict (array like)
list of variables to be deleted.
Examples
--------
>>> from pgmpy.models import NoisyOrModel
>>> model = NoisyOrModel(['x1', 'x2', 'x3'], [2, 3, 2], [[0.6, 0.4],
... [0.2, 0.4, 0.7],
... [0.1, 0. 4]])
>>> model.del_variables(['x1'])
"""
variables = [variables] if isinstance(variables, six.string_types) else set(variables)
indices = [index for index, variable in enumerate(self.variables) if variable in variables]
self.variables = np.delete(self.variables, indices, 0)
self.cardinality = np.delete(self.cardinality, indices, 0)
self.inhibitor_probability = [prob_array for index, prob_array in enumerate(self.inhibitor_probability)
if index not in indices]
|
python
|
def remove_layer(svg_source, layer_name):
'''
Remove layer(s) from SVG document.
Arguments
---------
svg_source : str or file-like
A file path, URI, or file-like object.
layer_name : str or list
Layer name or list of layer names to remove from SVG document.
Returns
-------
StringIO.StringIO
File-like object containing XML document with layer(s) removed.
'''
# Parse input file.
xml_root = lxml.etree.parse(svg_source)
svg_root = xml_root.xpath('/svg:svg', namespaces=INKSCAPE_NSMAP)[0]
if isinstance(layer_name, str):
layer_name = [layer_name]
for layer_name_i in layer_name:
# Remove existing layer from source, in-memory XML (source file remains
# unmodified).
layer_xpath = '//svg:g[@inkscape:label="%s"]' % layer_name_i
layer_groups = svg_root.xpath(layer_xpath, namespaces=INKSCAPE_NSMAP)
if layer_groups:
for g in layer_groups:
g.getparent().remove(g)
# Write result to `StringIO`.
output = StringIO.StringIO()
xml_root.write(output)
output.seek(0)
return output
|
java
|
@Remote
@Produces
public <K, V> RemoteCache<K, V> getRemoteCache(InjectionPoint injectionPoint) {
final Set<Annotation> qualifiers = injectionPoint.getQualifiers();
final RemoteCacheManager cacheManager = getRemoteCacheManager(qualifiers.toArray(new Annotation[0]));
final Remote remote = getRemoteAnnotation(injectionPoint.getAnnotated());
if (remote != null && !remote.value().isEmpty()) {
return cacheManager.getCache(remote.value());
}
return cacheManager.getCache();
}
|
python
|
def add_widget(self, widget):
"""
Add a Widget as a managed child of this Widget.
The child will be
automatically positioned and sized to fill the entire space inside
this Widget (unless _update_child_widgets is redefined).
Parameters
----------
widget : instance of Widget
The widget to add.
Returns
-------
widget : instance of Widget
The widget.
"""
self._widgets.append(widget)
widget.parent = self
self._update_child_widgets()
return widget
|
python
|
def close(self):
"""Close the connection to memcached, if it is open. The next call to a
method that requires a connection will re-open it."""
if self.sock is not None:
try:
self.sock.close()
except Exception:
pass
finally:
self.sock = None
|
java
|
protected boolean readPacket ()
throws IOException
{
int result;
while ((result = _stream.packetout(_packet)) != 1) {
if (result == 0 && !readPage()) {
return false;
}
}
return true;
}
|
python
|
def reverse_shortlex(end, other, excludeend=False):
"""Yield all intersections of end with other in reverse shortlex order.
>>> ['{:03b}'.format(s) for s in reverse_shortlex(0b111, [0b011, 0b101, 0b110])]
['111', '011', '101', '110', '001', '010', '100', '000']
>>> ', '.join(''.join(sorted(s))
... for s in reverse_shortlex({'a', 'b', 'c', 'd'},
... [{'b', 'c', 'd'}, {'a', 'c', 'd'}, {'a', 'b', 'd'}, {'a', 'b', 'c'}]))
'abcd, bcd, acd, abd, abc, cd, bd, bc, ad, ac, ab, d, c, b, a, '
>>> assert list(reverse_shortlex({1, 2}, [{1}, {2}], excludeend=True)) == \
[{1}, {2}, set()]
"""
if not excludeend:
yield end
queue = collections.deque([(end, other)])
while queue:
current, other = queue.popleft()
while other:
first, other = other[0], other[1:]
result = current & first
yield result
if other:
queue.append((result, other))
|
python
|
def get_config_section(self, name):
"""
Get a section of a configuration
"""
if self.config.has_section(name):
return self.config.items(name)
return []
|
python
|
def convert_dict_to_params(src_dict):
""" convert dict to params string
Args:
src_dict (dict): source mapping data structure
Returns:
str: string params data
Examples:
>>> src_dict = {
"a": 1,
"b": 2
}
>>> convert_dict_to_params(src_dict)
>>> "a=1&b=2"
"""
return "&".join([
"{}={}".format(key, value)
for key, value in src_dict.items()
])
|
python
|
def fqn(self):
''' Returns a fully qualified name for this object '''
prefix = type(self).cls_key()
return '{}:{}'.format(prefix, self.id)
|
java
|
private void deleteAllTraits(AtlasVertex instanceVertex) throws AtlasBaseException {
List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
LOG.debug("Deleting traits {} for {}", traitNames, string(instanceVertex));
String typeName = GraphHelper.getTypeName(instanceVertex);
for (String traitNameToBeDeleted : traitNames) {
String relationshipLabel = GraphHelper.getTraitLabel(typeName, traitNameToBeDeleted);
deleteEdgeReference(instanceVertex, relationshipLabel, TypeCategory.CLASSIFICATION, false);
}
}
|
java
|
private static String getTypeDeprecationInfo(JSType type) {
if (type == null) {
return null;
}
String depReason = getDeprecationReason(type.getJSDocInfo());
if (depReason != null) {
return depReason;
}
ObjectType objType = castToObject(type);
if (objType != null) {
ObjectType implicitProto = objType.getImplicitPrototype();
if (implicitProto != null) {
return getTypeDeprecationInfo(implicitProto);
}
}
return null;
}
|
python
|
def array_convert_function(sshape_one, sshape_two, variables):
""" Return a function defining the conversion process between two NumPy
arrays of different shapes """
if not isinstance(sshape_one, tuple): sshape_one = (sshape_one,)
if not isinstance(sshape_two, tuple): sshape_two = (sshape_two,)
s_one = flatten([eval_expr_names_and_nrs(d) if isinstance(d,str) else d
for d in sshape_one])
s_two = flatten([eval_expr_names_and_nrs(d) if isinstance(d,str) else d
for d in sshape_two])
if len(s_one) != len(s_two):
raise ValueError, ('Flattened shapes %s and %s '\
'do not have the same length. '
'Original shapes were %s and %s') % \
(s_one, s_two, sshape_one, sshape_two)
# Reason about the transpose
t_idx = tuple([s_one.index(v) for v in s_two])
# Figure out the actual numeric shape values to use
n_one = shape_from_str_tuple(s_one, variables)
n_two = [eval_expr(d,variables)
if isinstance(d,str) else d for d in sshape_two]
def f(ary): return np.reshape(ary, n_one).transpose(t_idx).reshape(n_two)
return f
|
python
|
def find_library(series_path):
"""Search for the location of a series within the library.
:param str series_path: name of the relative path of the series
:returns: library path
:rtype: str
"""
for location in cfg.CONF.libraries:
if os.path.isdir(os.path.join(location, series_path)):
return location
# already tried the full path; now walk down the path
segments = series_path.split(os.sep)[:-1]
while segments:
seg_path = os.path.join(*segments)
# if the directory exists then we found our location
if os.path.isdir(os.path.join(location, seg_path)):
return location
# remove the last element and try again
segments = segments[:-1]
return cfg.CONF.default_library
|
python
|
def _value_to_python(value):
"""Converts a google.protobuf.Value to a native Python object."""
assert isinstance(value, struct_pb2.Value)
field = value.WhichOneof('kind')
if field == 'number_value':
return value.number_value
elif field == 'string_value':
return value.string_value
elif field == 'bool_value':
return value.bool_value
else:
raise ValueError('Unknown struct_pb2.Value oneof field set: %s' % field)
|
java
|
@Override
public void eUnset(int featureID) {
switch (featureID) {
case AfplibPackage.ERS__RS_NAME:
setRSName(RS_NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
}
|
java
|
public <T extends Annotation> T
getControlAnnotation(Class<T> annotationClass)
{
Class controlInterface = getControlType();
return (T)controlInterface.getAnnotation(annotationClass);
}
|
python
|
def do_gen(argdict):
'''Generate the whole site.'''
site = make_site_obj(argdict)
try:
st = time.time()
site.generate()
et = time.time()
print "Generated Site in %f seconds."% (et-st)
except ValueError as e: # pragma: no cover
print "Cannot generate. You are not within a simplystatic \
tree and you didn't specify a directory."
|
python
|
def mixin_params(self, params):
"""
Merge in the MdsolAttribute for the passed parameter
:param dict params: dictionary of object parameters
"""
if not isinstance(params, (dict,)):
raise AttributeError("Cannot mixin to object of type {}".format(type(params)))
for attribute in self.attributes:
params.update({attribute.tag: attribute.value})
|
java
|
public void setInputType(InputType input)
{
if (this.stateMachine.currentMissionBehaviour() != null && this.stateMachine.currentMissionBehaviour().commandHandler != null)
this.stateMachine.currentMissionBehaviour().commandHandler.setOverriding(input == InputType.AI);
if (this.mouseHook != null)
this.mouseHook.isOverriding = (input == InputType.AI);
// This stops Minecraft from doing the annoying thing of stealing your mouse.
System.setProperty("fml.noGrab", input == InputType.AI ? "true" : "false");
inputType = input;
if (input == InputType.HUMAN)
{
Minecraft.getMinecraft().mouseHelper.grabMouseCursor();
}
else
{
Minecraft.getMinecraft().mouseHelper.ungrabMouseCursor();
}
this.stateMachine.getScreenHelper().addFragment("Mouse: " + input, TextCategory.TXT_INFO, INFO_MOUSE_CONTROL);
}
|
python
|
def _parse_memory_embedded_health(self, data):
"""Parse the get_host_health_data() for essential properties
:param data: the output returned by get_host_health_data()
:returns: memory size in MB.
:raises IloError, if unable to get the memory details.
"""
memory_mb = 0
memory = self._get_memory_details_value_based_on_model(data)
if memory is None:
msg = "Unable to get memory data. Error: Data missing"
raise exception.IloError(msg)
total_memory_size = 0
for memory_item in memory:
memsize = memory_item[self.MEMORY_SIZE_TAG]["VALUE"]
if memsize != self.MEMORY_SIZE_NOT_PRESENT_TAG:
memory_bytes = (
strutils.string_to_bytes(
memsize.replace(' ', ''), return_int=True))
memory_mb = int(memory_bytes / (1024 * 1024))
total_memory_size = total_memory_size + memory_mb
return total_memory_size
|
python
|
def _redact_secret(
data: Union[Dict, List],
) -> Union[Dict, List]:
""" Modify `data` in-place and replace keys named `secret`. """
if isinstance(data, dict):
stack = [data]
else:
stack = []
while stack:
current = stack.pop()
if 'secret' in current:
current['secret'] = '<redacted>'
else:
stack.extend(
value
for value in current.values()
if isinstance(value, dict)
)
return data
|
java
|
public static String getComponentProjectName(int componentType, String groupId, String artifactId) {
IModel m = ModelFactory.newModel(groupId, artifactId, null, null, MuleVersionEnum.MAIN_MULE_VERSION, null, null);
String projectFolderName = null;
ComponentEnum compEnum = ComponentEnum.get(componentType);
switch (compEnum) {
case INTEGRATION_COMPONENT:
projectFolderName = m.getIntegrationComponentProject();
break;
case INTEGRATION_TESTSTUBS_COMPONENT:
projectFolderName = m.getTeststubStandaloneProject();
break;
case SD_SCHEMA_COMPONENT:
projectFolderName = m.getSchemaProject();
break;
}
return projectFolderName;
}
|
java
|
public void addProperty(final FedoraResource resource,
final org.apache.jena.rdf.model.Property predicate,
final RDFNode value,
final Map<String, String> namespaces) throws RepositoryException {
addProperty(resource, predicate, value, namespaces, false);
}
|
java
|
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case AfplibPackage.CPIRG__GCGID:
return GCGID_EDEFAULT == null ? gcgid != null : !GCGID_EDEFAULT.equals(gcgid);
case AfplibPackage.CPIRG__PRT_FLAGS:
return PRT_FLAGS_EDEFAULT == null ? prtFlags != null : !PRT_FLAGS_EDEFAULT.equals(prtFlags);
case AfplibPackage.CPIRG__CODE_POINT:
return CODE_POINT_EDEFAULT == null ? codePoint != null : !CODE_POINT_EDEFAULT.equals(codePoint);
case AfplibPackage.CPIRG__COUNT:
return COUNT_EDEFAULT == null ? count != null : !COUNT_EDEFAULT.equals(count);
}
return super.eIsSet(featureID);
}
|
python
|
def send_sticker(self, sticker: str, reply: Message=None, on_success: callable=None,
reply_markup: botapi.ReplyMarkup=None):
"""
Send sticker to this peer.
:param sticker: File path to sticker to send.
:param reply: Message object.
:param on_success: Callback to call when call is complete.
:type reply: int or Message
"""
self.twx.send_sticker(peer=self, sticker=sticker, reply_to_message_id=reply, on_success=on_success,
reply_markup=reply_markup)
|
java
|
@Nonnull
public TypeaheadDataset setHeader (@Nullable final IHCNode aHeader)
{
return setHeader (aHeader != null ? HCRenderer.getAsHTMLStringWithoutNamespaces (aHeader) : (String) null);
}
|
java
|
private Date getTime(String value) throws MPXJException
{
try
{
Number hours = m_twoDigitFormat.parse(value.substring(0, 2));
Number minutes = m_twoDigitFormat.parse(value.substring(2, 4));
Calendar cal = DateHelper.popCalendar();
cal.set(Calendar.HOUR_OF_DAY, hours.intValue());
cal.set(Calendar.MINUTE, minutes.intValue());
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
Date result = cal.getTime();
DateHelper.pushCalendar(cal);
return result;
}
catch (ParseException ex)
{
throw new MPXJException("Failed to parse time " + value, ex);
}
}
|
python
|
def add_coupon(self, coupon, idempotency_key=None):
"""
Add a coupon to a Customer.
The coupon can be a Coupon object, or a valid Stripe Coupon ID.
"""
if isinstance(coupon, StripeModel):
coupon = coupon.id
stripe_customer = self.api_retrieve()
stripe_customer["coupon"] = coupon
stripe_customer.save(idempotency_key=idempotency_key)
return self.__class__.sync_from_stripe_data(stripe_customer)
|
python
|
def genlmsg_valid_hdr(nlh, hdrlen):
"""Validate Generic Netlink message headers.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/genl.c#L117
Verifies the integrity of the Netlink and Generic Netlink headers by enforcing the following requirements:
- Valid Netlink message header (`nlmsg_valid_hdr()`)
- Presence of a complete Generic Netlink header
- At least `hdrlen` bytes of payload included after the generic Netlink header.
Positional arguments:
nlh -- Netlink message header (nlmsghdr class instance).
hdrlen -- length of user header (integer).
Returns:
True if the headers are valid or False if not.
"""
if not nlmsg_valid_hdr(nlh, GENL_HDRLEN):
return False
ghdr = genlmsghdr(nlmsg_data(nlh))
if genlmsg_len(ghdr) < NLMSG_ALIGN(hdrlen):
return False
return True
|
python
|
def composition_prediction(self, composition, to_this_composition=True):
"""
Returns charged balanced substitutions from a starting or ending
composition.
Args:
composition:
starting or ending composition
to_this_composition:
If true, substitutions with this as a final composition
will be found. If false, substitutions with this as a
starting composition will be found (these are slightly
different)
Returns:
List of predictions in the form of dictionaries.
If to_this_composition is true, the values of the dictionary
will be from the list species. If false, the keys will be
from that list.
"""
preds = self.list_prediction(list(composition.keys()),
to_this_composition)
output = []
for p in preds:
if to_this_composition:
subs = {v: k for k, v in p['substitutions'].items()}
else:
subs = p['substitutions']
charge = 0
for k, v in composition.items():
charge += subs[k].oxi_state * v
if abs(charge) < 1e-8:
output.append(p)
logging.info('{} charge balanced substitutions found'
.format(len(output)))
return output
|
python
|
def __get_request_auth(username, password):
'''
Get libvirt.openAuth callback with username, password values overriding
the configuration ones.
'''
# pylint: disable=unused-argument
def __request_auth(credentials, user_data):
'''Callback method passed to libvirt.openAuth().
The credentials argument is a list of credentials that libvirt
would like to request. An element of this list is a list containing
5 items (4 inputs, 1 output):
- the credential type, e.g. libvirt.VIR_CRED_AUTHNAME
- a prompt to be displayed to the user
- a challenge
- a default result for the request
- a place to store the actual result for the request
The user_data argument is currently not set in the openAuth call.
'''
for credential in credentials:
if credential[0] == libvirt.VIR_CRED_AUTHNAME:
credential[4] = username if username else \
__salt__['config.get']('virt:connection:auth:username', credential[3])
elif credential[0] == libvirt.VIR_CRED_NOECHOPROMPT:
credential[4] = password if password else \
__salt__['config.get']('virt:connection:auth:password', credential[3])
else:
log.info('Unhandled credential type: %s', credential[0])
return 0
|
python
|
def imbtree(ntips, treeheight=1.0):
"""
Return an imbalanced (comb-like) tree topology.
"""
rtree = toytree.tree()
rtree.treenode.add_child(name="0")
rtree.treenode.add_child(name="1")
for i in range(2, ntips):
# empty node
cherry = toytree.tree()
# add new child
cherry.treenode.add_child(name=str(i))
# add old tree
cherry.treenode.add_child(rtree.treenode)
# update rtree
rtree = cherry
# get toytree from newick
tre = toytree.tree(rtree.write(tree_format=9))
tre = tre.mod.make_ultrametric()
self = tre.mod.node_scale_root_height(treeheight)
self._coords.update()
return self
|
python
|
def setVisible(self, state):
"""
Closes this widget and kills the result.
"""
super(XOverlayWidget, self).setVisible(state)
if not state:
self.setResult(0)
|
python
|
def get_calendar_entries(context, year=None, month=None,
template='zinnia/tags/entries_calendar.html'):
"""
Return an HTML calendar of entries.
"""
if not (year and month):
day_week_month = (context.get('day') or
context.get('week') or
context.get('month'))
publication_date = getattr(context.get('object'),
'publication_date', None)
if day_week_month:
current_month = day_week_month
elif publication_date:
if settings.USE_TZ:
publication_date = timezone.localtime(publication_date)
current_month = publication_date.date()
else:
today = timezone.now()
if settings.USE_TZ:
today = timezone.localtime(today)
current_month = today.date()
current_month = current_month.replace(day=1)
else:
current_month = date(year, month, 1)
dates = list(map(
lambda x: settings.USE_TZ and timezone.localtime(x).date() or x.date(),
Entry.published.datetimes('publication_date', 'month')))
if current_month not in dates:
dates.append(current_month)
dates.sort()
index = dates.index(current_month)
previous_month = index > 0 and dates[index - 1] or None
next_month = index != len(dates) - 1 and dates[index + 1] or None
calendar = Calendar()
return {'template': template,
'next_month': next_month,
'previous_month': previous_month,
'calendar': calendar.formatmonth(
current_month.year,
current_month.month,
previous_month=previous_month,
next_month=next_month)}
|
java
|
@BackpressureSupport(BackpressureKind.FULL)
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@SuppressWarnings("unchecked")
public static <T> Flowable<T> concatArray(MaybeSource<? extends T>... sources) {
ObjectHelper.requireNonNull(sources, "sources is null");
if (sources.length == 0) {
return Flowable.empty();
}
if (sources.length == 1) {
return RxJavaPlugins.onAssembly(new MaybeToFlowable<T>((MaybeSource<T>)sources[0]));
}
return RxJavaPlugins.onAssembly(new MaybeConcatArray<T>(sources));
}
|
python
|
def call_spellchecker(cmd, input_text=None, encoding=None):
"""Call spell checker with arguments."""
process = get_process(cmd)
# A buffer has been provided
if input_text is not None:
for line in input_text.splitlines():
# Hunspell truncates lines at `0x1fff` (at least on Windows this has been observed)
# Avoid truncation by chunking the line on white space and inserting a new line to break it.
offset = 0
end = len(line)
while True:
chunk_end = offset + 0x1fff
m = None if chunk_end >= end else RE_LAST_SPACE_IN_CHUNK.search(line, offset, chunk_end)
if m:
chunk_end = m.start(1)
chunk = line[offset:m.start(1)]
offset = m.end(1)
else:
chunk = line[offset:chunk_end]
offset = chunk_end
# Avoid wasted calls to empty strings
if chunk and not chunk.isspace():
process.stdin.write(chunk + b'\n')
if offset >= end:
break
return get_process_output(process, encoding)
|
java
|
public DerInputStream subStream(int len, boolean do_skip)
throws IOException {
DerInputBuffer newbuf = buffer.dup();
newbuf.truncate(len);
if (do_skip) {
buffer.skip(len);
}
return new DerInputStream(newbuf);
}
|
java
|
private TopHitsBuilder getTopHitsAggregation(SelectStatement selectStatement, Integer size,
EntityMetadata entityMetadata)
{
TopHitsBuilder topHitsBuilder = AggregationBuilders.topHits(ESConstants.TOP_HITS);
if (size != null)
{
topHitsBuilder.setSize(size);
}
return topHitsBuilder;
}
|
python
|
def get(self, request, pk=None):
""" Handles GET requests. """
self.top_level_forum = get_object_or_404(Forum, pk=pk) if pk else None
return super().get(request, pk)
|
java
|
public synchronized DownloadResponse getJnlpFileEx( JnlpResource jnlpres, DownloadRequest dreq )
throws IOException
{
String path = jnlpres.getPath();
URL resource = jnlpres.getResource();
long lastModified = jnlpres.getLastModified();
_log.addDebug( "lastModified: " + lastModified + " " + new Date( lastModified ) );
if ( lastModified == 0 )
{
_log.addWarning( "servlet.log.warning.nolastmodified", path );
}
// fix for 4474854: use the request URL as key to look up jnlp file
// in hash map
String reqUrl = HttpUtils.getRequestURL( dreq.getHttpRequest() ).toString();
// SQE: To support query string, we changed the hash key from Request URL to (Request URL + query string)
if ( dreq.getQuery() != null )
{
reqUrl += dreq.getQuery();
}
// Check if entry already exist in HashMap
JnlpFileEntry jnlpFile = (JnlpFileEntry) _jnlpFiles.get( reqUrl );
if ( jnlpFile != null && jnlpFile.getLastModified() == lastModified )
{
// Entry found in cache, so return it
return jnlpFile.getResponse();
}
// Read information from WAR file
long timeStamp = lastModified;
String mimeType = _servletContext.getMimeType( path );
if ( mimeType == null )
{
mimeType = JNLP_MIME_TYPE;
}
StringBuilder jnlpFileTemplate = new StringBuilder();
URLConnection conn = resource.openConnection();
BufferedReader br = new BufferedReader( new InputStreamReader( conn.getInputStream(), "UTF-8" ) );
String line = br.readLine();
if ( line != null && line.startsWith( "TS:" ) )
{
timeStamp = parseTimeStamp( line.substring( 3 ) );
_log.addDebug( "Timestamp: " + timeStamp + " " + new Date( timeStamp ) );
if ( timeStamp == 0 )
{
_log.addWarning( "servlet.log.warning.notimestamp", path );
timeStamp = lastModified;
}
line = br.readLine();
}
while ( line != null )
{
jnlpFileTemplate.append( line );
line = br.readLine();
}
String jnlpFileContent = specializeJnlpTemplate( dreq.getHttpRequest(), path, jnlpFileTemplate.toString() );
/* SQE: We need to add query string back to href in jnlp file. We also need to handle JRE requirement for
* the test. We reconstruct the xml DOM object, modify the value, then regenerate the jnlpFileContent.
*/
String query = dreq.getQuery();
String testJRE = dreq.getTestJRE();
_log.addDebug( "Double check query string: " + query );
// For backward compatibility: Always check if the href value exists.
// Bug 4939273: We will retain the jnlp template structure and will NOT add href value. Above old
// approach to always check href value caused some test case not run.
if ( query != null )
{
byte[] cb = jnlpFileContent.getBytes( "UTF-8" );
ByteArrayInputStream bis = new ByteArrayInputStream( cb );
try
{
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document document = builder.parse( bis );
if ( document != null && document.getNodeType() == Node.DOCUMENT_NODE )
{
boolean modified = false;
Element root = document.getDocumentElement();
if ( root.hasAttribute( "href" ) )
{
String href = root.getAttribute( "href" );
root.setAttribute( "href", href + "?" + query );
modified = true;
}
// Update version value for j2se tag
if ( testJRE != null )
{
NodeList j2seNL = root.getElementsByTagName( "j2se" );
if ( j2seNL != null )
{
Element j2se = (Element) j2seNL.item( 0 );
String ver = j2se.getAttribute( "version" );
if ( ver.length() > 0 )
{
j2se.setAttribute( "version", testJRE );
modified = true;
}
}
}
_hook.preCommit( dreq, document );
TransformerFactory tFactory = TransformerFactory.newInstance();
Transformer transformer = tFactory.newTransformer();
DOMSource source = new DOMSource( document );
StringWriter sw = new StringWriter();
StreamResult result = new StreamResult( sw );
transformer.transform( source, result );
jnlpFileContent = sw.toString();
_log.addDebug( "Converted jnlpFileContent: " + jnlpFileContent );
// Since we modified the file on the fly, we always update the timestamp value with current time
if ( modified )
{
timeStamp = new java.util.Date().getTime();
_log.addDebug( "Last modified on the fly: " + timeStamp );
}
}
}
catch ( Exception e )
{
_log.addDebug( e.toString(), e );
}
}
// Convert to bytes as a UTF-8 encoding
byte[] byteContent = jnlpFileContent.getBytes( "UTF-8" );
// Create entry
DownloadResponse resp =
DownloadResponse.getFileDownloadResponse( byteContent, mimeType, timeStamp, jnlpres.getReturnVersionId() );
jnlpFile = new JnlpFileEntry( resp, lastModified );
_jnlpFiles.put( reqUrl, jnlpFile );
return resp;
}
|
python
|
def from_meta(cls, meta: "DocstringMeta") -> T.Any:
"""Copy DocstringMeta from another instance."""
return cls(args=meta.args, description=meta.description)
|
java
|
public List<String> getKeys(String prefix) {
return IteratorUtils.toList(configuration.getKeys(prefix));
}
|
java
|
protected static org.neo4j.graphdb.Node createTemplateItemNode(
final AbstractGraphDatabase graphDatabase,
final TemplateItemInfo itemInfo) {
final org.neo4j.graphdb.Node itemNode = graphDatabase.createNode();
itemNode.setProperty(TemplateXMLFields.ITEM_NAME, itemInfo.getName());
return itemNode;
}
|
python
|
def create_router(self, context, router):
"""Creates a router on Arista Switch.
Deals with multiple configurations - such as Router per VRF,
a router in default VRF, Virtual Router in MLAG configurations
"""
if router:
router_name = self._arista_router_name(router['id'],
router['name'])
hashed = hashlib.sha256(router_name.encode('utf-8'))
rdm = str(int(hashed.hexdigest(), 16) % 65536)
mlag_peer_failed = False
for s in self._servers:
try:
self.create_router_on_eos(router_name, rdm, s)
mlag_peer_failed = False
except Exception:
if self._mlag_configured and not mlag_peer_failed:
# In paied switch, it is OK to fail on one switch
mlag_peer_failed = True
else:
msg = (_('Failed to create router %s on EOS') %
router_name)
LOG.exception(msg)
raise arista_exc.AristaServicePluginRpcError(msg=msg)
|
python
|
def join(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]:
"""Block until all items in the queue are processed.
Returns an awaitable, which raises `tornado.util.TimeoutError` after a
timeout.
"""
return self._finished.wait(timeout)
|
java
|
private CmsContextMenuItemWidget createEmptyItemWidget(
String id,
String caption,
String description,
CmsContextMenuConnector contextMenuConnector) {
CmsContextMenuItemWidget widget = GWT.create(CmsContextMenuItemWidget.class);
widget.setId(id);
widget.setCaption(caption);
widget.setTitle(description);
widget.setIcon(contextMenuConnector.getConnection().getIcon(contextMenuConnector.getResourceUrl(id)));
CmsContextMenuItemWidgetHandler handler = new CmsContextMenuItemWidgetHandler(widget, contextMenuConnector);
widget.addClickHandler(handler);
widget.addMouseOutHandler(handler);
widget.addMouseOverHandler(handler);
widget.addKeyUpHandler(handler);
widget.setRootComponent(this);
return widget;
}
|
python
|
def optimize(self, x0, target):
"""Calculate an optimum argument of an objective function."""
x = x0
for _ in range(self.maxiter):
delta = np.linalg.solve(self.h(x, target), -self.g(x, target))
x = x + delta
if np.linalg.norm(delta) < self.tol:
break
return x
|
python
|
def wms_vrt(wms_file, bounds=None, resolution=None):
"""Make a VRT XML document from a wms file.
Parameters
----------
wms_file : str
The source wms file
bounds : GeoVector, optional
The requested footprint of the generated VRT
resolution : float, optional
The requested resolution of the generated VRT
Returns
-------
bytes
An ascii-encoded string (an ElementTree detail)
"""
from telluric import rasterization, constants
wms_tree = ET.parse(wms_file)
service = wms_tree.find(".//Service")
if service is not None:
service_name = service.attrib.get("name")
else:
raise ValueError("Service tag is required")
# definition is based on https://www.gdal.org/frmt_wms.html
if service_name == "VirtualEarth":
left = find_and_convert_to_type(float, wms_tree, ".//DataWindow/UpperLeftX", -20037508.34)
up = find_and_convert_to_type(float, wms_tree, ".//DataWindow/UpperLeftY", 20037508.34)
right = find_and_convert_to_type(float, wms_tree, ".//DataWindow/LowerRightX", 20037508.34)
bottom = find_and_convert_to_type(float, wms_tree, ".//DataWindow/LowerRightY", -20037508.34)
upper_bound_zoom = find_and_convert_to_type(int, wms_tree, ".//DataWindow/TileLevel", 19)
projection = find_and_convert_to_type(str, wms_tree, ".//Projection", "EPSG: 3857")
projection = CRS(init=projection)
blockx = find_and_convert_to_type(str, wms_tree, ".//BlockSizeX", 256)
blocky = find_and_convert_to_type(str, wms_tree, ".//BlockSizeY", 256)
else:
left = find_and_convert_to_type(float, wms_tree, ".//DataWindow/UpperLeftX", -180.0)
up = find_and_convert_to_type(float, wms_tree, ".//DataWindow/UpperLeftY", 90.0)
right = find_and_convert_to_type(float, wms_tree, ".//DataWindow/LowerRightX", 180.0)
bottom = find_and_convert_to_type(float, wms_tree, ".//DataWindow/LowerRightY", -90.0)
upper_bound_zoom = find_and_convert_to_type(int, wms_tree, ".//DataWindow/TileLevel", 0)
projection = find_and_convert_to_type(str, wms_tree, ".//Projection", "EPSG:4326")
blockx = find_and_convert_to_type(str, wms_tree, ".//BlockSizeX", 1024)
blocky = find_and_convert_to_type(str, wms_tree, ".//BlockSizeY", 1024)
projection = CRS(init=projection)
bands_count = find_and_convert_to_type(int, wms_tree, ".//BandsCount", 3)
data_type = find_and_convert_to_type(str, wms_tree, ".//DataType", "Byte")
src_bounds = (left, bottom, right, up)
bounds = bounds.get_bounds(crs=projection) or src_bounds
src_resolution = constants.MERCATOR_RESOLUTION_MAPPING[upper_bound_zoom]
resolution = resolution or constants.MERCATOR_RESOLUTION_MAPPING[upper_bound_zoom]
dst_width, dst_height, transform = rasterization.raster_data(bounds=bounds, dest_resolution=resolution)
orig_width, orig_height, orig_transform = rasterization.raster_data(
bounds=src_bounds, dest_resolution=src_resolution)
src_window = from_bounds(*bounds, transform=orig_transform)
vrt = BaseVRT(dst_width, dst_height, projection, transform)
vrt.add_metadata(domain="IMAGE_STRUCTURE", items={"INTERLEAVE": "PIXEL"})
if bands_count != 3:
raise ValueError("We support currently on 3 bands WMS")
for idx, band in enumerate(["RED", "GREEN", "BLUE"]):
bidx = idx + 1
band_element = vrt.add_band(data_type, bidx, band)
dst_window = Window(0, 0, dst_width, dst_height)
vrt.add_band_simplesource(band_element, bidx, data_type, False, os.path.abspath(wms_file),
orig_width, orig_height, blockx, blocky,
src_window, dst_window)
return vrt
|
java
|
public void setXftUnits(Integer newXftUnits) {
Integer oldXftUnits = xftUnits;
xftUnits = newXftUnits;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.FNC__XFT_UNITS, oldXftUnits, xftUnits));
}
|
java
|
public static void main(String[] args) {
LOVEndpoint lov = new LOVEndpoint();
String file = args.length > 0 ? args[1].trim() : "rdfunit-model/src/main/resources/org/aksw/rdfunit/configuration/schemaLOV.csv";
lov.writeAllLOVEntriesToFile(file);
}
|
python
|
def almost_equal_elem(self,other,tol,relative=True):
"""
Compare whether two array types are almost equal, element
by element.
If the 'relative' parameter is 'True' (the default) then the
'tol' parameter (which must be positive) is interpreted as a
relative tolerance, and the comparison returns 'True' only if
abs(self[i]-other[i]) <= tol*abs(self[i])
for all elements of the array.
If 'relative' is 'False', then 'tol' is an absolute tolerance,
and the comparison is true only if
abs(self[i]-other[i]) <= tol
for all elements of the array.
Other meta-data (type, dtype, and length) must be exactly equal.
If either object's memory lives on the GPU it will be copied to
the CPU for the comparison, which may be slow. But the original
object itself will not have its memory relocated nor scheme
changed.
Parameters
----------
other
Another Python object, that should be tested for
almost-equality with 'self', element-by-element.
tol
A non-negative number, the tolerance, which is interpreted
as either a relative tolerance (the default) or an absolute
tolerance.
relative
A boolean, indicating whether 'tol' should be interpreted
as a relative tolerance (if True, the default if this argument
is omitted) or as an absolute tolerance (if tol is False).
Returns
-------
boolean
'True' if the data agree within the tolerance, as
interpreted by the 'relative' keyword, and if the types,
lengths, and dtypes are exactly the same.
"""
# Check that the tolerance is non-negative and raise an
# exception otherwise.
if (tol<0):
raise ValueError("Tolerance cannot be negative")
# Check that the meta-data agree; the type check is written in
# this way so that this method may be safely called from
# subclasses as well.
if type(other) != type(self):
return False
if self.dtype != other.dtype:
return False
if len(self) != len(other):
return False
# The numpy() method will move any GPU memory onto the CPU.
# Slow, but the user was warned.
diff = abs(self.numpy()-other.numpy())
if relative:
cmpary = tol*abs(self.numpy())
else:
cmpary = tol*ones(len(self),dtype=self.dtype)
return (diff<=cmpary).all()
|
python
|
def values_for_column(self, column_name, limit=10000):
"""Runs query against sqla to retrieve some
sample values for the given column.
"""
cols = {col.column_name: col for col in self.columns}
target_col = cols[column_name]
tp = self.get_template_processor()
qry = (
select([target_col.get_sqla_col()])
.select_from(self.get_from_clause(tp))
.distinct()
)
if limit:
qry = qry.limit(limit)
if self.fetch_values_predicate:
tp = self.get_template_processor()
qry = qry.where(tp.process_template(self.fetch_values_predicate))
engine = self.database.get_sqla_engine()
sql = '{}'.format(
qry.compile(engine, compile_kwargs={'literal_binds': True}),
)
sql = self.mutate_query_from_config(sql)
df = pd.read_sql_query(sql=sql, con=engine)
return [row[0] for row in df.to_records(index=False)]
|
java
|
public AndCondition optimize() {
AndCondition result = new AndCondition();
for (Condition each : conditions) {
if (Condition.class.equals(each.getClass())) {
result.getConditions().add(each);
}
}
if (result.getConditions().isEmpty()) {
result.getConditions().add(new NullCondition());
}
return result;
}
|
python
|
def map(cls, latitudes, longitudes, labels=None, colors=None, areas=None, **kwargs):
"""Return markers from columns of coordinates, labels, & colors.
The areas column is not applicable to markers, but sets circle areas.
"""
assert len(latitudes) == len(longitudes)
assert areas is None or hasattr(cls, '_has_radius'), "A " + cls.__name__ + " has no radius"
inputs = [latitudes, longitudes]
if labels is not None:
assert len(labels) == len(latitudes)
inputs.append(labels)
else:
inputs.append(("",) * len(latitudes))
if colors is not None:
assert len(colors) == len(latitudes)
inputs.append(colors)
if areas is not None:
assert len(areas) == len(latitudes)
inputs.append(np.array(areas) ** 0.5 / math.pi)
ms = [cls(*args, **kwargs) for args in zip(*inputs)]
return Map(ms)
|
java
|
public boolean isLockSupported(int idx) throws IOException {
StorageDirectory sd = storageDirs.get(idx);
FileLock firstLock = null;
FileLock secondLock = null;
try {
firstLock = sd.lock;
if(firstLock == null) {
firstLock = sd.tryLock();
if(firstLock == null)
return true;
}
secondLock = sd.tryLock();
if(secondLock == null)
return true;
} finally {
if(firstLock != null && firstLock != sd.lock) {
firstLock.release();
firstLock.channel().close();
}
if(secondLock != null) {
secondLock.release();
secondLock.channel().close();
}
}
return false;
}
|
java
|
@RequirePOST
public void doCreateAccountByAdmin(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
createAccountByAdmin(req, rsp, "addUser.jelly", "."); // send the user back to the listing page on success
}
|
java
|
private void updateActivity(ActivityType type, String name, String streamingUrl) {
if (name == null) {
activity = null;
} else if (streamingUrl == null) {
activity = new ActivityImpl(type, name, null);
} else {
activity = new ActivityImpl(type, name, streamingUrl);
}
websocketAdapter.updateStatus();
}
|
java
|
@Override
public void print(MoneyPrintContext context, Appendable appendable, BigMoney money) throws IOException {
MoneyFormatter fmt = (money.isZero() ? whenZero : money.isPositive() ? whenPositive : whenNegative);
fmt.getPrinterParser().print(context, appendable, money);
}
|
java
|
protected void processGossipResponse(Tree data) throws Exception {
// Debug
if (debugHeartbeats) {
String sender = data.get("sender", (String) null);
logger.info("Gossip response received from \"" + sender + "\" node:\r\n" + data);
}
// Online / offline nodes in responnse
Tree online = data.get("online");
Tree offline = data.get("offline");
// Process "online" block
if (online != null) {
for (Tree row : online) {
// Get nodeID
String nodeID = row.getName();
if (this.nodeID.equals(nodeID)) {
continue;
}
int size = row.size();
if (!row.isEnumeration() || size < 1 || size > 3) {
logger.warn("Invalid \"offline\" block: " + row);
continue;
}
// Get parameters from input
Tree info = null;
long cpuSeq = 0;
int cpu = 0;
if (row.size() == 1) {
info = row.get(0);
} else if (row.size() == 2) {
cpuSeq = row.get(0).asLong();
cpu = row.get(1).asInteger();
} else if (row.size() == 3) {
info = row.get(0);
cpuSeq = row.get(1).asLong();
cpu = row.get(2).asInteger();
} else {
logger.warn("Invalid \"online\" block: " + row.toString(false));
continue;
}
if (info != null) {
// Update "info" block,
// send updated, connected or reconnected event
updateNodeInfo(nodeID, info);
}
if (cpuSeq > 0) {
// We update our CPU info
NodeDescriptor node = nodes.get(nodeID);
if (node != null) {
node.writeLock.lock();
try {
node.updateCpu(cpuSeq, cpu);
} finally {
node.writeLock.unlock();
}
}
}
}
}
// Process "offline" block
if (offline != null) {
for (Tree row : offline) {
String nodeID = row.getName();
NodeDescriptor node;
if (this.nodeID.equals(nodeID)) {
long seq = row.asLong();
node = getDescriptor();
node.writeLock.lock();
try {
long newSeq = Math.max(node.seq, seq + 1);
if (node.seq < newSeq) {
node.seq = newSeq;
node.info.put("seq", newSeq);
}
} finally {
node.writeLock.unlock();
}
continue;
}
node = nodes.get(nodeID);
if (node == null) {
return;
}
if (!row.isPrimitive()) {
logger.warn("Invalid \"offline\" block: " + row);
continue;
}
// Get parameters from input
boolean disconnected = false;
node.writeLock.lock();
try {
long seq = row.asLong();
if (node.seq < seq && node.markAsOffline(seq)) {
// We know it is online, so we change it to offline
// Remove remote actions and listeners
registry.removeActions(node.nodeID);
eventbus.removeListeners(node.nodeID);
writer.close(node.nodeID);
disconnected = true;
}
} finally {
node.writeLock.unlock();
}
if (node != null && disconnected) {
// Notify listeners (not unexpected disconnection)
logger.info("Node \"" + node.nodeID + "\" disconnected.");
broadcastNodeDisconnected(node.info, false);
}
}
}
}
|
python
|
def _finalize_block_blob(self, sd, metadata, digest):
# type: (SyncCopy, blobxfer.models.synccopy.Descriptor, dict,
# str) -> None
"""Finalize Block blob
:param SyncCopy self: this
:param blobxfer.models.synccopy.Descriptor sd: synccopy descriptor
:param dict metadata: metadata dict
:param str digest: md5 digest
"""
blobxfer.operations.azure.blob.block.put_block_list(
sd.dst_entity, sd.last_block_num, digest, metadata)
if blobxfer.util.is_not_empty(sd.dst_entity.replica_targets):
for ase in sd.dst_entity.replica_targets:
blobxfer.operations.azure.blob.block.put_block_list(
ase, sd.last_block_num, digest, metadata)
|
python
|
def get_conditions(self):
"""
get conditions through which the pod has passed
:return: list of PodCondition enum or empty list
"""
# filter just values that are true (means that pod has that condition right now)
return [PodCondition.get_from_string(c.type) for c in self.get_status().conditions
if c.status == 'True']
|
python
|
def prefix(tokens, operator_table):
"""Match a prefix of an operator."""
operator, matched_tokens = operator_table.prefix.match(tokens)
if operator:
return TokenMatch(operator, None, matched_tokens)
|
java
|
public static String escape(final String url, final boolean strict) {
return (strict ? STRICT_ESCAPER : ESCAPER).escape(url);
}
|
python
|
def pbkdf2_hex(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS,
keylen=None, hashfunc=None):
"""Like :func:`pbkdf2_bin` but returns a hex encoded string.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha1.
"""
rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc)
return to_native(codecs.encode(rv, 'hex_codec'))
|
python
|
def pretty_list(rtlst, header, sortBy=0, borders=False):
"""Pretty list to fit the terminal, and add header"""
if borders:
_space = "|"
else:
_space = " "
# Windows has a fat terminal border
_spacelen = len(_space) * (len(header) - 1) + (10 if WINDOWS else 0)
_croped = False
# Sort correctly
rtlst.sort(key=lambda x: x[sortBy])
# Append tag
rtlst = header + rtlst
# Detect column's width
colwidth = [max([len(y) for y in x]) for x in zip(*rtlst)]
# Make text fit in box (if required)
width = get_terminal_width()
if conf.auto_crop_tables and width:
width = width - _spacelen
while sum(colwidth) > width:
_croped = True
# Needs to be cropped
# Get the longest row
i = colwidth.index(max(colwidth))
# Get all elements of this row
row = [len(x[i]) for x in rtlst]
# Get biggest element of this row: biggest of the array
j = row.index(max(row))
# Re-build column tuple with the edited element
t = list(rtlst[j])
t[i] = t[i][:-2] + "_"
rtlst[j] = tuple(t)
# Update max size
row[j] = len(t[i])
colwidth[i] = max(row)
if _croped:
log_runtime.info("Table cropped to fit the terminal (conf.auto_crop_tables==True)") # noqa: E501
# Generate padding scheme
fmt = _space.join(["%%-%ds" % x for x in colwidth])
# Append separation line if needed
if borders:
rtlst.insert(1, tuple("-" * x for x in colwidth))
# Compile
rt = "\n".join(((fmt % x).strip() for x in rtlst))
return rt
|
python
|
def show_graph(self, format='svg'):
"""
Render this Pipeline as a DAG.
Parameters
----------
format : {'svg', 'png', 'jpeg'}
Image format to render with. Default is 'svg'.
"""
g = self.to_simple_graph(AssetExists())
if format == 'svg':
return g.svg
elif format == 'png':
return g.png
elif format == 'jpeg':
return g.jpeg
else:
# We should never get here because of the expect_element decorator
# above.
raise AssertionError("Unknown graph format %r." % format)
|
java
|
public Statistics columnDistinctCount(String columnName, Long ndv) {
this.columnStats
.computeIfAbsent(columnName, column -> new HashMap<>())
.put(DISTINCT_COUNT, String.valueOf(ndv));
return this;
}
|
java
|
public static void disableDoubleBuffering(Component c)
{
RepaintManager currentManager = RepaintManager.currentManager(c);
currentManager.setDoubleBufferingEnabled(false);
}
|
java
|
public static <T> Set<T> cast(Set<?> set) {
return set == null ? null : new CastingSet<T>(set);
}
|
python
|
def form_invalid(self, form):
"""Override of CreateView method, logs invalid email form submissions."""
LOGGER.debug("Invalid Email Form Submitted")
messages.add_message(self.request, messages.ERROR, _("Invalid Email Address."))
return super(EmailTermsView, self).form_invalid(form)
|
java
|
@Nonnull
@OverrideOnDemand
protected TOOLBAR_TYPE createCreateToolbar (@Nonnull final WPECTYPE aWPEC,
@Nonnull final FORM_TYPE aForm,
@Nullable final DATATYPE aSelectedObject)
{
final Locale aDisplayLocale = aWPEC.getDisplayLocale ();
final TOOLBAR_TYPE aToolbar = createNewCreateToolbar (aWPEC);
aToolbar.addHiddenField (CPageParam.PARAM_ACTION, CPageParam.ACTION_CREATE);
if (aSelectedObject != null)
aToolbar.addHiddenField (CPageParam.PARAM_OBJECT, aSelectedObject.getID ());
aToolbar.addHiddenField (CPageParam.PARAM_SUBACTION, CPageParam.ACTION_SAVE);
// Save button
aToolbar.addSubmitButton (getCreateToolbarSubmitButtonText (aDisplayLocale), getCreateToolbarSubmitButtonIcon ());
// Cancel button
aToolbar.addButtonCancel (aDisplayLocale);
// Callback
modifyCreateToolbar (aWPEC, aToolbar);
return aToolbar;
}
|
python
|
async def get_version(self, tp, params):
"""
Loads version from the stream / version database
# TODO: instance vs. tp.
:param tp:
:param params:
:return:
"""
tw = TypeWrapper(tp, params)
if not tw.is_versioned():
# self.registry.set_tr()
return TypeWrapper.ELEMENTARY_RES
# If not in the DB, load from archive at current position
if not self.version_db.is_versioned(tw):
tr = await load_uvarint(self.iobj)
ver = await load_uvarint(self.iobj)
self.version_db.set_version(tw, tr, ver)
else:
tr, ver = self.version_db.get_version(tw)
obj_id = None if tr == 0 else await load_uvarint(self.iobj)
self.registry.set_tr(obj_id)
return ver
|
python
|
def primary_dimensions(self):
"""Iterate over the primary dimension columns, columns which do not have a parent
"""
from ambry.valuetype.core import ROLE
for c in self.columns:
if not c.parent and c.role == ROLE.DIMENSION:
yield c
|
python
|
def _get_export_mgr(self):
"""
Returns:
(DiskExportManager): Handler for each disk
"""
return (
DiskExportManager.get_instance_by_type(
dst=self._dst,
disk=disk,
do_compress=self._compress,
*self._args,
**self._kwargs
) for disk in self._collect()
)
|
python
|
def _count_files_to_amber(tumor_counts, normal_counts, work_dir, data):
"""Converts tumor and normal counts from GATK CollectAllelicCounts into Amber format.
"""
amber_dir = utils.safe_makedir(os.path.join(work_dir, "amber"))
out_file = os.path.join(amber_dir, "%s.amber.baf" % dd.get_sample_name(data))
if not utils.file_uptodate(out_file, tumor_counts):
with file_transaction(data, out_file) as tx_out_file:
with open(tumor_counts) as tumor_handle:
with open(normal_counts) as normal_handle:
with open(tx_out_file, "w") as out_handle:
writer = csv.writer(out_handle, delimiter="\t")
writer.writerow(["Chromosome", "Position", "TumorBAF", "TumorModifiedBAF", "TumorDepth",
"NormalBAF", "NormalModifiedBAF", "NormalDepth"])
header = None
for t, n in zip(tumor_handle, normal_handle):
if header is None and t.startswith("CONTIG"):
header = t.strip().split()
elif header is not None:
t_vals = dict(zip(header, t.strip().split()))
n_vals = dict(zip(header, n.strip().split()))
amber_line = _counts_to_amber(t_vals, n_vals)
if amber_line:
writer.writerow(amber_line)
return out_file
|
java
|
public static <S, I, O> ADTNode<S, I, O> buildFromADS(final ADSNode<S, I, O> node) {
if (node.isLeaf()) {
return new ADTLeafNode<>(null, node.getHypothesisState());
}
final ADTNode<S, I, O> result = new ADTSymbolNode<>(null, node.getSymbol());
for (Map.Entry<O, ADSNode<S, I, O>> entry : node.getChildren().entrySet()) {
final O adsOutput = entry.getKey();
final ADSNode<S, I, O> adsNode = entry.getValue();
final ADTNode<S, I, O> newChild = buildFromADS(adsNode);
newChild.setParent(result);
result.getChildren().put(adsOutput, newChild);
}
return result;
}
|
java
|
public Collection<AlertViolation> list(List<String> queryParams)
{
return HTTP.GET("/v2/alerts_violations.json", null, queryParams, ALERT_VIOLATIONS).get();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.