language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def gradients_X(self, dL_dK, X, X2, target):
"""Derivative of the covariance matrix with respect to X."""
if X2==None or X2 is X:
dL_dKdiag = dL_dK.flat[::dL_dK.shape[0]+1]
self.dKdiag_dX(dL_dKdiag, X, target)
|
java
|
public void setResizable(boolean resize) {
if (resize != m_isResize) {
if (resize) {
getElement().appendChild(m_resize.getElement());
adopt(m_resize);
m_resize.addMouseDownHandler(new MouseDownHandler() {
public void onMouseDown(MouseDownEvent event) {
setStartParameters(event);
CmsDebugLog.getInstance().printLine("Registering preview handler");
if (m_previewHandlerRegistration != null) {
m_previewHandlerRegistration.removeHandler();
}
m_previewHandlerRegistration = Event.addNativePreviewHandler(new ResizeEventPreviewHandler());
}
});
} else {
m_resize.removeFromParent();
}
m_isResize = resize;
}
}
|
java
|
public Observable<DatabaseInner> updateAsync(String resourceGroupName, String serverName, String databaseName, DatabaseUpdate parameters) {
return updateWithServiceResponseAsync(resourceGroupName, serverName, databaseName, parameters).map(new Func1<ServiceResponse<DatabaseInner>, DatabaseInner>() {
@Override
public DatabaseInner call(ServiceResponse<DatabaseInner> response) {
return response.body();
}
});
}
|
java
|
public static <T> Collector<T, ?, List<T>> last(final int n) {
N.checkArgNotNegative(n, "n");
final Supplier<Deque<T>> supplier = new Supplier<Deque<T>>() {
@Override
public Deque<T> get() {
return n <= 1024 ? new ArrayDeque<T>(n) : new LinkedList<T>();
}
};
final BiConsumer<Deque<T>, T> accumulator = new BiConsumer<Deque<T>, T>() {
@Override
public void accept(Deque<T> dqueue, T t) {
if (n > 0) {
if (dqueue.size() >= n) {
dqueue.pollFirst();
}
dqueue.offerLast(t);
}
}
};
final BinaryOperator<Deque<T>> combiner = new BinaryOperator<Deque<T>>() {
@Override
public Deque<T> apply(Deque<T> a, Deque<T> b) {
if (N.notNullOrEmpty(a) && N.notNullOrEmpty(b)) {
throw new UnsupportedOperationException("The 'first' and 'last' Collector only can be used in sequential stream");
}
while (b.size() < n && !a.isEmpty()) {
b.addFirst(a.pollLast());
}
return b;
}
};
final Function<Deque<T>, List<T>> finisher = new Function<Deque<T>, List<T>>() {
@Override
public List<T> apply(Deque<T> dqueue) {
return new ArrayList<>(dqueue);
}
};
return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID);
}
|
java
|
public SortedSet<TypeElement> allSubClasses(TypeElement typeElement, boolean isEnum) {
// new entries added to the set are searched as well, this is
// really a work queue.
List<TypeElement> list = new ArrayList<>(directSubClasses(typeElement, isEnum));
for (int i = 0; i < list.size(); i++) {
TypeElement te = list.get(i);
SortedSet<TypeElement> tset = directSubClasses0(te, isEnum);
for (TypeElement tte : tset) {
if (!list.contains(tte)) {
list.add(tte);
}
}
}
SortedSet<TypeElement> out = new TreeSet<>(comparator);
out.addAll(list);
return out;
}
|
python
|
def get_cell_boundary_variables(ds):
'''
Returns a list of variable names for variables that represent cell
boundaries through the `bounds` attribute
:param netCDF4.Dataset nc: netCDF dataset
'''
boundary_variables = []
has_bounds = ds.get_variables_by_attributes(bounds=lambda x: x is not None)
for var in has_bounds:
if var.bounds in ds.variables:
boundary_variables.append(var.bounds)
return boundary_variables
|
python
|
def _initialize_indices(model_class, name, bases, attrs):
"""Stores the list of indexed attributes."""
model_class._indices = []
for k, v in attrs.iteritems():
if isinstance(v, (Attribute, ListField)) and v.indexed:
model_class._indices.append(k)
if model_class._meta['indices']:
model_class._indices.extend(model_class._meta['indices'])
|
python
|
def destroy(name, call=None):
'''
To destroy a VM from the VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -d vmname
salt-cloud --destroy vmname
salt-cloud -a destroy vmname
'''
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
__utils__['cloud.fire_event'](
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
args={'name': name},
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
vm_properties = [
"name",
"summary.runtime.powerState"
]
vm_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.VirtualMachine, vm_properties)
for vm in vm_list:
if vm["name"] == name:
if vm["summary.runtime.powerState"] != "poweredOff":
# Power off the vm first
try:
log.info('Powering Off VM %s', name)
task = vm["object"].PowerOff()
salt.utils.vmware.wait_for_task(task, name, 'power off')
except Exception as exc:
log.error(
'Error while powering off VM %s: %s',
name, exc,
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return 'failed to destroy'
try:
log.info('Destroying VM %s', name)
task = vm["object"].Destroy_Task()
salt.utils.vmware.wait_for_task(task, name, 'destroy')
except Exception as exc:
log.error(
'Error while destroying VM %s: %s',
name, exc,
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return 'failed to destroy'
__utils__['cloud.fire_event'](
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
args={'name': name},
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
if __opts__.get('update_cachedir', False) is True:
__utils__['cloud.delete_minion_cachedir'](name, __active_provider_name__.split(':')[0], __opts__)
return True
|
java
|
protected boolean doesLevelMatch(final Level level, final CSNodeWrapper node, boolean matchContent) {
if (!EntityUtilities.isNodeALevel(node)) return false;
// If the unique id is not from the parser, than use the unique id to compare
if (level.getUniqueId() != null && level.getUniqueId().matches("^\\d.*")) {
return level.getUniqueId().equals(Integer.toString(node.getId()));
} else {
// If the target ids match then the level should be the same
if (level.getTargetId() != null && level.getTargetId() == node.getTargetId()) {
return true;
}
if (matchContent) {
// Make sure the level type matches
if (node.getNodeType() != level.getLevelType().getId()) return false;
return level.getTitle().equals(node.getTitle());
} else {
return StringUtilities.similarDamerauLevenshtein(level.getTitle(),
node.getTitle()) >= ProcessorConstants.MIN_MATCH_SIMILARITY;
}
}
}
|
java
|
@Deprecated
@Override
public void remove(final WComponent item) {
if (item instanceof MenuItem) {
removeMenuItem((MenuItem) item);
}
}
|
python
|
def area(self):
r"""The area of the current curved polygon.
This assumes, but does not check, that the current curved polygon
is valid (i.e. it is bounded by the edges).
This computes the area via Green's theorem. Using the vector field
:math:`\mathbf{F} = \left[-y, x\right]^T`, since
:math:`\partial_x(x) - \partial_y(-y) = 2` Green's theorem says
.. math::
\int_{\mathcal{P}} 2 \, d\textbf{x} =
\int_{\partial \mathcal{P}} -y \, dx + x \, dy
(where :math:`\mathcal{P}` is the current curved polygon).
Note that for a given edge :math:`C(r)` with control points
:math:`x_j, y_j`, the integral can be simplified:
.. math::
\int_C -y \, dx + x \, dy = \int_0^1 (x y' - y x') \, dr
= \sum_{i < j} (x_i y_j - y_i x_j) \int_0^1 b_{i, d}
b'_{j, d} \, dr
where :math:`b_{i, d}, b_{j, d}` are Bernstein basis polynomials.
Returns:
float: The area of the current curved polygon.
"""
edges = tuple(edge._nodes for edge in self._edges)
return _surface_helpers.compute_area(edges)
|
java
|
public static Object invokeMethod(Object bean, Method method, Object... args) throws Exception {
Class<?>[] types = method.getParameterTypes();
int argCount = args == null ? 0 : args.length;
// 参数个数对不上
if (argCount != types.length) {
throw new IllegalStateException(String.format("%s in %s", method.getName(), bean));
}
// 转参数类型
for (int i = 0; i < argCount; i++) {
args[i] = cast(args[i], types[i]);
}
return method.invoke(bean, args);
}
|
java
|
public Signer build() {
Util.notNull(signingService, "KSI signing service");
if (defaultHashAlgorithm == null) {
this.defaultHashAlgorithm = HashAlgorithm.SHA2_256;
}
defaultHashAlgorithm.checkExpiration();
if (policy == null) {
this.policy = ContextAwarePolicyAdapter.createInternalPolicy();
}
KSISignatureComponentFactory signatureComponentFactory = new InMemoryKsiSignatureComponentFactory();
KSISignatureFactory uniSignatureFactory = new InMemoryKsiSignatureFactory(policy, signatureComponentFactory);
return new SignerImpl(signingService, uniSignatureFactory, defaultHashAlgorithm);
}
|
java
|
@Exported
@Restricted(DoNotUse.class)
@CheckForNull
public String getAbsoluteRemotePath() {
if(hasPermission(CONNECT)) {
return getAbsoluteRemoteFs();
} else {
return null;
}
}
|
java
|
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mIsCapturingGesture = true;
mIsClickCandidate = true;
mActionDownTime = event.getEventTime();
mActionDownX = event.getX();
mActionDownY = event.getY();
break;
case MotionEvent.ACTION_MOVE:
if (Math.abs(event.getX() - mActionDownX) > mSingleTapSlopPx ||
Math.abs(event.getY() - mActionDownY) > mSingleTapSlopPx) {
mIsClickCandidate = false;
}
break;
case MotionEvent.ACTION_CANCEL:
mIsCapturingGesture = false;
mIsClickCandidate = false;
break;
case MotionEvent.ACTION_UP:
mIsCapturingGesture = false;
if (Math.abs(event.getX() - mActionDownX) > mSingleTapSlopPx ||
Math.abs(event.getY() - mActionDownY) > mSingleTapSlopPx) {
mIsClickCandidate = false;
}
if (mIsClickCandidate) {
if (event.getEventTime() - mActionDownTime <= ViewConfiguration.getLongPressTimeout()) {
if (mClickListener != null) {
mClickListener.onClick();
}
} else {
// long click, not handled
}
}
mIsClickCandidate = false;
break;
}
return true;
}
|
python
|
def python_sidebar_navigation(python_input):
"""
Create the `Layout` showing the navigation information for the sidebar.
"""
def get_text_fragments():
tokens = []
# Show navigation info.
tokens.extend([
('class:sidebar', ' '),
('class:sidebar.key', '[Arrows]'),
('class:sidebar', ' '),
('class:sidebar.description', 'Navigate'),
('class:sidebar', ' '),
('class:sidebar.key', '[Enter]'),
('class:sidebar', ' '),
('class:sidebar.description', 'Hide menu'),
])
return tokens
return Window(
FormattedTextControl(get_text_fragments),
style='class:sidebar',
width=Dimension.exact(43),
height=Dimension.exact(1))
|
java
|
private boolean builtInEdit(String initialText,
Consumer<String> saveHandler, Consumer<String> errorHandler) {
try {
ServiceLoader<BuildInEditorProvider> sl
= ServiceLoader.load(BuildInEditorProvider.class);
// Find the highest ranking provider
BuildInEditorProvider provider = null;
for (BuildInEditorProvider p : sl) {
if (provider == null || p.rank() > provider.rank()) {
provider = p;
}
}
if (provider != null) {
provider.edit(getResourceString("jshell.label.editpad"),
initialText, saveHandler, errorHandler);
return true;
} else {
errormsg("jshell.err.no.builtin.editor");
}
} catch (RuntimeException ex) {
errormsg("jshell.err.cant.launch.editor", ex);
}
fluffmsg("jshell.msg.try.set.editor");
return false;
}
|
python
|
def instance(self, root, baseurl, loaded_schemata, options):
"""
Create and return an new schema object using the specified I{root} and
I{URL}.
@param root: A schema root node.
@type root: L{sax.element.Element}
@param baseurl: A base URL.
@type baseurl: str
@param loaded_schemata: Already loaded schemata cache (URL --> Schema).
@type loaded_schemata: dict
@param options: An options dictionary.
@type options: L{options.Options}
@return: The newly created schema object.
@rtype: L{Schema}
@note: This is only used by Import children.
"""
return Schema(root, baseurl, options, loaded_schemata)
|
python
|
def json(self):
"""
Output the security rules as a json string.
Return:
str
"""
return json.dumps(self.dict_rules,
sort_keys=True,
indent=2,
separators=(',', ': '))
|
python
|
def update(self, cookies):
"""Add specified cookies to our cookie jar, and persists it.
:param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar.
"""
cookie_jar = self.get_cookie_jar()
for cookie in cookies:
cookie_jar.set_cookie(cookie)
with self._lock:
cookie_jar.save()
|
python
|
def get_redirect_args(self, request, callback):
"""Get request parameters for redirect url."""
callback = request.build_absolute_uri(callback)
args = {
'client_id': self.consumer_key,
'redirect_uri': callback,
'response_type': 'code',
}
scope = self.get_scope(request)
if scope:
args['scope'] = self.scope_separator.join(self.get_scope(request))
state = self.get_application_state(request, callback)
if state is not None:
args['state'] = state
request.session[self.session_key] = state
auth_params = self.get_auth_params(request)
if auth_params:
args.update(auth_params)
return args
|
java
|
@SuppressWarnings("WeakerAccess")
public SnackbarBuilder actionDismissCallback(final SnackbarActionDismissCallback callback) {
callbacks.add(new SnackbarCallback() {
public void onSnackbarActionPressed(Snackbar snackbar) {
callback.onSnackbarActionPressed(snackbar);
}
});
return this;
}
|
java
|
private static String getUPNStringFromSequence(final ASN1Sequence seq) {
if (seq == null) {
return null;
}
val id = ASN1ObjectIdentifier.getInstance(seq.getObjectAt(0));
if (id != null && UPN_OBJECTID.equals(id.getId())) {
val obj = (ASN1TaggedObject) seq.getObjectAt(1);
val primitiveObj = obj.getObject();
val func = FunctionUtils.doIf(Predicates.instanceOf(ASN1TaggedObject.class),
() -> ASN1TaggedObject.getInstance(primitiveObj).getObject(),
() -> primitiveObj);
val prim = func.apply(primitiveObj);
if (prim instanceof ASN1OctetString) {
return new String(((ASN1OctetString) prim).getOctets(), StandardCharsets.UTF_8);
}
if (prim instanceof ASN1String) {
return ((ASN1String) prim).getString();
}
}
return null;
}
|
python
|
def load_module_from_modpath(parts, path=None, use_sys=1):
"""Load a python module from its split name.
:type parts: list(str) or tuple(str)
:param parts:
python name of a module or package split on '.'
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:type use_sys: bool
:param use_sys:
boolean indicating whether the sys.modules dictionary should be used or not
:raise ImportError: if the module or package is not found
:rtype: module
:return: the loaded module
"""
if use_sys:
try:
return sys.modules[".".join(parts)]
except KeyError:
pass
modpath = []
prevmodule = None
for part in parts:
modpath.append(part)
curname = ".".join(modpath)
module = None
if len(modpath) != len(parts):
# even with use_sys=False, should try to get outer packages from sys.modules
module = sys.modules.get(curname)
elif use_sys:
# because it may have been indirectly loaded through a parent
module = sys.modules.get(curname)
if module is None:
mp_file, mp_filename, mp_desc = imp.find_module(part, path)
module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
# mp_file still needs to be closed.
if mp_file:
mp_file.close()
if prevmodule:
setattr(prevmodule, part, module)
_file = getattr(module, "__file__", "")
prevmodule = module
if not _file and util.is_namespace(curname):
continue
if not _file and len(modpath) != len(parts):
raise ImportError("no module in %s" % ".".join(parts[len(modpath) :]))
path = [os.path.dirname(_file)]
return module
|
java
|
public void marshall(OrganizationAggregationSource organizationAggregationSource, ProtocolMarshaller protocolMarshaller) {
if (organizationAggregationSource == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(organizationAggregationSource.getRoleArn(), ROLEARN_BINDING);
protocolMarshaller.marshall(organizationAggregationSource.getAwsRegions(), AWSREGIONS_BINDING);
protocolMarshaller.marshall(organizationAggregationSource.getAllAwsRegions(), ALLAWSREGIONS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public void addGSV( GSVSentence gsv ) {
try {
if (gsv.isValid())
satelliteInfo = gsv.getSatelliteInfo();
} catch (Exception e) {
// ignore it, this should be handled in the isValid,
// if an exception is thrown, we can't deal with it here.
}
}
|
java
|
public com.google.api.ads.adwords.axis.v201809.cm.Budget getBudget() {
return budget;
}
|
python
|
def logfile(filename, formatter=None, mode='a', maxBytes=0, backupCount=0, encoding=None, loglevel=None, disableStderrLogger=False):
"""
Setup logging to file (using a `RotatingFileHandler <https://docs.python.org/2/library/logging.handlers.html#rotatingfilehandler>`_ internally).
By default, the file grows indefinitely (no rotation). You can use the ``maxBytes`` and
``backupCount`` values to allow the file to rollover at a predetermined size. When the
size is about to be exceeded, the file is closed and a new file is silently opened
for output. Rollover occurs whenever the current log file is nearly ``maxBytes`` in length;
if either of ``maxBytes`` or ``backupCount`` is zero, rollover never occurs.
If ``backupCount`` is non-zero, the system will save old log files by appending the
extensions ‘.1’, ‘.2’ etc., to the filename. For example, with a ``backupCount`` of 5
and a base file name of app.log, you would get app.log, app.log.1, app.log.2, up to
app.log.5. The file being written to is always app.log. When this file is filled,
it is closed and renamed to app.log.1, and if files app.log.1, app.log.2, etc. exist,
then they are renamed to app.log.2, app.log.3 etc. respectively.
:arg string filename: Filename of the logfile. Set to `None` to disable logging to the logfile.
:arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter).
:arg string mode: mode to open the file with. Defaults to ``a``
:arg int maxBytes: Size of the logfile when rollover should occur. Defaults to 0, rollover never occurs.
:arg int backupCount: Number of backups to keep. Defaults to 0, rollover never occurs.
:arg string encoding: Used to open the file with that encoding.
:arg int loglevel: Set a custom loglevel for the file logger, else uses the current global loglevel.
:arg bool disableStderrLogger: Should the default stderr logger be disabled. Defaults to False.
"""
# Step 1: If an internal RotatingFileHandler already exists, remove it
__remove_internal_loggers(logger, disableStderrLogger)
# Step 2: If wanted, add the RotatingFileHandler now
if filename:
rotating_filehandler = RotatingFileHandler(filename, mode=mode, maxBytes=maxBytes, backupCount=backupCount, encoding=encoding)
# Set internal attributes on this handler
setattr(rotating_filehandler, LOGZERO_INTERNAL_LOGGER_ATTR, True)
if loglevel:
setattr(rotating_filehandler, LOGZERO_INTERNAL_HANDLER_IS_CUSTOM_LOGLEVEL, True)
# Configure the handler and add it to the logger
rotating_filehandler.setLevel(loglevel or _loglevel)
rotating_filehandler.setFormatter(formatter or _formatter or LogFormatter(color=False))
logger.addHandler(rotating_filehandler)
|
python
|
def take_at_most_n_seconds(time_s, func, *args, **kwargs):
"""A function that returns whether a function call took less than time_s.
NOTE: The function call is not killed and will run indefinitely if hung.
Args:
time_s: Maximum amount of time to take.
func: Function to call.
*args: Arguments to call the function with.
**kwargs: Keyword arguments to call the function with.
Returns:
True if the function finished in less than time_s seconds.
"""
thread = threading.Thread(target=func, args=args, kwargs=kwargs)
thread.start()
thread.join(time_s)
if thread.is_alive():
return False
return True
|
python
|
def data(self, column, role):
"""Return the data for the specified column and role
The column addresses one attribute of the data.
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role
:rtype:
:raises: None
"""
return self.columns[column](self._shot, role)
|
java
|
@Override
public ZKData<byte[]> getZKByteData(String path) throws InterruptedException, KeeperException
{
return getZKByteData(path, null);
}
|
python
|
def createRole(self, *args, **kwargs):
"""
Create Role
Create a new role.
The caller's scopes must satisfy the new role's scopes.
If there already exists a role with the same `roleId` this operation
will fail. Use `updateRole` to modify an existing role.
Creation of a role that will generate an infinite expansion will result
in an error response.
This method takes input: ``v1/create-role-request.json#``
This method gives output: ``v1/get-role-response.json#``
This method is ``stable``
"""
return self._makeApiCall(self.funcinfo["createRole"], *args, **kwargs)
|
java
|
public void marshall(CreateFileSystemRequest createFileSystemRequest, ProtocolMarshaller protocolMarshaller) {
if (createFileSystemRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createFileSystemRequest.getCreationToken(), CREATIONTOKEN_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getPerformanceMode(), PERFORMANCEMODE_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getEncrypted(), ENCRYPTED_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getKmsKeyId(), KMSKEYID_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getThroughputMode(), THROUGHPUTMODE_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getProvisionedThroughputInMibps(), PROVISIONEDTHROUGHPUTINMIBPS_BINDING);
protocolMarshaller.marshall(createFileSystemRequest.getTags(), TAGS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
protected Statement createStatement(Connection conn, OutputHandler outputHandler, String sql)
throws SQLException {
Statement result = null;
Integer resultSetType = null;
Integer resultSetConcurrency = null;
if (outputHandler instanceof LazyScrollOutputHandler) {
if (overrider.hasOverride(MjdbcConstants.OVERRIDE_LAZY_SCROLL_CHANGE_SENSITIVE) == true) {
// read value
overrider.getOverride(MjdbcConstants.OVERRIDE_LAZY_SCROLL_CHANGE_SENSITIVE);
resultSetType = ResultSet.TYPE_SCROLL_SENSITIVE;
} else {
resultSetType = ResultSet.TYPE_SCROLL_INSENSITIVE;
}
}
if (outputHandler instanceof LazyUpdateOutputHandler) {
resultSetConcurrency = ResultSet.CONCUR_UPDATABLE;
}
if (resultSetType == null && resultSetConcurrency == null) {
result = conn.createStatement();
} else {
resultSetType = (resultSetType == null ? ResultSet.TYPE_FORWARD_ONLY : resultSetType);
resultSetConcurrency = (resultSetConcurrency == null ? ResultSet.CONCUR_READ_ONLY : resultSetConcurrency);
result = conn.createStatement(resultSetType, resultSetConcurrency);
}
return result;
}
|
python
|
def channel_info(self, chan):
"""get soundfont, bank, prog, preset name of channel"""
info=fluid_synth_channel_info_t()
fluid_synth_get_channel_info(self.synth, chan, byref(info))
return (info.sfont_id, info.bank, info.program, info.name)
|
python
|
def _nodes(e):
"""
A helper for ordered() which returns the node count of ``e`` which
for Basic objects is the number of Basic nodes in the expression tree
but for other objects is 1 (unless the object is an iterable or dict
for which the sum of nodes is returned).
"""
from .basic import Basic
if isinstance(e, Basic):
return e.count(Basic)
elif iterable(e):
return 1 + sum(_nodes(ei) for ei in e)
elif isinstance(e, dict):
return 1 + sum(_nodes(k) + _nodes(v) for k, v in e.items())
else:
return 1
|
python
|
def get_descendants_group_count(cls, parent=None):
"""
Helper for a very common case: get a group of siblings and the number
of *descendants* (not only children) in every sibling.
:param parent:
The parent of the siblings to return. If no parent is given, the
root nodes will be returned.
:returns:
A `list` (**NOT** a Queryset) of node objects with an extra
attribute: `descendants_count`.
"""
if parent is None:
qset = cls.get_root_nodes()
else:
qset = parent.get_children()
nodes = list(qset)
for node in nodes:
node.descendants_count = node.get_descendant_count()
return nodes
|
python
|
def match_validator(expression):
"""Return validator function that will check if matches given expression.
Args:
match: if string then this will be converted to regular expression
using ``re.compile``. Can be also any object that has ``match()``
method like already compiled regular regular expression or custom
matching object/class.
"""
if isinstance(expression, str):
compiled = re.compile(expression)
elif hasattr(expression, 'match'):
# check it early so we could say something is wrong early
compiled = expression
else:
raise TypeError(
'Provided match is nor a string nor has a match method '
'(like re expressions)'
)
def validator(value):
if not compiled.match(value):
# note: make it a list for consistent representation
raise ValidationError(
"{} does not match pattern: {}".format(
value,
compiled.pattern
if hasattr(compiled, 'pattern')
else compiled
)
)
return validator
|
python
|
def has_generic_permission(self, request, permission_type):
"""
Return true if the current user has permission on this
image. Return the string 'ALL' if the user has all rights.
"""
user = request.user
if not user.is_authenticated():
return False
elif user.is_superuser:
return True
elif user == self.owner:
return True
elif self.folder:
return self.folder.has_generic_permission(request, permission_type)
else:
return False
|
python
|
def get_used_fields(payload):
"""
Get a list containing the names of the fields that are used in the
xso.Query.
:param payload: Query object o be
:type payload: :class:`~aioxmpp.ibr.Query`
:return: :attr:`list`
"""
return [
tag
for tag, descriptor in payload.CHILD_MAP.items()
if descriptor.__get__(payload, type(payload)) is not None
]
|
java
|
public void ensureHasSpace(int numBytesToAdd) {
if (numBytesToAdd < 0) {
throw new IllegalArgumentException("Number of bytes can't be negative");
}
int capacityLeft = getCapacityLeft();
if (capacityLeft < numBytesToAdd) {
grow(numBytesToAdd - capacityLeft, true);
}
}
|
java
|
@TimerJ
public TableMetadataBuilder withOptions(Map<Selector, Selector> opts) {
options = new HashMap<Selector, Selector>(opts);
return this;
}
|
python
|
def format_datetime(value):
"""
Format datetime
"""
dt_format = '%Y-%m-%dT%H:%M:%I'
if isinstance(value, datetime):
return value.strftime(dt_format)
return value
|
java
|
public boolean isTransitionApplied (Class<?> clazz, final String name)
throws PersistenceException
{
final String cname = clazz.getName();
return execute(new Operation<Boolean>() {
public Boolean invoke (Connection conn, DatabaseLiaison liaison)
throws SQLException, PersistenceException
{
PreparedStatement stmt = null;
try {
stmt = conn.prepareStatement(
" select " + liaison.columnSQL("NAME") +
" from " + liaison.tableSQL("TRANSITIONS") +
" where " + liaison.columnSQL("CLASS") + "=?" +
" and " + liaison.columnSQL("NAME") + "=?");
stmt.setString(1, cname);
stmt.setString(2, name);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
return true;
}
} finally {
JDBCUtil.close(stmt);
}
return false;
}
});
}
|
java
|
private void delete(Node<K, V> n) {
if (root == n) {
deleteMin();
n.o_c = null;
n.y_s = null;
n.o_s = null;
return;
}
if (n.o_s == null) {
throw new IllegalArgumentException("Invalid handle!");
}
// unlink from parent
if (n.y_s != null) {
n.y_s.o_s = n.o_s;
}
if (n.o_s.o_c == n) { // I am the oldest :(
n.o_s.o_c = n.y_s;
} else { // I have an older sibling!
n.o_s.y_s = n.y_s;
}
n.y_s = null;
n.o_s = null;
// perform delete-min at tree rooted at this
Node<K, V> t = combine(cutChildren(n));
// and merge with other cut tree
if (comparator == null) {
root = link(root, t);
} else {
root = linkWithComparator(root, t);
}
size--;
}
|
java
|
protected void purgeZeroObject(Long bucketId, Long objectId, AllocationPoint point, boolean copyback) {
allocationsMap.remove(objectId);
memoryHandler.purgeZeroObject(bucketId, objectId, point, copyback);
getFlowController().getEventsProvider().storeEvent(point.getLastWriteEvent());
getFlowController().getEventsProvider().storeEvent(point.getLastReadEvent());
}
|
python
|
def compute_payments(self, precision=None):
'''
Returns the total amount of payments made to this invoice.
@param precision:int Number of decimal places
@return: Decimal
'''
return quantize(sum([payment.amount for payment in self.__payments]),
precision)
|
python
|
def get_mosaic_by_name(self, name):
'''Get the API representation of a mosaic by name.
:param name str: The name of the mosaic
:returns: :py:Class:`planet.api.models.Mosaics`
:raises planet.api.exceptions.APIException: On API error.
'''
params = {'name__is': name}
url = self._url('basemaps/v1/mosaics')
return self._get(url, models.Mosaics, params=params).get_body()
|
java
|
void addInvokes(MethodDescriptor methodDescriptor, final Integer lineNumber, MethodDescriptor invokedMethodDescriptor) {
InvokesDescriptor invokesDescriptor = scannerContext.getStore().create(methodDescriptor, InvokesDescriptor.class, invokedMethodDescriptor);
invokesDescriptor.setLineNumber(lineNumber);
}
|
java
|
public void evaluateAndSet(Object target, Object value) {
MVEL.executeSetExpression(getCompiledSetExpression(), target, value);
}
|
python
|
def handle_presence(self, old_present):
'''
Fire presence events if enabled
'''
# On the first run it may need more time for the EventPublisher
# to come up and be ready. Set the timeout to account for this.
if self.presence_events and self.event.connect_pull(timeout=3):
present = self.ckminions.connected_ids()
new = present.difference(old_present)
lost = old_present.difference(present)
if new or lost:
# Fire new minions present event
data = {'new': list(new),
'lost': list(lost)}
self.event.fire_event(data, tagify('change', 'presence'))
data = {'present': list(present)}
self.event.fire_event(data, tagify('present', 'presence'))
old_present.clear()
old_present.update(present)
|
java
|
public NotificationChain basicSetFinallyExpression(XExpression newFinallyExpression, NotificationChain msgs)
{
XExpression oldFinallyExpression = finallyExpression;
finallyExpression = newFinallyExpression;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, XbasePackage.XTRY_CATCH_FINALLY_EXPRESSION__FINALLY_EXPRESSION, oldFinallyExpression, newFinallyExpression);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
|
python
|
def _create_flags(self, kw):
"""
this clones the kw dict, adding a lower-case version of every key
(duplicated in circuit.py; consider putting in util?)
"""
flags = {}
for k in kw.keys():
flags[k] = kw[k]
flags[k.lower()] = flags[k]
return flags
|
java
|
private boolean nextSegment() {
assert(state == State.ITER_CHECK_FWD);
// The input text [start..(iter index)[ passes the FCD check.
pos = iter.getIndex();
// Collect the characters being checked, in case they need to be normalized.
if(s == null) {
s = new StringBuilder();
} else {
s.setLength(0);
}
int prevCC = 0;
for(;;) {
// Fetch the next character and its fcd16 value.
int c = iter.nextCodePoint();
if(c < 0) { break; }
int fcd16 = nfcImpl.getFCD16(c);
int leadCC = fcd16 >> 8;
if(leadCC == 0 && s.length() != 0) {
// FCD boundary before this character.
iter.previousCodePoint();
break;
}
s.appendCodePoint(c);
if(leadCC != 0 && (prevCC > leadCC || CollationFCD.isFCD16OfTibetanCompositeVowel(fcd16))) {
// Fails FCD check. Find the next FCD boundary and normalize.
for(;;) {
c = iter.nextCodePoint();
if(c < 0) { break; }
if(nfcImpl.getFCD16(c) <= 0xff) {
iter.previousCodePoint();
break;
}
s.appendCodePoint(c);
}
normalize(s);
start = pos;
limit = pos + s.length();
state = State.IN_NORM_ITER_AT_LIMIT;
pos = 0;
return true;
}
prevCC = fcd16 & 0xff;
if(prevCC == 0) {
// FCD boundary after the last character.
break;
}
}
limit = pos + s.length();
assert(pos != limit);
iter.moveIndex(-s.length());
state = State.ITER_IN_FCD_SEGMENT;
return true;
}
|
java
|
public DenyAssignmentInner get(String scope, String denyAssignmentId) {
return getWithServiceResponseAsync(scope, denyAssignmentId).toBlocking().single().body();
}
|
python
|
def redirect(self,
where: Optional[str] = None,
default: Optional[str] = None,
override: Optional[str] = None,
**url_kwargs):
"""
Convenience method for returning redirect responses.
:param where: A URL, endpoint, or config key name to redirect to.
:param default: A URL, endpoint, or config key name to redirect to if
``where`` is invalid.
:param override: explicitly redirect to a URL, endpoint, or config key name
(takes precedence over the ``next`` value in query strings
or forms)
:param url_kwargs: the variable arguments of the URL rule
:param _anchor: if provided this is added as anchor to the URL.
:param _external: if set to ``True``, an absolute URL is generated. Server
address can be changed via ``SERVER_NAME`` configuration
variable which defaults to `localhost`.
:param _external_host: if specified, the host of an external server to
generate urls for (eg https://example.com or
localhost:8888)
:param _method: if provided this explicitly specifies an HTTP method.
:param _scheme: a string specifying the desired URL scheme. The `_external`
parameter must be set to ``True`` or a :exc:`ValueError`
is raised. The default behavior uses the same scheme as
the current request, or ``PREFERRED_URL_SCHEME`` from the
:ref:`app configuration <config>` if no request context is
available. As of Werkzeug 0.10, this also can be set
to an empty string to build protocol-relative URLs.
"""
return redirect(where, default, override, _cls=self, **url_kwargs)
|
java
|
public long run(String partitionValue, long maxTotalRows, long targetMaxRowsToDelete) {
if (targetMaxRowsToDelete <= 0) {
throw new VoltAbortException("maxRowsToDeletePerProc must be > 0.");
}
if (maxTotalRows < 0) {
throw new VoltAbortException("maxTotalRows must be >= 0.");
}
// Count the rows in the current partition.
voltQueueSQL(countRows, EXPECT_SCALAR_LONG);
long count = voltExecuteSQL()[0].asScalarLong();
// If partition is smaller than desired, return without deleting rows.
if (count < maxTotalRows) {
// Return 0 deleted rows.
return 0;
}
// If asked to remove all rows, go ahead
if ((maxTotalRows == 0) && (count < targetMaxRowsToDelete)) {
voltQueueSQL(deleteAll, EXPECT_SCALAR_MATCH(count));
voltExecuteSQL(true);
// Total deleted rows = table size.
return count;
}
// Figure out how many rows to try to delete.
long agedOutCount = count - maxTotalRows;
long rowsToConsider = Math.min(agedOutCount, targetMaxRowsToDelete);
// Find the timestamp of the row at position N in the sorted order, where N is the chunk size
voltQueueSQL(getNthOldestTimestamp, EXPECT_SCALAR, rowsToConsider);
TimestampType newestToDiscard = voltExecuteSQL()[0].fetchRow(0).getTimestampAsTimestamp(0);
// Delete all rows >= the timestamp found in the previous statement.
// This will delete AT LEAST N rows, but since timestamps may be non-unique,
// it might delete more than N. In the worst case, it could delete all rows
// if every row has an identical timestamp value. It is guaranteed to make
// progress. If we used strictly less than, it might not make progress.
// This is why the max rows to delete number is a target, not always a perfect max.
voltQueueSQL(deleteOlderThanDate, EXPECT_SCALAR_LONG, newestToDiscard);
long deletedCount = voltExecuteSQL(true)[0].asScalarLong();
return deletedCount;
}
|
python
|
def valid_url(url):
"""Validate url.
:rtype: str
:return: url
:param str url: package homepage url.
"""
regex = re.compile(
r'^(?:http)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))'
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
if not regex.match(url):
raise argparse.ArgumentTypeError('"{0}" is invalid url.'.format(url))
return url
|
python
|
def split_data_cwl_items(items, default_keys=None):
"""Split a set of CWL output dictionaries into data samples and CWL items.
Handles cases where we're arrayed on multiple things, like a set of regional
VCF calls and data objects.
"""
key_lens = set([])
for data in items:
key_lens.add(len(_get_all_cwlkeys([data], default_keys)))
extra_key_len = min(list(key_lens)) if len(key_lens) > 1 else None
data_out = []
extra_out = []
for data in items:
if extra_key_len and len(_get_all_cwlkeys([data], default_keys)) == extra_key_len:
extra_out.append(data)
else:
data_out.append(data)
if len(extra_out) == 0:
return data_out, {}
else:
cwl_keys = extra_out[0]["cwl_keys"]
for extra in extra_out[1:]:
cur_cwl_keys = extra["cwl_keys"]
assert cur_cwl_keys == cwl_keys, pprint.pformat(extra_out)
cwl_extras = collections.defaultdict(list)
for data in items:
for key in cwl_keys:
cwl_extras[key].append(data[key])
data_final = []
for data in data_out:
for key in cwl_keys:
data.pop(key)
data_final.append(data)
return data_final, dict(cwl_extras)
|
python
|
def show_vcs_output_vcs_nodes_vcs_node_info_node_public_ipv6_addresses_node_public_ipv6_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_vcs = ET.Element("show_vcs")
config = show_vcs
output = ET.SubElement(show_vcs, "output")
vcs_nodes = ET.SubElement(output, "vcs-nodes")
vcs_node_info = ET.SubElement(vcs_nodes, "vcs-node-info")
node_public_ipv6_addresses = ET.SubElement(vcs_node_info, "node-public-ipv6-addresses")
node_public_ipv6_address = ET.SubElement(node_public_ipv6_addresses, "node-public-ipv6-address")
node_public_ipv6_address.text = kwargs.pop('node_public_ipv6_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
protected boolean isWellDefined() {
int size = references.size();
if (size == 0) {
return false;
}
// If this is a declaration that does not instantiate the variable,
// it's not well-defined.
Reference init = getInitializingReference();
if (init == null) {
return false;
}
checkState(references.get(0).isDeclaration());
BasicBlock initBlock = init.getBasicBlock();
for (int i = 1; i < size; i++) {
if (!initBlock.provablyExecutesBefore(references.get(i).getBasicBlock())) {
return false;
}
}
return true;
}
|
java
|
public void setInitialized() {
boolean okay = (this.state == STATE_INITIALIZING);
this.state = STATE_INITIALIZED;
if (!okay) {
if (isInitialized()) {
throw new IllegalStateException("Already initialized.");
} else {
throw new IllegalStateException("You need to call setInitializing() before!");
}
}
}
|
java
|
@Override
public void visit(NodeVisitor v) {
if (v.visit(this)) {
initializer.visit(v);
condition.visit(v);
increment.visit(v);
body.visit(v);
}
}
|
python
|
def create_metadata_file(output_path, data_dir):
'''Creates a METADATA.json file from a data directory
The file is written to output_path
'''
# Relative path to all (BASIS).metadata.json files
meta_filelist, table_filelist, _, _ = get_all_filelist(data_dir)
metadata = {}
for meta_file_relpath in meta_filelist:
# Read in the metadata for a single basis set
meta_file_path = os.path.join(data_dir, meta_file_relpath)
bs_metadata = read_json_basis(meta_file_path)
# Base of the filename for table basis sets
# Basename is something like '6-31G.', including the last period
base_relpath, meta_filename = os.path.split(meta_file_relpath)
base_filename = meta_filename.split('.')[0] + '.'
# All the table files that correspond to this metadata file
# (relative to data_dir)
this_filelist = [
x for x in table_filelist
if os.path.dirname(x) == base_relpath and os.path.basename(x).startswith(base_filename)
]
# The 'versions' dict that will go into the metadata
version_info = {}
# Make sure function types are the same
function_types = None
# For each table basis, compose it
for table_file in this_filelist:
# Obtain just the filename of the table basis
table_filename = os.path.basename(table_file)
# Obtain the base filename and version from the filename
# The base filename is the part before the first period
# (filebase.ver.table.json)
table_filebase, ver, _, _ = table_filename.split('.')
# Fully compose the basis set from components
bs = compose_table_basis(table_file, data_dir)
# Elements for which this basis is defined
defined_elements = sorted(list(bs['elements'].keys()), key=lambda x: int(x))
# Determine the types of functions contained in the basis
# (gto, ecp, etc)
if function_types is None:
function_types = bs['function_types']
elif function_types != bs['function_types']:
raise RuntimeError("Differing function types across versions for " + base_filename)
# Create the metadata for this specific version
# yapf: disable
version_info[ver] = { 'file_relpath': table_file,
'revdesc': bs['revision_description'],
'elements': defined_elements
}
# yapf: enable
# Sort the version dicts
version_info = dict(sorted(version_info.items()))
# Find the maximum version for this basis
latest_ver = max(version_info.keys())
# Create the common metadata for this basis set
# display_name and other_names are placeholders to keep order
# yapf: disable
common_md = { 'display_name': None,
'other_names': None,
'description': bs['description'],
'latest_version': latest_ver,
'basename': base_filename[:-1], # Strip off that trailing period
'relpath': base_relpath,
'family': bs['family'],
'role': bs['role'],
'functiontypes': function_types,
'auxiliaries': bs['auxiliaries'],
'versions': version_info }
# yapf: enable
# Loop through all the common names, translate them, and then add the data
for bs_name in bs_metadata['names']:
tr_name = transform_basis_name(bs_name)
if tr_name in metadata:
raise RuntimeError("Duplicate basis set name: " + tr_name)
# Create a new entry, with all the common metadata
# Also, store the other names for this basis
other_names = bs_metadata['names'].copy()
other_names.remove(bs_name)
metadata[tr_name] = common_md.copy()
metadata[tr_name]['display_name'] = bs_name
metadata[tr_name]['other_names'] = other_names
# Write out the metadata
metadata = dict(sorted(metadata.items()))
_write_plain_json(output_path, metadata)
|
python
|
def get_request_participants(self, issue_id_or_key, start=0, limit=50):
"""
Get request participants
:param issue_id_or_key: str
:param start: OPTIONAL: int
:param limit: OPTIONAL: int
:return: Request participants
"""
url = 'rest/servicedeskapi/request/{}/participant'.format(issue_id_or_key)
params = {}
if start is not None:
params['start'] = int(start)
if limit is not None:
params['limit'] = int(limit)
return self.get(url, params=params).get('values')
|
python
|
def write_master_ninja(master_ninja, targets):
"""Writes master build.ninja file, referencing all given subninjas."""
master_ninja.variable('cxx', 'c++')
master_ninja.variable('ld', '$cxx')
if sys.platform == 'darwin':
master_ninja.variable('alink', 'libtool -static')
else:
master_ninja.variable('alink', 'ar rcs')
master_ninja.newline()
master_ninja.pool('link_pool', depth=4)
master_ninja.newline()
master_ninja.rule('cxx', description='CXX $out',
command='$cxx -MMD -MF $out.d $defines $includes $cflags -c $in -o $out',
depfile='$out.d', deps='gcc')
master_ninja.rule('alink', description='ARCHIVE $out',
command='rm -f $out && $alink -o $out $in')
master_ninja.rule('link', description='LINK $out', pool='link_pool',
command='$ld $ldflags -o $out $in $libs')
master_ninja.rule('stamp', description='STAMP $out', command='touch $out')
master_ninja.newline()
for target in targets:
master_ninja.subninja(target.ninja_file_path)
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for target in targets:
if target.name != target.output:
master_ninja.build(target.name, 'phony', target.output)
master_ninja.newline()
master_ninja.build('all', 'phony', [target.output for target in targets])
master_ninja.default('all')
|
python
|
def parse(self, ticket):
"""Parses the passed ticket, returning a tuple containing the digest,
user_id, valid_until, tokens, and user_data fields
"""
if len(ticket) < self._min_ticket_size():
raise TicketParseError(ticket, 'Invalid ticket length')
digest_len = self._hash.digest_size * 2
digest = ticket[:digest_len]
try:
time_len = 8
time = int(ticket[digest_len:digest_len + time_len], 16)
except:
raise TicketParseError(ticket, 'Invalid time field')
parts = ticket[digest_len + time_len:].split('!')
if len(parts) != 3:
raise TicketParseError(ticket, 'Missing parts')
user_id = ulp.unquote(parts[0])
tokens = ()
if parts[1]:
tokens = tuple((ulp.unquote(t) for t in parts[1].split(',')))
user_data = ulp.unquote(parts[2])
return TicketInfo(digest, user_id, tokens, user_data, time)
|
java
|
public static Schema recordOf(String name) {
Preconditions.checkNotNull(name, "Record name cannot be null.");
return new Schema(Type.RECORD, null, null, null, null, name, null, null);
}
|
java
|
public boolean isDone() {
int count = 0;
for (Pair<T, Future<?>> pair: _pairs) {
if (pair.second.isDone()) ++count;
}
return count == _pairs.size();
}
|
python
|
def set_metadata(self, set_id, fp):
"""
Set the XML metadata on a set.
:param file fp: file-like object to read the XML metadata from.
"""
base_url = self.client.get_url('SET', 'GET', 'single', {'id': set_id})
self._metadata.set(base_url, fp)
|
java
|
public void enableReceiveNotifyMsg(boolean enable, EnableReceiveNotifyMsgHandler handler) {
HMSAgentLog.i("enableReceiveNotifyMsg:enable=" + enable + " handler=" + StrUtils.objDesc(handler));
this.enable = enable;
this.handler = handler;
connect();
}
|
python
|
def set_mtime(self, name, mtime, size):
"""Set modification time on file."""
self.check_write(name)
os.utime(os.path.join(self.cur_dir, name), (-1, mtime))
|
python
|
def connectProcess(connection, processProtocol, commandLine='', env={},
usePTY=None, childFDs=None, *args, **kwargs):
"""Opens a SSHSession channel and connects a ProcessProtocol to it
@param connection: the SSH Connection to open the session channel on
@param processProtocol: the ProcessProtocol instance to connect to the process
@param commandLine: the command line to execute the process
@param env: optional environment variables to set for the process
@param usePTY: if set, request a PTY for the process
@param childFDs: custom child file descriptors for the process
"""
processOpenDeferred = defer.Deferred()
process = SSHProcess(processProtocol, commandLine, env, usePTY, childFDs,
*args, **kwargs)
process.processOpen = processOpenDeferred.callback
process.openFailed = processOpenDeferred.errback
connection.openChannel(process)
return processOpenDeferred
|
python
|
def write_stats_to_json(cls, file_name, stats):
"""Write stats to a local json file."""
params = cls._json_dump_options(stats)
mode = 'w' if PY3 else 'wb'
try:
safe_file_dump(file_name, params, mode=mode)
except Exception as e: # Broad catch - we don't want to fail in stats related failure.
print('WARNING: Failed to write stats to {} due to Error: {}'.format(file_name, e),
file=sys.stderr)
|
java
|
public void marshall(AlarmConfiguration alarmConfiguration, ProtocolMarshaller protocolMarshaller) {
if (alarmConfiguration == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(alarmConfiguration.getEnabled(), ENABLED_BINDING);
protocolMarshaller.marshall(alarmConfiguration.getIgnorePollAlarmFailure(), IGNOREPOLLALARMFAILURE_BINDING);
protocolMarshaller.marshall(alarmConfiguration.getAlarms(), ALARMS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public synchronized void close() throws IOException {
writable = false;
if (map != null) {
ByteArrayUtil.unmapByteBuffer(map);
map = null;
}
if (lock != null) {
lock.release();
lock = null;
}
file.close();
}
|
python
|
def _set_allow(self, v, load=False):
"""
Setter method for allow, mapped from YANG variable /port_profile/allow (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_allow is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_allow() directly.
YANG Description: Allow/Drop non-profiled macs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=allow.allow, is_container='container', presence=False, yang_name="allow", rest_name="allow", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow/Drop non-profiled macs', u'display-when': u'../name = "default"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """allow must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=allow.allow, is_container='container', presence=False, yang_name="allow", rest_name="allow", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow/Drop non-profiled macs', u'display-when': u'../name = "default"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)""",
})
self.__allow = t
if hasattr(self, '_set'):
self._set()
|
python
|
def get_freesasa_annotations(self, outdir, include_hetatms=False, force_rerun=False):
"""Run ``freesasa`` on this structure and store the calculated properties in the corresponding ChainProps
"""
if self.file_type != 'pdb':
log.error('{}: unable to run freesasa with "{}" file type. Please change file type to "pdb"'.format(self.id,
self.file_type))
return
# Parse the structure to store chain sequences
if self.structure:
parsed = self.structure
else:
parsed = self.parse_structure()
if not parsed:
log.error('{}: unable to open structure to run freesasa'.format(self.id))
return
# Set outfile name
log.debug('{}: running freesasa'.format(self.id))
if include_hetatms:
outfile = '{}.freesasa_het.rsa'.format(self.id)
else:
outfile = '{}.freesasa_nohet.rsa'.format(self.id)
# Run freesasa
result = fs.run_freesasa(infile=self.structure_path,
outfile=outfile,
include_hetatms=include_hetatms,
outdir=outdir,
force_rerun=force_rerun)
# Parse results
result_parsed = fs.parse_rsa_data(result)
prop_dict = defaultdict(lambda: defaultdict(list))
for k, v in result_parsed.items():
chain = k[0]
for prop, calc in v.items():
prop_dict[chain][prop].append(calc)
# Reorganize and store results
all_props = ['all_atoms_abs', 'all_atoms_rel', 'side_chain_abs', 'side_chain_rel', 'main_chain_abs',
'main_chain_rel', 'non_polar_abs', 'non_polar_rel', 'all_polar_abs', 'all_polar_rel']
all_props_renamed = {'all_atoms_abs' : 'ASA_ALL-freesasa',
'all_atoms_rel' : 'RSA_ALL-freesasa',
'all_polar_abs' : 'ASA_POLAR-freesasa',
'all_polar_rel' : 'RSA_POLAR-freesasa',
'main_chain_abs': 'ASA_BACKBONE-freesasa',
'main_chain_rel': 'RSA_BACKBONE-freesasa',
'non_polar_abs' : 'ASA_NONPOLAR-freesasa',
'non_polar_rel' : 'RSA_NONPOLAR-freesasa',
'side_chain_abs': 'ASA_RESIDUE-freesasa',
'side_chain_rel': 'RSA_RESIDUE-freesasa'}
## Rename dictionary keys based on if HETATMs were included
if include_hetatms:
suffix = '_het'
else:
suffix = '_nohet'
for k, v in all_props_renamed.items():
all_props_renamed[k] = v + suffix
for chain in self.chains:
for prop in all_props:
prop_list = ssbio.protein.structure.properties.residues.match_structure_sequence(orig_seq=chain.seq_record,
new_seq=prop_dict[chain.id][prop],
fill_with=float('Inf'),
ignore_excess=True)
chain.seq_record.letter_annotations[all_props_renamed[prop]] = prop_list
log.debug('{}: stored freesasa calculations in chain seq_record letter_annotations'.format(chain))
|
python
|
def run(self):
"""Begins simultaneous generation/acquisition
:returns: numpy.ndarray -- read samples
"""
try:
if self.aotask is None:
print u"You must arm the calibration first"
return
# acquire data and stop task, lock must have been release by
# previous reset
self.daq_lock.acquire()
self.aotask.StartTask()
self.aitask.StartTask()
# blocking read
data = self.aitask.read()
# write task should always be shorter than read
# self.aotask.WaitUntilTaskDone(10)
self.nacquired += 1
self.aitask.stop()
self.aotask.stop()
except:
print u'ERROR! TERMINATE!'
self.daq_lock.release()
self.stop()
raise
return data
|
java
|
public void deleteExpired(IEntityLock lock) throws LockingException {
deleteExpired(new Date(), lock.getEntityType(), lock.getEntityKey());
}
|
java
|
public Comment addComment(long sheetId, long discussionId, Comment comment) throws SmartsheetException{
return this.createResource("sheets/" + sheetId + "/discussions/" + discussionId + "/comments", Comment.class, comment);
}
|
python
|
def parsecommonarguments(object, doc, annotationtype, required, allowed, **kwargs):
"""Internal function to parse common FoLiA attributes and sets up the instance accordingly. Do not invoke directly."""
object.doc = doc #The FoLiA root document
if required is None:
required = tuple()
if allowed is None:
allowed = tuple()
supported = required + allowed
if 'generate_id_in' in kwargs:
try:
kwargs['id'] = kwargs['generate_id_in'].generate_id(object.__class__)
except GenerateIDException:
pass #ID could not be generated, just skip
del kwargs['generate_id_in']
if 'id' in kwargs:
if Attrib.ID not in supported:
raise ValueError("ID is not supported on " + object.__class__.__name__)
isncname(kwargs['id'])
object.id = kwargs['id']
del kwargs['id']
elif Attrib.ID in required:
raise ValueError("ID is required for " + object.__class__.__name__)
else:
object.id = None
if 'set' in kwargs:
if Attrib.CLASS not in supported and not object.SETONLY:
raise ValueError("Set is not supported on " + object.__class__.__name__)
if not kwargs['set']:
object.set ="undefined"
else:
object.set = kwargs['set']
del kwargs['set']
if object.set:
if doc and (not (annotationtype in doc.annotationdefaults) or not (object.set in doc.annotationdefaults[annotationtype])):
if object.set in doc.alias_set:
object.set = doc.alias_set[object.set]
elif doc.autodeclare:
doc.annotations.append( (annotationtype, object.set ) )
doc.annotationdefaults[annotationtype] = {object.set: {} }
else:
raise ValueError("Set '" + object.set + "' is used for " + object.__class__.__name__ + ", but has no declaration!")
elif annotationtype in doc.annotationdefaults and len(doc.annotationdefaults[annotationtype]) == 1:
object.set = list(doc.annotationdefaults[annotationtype].keys())[0]
elif object.ANNOTATIONTYPE == AnnotationType.TEXT:
object.set = "undefined" #text content needs never be declared (for backward compatibility) and is in set 'undefined'
elif Attrib.CLASS in required: #or (hasattr(object,'SETONLY') and object.SETONLY):
raise ValueError("Set is required for " + object.__class__.__name__)
if 'class' in kwargs:
if not Attrib.CLASS in supported:
raise ValueError("Class is not supported for " + object.__class__.__name__)
object.cls = kwargs['class']
del kwargs['class']
elif 'cls' in kwargs:
if not Attrib.CLASS in supported:
raise ValueError("Class is not supported on " + object.__class__.__name__)
object.cls = kwargs['cls']
del kwargs['cls']
elif Attrib.CLASS in required:
raise ValueError("Class is required for " + object.__class__.__name__)
if object.cls and not object.set:
if doc and doc.autodeclare:
if not (annotationtype, 'undefined') in doc.annotations:
doc.annotations.append( (annotationtype, 'undefined') )
doc.annotationdefaults[annotationtype] = {'undefined': {} }
object.set = 'undefined'
else:
raise ValueError("Set is required for " + object.__class__.__name__ + ". Class '" + object.cls + "' assigned without set.")
if 'annotator' in kwargs:
if not Attrib.ANNOTATOR in supported:
raise ValueError("Annotator is not supported for " + object.__class__.__name__)
object.annotator = kwargs['annotator']
del kwargs['annotator']
elif doc and annotationtype in doc.annotationdefaults and object.set in doc.annotationdefaults[annotationtype] and 'annotator' in doc.annotationdefaults[annotationtype][object.set]:
object.annotator = doc.annotationdefaults[annotationtype][object.set]['annotator']
elif Attrib.ANNOTATOR in required:
raise ValueError("Annotator is required for " + object.__class__.__name__)
if 'annotatortype' in kwargs:
if not Attrib.ANNOTATOR in supported:
raise ValueError("Annotatortype is not supported for " + object.__class__.__name__)
if kwargs['annotatortype'] == 'auto' or kwargs['annotatortype'] == AnnotatorType.AUTO:
object.annotatortype = AnnotatorType.AUTO
elif kwargs['annotatortype'] == 'manual' or kwargs['annotatortype'] == AnnotatorType.MANUAL:
object.annotatortype = AnnotatorType.MANUAL
else:
raise ValueError("annotatortype must be 'auto' or 'manual', got " + repr(kwargs['annotatortype']))
del kwargs['annotatortype']
elif doc and annotationtype in doc.annotationdefaults and object.set in doc.annotationdefaults[annotationtype] and 'annotatortype' in doc.annotationdefaults[annotationtype][object.set]:
object.annotatortype = doc.annotationdefaults[annotationtype][object.set]['annotatortype']
elif Attrib.ANNOTATOR in required:
raise ValueError("Annotatortype is required for " + object.__class__.__name__)
if 'confidence' in kwargs:
if not Attrib.CONFIDENCE in supported:
raise ValueError("Confidence is not supported")
if kwargs['confidence'] is not None:
try:
object.confidence = float(kwargs['confidence'])
assert object.confidence >= 0.0 and object.confidence <= 1.0
except:
raise ValueError("Confidence must be a floating point number between 0 and 1, got " + repr(kwargs['confidence']) )
del kwargs['confidence']
elif Attrib.CONFIDENCE in required:
raise ValueError("Confidence is required for " + object.__class__.__name__)
if 'n' in kwargs:
if not Attrib.N in supported:
raise ValueError("N is not supported for " + object.__class__.__name__)
object.n = kwargs['n']
del kwargs['n']
elif Attrib.N in required:
raise ValueError("N is required for " + object.__class__.__name__)
if 'datetime' in kwargs:
if not Attrib.DATETIME in supported:
raise ValueError("Datetime is not supported")
if isinstance(kwargs['datetime'], datetime):
object.datetime = kwargs['datetime']
else:
#try:
object.datetime = parse_datetime(kwargs['datetime'])
#except:
# raise ValueError("Unable to parse datetime: " + str(repr(kwargs['datetime'])))
del kwargs['datetime']
elif doc and annotationtype in doc.annotationdefaults and object.set in doc.annotationdefaults[annotationtype] and 'datetime' in doc.annotationdefaults[annotationtype][object.set]:
object.datetime = doc.annotationdefaults[annotationtype][object.set]['datetime']
elif Attrib.DATETIME in required:
raise ValueError("Datetime is required for " + object.__class__.__name__)
if 'src' in kwargs:
if not Attrib.SRC in supported:
raise ValueError("Source is not supported for " + object.__class__.__name__)
object.src = kwargs['src']
del kwargs['src']
elif Attrib.SRC in required:
raise ValueError("Source is required for " + object.__class__.__name__)
if 'begintime' in kwargs:
if not Attrib.BEGINTIME in supported:
raise ValueError("Begintime is not supported for " + object.__class__.__name__)
object.begintime = parsetime(kwargs['begintime'])
del kwargs['begintime']
elif Attrib.BEGINTIME in required:
raise ValueError("Begintime is required for " + object.__class__.__name__)
if 'endtime' in kwargs:
if not Attrib.ENDTIME in supported:
raise ValueError("Endtime is not supported for " + object.__class__.__name__)
object.endtime = parsetime(kwargs['endtime'])
del kwargs['endtime']
elif Attrib.ENDTIME in required:
raise ValueError("Endtime is required for " + object.__class__.__name__)
if 'speaker' in kwargs:
if not Attrib.SPEAKER in supported:
raise ValueError("Speaker is not supported for " + object.__class__.__name__)
object.speaker = kwargs['speaker']
del kwargs['speaker']
elif Attrib.SPEAKER in required:
raise ValueError("Speaker is required for " + object.__class__.__name__)
if 'auth' in kwargs:
if kwargs['auth'] in ('no','false'):
object.auth = False
else:
object.auth = bool(kwargs['auth'])
del kwargs['auth']
else:
object.auth = object.__class__.AUTH
if 'text' in kwargs:
if kwargs['text']:
object.settext(kwargs['text'])
del kwargs['text']
if 'phon' in kwargs:
if kwargs['phon']:
object.setphon(kwargs['phon'])
del kwargs['phon']
if 'textclass' in kwargs:
if not Attrib.TEXTCLASS in supported:
raise ValueError("Textclass is not supported for " + object.__class__.__name__)
object.textclass = kwargs['textclass']
del kwargs['textclass']
else:
if Attrib.TEXTCLASS in supported:
object.textclass = "current"
if 'metadata' in kwargs:
if not Attrib.METADATA in supported:
raise ValueError("Metadata is not supported for " + object.__class__.__name__)
object.metadata = kwargs['metadata']
if doc:
try:
doc.submetadata[kwargs['metadata']]
except KeyError:
raise KeyError("No such metadata defined: " + kwargs['metadata'])
del kwargs['metadata']
if object.XLINK:
if 'href' in kwargs:
object.href =kwargs['href']
del kwargs['href']
if 'xlinktype' in kwargs:
object.xlinktype = kwargs['xlinktype']
del kwargs['xlinktype']
if 'xlinkrole' in kwargs:
object.xlinkrole = kwargs['xlinkrole']
del kwargs['xlinkrole']
if 'xlinklabel' in kwargs:
object.xlinklabel = kwargs['xlinklabel']
del kwargs['xlinklabel']
if 'xlinkshow' in kwargs:
object.xlinkshow = kwargs['xlinkshow']
del kwargs['xlinklabel']
if 'xlinktitle' in kwargs:
object.xlinktitle = kwargs['xlinktitle']
del kwargs['xlinktitle']
if doc and doc.debug >= 2:
print(" @id = ", repr(object.id),file=stderr)
print(" @set = ", repr(object.set),file=stderr)
print(" @class = ", repr(object.cls),file=stderr)
print(" @annotator = ", repr(object.annotator),file=stderr)
print(" @annotatortype= ", repr(object.annotatortype),file=stderr)
print(" @confidence = ", repr(object.confidence),file=stderr)
print(" @n = ", repr(object.n),file=stderr)
print(" @datetime = ", repr(object.datetime),file=stderr)
#set index
if object.id and doc:
if object.id in doc.index:
if doc.debug >= 1: print("[PyNLPl FoLiA DEBUG] Duplicate ID not permitted:" + object.id,file=stderr)
raise DuplicateIDError("Duplicate ID not permitted: " + object.id)
else:
if doc.debug >= 1: print("[PyNLPl FoLiA DEBUG] Adding to index: " + object.id,file=stderr)
doc.index[object.id] = object
#Parse feature attributes (shortcut for feature specification for some elements)
for c in object.ACCEPTED_DATA:
if issubclass(c, Feature):
if c.SUBSET in kwargs:
if kwargs[c.SUBSET]:
object.append(c,cls=kwargs[c.SUBSET])
del kwargs[c.SUBSET]
return kwargs
|
java
|
public void set(double m00, double m01, double m02, double m10, double m11, double m12) {
set(m00, m01, m02, m10, m11, m12, 0., 0., 1.);
}
|
java
|
protected void rotateLeft(final OMVRBTreeEntry<K, V> p) {
if (p != null) {
OMVRBTreeEntry<K, V> r = p.getRight();
p.setRight(r.getLeft());
if (r.getLeft() != null)
r.getLeft().setParent(p);
r.setParent(p.getParent());
if (p.getParent() == null)
setRoot(r);
else if (p.getParent().getLeft() == p)
p.getParent().setLeft(r);
else
p.getParent().setRight(r);
p.setParent(r);
r.setLeft(p);
}
}
|
java
|
@GET
@Path("/{pushAppID}/count")
@Produces(MediaType.APPLICATION_JSON)
public Response countInstallations(@PathParam("pushAppID") String pushApplicationID) {
logger.trace("counting devices by type for push application '{}'", pushApplicationID);
Map<String, Long> result = pushAppService.countInstallationsByType(pushApplicationID);
return Response.ok(result).build();
}
|
java
|
public static String join(Collection collection,
String separator,
boolean trailing) {
StringBuilder b = new StringBuilder();
// fast return on empty collection
if (collection.isEmpty()) {
return trailing ? separator : "";
}
for (Object o : collection) {
b.append(o);
b.append(separator);
}
int length = separator.length();
String s = b.toString();
return (trailing || s.isEmpty()) ?
s : (b.substring(0, b.length() - length));
}
|
python
|
def del_actor(self, actor):
"""Remove an actor when the socket is closed."""
if _debug: TCPClientDirector._debug("del_actor %r", actor)
del self.clients[actor.peer]
# tell the ASE the client has gone away
if self.serviceElement:
self.sap_request(del_actor=actor)
# see if it should be reconnected
if actor.peer in self.reconnect:
connect_task = FunctionTask(self.connect, actor.peer)
connect_task.install_task(_time() + self.reconnect[actor.peer])
|
java
|
@SuppressWarnings("unchecked")
public Map<String, Object> getNextPropertiesParam(InputStream in, String strName, Map<String, Object> properties)
{
return (Map)this.getNextObjectParam(in, strName, properties);
}
|
java
|
public boolean confirmFailure(String orderNumber, String timestamp, String authCode) {
String base = new StringBuilder()
.append(orderNumber)
.append('|')
.append(timestamp)
.append('|')
.append(merchantSecret)
.toString();
return
StringUtils.equals(
StringUtils.upperCase(DigestUtils.md5Hex(base)),
authCode
);
}
|
python
|
def rulejoin(class_rule, method_rule):
"""
Join class and method rules. Used internally by :class:`ClassView` to
combine rules from the :func:`route` decorators on the class and on the
individual view handler methods::
>>> rulejoin('/', '')
'/'
>>> rulejoin('/', 'first')
'/first'
>>> rulejoin('/first', '/second')
'/second'
>>> rulejoin('/first', 'second')
'/first/second'
>>> rulejoin('/first/', 'second')
'/first/second'
>>> rulejoin('/first/<second>', '')
'/first/<second>'
>>> rulejoin('/first/<second>', 'third')
'/first/<second>/third'
"""
if method_rule.startswith('/'):
return method_rule
else:
return class_rule + ('' if class_rule.endswith('/') or not method_rule else '/') + method_rule
|
java
|
private Queue<ReadFuture> getReadyReadFutures() {
Queue<ReadFuture> readyReadFutures =
(Queue<ReadFuture>) getAttribute(READY_READ_FUTURES_KEY);
if (readyReadFutures == null) {
readyReadFutures = new CircularQueue<>();
Queue<ReadFuture> oldReadyReadFutures =
(Queue<ReadFuture>) setAttributeIfAbsent(
READY_READ_FUTURES_KEY, readyReadFutures);
if (oldReadyReadFutures != null) {
readyReadFutures = oldReadyReadFutures;
}
}
return readyReadFutures;
}
|
java
|
public static ContentType get(
final HttpEntity entity) throws ParseException, UnsupportedCharsetException {
if (entity == null) {
return null;
}
Header header = entity.getContentType();
if (header != null) {
HeaderElement[] elements = header.getElements();
if (elements.length > 0) {
return create(elements[0]);
}
}
return null;
}
|
python
|
def start(self):
"""
Start the server and run forever.
"""
Server().start(self.options,self.handler_function, self.__class__.component_type)
|
java
|
public Observable<Page<VirtualWANInner>> listAsync() {
return listWithServiceResponseAsync()
.map(new Func1<ServiceResponse<Page<VirtualWANInner>>, Page<VirtualWANInner>>() {
@Override
public Page<VirtualWANInner> call(ServiceResponse<Page<VirtualWANInner>> response) {
return response.body();
}
});
}
|
java
|
static Set<String> determineImportOnUpdatePaths(
String templateLoc, Set<String> relResourcePathSet) {
String templateLocPath = templateLoc.split("\\*")[0]; // up to wildcard pattern
if (!templateLocPath.endsWith("/")) {
templateLocPath = templateLocPath + "/";
}
final Set<String> fullResourcePathSet = new HashSet<>();
for (String resourcePath : relResourcePathSet) {
final String fullPath;
try {
fullPath =
resourcePath.matches("^[a-zA-Z]+:.*")
? resourcePath
: (new URI(templateLocPath).resolve(resourcePath)).toString();
} catch (URISyntaxException e) {
throw new RuntimeException(
"Unable to construct a URI by resolving '"
+ resourcePath
+ "'from '"
+ templateLocPath
+ "'");
}
log.debug("Calculated full path: {} -> {}", resourcePath, fullPath);
fullResourcePathSet.add(fullPath);
}
return fullResourcePathSet;
}
|
python
|
def record(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]:
"""Record data and return a list of data_and_metadata objects.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
"""
if frame_parameters:
self.__hardware_source.set_record_frame_parameters(self.__hardware_source.get_frame_parameters_from_dict(frame_parameters))
if channels_enabled is not None:
for channel_index, channel_enabled in enumerate(channels_enabled):
self.__hardware_source.set_channel_enabled(channel_index, channel_enabled)
self.__hardware_source.start_recording()
return self.__hardware_source.get_next_xdatas_to_finish(timeout)
|
python
|
def update_email_template(self, template_id, template_dict):
"""
Updates a emailtemplate
:param template_id: the template id
:param template_dict: dict
:return: dict
"""
return self._create_put_request(
resource=EMAIL_TEMPLATES,
billomat_id=template_id,
send_data=template_dict
)
|
python
|
def fromfile(self, path_to_file, mimetype=None):
"""
load blob content from file in StorageBlobModel instance. Parameters are:
- path_to_file (required): path to a local file
- mimetype (optional): set a mimetype. azurestoragewrap will guess it if not given
"""
if os.path.isfile(path_to_file):
# Load file into self.__content__
self.filename = os.path.basename(path_to_file)
with open(path_to_file, "rb") as in_file:
self.content = in_file.read()
#guess mime-type
self.properties.content_settings = ContentSettings()
if mimetype is None:
mimetype = guess_type(path_to_file)
if mimetype[0] is None:
mimetype = 'application/octet-stream'
else:
if not mimetype[1] is None:
self.properties.content_settings.content_encoding = mimetype[1]
mimetype = mimetype[0]
self.properties.content_settings.content_type = mimetype
else:
raise AzureStorageWrapException(self, 'Can not load blob content, because given path is not a local file')
|
python
|
def count(self, *urls):
"""
Return comment count for one ore more urls..
"""
threads = dict(self.db.execute([
'SELECT threads.uri, COUNT(comments.id) FROM comments',
'LEFT OUTER JOIN threads ON threads.id = tid AND comments.mode = 1',
'GROUP BY threads.uri'
]).fetchall())
return [threads.get(url, 0) for url in urls]
|
java
|
public byte[] deliverSm(int sequenceNumber, String serviceType,
byte sourceAddrTon, byte sourceAddrNpi, String sourceAddr,
byte destAddrTon, byte destAddrNpi, String destinationAddr,
byte esmClass, byte protocolId, byte priorityFlag,
byte registeredDelivery, byte dataCoding, byte[] shortMessage,
OptionalParameter... optionalParameters) throws PDUStringException {
StringValidator.validateString(serviceType,
StringParameter.SERVICE_TYPE);
StringValidator.validateString(sourceAddr, StringParameter.SOURCE_ADDR);
StringValidator.validateString(destinationAddr,
StringParameter.DESTINATION_ADDR);
StringValidator.validateString(shortMessage,
StringParameter.SHORT_MESSAGE);
PDUByteBuffer buf = new PDUByteBuffer(SMPPConstant.CID_DELIVER_SM, 0,
sequenceNumber);
buf.append(serviceType);
buf.append(sourceAddrTon);
buf.append(sourceAddrNpi);
buf.append(sourceAddr);
buf.append(destAddrTon);
buf.append(destAddrNpi);
buf.append(destinationAddr);
buf.append(esmClass);
buf.append(protocolId);
buf.append(priorityFlag);
buf.append((String)null); // schedule delivery time
buf.append((String)null); // validity period
buf.append(registeredDelivery);
buf.append((byte)0); // replace if present flag
buf.append(dataCoding);
buf.append((byte)0); // sm default msg id
buf.append((byte)shortMessage.length);
buf.append(shortMessage);
buf.appendAll(optionalParameters);
;
return buf.toBytes();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.