language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public static CPRule findByGroupId_Last(long groupId,
OrderByComparator<CPRule> orderByComparator)
throws com.liferay.commerce.product.exception.NoSuchCPRuleException {
return getPersistence().findByGroupId_Last(groupId, orderByComparator);
}
|
python
|
def get_obj_name(obj, full=True):
""" Gets the #str name of @obj
@obj: any python object
@full: #bool returns with parent name as well if True
-> #str object name
..
from redis_structures.debug import get_parent_obj
get_obj_name(get_obj_name)
# -> 'get_obj_name'
get_obj_name(redis_structures.debug.Timer)
# -> 'Timer'
..
"""
has_name_attr = hasattr(obj, '__name__')
if has_name_attr and obj.__name__ == "<lambda>":
try:
src = whitespace_sub("", inspect.getsource(obj))\
.replace("\n", "; ").strip(" <>")
except OSError:
src = obj.__name__
return lambda_sub("", src)
if hasattr(obj, '__qualname__') and obj.__qualname__:
return obj.__qualname__.split(".")[-1]
elif has_name_attr and obj.__name__:
return obj.__name__.split(".")[-1]
elif hasattr(obj, '__class__'):
return str(obj.__class__.__name__).strip("<>")
else:
return str(obj.__repr__())
|
python
|
def make_spo(sub, prd, obj):
'''
Decorates the three given strings as a line of ntriples
'''
# To establish string as a curie and expand,
# we use a global curie_map(.yaml)
# sub are allways uri (unless a bnode)
# prd are allways uri (unless prd is 'a')
# should fail loudly if curie does not exist
if prd == 'a':
prd = 'rdf:type'
try:
(subcuri, subid) = re.split(r':', sub)
except Exception:
LOG.error("not a Subject Curie '%s'", sub)
raise ValueError
try:
(prdcuri, prdid) = re.split(r':', prd)
except Exception:
LOG.error("not a Predicate Curie '%s'", prd)
raise ValueError
objt = ''
# object is a curie or bnode or literal [string|number]
objcuri = None
match = re.match(CURIERE, obj)
if match is not None:
try:
(objcuri, objid) = re.split(r':', obj)
except ValueError:
match = None
if match is not None and objcuri in CURIEMAP:
objt = CURIEMAP[objcuri] + objid.strip()
# allow unexpanded bnodes in object
if objcuri != '_' or CURIEMAP[objcuri] != '_:b':
objt = '<' + objt + '>'
elif obj.isnumeric():
objt = '"' + obj + '"'
else:
# Literals may not contain the characters ", LF, CR '\'
# except in their escaped forms. internal quotes as well.
obj = obj.strip('"').replace('\\', '\\\\').replace('"', '\'')
obj = obj.replace('\n', '\\n').replace('\r', '\\r')
objt = '"' + obj + '"'
# allow unexpanded bnodes in subject
if subcuri is not None and subcuri in CURIEMAP and \
prdcuri is not None and prdcuri in CURIEMAP:
subjt = CURIEMAP[subcuri] + subid.strip()
if subcuri != '_' or CURIEMAP[subcuri] != '_:b':
subjt = '<' + subjt + '>'
return subjt + ' <' + CURIEMAP[prdcuri] + prdid.strip() + '> ' + objt + ' .'
else:
LOG.error(
'Cant work with: <%s> %s , <%s> %s, %s',
subcuri, subid, prdcuri, prdid, objt)
return None
|
java
|
public static boolean isAllAssignableFrom(Class<?>[] types1, Class<?>[] types2) {
if (ArrayUtil.isEmpty(types1) && ArrayUtil.isEmpty(types2)) {
return true;
}
if (null == types1 || null == types2) {
// 任何一个为null不相等(之前已判断两个都为null的情况)
return false;
}
if (types1.length != types2.length) {
return false;
}
Class<?> type1;
Class<?> type2;
for (int i = 0; i < types1.length; i++) {
type1 = types1[i];
type2 = types2[i];
if (isBasicType(type1) && isBasicType(type2)) {
// 原始类型和包装类型存在不一致情况
if (BasicType.unWrap(type1) != BasicType.unWrap(type2)) {
return false;
}
} else if (false == type1.isAssignableFrom(type2)) {
return false;
}
}
return true;
}
|
java
|
public Observable<ServiceResponse<Page<FileInner>>> listOutputFilesWithServiceResponseAsync(final String resourceGroupName, final String workspaceName, final String experimentName, final String jobName, final JobsListOutputFilesOptions jobsListOutputFilesOptions) {
return listOutputFilesSinglePageAsync(resourceGroupName, workspaceName, experimentName, jobName, jobsListOutputFilesOptions)
.concatMap(new Func1<ServiceResponse<Page<FileInner>>, Observable<ServiceResponse<Page<FileInner>>>>() {
@Override
public Observable<ServiceResponse<Page<FileInner>>> call(ServiceResponse<Page<FileInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listOutputFilesNextWithServiceResponseAsync(nextPageLink));
}
});
}
|
java
|
void passfg(final int nac[], final int ido, final int ip, final int l1, final int idl1, final float in[], final int in_off, final float out[], final int out_off, final int offset, final int isign) {
int idij, idlj, idot, ipph, l, jc, lc, idj, idl, inc, idp;
float w1r, w1i, w2i, w2r;
int iw1;
iw1 = offset;
idot = ido / 2;
ipph = (ip + 1) / 2;
idp = ip * ido;
if (ido >= l1) {
for (int j = 1; j < ipph; j++) {
jc = ip - j;
int idx1 = j * ido;
int idx2 = jc * ido;
for (int k = 0; k < l1; k++) {
int idx3 = k * ido;
int idx4 = idx3 + idx1 * l1;
int idx5 = idx3 + idx2 * l1;
int idx6 = idx3 * ip;
for (int i = 0; i < ido; i++) {
int oidx1 = out_off + i;
float i1r = in[in_off + i + idx1 + idx6];
float i2r = in[in_off + i + idx2 + idx6];
out[oidx1 + idx4] = i1r + i2r;
out[oidx1 + idx5] = i1r - i2r;
}
}
}
for (int k = 0; k < l1; k++) {
int idxt1 = k * ido;
int idxt2 = idxt1 * ip;
for (int i = 0; i < ido; i++) {
out[out_off + i + idxt1] = in[in_off + i + idxt2];
}
}
} else {
for (int j = 1; j < ipph; j++) {
jc = ip - j;
int idxt1 = j * l1 * ido;
int idxt2 = jc * l1 * ido;
int idxt3 = j * ido;
int idxt4 = jc * ido;
for (int i = 0; i < ido; i++) {
for (int k = 0; k < l1; k++) {
int idx1 = k * ido;
int idx2 = idx1 * ip;
int idx3 = out_off + i;
int idx4 = in_off + i;
float i1r = in[idx4 + idxt3 + idx2];
float i2r = in[idx4 + idxt4 + idx2];
out[idx3 + idx1 + idxt1] = i1r + i2r;
out[idx3 + idx1 + idxt2] = i1r - i2r;
}
}
}
for (int i = 0; i < ido; i++) {
for (int k = 0; k < l1; k++) {
int idx1 = k * ido;
out[out_off + i + idx1] = in[in_off + i + idx1 * ip];
}
}
}
idl = 2 - ido;
inc = 0;
int idxt0 = (ip - 1) * idl1;
for (l = 1; l < ipph; l++) {
lc = ip - l;
idl += ido;
int idxt1 = l * idl1;
int idxt2 = lc * idl1;
int idxt3 = idl + iw1;
w1r = wtable[idxt3 - 2];
w1i = isign * wtable[idxt3 - 1];
for (int ik = 0; ik < idl1; ik++) {
int idx1 = in_off + ik;
int idx2 = out_off + ik;
in[idx1 + idxt1] = out[idx2] + w1r * out[idx2 + idl1];
in[idx1 + idxt2] = w1i * out[idx2 + idxt0];
}
idlj = idl;
inc += ido;
for (int j = 2; j < ipph; j++) {
jc = ip - j;
idlj += inc;
if (idlj > idp)
idlj -= idp;
int idxt4 = idlj + iw1;
w2r = wtable[idxt4 - 2];
w2i = isign * wtable[idxt4 - 1];
int idxt5 = j * idl1;
int idxt6 = jc * idl1;
for (int ik = 0; ik < idl1; ik++) {
int idx1 = in_off + ik;
int idx2 = out_off + ik;
in[idx1 + idxt1] += w2r * out[idx2 + idxt5];
in[idx1 + idxt2] += w2i * out[idx2 + idxt6];
}
}
}
for (int j = 1; j < ipph; j++) {
int idxt1 = j * idl1;
for (int ik = 0; ik < idl1; ik++) {
int idx1 = out_off + ik;
out[idx1] += out[idx1 + idxt1];
}
}
for (int j = 1; j < ipph; j++) {
jc = ip - j;
int idx1 = j * idl1;
int idx2 = jc * idl1;
for (int ik = 1; ik < idl1; ik += 2) {
int idx3 = out_off + ik;
int idx4 = in_off + ik;
int iidx1 = idx4 + idx1;
int iidx2 = idx4 + idx2;
float i1i = in[iidx1 - 1];
float i1r = in[iidx1];
float i2i = in[iidx2 - 1];
float i2r = in[iidx2];
int oidx1 = idx3 + idx1;
int oidx2 = idx3 + idx2;
out[oidx1 - 1] = i1i - i2r;
out[oidx2 - 1] = i1i + i2r;
out[oidx1] = i1r + i2i;
out[oidx2] = i1r - i2i;
}
}
nac[0] = 1;
if (ido == 2)
return;
nac[0] = 0;
System.arraycopy(out, out_off, in, in_off, idl1);
int idx0 = l1 * ido;
for (int j = 1; j < ip; j++) {
int idx1 = j * idx0;
for (int k = 0; k < l1; k++) {
int idx2 = k * ido;
int oidx1 = out_off + idx2 + idx1;
int iidx1 = in_off + idx2 + idx1;
in[iidx1] = out[oidx1];
in[iidx1 + 1] = out[oidx1 + 1];
}
}
if (idot <= l1) {
idij = 0;
for (int j = 1; j < ip; j++) {
idij += 2;
int idx1 = j * l1 * ido;
for (int i = 3; i < ido; i += 2) {
idij += 2;
int idx2 = idij + iw1 - 1;
w1r = wtable[idx2 - 1];
w1i = isign * wtable[idx2];
int idx3 = in_off + i;
int idx4 = out_off + i;
for (int k = 0; k < l1; k++) {
int idx5 = k * ido + idx1;
int iidx1 = idx3 + idx5;
int oidx1 = idx4 + idx5;
float o1i = out[oidx1 - 1];
float o1r = out[oidx1];
in[iidx1 - 1] = w1r * o1i - w1i * o1r;
in[iidx1] = w1r * o1r + w1i * o1i;
}
}
}
} else {
idj = 2 - ido;
for (int j = 1; j < ip; j++) {
idj += ido;
int idx1 = j * l1 * ido;
for (int k = 0; k < l1; k++) {
idij = idj;
int idx3 = k * ido + idx1;
for (int i = 3; i < ido; i += 2) {
idij += 2;
int idx2 = idij - 1 + iw1;
w1r = wtable[idx2 - 1];
w1i = isign * wtable[idx2];
int iidx1 = in_off + i + idx3;
int oidx1 = out_off + i + idx3;
float o1i = out[oidx1 - 1];
float o1r = out[oidx1];
in[iidx1 - 1] = w1r * o1i - w1i * o1r;
in[iidx1] = w1r * o1r + w1i * o1i;
}
}
}
}
}
|
java
|
public static String extractMetricName(String[] strs) {
if (strs.length < 6) return null;
return strs[strs.length - 1];
}
|
java
|
public <T> void putCustom(String key, T value) {
custom.put(key, value);
}
|
python
|
def send_msg(self, chat_id, msg_type, **kwargs):
""" deprecated, use `send` instead """
return self.send(chat_id, msg_type, **kwargs)
|
python
|
def _get_valid_stan_args(base_args=None):
"""Fill in default values for arguments not provided in `base_args`.
RStan does this in C++ in stan_args.hpp in the stan_args constructor.
It seems easier to deal with here in Python.
"""
args = base_args.copy() if base_args is not None else {}
# Default arguments, c.f. rstan/rstan/inst/include/rstan/stan_args.hpp
# values in args are going to be converted into C++ objects so
# prepare them accordingly---e.g., unicode -> bytes -> std::string
args['chain_id'] = args.get('chain_id', 1)
args['append_samples'] = args.get('append_samples', False)
if args.get('method') is None or args['method'] == "sampling":
args['method'] = stan_args_method_t.SAMPLING
elif args['method'] == "optim":
args['method'] = stan_args_method_t.OPTIM
elif args['method'] == 'test_grad':
args['method'] = stan_args_method_t.TEST_GRADIENT
elif args['method'] == 'variational':
args['method'] = stan_args_method_t.VARIATIONAL
else:
args['method'] = stan_args_method_t.SAMPLING
args['sample_file_flag'] = True if args.get('sample_file') else False
args['sample_file'] = args.get('sample_file', '').encode('ascii')
args['diagnostic_file_flag'] = True if args.get('diagnostic_file') else False
args['diagnostic_file'] = args.get('diagnostic_file', '').encode('ascii')
# NB: argument named "seed" not "random_seed"
args['random_seed'] = args.get('seed', int(time.time()))
if args['method'] == stan_args_method_t.VARIATIONAL:
# variational does not use a `control` map like sampling
args['ctrl'] = args.get('ctrl', dict(variational=dict()))
args['ctrl']['variational']['iter'] = args.get('iter', 10000)
args['ctrl']['variational']['grad_samples'] = args.get('grad_samples', 1)
args['ctrl']['variational']['elbo_samples'] = args.get('elbo_samples', 100)
args['ctrl']['variational']['eval_elbo'] = args.get('eval_elbo', 100)
args['ctrl']['variational']['output_samples'] = args.get('output_samples', 1000)
args['ctrl']['variational']['adapt_iter'] = args.get('adapt_iter', 50)
args['ctrl']['variational']['eta'] = args.get('eta', 1.0)
args['ctrl']['variational']['adapt_engaged'] = args.get('adapt_engaged', True)
args['ctrl']['variational']['tol_rel_obj'] = args.get('tol_rel_obj', 0.01)
if args.get('algorithm', '').lower() == 'fullrank':
args['ctrl']['variational']['algorithm'] = variational_algo_t.FULLRANK
else:
args['ctrl']['variational']['algorithm'] = variational_algo_t.MEANFIELD
elif args['method'] == stan_args_method_t.SAMPLING:
args['ctrl'] = args.get('ctrl', dict(sampling=dict()))
args['ctrl']['sampling']['iter'] = iter = args.get('iter', 2000)
args['ctrl']['sampling']['warmup'] = warmup = args.get('warmup', iter // 2)
calculated_thin = iter - warmup // 1000
if calculated_thin < 1:
calculated_thin = 1
args['ctrl']['sampling']['thin'] = thin = args.get('thin', calculated_thin)
args['ctrl']['sampling']['save_warmup'] = True # always True now
args['ctrl']['sampling']['iter_save_wo_warmup'] = iter_save_wo_warmup = 1 + (iter - warmup - 1) // thin
args['ctrl']['sampling']['iter_save'] = iter_save_wo_warmup + 1 + (warmup - 1) // thin
refresh = iter // 10 if iter >= 20 else 1
args['ctrl']['sampling']['refresh'] = args.get('refresh', refresh)
ctrl_lst = args.get('control', dict())
ctrl_sampling = args['ctrl']['sampling']
# NB: if these defaults change, remember to update docstrings
ctrl_sampling['adapt_engaged'] = ctrl_lst.get("adapt_engaged", True)
ctrl_sampling['adapt_gamma'] = ctrl_lst.get("adapt_gamma", 0.05)
ctrl_sampling['adapt_delta'] = ctrl_lst.get("adapt_delta", 0.8)
ctrl_sampling['adapt_kappa'] = ctrl_lst.get("adapt_kappa", 0.75)
ctrl_sampling['adapt_t0'] = ctrl_lst.get("adapt_t0", 10.0)
ctrl_sampling['adapt_init_buffer'] = ctrl_lst.get("adapt_init_buffer", 75)
ctrl_sampling['adapt_term_buffer'] = ctrl_lst.get("adapt_term_buffer", 50)
ctrl_sampling['adapt_window'] = ctrl_lst.get("adapt_window", 25)
ctrl_sampling['stepsize'] = ctrl_lst.get("stepsize", 1.0)
ctrl_sampling['stepsize_jitter'] = ctrl_lst.get("stepsize_jitter", 0.0)
algorithm = args.get('algorithm', 'NUTS')
if algorithm == 'HMC':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.HMC
elif algorithm == 'Metropolis':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.Metropolis
elif algorithm == 'NUTS':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.NUTS
elif algorithm == 'Fixed_param':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.Fixed_param
# TODO: Setting adapt_engaged to False solves the segfault reported
# in issue #200; find out why this hack is needed. RStan deals with
# the setting elsewhere.
ctrl_sampling['adapt_engaged'] = False
else:
msg = "Invalid value for parameter algorithm (found {}; " \
"require HMC, Metropolis, NUTS, or Fixed_param).".format(algorithm)
raise ValueError(msg)
metric = ctrl_lst.get('metric', 'diag_e')
if metric == "unit_e":
ctrl_sampling['metric'] = sampling_metric_t.UNIT_E
elif metric == "diag_e":
ctrl_sampling['metric'] = sampling_metric_t.DIAG_E
elif metric == "dense_e":
ctrl_sampling['metric'] = sampling_metric_t.DENSE_E
if ctrl_sampling['algorithm'] == sampling_algo_t.NUTS:
ctrl_sampling['max_treedepth'] = ctrl_lst.get("max_treedepth", 10)
elif ctrl_sampling['algorithm'] == sampling_algo_t.HMC:
ctrl_sampling['int_time'] = ctrl_lst.get('int_time', 6.283185307179586476925286766559005768e+00)
elif ctrl_sampling['algorithm'] == sampling_algo_t.Metropolis:
pass
elif ctrl_sampling['algorithm'] == sampling_algo_t.Fixed_param:
pass
elif args['method'] == stan_args_method_t.OPTIM:
args['ctrl'] = args.get('ctrl', dict(optim=dict()))
args['ctrl']['optim']['iter'] = iter = args.get('iter', 2000)
algorithm = args.get('algorithm', 'LBFGS')
if algorithm == "BFGS":
args['ctrl']['optim']['algorithm'] = optim_algo_t.BFGS
elif algorithm == "Newton":
args['ctrl']['optim']['algorithm'] = optim_algo_t.Newton
elif algorithm == "LBFGS":
args['ctrl']['optim']['algorithm'] = optim_algo_t.LBFGS
else:
msg = "Invalid value for parameter algorithm (found {}; " \
"require (L)BFGS or Newton).".format(algorithm)
raise ValueError(msg)
refresh = args['ctrl']['optim']['iter'] // 100
args['ctrl']['optim']['refresh'] = args.get('refresh', refresh)
if args['ctrl']['optim']['refresh'] < 1:
args['ctrl']['optim']['refresh'] = 1
args['ctrl']['optim']['init_alpha'] = args.get("init_alpha", 0.001)
args['ctrl']['optim']['tol_obj'] = args.get("tol_obj", 1e-12)
args['ctrl']['optim']['tol_grad'] = args.get("tol_grad", 1e-8)
args['ctrl']['optim']['tol_param'] = args.get("tol_param", 1e-8)
args['ctrl']['optim']['tol_rel_obj'] = args.get("tol_rel_obj", 1e4)
args['ctrl']['optim']['tol_rel_grad'] = args.get("tol_rel_grad", 1e7)
args['ctrl']['optim']['save_iterations'] = args.get("save_iterations", True)
args['ctrl']['optim']['history_size'] = args.get("history_size", 5)
elif args['method'] == stan_args_method_t.TEST_GRADIENT:
args['ctrl'] = args.get('ctrl', dict(test_grad=dict()))
args['ctrl']['test_grad']['epsilon'] = args.get("epsilon", 1e-6)
args['ctrl']['test_grad']['error'] = args.get("error", 1e-6)
init = args.get('init', "random")
if isinstance(init, string_types):
args['init'] = init.encode('ascii')
elif isinstance(init, dict):
args['init'] = "user".encode('ascii')
# while the name is 'init_list', it is a dict; the name comes from rstan,
# where list elements can have names
args['init_list'] = init
else:
args['init'] = "random".encode('ascii')
args['init_radius'] = args.get('init_r', 2.0)
if (args['init_radius'] <= 0):
args['init'] = b"0"
# 0 initialization requires init_radius = 0
if (args['init'] == b"0" or args['init'] == 0):
args['init_radius'] = 0.0
args['enable_random_init'] = args.get('enable_random_init', True)
# RStan calls validate_args() here
return args
|
java
|
static String[] getServersFromURL(String url) {
// get everything between the prefix and the ?
String prefix = URL_PREFIX + "//";
int end = url.length();
if (url.indexOf("?") > 0) {
end = url.indexOf("?");
}
String servstring = url.substring(prefix.length(), end);
return servstring.split(",");
}
|
java
|
public static double sabrBerestyckiNormalVolatilityApproximation(double alpha, double beta, double rho, double nu, double displacement, double underlying, double strike, double maturity)
{
// Apply displacement. Displaced model is just a shift on underlying and strike.
underlying += displacement;
strike += displacement;
double forwardStrikeAverage = (underlying+strike) / 2.0; // Original paper uses a geometric average here
double z;
if(beta < 1.0) {
z = nu / alpha * (Math.pow(underlying, 1.0-beta) - Math.pow(strike, 1.0-beta)) / (1.0-beta);
} else {
z = nu / alpha * Math.log(underlying/strike);
}
double x = Math.log((Math.sqrt(1.0 - 2.0*rho*z + z*z) + z - rho) / (1.0-rho));
double term1;
if(Math.abs(underlying - strike) < 1E-10 * (1+Math.abs(underlying))) {
// ATM case - we assume underlying = strike
term1 = alpha * Math.pow(underlying, beta);
}
else {
term1 = nu * (underlying-strike) / x;
}
double sigma = term1 * (1.0 + maturity * ((-beta*(2-beta)*alpha*alpha)/(24*Math.pow(forwardStrikeAverage,2.0*(1.0-beta))) + beta*alpha*rho*nu / (4*Math.pow(forwardStrikeAverage,(1.0-beta))) + (2.0 -3.0*rho*rho)*nu*nu/24));
return Math.max(sigma, 0.0);
}
|
java
|
private void complete(Subscriber<? super T> subscriber) {
if (!subscriber.isUnsubscribed()) {
debug("onCompleted");
subscriber.onCompleted();
} else
debug("unsubscribed");
}
|
python
|
def attribute_crawl(self, key):
"""
Grab all attribute values associated with the given feature.
Traverse the given feature (and all of its descendants) to find all
values associated with the given attribute key.
>>> import tag
>>> reader = tag.GFF3Reader(tag.pkgdata('otau-no-seqreg.gff3'))
>>> features = tag.select.features(reader)
>>> for feature in features:
... names = feature.attribute_crawl('Name')
... print(sorted(list(names)))
['Ot01g00060', 'XM_003074019.1', 'XP_003074065.1']
['Ot01g00070', 'XM_003074020.1', 'XP_003074066.1']
['Ot01g00080', 'XM_003074021.1', 'XP_003074067.1']
['Ot01g00090', 'XM_003074022.1', 'XP_003074068.1']
['Ot01g00100', 'XM_003074023.1', 'XP_003074069.1']
['Ot01g00110', 'XM_003074024.1', 'XP_003074070.1']
"""
union = set()
for feature in self:
values = feature.get_attribute(key, as_list=True)
if values is not None:
union.update(set(values))
return union
|
java
|
public void setProjectsNotFound(java.util.Collection<String> projectsNotFound) {
if (projectsNotFound == null) {
this.projectsNotFound = null;
return;
}
this.projectsNotFound = new java.util.ArrayList<String>(projectsNotFound);
}
|
python
|
def token_getter(provider, token=None):
""" Generic token getter for all the providers """
session_key = provider + '_token'
if token is None:
token = session.get(session_key)
return token
|
python
|
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
|
python
|
def is_valid_transition(self, source: str, dest: str) -> bool:
"""
Checks if a transitions is registered in the FSM
Args:
source (str): the source state name
dest (str): the destination state name
Returns:
bool: wether the transition is valid or not
"""
if dest not in self._states or source not in self._states:
raise NotValidState
elif dest not in self._transitions[source]:
raise NotValidTransition
return True
|
java
|
@Override
public CreatePermissionResult createPermission(CreatePermissionRequest request) {
request = beforeClientExecution(request);
return executeCreatePermission(request);
}
|
python
|
def buy_item(self, item_name, abbr):
url = 'https://www.duolingo.com/2017-06-30/users/{}/purchase-store-item'
url = url.format(self.user_data.id)
data = {'name': item_name, 'learningLanguage': abbr}
request = self._make_req(url, data)
"""
status code '200' indicates that the item was purchased
returns a text like: {"streak_freeze":"2017-01-10 02:39:59.594327"}
"""
if request.status_code == 400 and request.json()['error'] == 'ALREADY_HAVE_STORE_ITEM':
raise AlreadyHaveStoreItemException('Already equipped with ' + item_name + '.')
if not request.ok:
# any other error:
raise Exception('Not possible to buy item.')
|
java
|
public static void generateTarGz(String src, String target) throws IOException {
File sourceDirectory = new File(src);
File destinationArchive = new File(target);
String sourcePath = sourceDirectory.getAbsolutePath();
FileOutputStream destinationOutputStream = new FileOutputStream(destinationArchive);
TarArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GzipCompressorOutputStream(new BufferedOutputStream(destinationOutputStream)));
archiveOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
try {
Collection<File> childrenFiles = org.apache.commons.io.FileUtils.listFiles(sourceDirectory, null, true);
childrenFiles.remove(destinationArchive);
ArchiveEntry archiveEntry;
FileInputStream fileInputStream;
for (File childFile : childrenFiles) {
String childPath = childFile.getAbsolutePath();
String relativePath = childPath.substring((sourcePath.length() + 1), childPath.length());
relativePath = FilenameUtils.separatorsToUnix(relativePath);
archiveEntry = new TarArchiveEntry(childFile, relativePath);
fileInputStream = new FileInputStream(childFile);
archiveOutputStream.putArchiveEntry(archiveEntry);
try {
IOUtils.copy(fileInputStream, archiveOutputStream);
} finally {
IOUtils.closeQuietly(fileInputStream);
archiveOutputStream.closeArchiveEntry();
}
}
} finally {
IOUtils.closeQuietly(archiveOutputStream);
}
}
|
java
|
@Override
public void add(final int index, final SyndCategory obj) {
final SyndCategoryImpl sCat = (SyndCategoryImpl) obj;
DCSubject subject;
if (sCat != null) {
subject = sCat.getSubject();
} else {
subject = null;
}
subjects.add(index, subject);
}
|
python
|
def _map_trajectory(self):
""" Return filepath as a class attribute"""
self.trajectory_map = {}
with open(self.filepath, 'r') as trajectory_file:
with closing(
mmap(
trajectory_file.fileno(), 0,
access=ACCESS_READ)) as mapped_file:
progress = 0
line = 0
frame = -1
frame_start = 0
while progress <= len(mapped_file):
line = line + 1
# We read a binary data from a mapped file.
bline = mapped_file.readline()
# If the bline length equals zero we terminate.
# We reached end of the file but still add the last frame!
if len(bline) == 0:
frame = frame + 1
if progress - frame_start > 10:
self.trajectory_map[frame] = [
frame_start, progress
]
break
# We need to decode byte line into an utf-8 string.
sline = bline.decode("utf-8").strip('\n').split()
# We extract map's byte coordinates for each frame
if len(sline) == 1 and sline[0] == 'END':
frame = frame + 1
self.trajectory_map[frame] = [frame_start, progress]
frame_start = progress
# Here we extract the map's byte coordinates for the header
# And also the periodic system type needed for later.
progress = progress + len(bline)
self.no_of_frames = frame
|
python
|
def _serialize_model_helper(self, model, field_dict=None):
"""
A recursive function for serializing a model
into a json ready format.
"""
field_dict = field_dict or self.dot_field_list_to_dict()
if model is None:
return None
if isinstance(model, Query):
model = model.all()
if isinstance(model, (list, set)):
return [self.serialize_model(m, field_dict=field_dict) for m in model]
model_dict = {}
for name, sub in six.iteritems(field_dict):
value = getattr(model, name)
if sub:
value = self.serialize_model(value, field_dict=sub)
model_dict[name] = value
return model_dict
|
java
|
public static Geometry computeShadow(Geometry geometry, double azimuth, double altitude, double height, boolean doUnion) {
if (geometry == null) {
return null;
}
if (height <= 0) {
throw new IllegalArgumentException("The height of the geometry must be greater than 0.");
}
//Compute the shadow offset
double[] shadowOffSet = shadowOffset(azimuth, altitude, height);
if (geometry instanceof Polygon) {
return shadowPolygon((Polygon) geometry, shadowOffSet, geometry.getFactory(), doUnion);
} else if (geometry instanceof LineString) {
return shadowLine((LineString) geometry, shadowOffSet, geometry.getFactory(), doUnion);
} else if (geometry instanceof Point) {
return shadowPoint((Point) geometry, shadowOffSet, geometry.getFactory());
} else {
throw new IllegalArgumentException("The shadow function supports only single geometry POINT, LINE or POLYGON.");
}
}
|
java
|
public List<DetailParam> getParameterFromMethodWithAnnotation(Class<?> parentClass, Method method, Class<?> annotationClass) {
List<DetailParam> params = new ArrayList<>();
if (method.getParameterCount() < 1) {
return params;
}
for (Parameter param : method.getParameters()) {
Annotation[] annotations = param.getAnnotations();
for (Annotation annotation : annotations) {
if (annotation.annotationType().equals(annotationClass)) {
params.add(new DetailParam(param.getType(), method, parentClass));
break;
}
}
}
return params;
}
|
java
|
public ServiceFuture<VariableInner> createOrUpdateAsync(String resourceGroupName, String automationAccountName, String variableName, VariableCreateOrUpdateParameters parameters, final ServiceCallback<VariableInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, automationAccountName, variableName, parameters), serviceCallback);
}
|
python
|
def wait_for_string(self, expected_string, timeout=60):
"""Wait for string FSM."""
# 0 1 2 3
events = [self.syntax_error_re, self.connection_closed_re, expected_string, self.press_return_re,
# 4 5 6 7
self.more_re, pexpect.TIMEOUT, pexpect.EOF, self.buffer_overflow_re]
# add detected prompts chain
events += self.device.get_previous_prompts() # without target prompt
self.log("Expecting: {}".format(pattern_to_str(expected_string)))
transitions = [
(self.syntax_error_re, [0], -1, CommandSyntaxError("Command unknown", self.device.hostname), 0),
(self.connection_closed_re, [0], 1, a_connection_closed, 10),
(pexpect.TIMEOUT, [0], -1, CommandTimeoutError("Timeout waiting for prompt", self.device.hostname), 0),
(pexpect.EOF, [0, 1], -1, ConnectionError("Unexpected device disconnect", self.device.hostname), 0),
(self.more_re, [0], 0, partial(a_send, " "), 10),
(expected_string, [0, 1], -1, a_expected_prompt, 0),
(self.press_return_re, [0], -1, a_stays_connected, 0),
# TODO: Customize in XR driver
(self.buffer_overflow_re, [0], -1, CommandSyntaxError("Command too long", self.device.hostname), 0)
]
for prompt in self.device.get_previous_prompts():
transitions.append((prompt, [0, 1], 0, a_unexpected_prompt, 0))
fsm = FSM("WAIT-4-STRING", self.device, events, transitions, timeout=timeout)
return fsm.run()
|
python
|
def embed(inp, n_inputs, n_features, initializer=None,
fix_parameters=False, apply_w=None):
""" Embed.
Embed slices a matrix/tensor with indexing array/tensor. Weights are initialized with :obj:`nnabla.initializer.UniformInitializer` within the range of :math:`-\\sqrt{3}` and :math:`\\sqrt{3}`.
Args:
x(~nnabla.Variable): [Integer] Indices with shape :math:`(I_0, ..., I_N)`
n_inputs : number of possible inputs, words or vocabraries
n_features : number of embedding features
fix_parameters (bool): When set to `True`, the embedding weight matrix
will not be updated.
apply_w (function): Lambda, function, or callable object applied to the weights.
Returns:
~nnabla.Variable: Output with shape :math:`(I_0, ..., I_N, W_1, ..., W_M)`
"""
if not initializer:
initializer = UniformInitializer((-np.sqrt(3.), np.sqrt(3)))
w = get_parameter_or_create("W", [n_inputs, n_features],
initializer, True, not fix_parameters)
if apply_w is not None:
w = apply_w(w)
return F.embed(inp, w)
|
python
|
def buffer_focus(self, buf, redraw=True):
"""focus given :class:`~alot.buffers.Buffer`."""
# call pre_buffer_focus hook
prehook = settings.get_hook('pre_buffer_focus')
if prehook is not None:
prehook(ui=self, dbm=self.dbman, buf=buf)
success = False
if buf not in self.buffers:
logging.error('tried to focus unknown buffer')
else:
if self.current_buffer != buf:
self.current_buffer = buf
self.mode = buf.modename
if isinstance(self.current_buffer, BufferlistBuffer):
self.current_buffer.rebuild()
self.update()
success = True
# call post_buffer_focus hook
posthook = settings.get_hook('post_buffer_focus')
if posthook is not None:
posthook(ui=self, dbm=self.dbman, buf=buf, success=success)
|
java
|
public Where<T, ID> and(Where<T, ID> first, Where<T, ID> second, Where<T, ID>... others) {
Clause[] clauses = buildClauseArray(others, "AND");
Clause secondClause = pop("AND");
Clause firstClause = pop("AND");
addClause(new ManyClause(firstClause, secondClause, clauses, ManyClause.AND_OPERATION));
return this;
}
|
java
|
public void insertNewAttributeValue(CmsEntity value, int index, Panel container) {
// make sure not to add more values than allowed
int maxOccurrence = getEntityType().getAttributeMaxOccurrence(m_attributeName);
CmsEntityAttribute attribute = m_entity.getAttribute(m_attributeName);
boolean mayHaveMore = ((attribute == null) || (attribute.getValueCount() < maxOccurrence));
if (mayHaveMore && value.getTypeName().equals(m_attributeType.getId())) {
m_entity.insertAttributeValue(m_attributeName, value, index);
int valueIndex = index;
CmsAttributeValueView valueView = null;
if ((m_attributeValueViews.size() == 1) && !m_attributeValueViews.get(0).hasValue()) {
valueView = m_attributeValueViews.get(0);
} else {
valueView = new CmsAttributeValueView(
this,
m_widgetService.getAttributeLabel(m_attributeName),
m_widgetService.getAttributeHelp(m_attributeName));
}
CmsRenderer.setAttributeChoice(m_widgetService, valueView, getAttributeType());
m_attributeValueViews.remove(valueView);
m_attributeValueViews.add(index, valueView);
((FlowPanel)container).insert(valueView, index);
insertHandlers(valueIndex);
I_CmsEntityRenderer renderer = m_widgetService.getRendererForAttribute(m_attributeName, getAttributeType());
valueView.setValueEntity(renderer, value);
CmsUndoRedoHandler handler = CmsUndoRedoHandler.getInstance();
if (handler.isIntitalized()) {
handler.addChange(m_entity.getId(), m_attributeName, valueIndex, ChangeType.add);
}
}
}
|
python
|
def set_inlets(self, pores=None, clusters=None):
r"""
Parameters
----------
pores : array_like
The list of inlet pores from which the Phase can enter the Network
clusters : list of lists - can be just one list but each list defines
a cluster of pores that share a common invasion pressure.
Like Basic Invasion Percolation a queue of
"""
if pores is not None:
logger.info("Setting inlet pores at shared pressure")
clusters = []
clusters.append(pores)
elif clusters is not None:
logger.info("Setting inlet clusters at individual pressures")
else:
logger.error("Either 'inlets' or 'clusters' must be passed to" +
" setup method")
self.queue = []
for i, cluster in enumerate(clusters):
self.queue.append([])
# Perform initial analysis on input pores
self['pore.invasion_sequence'][cluster] = 0
self['pore.cluster'][cluster] = i
self['pore.invasion_pressure'][cluster] = -np.inf
if np.size(cluster) > 1:
for elem_id in cluster:
self._add_ts2q(elem_id, self.queue[i])
elif np.size(cluster) == 1:
self._add_ts2q(cluster, self.queue[i])
else:
logger.warning("Some inlet clusters have no pores")
if self.settings['snap_off']:
self._apply_snap_off()
|
java
|
public static boolean isAssignableFrom(String lookingFor, TypeDescriptor candidate) {
String[] interfaces = candidate.getSuperinterfacesName();
for (String intface : interfaces) {
if (intface.equals(lookingFor)) {
return true;
}
boolean b = isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(intface));
if (b) {
return true;
}
}
String supertypename = candidate.getSupertypeName();
if (supertypename == null) {
return false;
}
if (supertypename.equals(lookingFor)) {
return true;
}
return isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(supertypename));
}
|
java
|
public AccountInfo getAccountInfo() throws Exception {
final JSONObject jsonObject = toJSONObject(client.get(getUri(), null));
return new AccountInfo(client, jsonObject);
}
|
java
|
public static PublishDelete createPublishDelete(Identifier i1, Identifier i2,
String filter) {
PublishDelete pd = createPublishDelete();
fillIdentifierHolder(pd, i1, i2);
pd.setFilter(filter);
return pd;
}
|
java
|
@Override
public SendTemplatedEmailResult sendTemplatedEmail(SendTemplatedEmailRequest request) {
request = beforeClientExecution(request);
return executeSendTemplatedEmail(request);
}
|
java
|
public OvhClusterAllowedNetwork serviceName_cluster_clusterId_allowedNetwork_allowedNetworkId_GET(String serviceName, String clusterId, String allowedNetworkId) throws IOException {
String qPath = "/dbaas/logs/{serviceName}/cluster/{clusterId}/allowedNetwork/{allowedNetworkId}";
StringBuilder sb = path(qPath, serviceName, clusterId, allowedNetworkId);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhClusterAllowedNetwork.class);
}
|
java
|
public static List<String> readLinesWithRestOrFilePathOrClasspath(
String resourceWithRestOrFilePathOrClasspath, String charsetName) {
return JMCollections
.buildListByLine(getStringWithRestOrClasspathOrFilePath(
resourceWithRestOrFilePathOrClasspath, charsetName));
}
|
java
|
public void changeConnector(String connectorName, String attribName, String attribValue) throws Exception {
final Address address = Address.root().add(SUBSYSTEM, SUBSYSTEM_WEB, CONNECTOR, connectorName);
final ModelNode op = createWriteAttributeRequest(attribName, attribValue, address);
final ModelNode response = execute(op);
if (!isSuccess(response)) {
throw new FailureException(response);
}
return;
}
|
java
|
private void writeHeader(final String dtd) throws IOException {
if (forHtml){
wtr.write("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">");
return;
}
wtr.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n");
if (dtd == null) {
return;
}
wtr.write("<!DOCTYPE properties SYSTEM \"");
wtr.write(dtd);
wtr.write("\">\n");
}
|
python
|
def convert_units(self, from_units, to_units):
'''
Convert the mesh from one set of units to another.
These calls are equivalent:
- mesh.convert_units(from_units='cm', to_units='m')
- mesh.scale(.01)
'''
from blmath import units
factor = units.factor(
from_units=from_units,
to_units=to_units,
units_class='length'
)
self.scale(factor)
|
java
|
public final void memberDecl() throws RecognitionException {
int memberDecl_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 22) ) { return; }
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:365:5: ( genericMethodOrConstructorDecl | methodDeclaration | fieldDeclaration | 'void' Identifier voidMethodDeclaratorRest | Identifier constructorDeclaratorRest | interfaceDeclaration | classDeclaration )
int alt36=7;
switch ( input.LA(1) ) {
case 53:
{
alt36=1;
}
break;
case Identifier:
{
switch ( input.LA(2) ) {
case 53:
{
int LA36_9 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 9, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case 47:
{
int LA36_10 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 10, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case 59:
{
int LA36_11 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 11, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case Identifier:
{
int LA36_12 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 12, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case 36:
{
alt36=5;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
input.consume();
NoViableAltException nvae =
new NoViableAltException("", 36, 2, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case 65:
case 67:
case 71:
case 77:
case 85:
case 92:
case 94:
case 105:
{
int LA36_3 = input.LA(2);
if ( (LA36_3==59) ) {
int LA36_14 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 14, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
else if ( (LA36_3==Identifier) ) {
int LA36_15 = input.LA(3);
if ( (synpred38_Java()) ) {
alt36=2;
}
else if ( (synpred39_Java()) ) {
alt36=3;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
input.consume();
}
NoViableAltException nvae =
new NoViableAltException("", 36, 15, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
input.consume();
NoViableAltException nvae =
new NoViableAltException("", 36, 3, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
break;
case 118:
{
alt36=4;
}
break;
case 58:
case 93:
{
alt36=6;
}
break;
case ENUM:
case 72:
{
alt36=7;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return;}
NoViableAltException nvae =
new NoViableAltException("", 36, 0, input);
throw nvae;
}
switch (alt36) {
case 1 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:365:7: genericMethodOrConstructorDecl
{
pushFollow(FOLLOW_genericMethodOrConstructorDecl_in_memberDecl710);
genericMethodOrConstructorDecl();
state._fsp--;
if (state.failed) return;
}
break;
case 2 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:366:7: methodDeclaration
{
pushFollow(FOLLOW_methodDeclaration_in_memberDecl718);
methodDeclaration();
state._fsp--;
if (state.failed) return;
}
break;
case 3 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:367:7: fieldDeclaration
{
pushFollow(FOLLOW_fieldDeclaration_in_memberDecl726);
fieldDeclaration();
state._fsp--;
if (state.failed) return;
}
break;
case 4 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:368:7: 'void' Identifier voidMethodDeclaratorRest
{
match(input,118,FOLLOW_118_in_memberDecl734); if (state.failed) return;
match(input,Identifier,FOLLOW_Identifier_in_memberDecl736); if (state.failed) return;
pushFollow(FOLLOW_voidMethodDeclaratorRest_in_memberDecl738);
voidMethodDeclaratorRest();
state._fsp--;
if (state.failed) return;
}
break;
case 5 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:369:7: Identifier constructorDeclaratorRest
{
match(input,Identifier,FOLLOW_Identifier_in_memberDecl746); if (state.failed) return;
pushFollow(FOLLOW_constructorDeclaratorRest_in_memberDecl748);
constructorDeclaratorRest();
state._fsp--;
if (state.failed) return;
}
break;
case 6 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:370:7: interfaceDeclaration
{
pushFollow(FOLLOW_interfaceDeclaration_in_memberDecl756);
interfaceDeclaration();
state._fsp--;
if (state.failed) return;
}
break;
case 7 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:371:7: classDeclaration
{
pushFollow(FOLLOW_classDeclaration_in_memberDecl764);
classDeclaration();
state._fsp--;
if (state.failed) return;
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 22, memberDecl_StartIndex); }
}
}
|
java
|
private boolean hasDelimiter(String word, String[] delimiters) {
boolean delim = false;
for (int i = 0; i < delimiters.length; i++) {
if (word.indexOf(delimiters[i]) > -1) {
delim = true;
break;
}
}
return delim;
}
|
java
|
public OperationStatus deletePatterns(UUID appId, String versionId, List<UUID> patternIds) {
return deletePatternsWithServiceResponseAsync(appId, versionId, patternIds).toBlocking().single().body();
}
|
python
|
def activate(self, token):
"""Make a copy of the received token and call `self._activate`."""
if watchers.worth('MATCHER', 'DEBUG'): # pragma: no cover
watchers.MATCHER.debug(
"Node <%s> activated with token %r", self, token)
return self._activate(token.copy())
|
python
|
def set_data(self, image):
"""Set the data
Parameters
----------
image : array-like
The image data.
"""
data = np.asarray(image)
if self._data is None or self._data.shape != data.shape:
self._need_vertex_update = True
self._data = data
self._need_texture_upload = True
|
java
|
@Override
public AttributeValue_4[] write_read_attributes_4(final AttributeValue_4[] values, final ClntIdent clIdent)
throws MultiDevFailed, DevFailed {
MDC.setContextMap(contextMap);
xlogger.entry();
checkInitialization();
final String[] names = new String[values.length];
for (int i = 0; i < names.length; i++) {
names[i] = values[i].name;
}
deviceMonitoring.startRequest("write_read_attributes_4 " + Arrays.toString(names), clIdent);
clientIdentity.set(clIdent);
AttributeValue_4[] val = null;
if (!name.equalsIgnoreCase(getAdminDeviceName())) {
clientLocking.checkClientLocking(clIdent, names);
}
final Object lock = deviceLock.getAttributeLock();
try {
synchronized (lock != null ? lock : new Object()) {
val = writeRead(values);
}
} catch (final Exception e) {
deviceMonitoring.addError();
if (e instanceof DevFailed) {
throw (DevFailed) e;
} else {
// with CORBA, the stack trace is not visible by the client if
// not inserted in DevFailed.
throw DevFailedUtils.newDevFailed(e);
}
}
xlogger.exit();
return val;
}
|
java
|
public DelimiterWriterFactory addColumnTitle(final String columnTitle) {
final Map<String, Object> columnMapping = this.getColumnMapping();
final List<ColumnMetaData> columnMetaDatas = (List<ColumnMetaData>) columnMapping.get(FPConstants.DETAIL_ID);
final Map<Integer, String> columnIndices = (Map<Integer, String>) columnMapping.get(FPConstants.COL_IDX);
final ColumnMetaData metaData = new ColumnMetaData();
metaData.setColName(columnTitle);
columnMetaDatas.add(metaData);
final Integer columnIndex = columnMetaDatas.indexOf(metaData);
columnIndices.put(columnIndex, columnTitle);
return this;
}
|
python
|
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(DeleteStorageValue, self).fix_config(options)
opt = "storage_name"
if opt not in options:
options[opt] = "unknown"
if opt not in self.help:
self.help[opt] = "The name of the storage value to delete (string)."
return options
|
python
|
def generateFeatures(numFeatures):
"""Return string features.
If <=62 features are requested, output will be single character
alphanumeric strings. Otherwise, output will be ["F1", "F2", ...]
"""
# Capital letters, lowercase letters, numbers
candidates = ([chr(i+65) for i in xrange(26)] +
[chr(i+97) for i in xrange(26)] +
[chr(i+48) for i in xrange(10)])
if numFeatures > len(candidates):
candidates = ["F{}".format(i) for i in xrange(numFeatures)]
return candidates
return candidates[:numFeatures]
|
python
|
def extend_embedder_vocab(self, embedding_sources_mapping: Dict[str, str] = None) -> None:
"""
Iterates through all embedding modules in the model and assures it can embed
with the extended vocab. This is required in fine-tuning or transfer learning
scenarios where model was trained with original vocabulary but during
fine-tuning/tranfer-learning, it will have it work with extended vocabulary
(original + new-data vocabulary).
Parameters
----------
embedding_sources_mapping : Dict[str, str], (optional, default=None)
Mapping from model_path to pretrained-file path of the embedding
modules. If pretrained-file used at time of embedding initialization
isn't available now, user should pass this mapping. Model path is
path traversing the model attributes upto this embedding module.
Eg. "_text_field_embedder.token_embedder_tokens".
"""
# self.named_modules() gives all sub-modules (including nested children)
# The path nesting is already separated by ".": eg. parent_module_name.child_module_name
embedding_sources_mapping = embedding_sources_mapping or {}
for model_path, module in self.named_modules():
if hasattr(module, 'extend_vocab'):
pretrained_file = embedding_sources_mapping.get(model_path, None)
module.extend_vocab(self.vocab,
extension_pretrained_file=pretrained_file,
model_path=model_path)
|
python
|
def rolling_performances(self, timestamp='one_month'):
''' Filters self.perfs '''
# TODO Study the impact of month choice
# TODO Check timestamp in an enumeration
# TODO Implement other benchmarks for perf computation
# (zipline issue, maybe expected)
if self.metrics:
perfs = {}
length = range(len(self.metrics[timestamp]))
index = self._get_index(self.metrics[timestamp])
perf_keys = self.metrics[timestamp][0].keys()
perf_keys.pop(perf_keys.index('period_label'))
perfs['period'] = np.array(
[pd.datetime.date(date) for date in index])
for key in perf_keys:
perfs[key] = self._to_perf_array(timestamp, key, length)
else:
# TODO Get it from DB if it exists
raise NotImplementedError()
return pd.DataFrame(perfs, index=index)
|
python
|
def pickle_load(cls, filepath, spectator_mode=True, remove_lock=False):
"""
Loads the object from a pickle file and performs initial setup.
Args:
filepath: Filename or directory name. It filepath is a directory, we
scan the directory tree starting from filepath and we
read the first pickle database. Raise RuntimeError if multiple
databases are found.
spectator_mode: If True, the nodes of the flow are not connected by signals.
This option is usually used when we want to read a flow
in read-only mode and we want to avoid callbacks that can change the flow.
remove_lock:
True to remove the file lock if any (use it carefully).
"""
if os.path.isdir(filepath):
# Walk through each directory inside path and find the pickle database.
for dirpath, dirnames, filenames in os.walk(filepath):
fnames = [f for f in filenames if f == cls.PICKLE_FNAME]
if fnames:
if len(fnames) == 1:
filepath = os.path.join(dirpath, fnames[0])
break # Exit os.walk
else:
err_msg = "Found multiple databases:\n %s" % str(fnames)
raise RuntimeError(err_msg)
else:
err_msg = "Cannot find %s inside directory %s" % (cls.PICKLE_FNAME, filepath)
raise ValueError(err_msg)
if remove_lock and os.path.exists(filepath + ".lock"):
try:
os.remove(filepath + ".lock")
except:
pass
with FileLock(filepath):
with open(filepath, "rb") as fh:
flow = pmg_pickle_load(fh)
# Check if versions match.
if flow.VERSION != cls.VERSION:
msg = ("File flow version %s != latest version %s\n."
"Regenerate the flow to solve the problem " % (flow.VERSION, cls.VERSION))
warnings.warn(msg)
flow.set_spectator_mode(spectator_mode)
# Recompute the status of each task since tasks that
# have been submitted previously might be completed.
flow.check_status()
return flow
|
python
|
def main():
'''
Simple examples
'''
args = parse_arguments()
if args.askpass:
password = getpass.getpass("Password: ")
else:
password = None
if args.asksudopass:
sudo = True
sudo_pass = getpass.getpass("Sudo password[default ssh password]: ")
if len(sudo_pass) == 0:
sudo_pass = password
sudo_user = 'root'
else:
sudo = False
sudo_pass = None
sudo_user = None
if not args.username:
username = getpass.getuser()
else:
username = args.username
host_list = args.hosts
os.environ["ANSIBLE_HOST_KEY_CHECKING"] = "False"
execute_ping(host_list, username, password,
sudo=sudo, sudo_user=sudo_user, sudo_pass=sudo_pass)
|
java
|
public synchronized Pair<PathHandler, Undertow> getServer(int port, String hostName) {
return getServer(port,hostName,null);
}
|
java
|
@Override
public boolean apply(final URI uri) {
if (uri.getHost() == null) throw new IllegalArgumentException("URI \"" + uri + "\" has no host");
// BURL hosts are always lower cased
return uri.getHost().equals(host);
}
|
java
|
public QueryBuilder byAuthor(String author) {
Validate.argumentIsNotNull(author);
queryParamsBuilder.author(author);
return this;
}
|
java
|
private void doRelease() {
try {
in.close();
} catch (Exception ex) {
if (log.isDebugEnabled())
log.debug("FYI", ex);
}
if (in instanceof Releasable) {
// This allows any underlying stream that has the close operation
// disabled to be truly released
Releasable r = (Releasable)in;
r.release();
}
abortIfNeeded();
}
|
java
|
@SafeVarargs
public static void assertEmpty(String message, DataSource... dataSources) throws DBAssertionError {
multipleEmptyStateAssertions(CallInfo.create(message), dataSources);
}
|
python
|
def add_contacts(
self,
contacts: List["pyrogram.InputPhoneContact"]
):
"""Use this method to add contacts to your Telegram address book.
Args:
contacts (List of :obj:`InputPhoneContact <pyrogram.InputPhoneContact>`):
The contact list to be added
Returns:
On success, the added contacts are returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
imported_contacts = self.send(
functions.contacts.ImportContacts(
contacts=contacts
)
)
return imported_contacts
|
python
|
def main():
"""pyprf_opt_brute entry point."""
# Get list of input arguments (without first one, which is the path to the
# function that is called): --NOTE: This is another way of accessing
# input arguments, but since we use 'argparse' it is redundant.
# lstArgs = sys.argv[1:]
strWelcome = 'pyprf_opt_brute ' + __version__
strDec = '=' * len(strWelcome)
print(strDec + '\n' + strWelcome + '\n' + strDec)
objNspc = get_arg_parse()
# Print info if no config argument is provided.
if any(item is None for item in [objNspc.config, objNspc.strPthPrior,
objNspc.varNumOpt1, objNspc.varNumOpt2]):
print('Please provide the necessary input arguments, i.e.:')
print('-strCsvCnfg -strPthPrior -varNumOpt1 and -varNumOpt2')
else:
# Signal non-test mode to lower functions (needed for pytest):
lgcTest = False
# Perform pRF fitting without suppressive surround
if objNspc.supsur is None:
print('***Mode: Fit pRF models, no suppressive surround***')
# Call to main function, to invoke pRF analysis:
pyprf_opt_brute(objNspc.config, objNspc, lgcTest=lgcTest,
strPathHrf=objNspc.strPathHrf, varRat=None)
# Perform pRF fitting with suppressive surround
else:
print('***Mode: Fit pRF models, suppressive surround***')
# Load config parameters from csv file into dictionary:
dicCnfg = load_config(objNspc.config, lgcTest=lgcTest,
lgcPrint=False)
# Load config parameters from dictionary into namespace.
# We do this on every loop so we have a fresh start in case
# variables are redefined during the prf analysis
cfg = cls_set_config(dicCnfg)
# Make sure that lgcCrteMdl is set to True since we will need
# to loop iteratively over pyprf_feature with different ratios
# for size surround to size center. On every loop models,
# reflecting the new ratio, need to be created from scratch
errorMsg = 'lgcCrteMdl needs to be set to True for -supsur.'
assert cfg.lgcCrteMdl, errorMsg
# Make sure that switchHrf is set to 1. It would not make sense
# to find the negative surround for the hrf deriavtive function
errorMsg = 'switchHrfSet needs to be set to 1 for -supsur.'
assert cfg.switchHrfSet == 1, errorMsg
# Get list with size ratios
lstRat = objNspc.supsur
# Make sure that all ratios are larger than 1.0
errorMsg = 'All provided ratios need to be larger than 1.0'
assert np.all(np.greater(np.array(lstRat), 1.0)), errorMsg
# Append None as the first entry, so fitting without surround
# is performed once as well
lstRat.insert(0, None)
# Loop over ratios and find best pRF
for varRat in lstRat:
# Print to command line, so the user knows which exponent
# is used
print('---Ratio surround to center: ' + str(varRat))
# Call to main function, to invoke pRF analysis:
pyprf_opt_brute(objNspc.config, objNspc, lgcTest=lgcTest,
strPathHrf=objNspc.strPathHrf, varRat=varRat)
# List with name suffices of output images:
lstNiiNames = ['_x_pos',
'_y_pos',
'_SD',
'_R2',
'_polar_angle',
'_eccentricity',
'_Betas']
# Compare results for the different ratios, export nii files
# based on the results of the comparison and delete in-between
# results
# Replace first entry (None) with 1, so it can be saved to nii
lstRat[0] = 1.0
# Append 'hrf' to cfg.strPathOut, if fitting was done with
# custom hrf
if objNspc.strPathHrf is not None:
cfg.strPathOut = cfg.strPathOut + '_hrf'
cmp_res_R2(lstRat, lstNiiNames, cfg.strPathOut, cfg.strPathMdl,
lgcDel=True, lgcSveMdlTc=False, strNmeExt='_brute')
|
python
|
def infer_category(self, id):
"""
heuristic to infer a category from an id, e.g. DOID:nnn --> disease
"""
logging.info("Attempting category inference on id={}".format(id))
toks = id.split(":")
idspace = toks[0]
c = None
if idspace == 'DOID':
c='disease'
if c is not None:
logging.info("Inferred category: {} based on id={}".format(c, id))
return c
|
java
|
public void addRemoveVendor(String source, String name, String value, boolean regex, Confidence confidence) {
removeVendor.add(new EvidenceMatcher(source, name, value, regex, confidence));
}
|
java
|
public static Set<String> getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> {
DatabaseMetaData metadata;
try {
metadata = connection.getMetaData();
ResultSet indexes = metadata.getColumns(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), null);
Set<String> columnNames = new HashSet<>();
while (indexes.next()) {
String tableName = toUpperCase(indexes.getString("TABLE_NAME"), Locale.US);
if (!theTableName.equalsIgnoreCase(tableName)) {
continue;
}
String columnName = indexes.getString("COLUMN_NAME");
columnName = toUpperCase(columnName, Locale.US);
columnNames.add(columnName);
}
return columnNames;
} catch (SQLException e) {
throw new InternalErrorException(e);
}
});
}
}
|
java
|
private Map<String, Map<String, List<String>>> transformHighlighting() {
Map<String, Map<String, List<String>>> result = new HashMap<String, Map<String, List<String>>>();
if (m_queryResponse.getHighlighting() != null) {
for (String key : m_queryResponse.getHighlighting().keySet()) {
Map<String, ?> value = m_queryResponse.getHighlighting().get(key);
Map<String, List<String>> innerResult = new HashMap<String, List<String>>();
for (String innerKey : value.keySet()) {
Object entry = value.get(innerKey);
List<String> innerList = new ArrayList<String>();
if (entry instanceof String) {
innerResult.put(innerKey, Collections.singletonList((String)entry));
} else if (entry instanceof String[]) {
String[] li = (String[])entry;
for (Object lo : li) {
String s = (String)lo;
innerList.add(s);
}
innerResult.put(innerKey, innerList);
} else if (entry instanceof List<?>) {
List<?> li = (List<?>)entry;
for (Object lo : li) {
String s = (String)lo;
innerList.add(s);
}
innerResult.put(innerKey, innerList);
}
}
result.put(key, innerResult);
}
}
return result;
}
|
java
|
void setErrorStatus(String msg, Exception e) {
this.e = new IOException(msg + " " + (e == null ? "" : e.toString()));
// no more writes will be accepted
this.isDisabled = true;
// close the executor
sendExecutor.shutdown();
LOG.error(msg, e);
}
|
java
|
public Object getControlValue()
{
Object objValue = super.getControlValue();
if (objValue instanceof Boolean)
{
if (((Boolean)objValue).booleanValue())
objValue = Constants.TRUE.toLowerCase();
else
objValue = Constants.FALSE.toLowerCase();
}
return objValue;
}
|
python
|
def getTrackedDeviceClass(self, unDeviceIndex):
"""
Returns the device class of a tracked device. If there has not been a device connected in this slot
since the application started this function will return TrackedDevice_Invalid. For previous detected
devices the function will return the previously observed device class.
* To determine which devices exist on the system, just loop from 0 to k_unMaxTrackedDeviceCount and check
the device class. Every device with something other than TrackedDevice_Invalid is associated with an
actual tracked device.
"""
fn = self.function_table.getTrackedDeviceClass
result = fn(unDeviceIndex)
return result
|
python
|
def transform_point(point, source_crs, target_crs):
""" Maps point form src_crs to tgt_crs
:param point: a tuple `(x, y)`
:type point: (float, float)
:param source_crs: source CRS
:type source_crs: constants.CRS
:param target_crs: target CRS
:type target_crs: constants.CRS
:return: point in target CRS
:rtype: (float, float)
"""
if source_crs == target_crs:
return point
old_x, old_y = point
new_x, new_y = pyproj.transform(CRS.projection(source_crs), CRS.projection(target_crs), old_x, old_y)
return new_x, new_y
|
python
|
def _glob_to_sql(self, string):
"""Convert glob-like wildcards to SQL wildcards
* becomes %
? becomes _
% becomes \%
\\ remains \\
\* remains \*
\? remains \?
This also adds a leading and trailing %, unless the pattern begins with
^ or ends with $
"""
# What's with the chr(1) and chr(2) nonsense? It's a trick to
# hide \* and \? from the * and ? substitutions. This trick
# depends on the substitutiones being done in order. chr(1)
# and chr(2) were picked because I know those characters
# almost certainly won't be in the input string
table = ((r'\\', chr(1)), (r'\*', chr(2)), (r'\?', chr(3)),
(r'%', r'\%'), (r'?', '_'), (r'*', '%'),
(chr(1), r'\\'), (chr(2), r'\*'), (chr(3), r'\?'))
for (a, b) in table:
string = string.replace(a,b)
string = string[1:] if string.startswith("^") else "%" + string
string = string[:-1] if string.endswith("$") else string + "%"
return string
|
java
|
public double learn(double[] x, double[] y, double weight) {
setInput(x);
propagate();
double err = weight * computeOutputError(y);
if (weight != 1.0) {
for (int i = 0; i < outputLayer.units; i++) {
outputLayer.error[i] *= weight;
}
}
backpropagate();
adjustWeights();
return err;
}
|
java
|
public String dump() throws RepositoryException {
StringBuilder tmp = new StringBuilder();
QueryTreeDump.dump(this, tmp);
return tmp.toString();
}
|
python
|
def _wait_trigger(self):
"""Called to launch the next request in the queue."""
if _debug: IOQController._debug("_wait_trigger")
# make sure we are waiting
if (self.state != CTRL_WAITING):
raise RuntimeError("not waiting")
# change our state
self.state = CTRL_IDLE
_statelog.debug("%s %s %s" % (_strftime(), self.name, "idle"))
# look for more to do
IOQController._trigger(self)
|
python
|
def _check_apt_updates(self):
"""
This method will use the 'checkupdates' command line utility
to determine how many updates are waiting to be installed via
'apt list --upgradeable'.
"""
output = str(subprocess.check_output(["apt", "list", "--upgradeable"]))
output = output.split(LINE_SEPARATOR)
return len(output[1:-1])
|
python
|
def file_download(self, item_id: str, item_name: str, dir_name: str) -> bool:
"""
Download file from Google Drive
:param item_id:
:param dir_name:
:return:
"""
service = self.__get_service()
request = service.files().get_media(fileId=item_id)
self.__create_download_dir(dir_name)
fh = io.FileIO(os.path.join(dir_name, item_name), mode='wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
return done
|
python
|
def parse_compound_file(path, format):
"""Open and parse reaction file based on file extension or given format
Path can be given as a string or a context.
"""
context = FilePathContext(path)
# YAML files do not need to explicitly specify format
format = resolve_format(format, context.filepath)
if format == 'yaml':
logger.debug('Parsing compound file {} as YAML'.format(
context.filepath))
with context.open('r') as f:
for compound in parse_compound_yaml_file(context, f):
yield compound
elif format == 'modelseed':
logger.debug('Parsing compound file {} as ModelSEED TSV'.format(
context.filepath))
with context.open('r') as f:
for compound in modelseed.parse_compound_file(f, context):
yield compound
elif format == 'tsv':
logger.debug('Parsing compound file {} as TSV'.format(
context.filepath))
with context.open('r') as f:
for compound in parse_compound_table_file(context, f):
yield compound
else:
raise ParseError('Unable to detect format of compound file {}'.format(
context.filepath))
|
python
|
def btc_make_payment_script( address, segwit=None, **ignored ):
"""
Make a pay-to-address script.
"""
if segwit is None:
segwit = get_features('segwit')
# is address bech32-encoded?
witver, withash = segwit_addr_decode(address)
if witver is not None and withash is not None:
# bech32 segwit address
if not segwit:
raise ValueError("Segwit is disabled")
if len(withash) == 20:
# p2wpkh
script_hex = '0014' + withash.encode('hex')
return script_hex
elif len(withash) == 32:
# p2wsh
script_hex = '0020' + withash.encode('hex')
return script_hex
else:
raise ValueError("Unrecognized address '%s'" % address )
else:
# address is b58check-encoded
vb = keylib.b58check.b58check_version_byte(address)
if vb == version_byte:
# p2pkh
hash160 = binascii.hexlify( keylib.b58check.b58check_decode(address) )
script = 'OP_DUP OP_HASH160 {} OP_EQUALVERIFY OP_CHECKSIG'.format(hash160)
script_hex = btc_script_to_hex(script)
return script_hex
elif vb == multisig_version_byte:
# p2sh
hash160 = binascii.hexlify( keylib.b58check.b58check_decode(address) )
script = 'OP_HASH160 {} OP_EQUAL'.format(hash160)
script_hex = btc_script_to_hex(script)
return script_hex
else:
raise ValueError("Unrecognized address '%s'" % address )
|
java
|
@Nonnull
public static LoadedKeyStore loadKeyStore (@Nonnull final IKeyStoreType aKeyStoreType,
@Nullable final String sKeyStorePath,
@Nullable final String sKeyStorePassword)
{
ValueEnforcer.notNull (aKeyStoreType, "KeyStoreType");
// Get the parameters for the key store
if (StringHelper.hasNoText (sKeyStorePath))
return new LoadedKeyStore (null, EKeyStoreLoadError.KEYSTORE_NO_PATH);
KeyStore aKeyStore = null;
// Try to load key store
try
{
aKeyStore = loadKeyStoreDirect (aKeyStoreType, sKeyStorePath, sKeyStorePassword);
}
catch (final IllegalArgumentException ex)
{
if (LOGGER.isWarnEnabled ())
LOGGER.warn ("No such key store '" + sKeyStorePath + "': " + ex.getMessage (), ex.getCause ());
return new LoadedKeyStore (null,
EKeyStoreLoadError.KEYSTORE_LOAD_ERROR_NON_EXISTING,
sKeyStorePath,
ex.getMessage ());
}
catch (final Exception ex)
{
final boolean bInvalidPW = ex instanceof IOException && ex.getCause () instanceof UnrecoverableKeyException;
if (LOGGER.isWarnEnabled ())
LOGGER.warn ("Failed to load key store '" + sKeyStorePath + "': " + ex.getMessage (),
bInvalidPW ? null : ex.getCause ());
return new LoadedKeyStore (null,
bInvalidPW ? EKeyStoreLoadError.KEYSTORE_INVALID_PASSWORD
: EKeyStoreLoadError.KEYSTORE_LOAD_ERROR_FORMAT_ERROR,
sKeyStorePath,
ex.getMessage ());
}
// Finally success
return new LoadedKeyStore (aKeyStore, null);
}
|
python
|
async def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
await self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PONG:
self.pong_received = True
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
|
python
|
def shuffle(self, times=1):
"""
Shuffles the Stack.
.. note::
Shuffling large numbers of cards (100,000+) may take a while.
:arg int times:
The number of times to shuffle.
"""
for _ in xrange(times):
random.shuffle(self.cards)
|
java
|
private void notifyTTAboutTaskCompletion() {
if (oobHeartbeatOnTaskCompletion) {
synchronized (finishedCount) {
int value = finishedCount.get();
finishedCount.set(value+1);
finishedCount.notifyAll();
}
}
}
|
python
|
def events_login(self):
"""
Get all login events. Uses GET to /events/login interface.
:Returns: (list) Events
"""
# TODO Add paging to this
response = self._get(url.events_logins)
return self._create_response(response).get("events")
|
python
|
def context_info(zap_helper, context_name):
"""Get info about the given context."""
with zap_error_handler():
info = zap_helper.get_context_info(context_name)
console.info('ID: {}'.format(info['id']))
console.info('Name: {}'.format(info['name']))
console.info('Authentication type: {}'.format(info['authType']))
console.info('Included regexes: {}'.format(info['includeRegexs']))
console.info('Excluded regexes: {}'.format(info['excludeRegexs']))
|
java
|
private void quote(final String val) throws IOException {
if (val.indexOf("\"") < 0) {
value(val, "\"");
} else {
value(val, "'");
}
}
|
python
|
def add_item(self, item_id, assessment_part_id):
"""Appends an item to an assessment part.
arg: item_id (osid.id.Id): ``Id`` of the ``Item``
arg: assessment_part_id (osid.id.Id): ``Id`` of the
``AssessmentPart``
raise: AlreadyExists - ``item_id`` already part of
``assessment_part_id``
raise: NotFound - ``item_id`` or ``assessment_part_id`` not
found
raise: NullArgument - ``item_id`` or ``assessment_part_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
# The item found check may want to be run through _get_provider_manager
# so as to ensure access control:
from dlkit.abstract_osid.id.primitives import Id as ABCId
if not isinstance(item_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if (not isinstance(assessment_part_id, ABCId) and
assessment_part_id.get_identifier_namespace() != 'assessment_authoring.AssessmentPart'):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if item_id.get_identifier_namespace() != 'assessment.Item':
if item_id.get_authority() != self._authority:
raise errors.InvalidArgument()
else:
mgr = self._get_provider_manager('ASSESSMENT')
admin_session = mgr.get_item_admin_session_for_bank(self._catalog_id, proxy=self._proxy)
item_id = admin_session._get_item_id_with_enclosure(item_id)
collection = JSONClientValidated('assessment',
collection='Item',
runtime=self._runtime)
item = collection.find_one({'_id': ObjectId(item_id.get_identifier())})
collection = JSONClientValidated('assessment_authoring',
collection='AssessmentPart',
runtime=self._runtime)
assessment_part = collection.find_one({'_id': ObjectId(assessment_part_id.get_identifier())})
if 'itemIds' in assessment_part:
if str(item_id) not in assessment_part['itemIds']:
assessment_part['itemIds'].append(str(item_id))
else:
assessment_part['itemIds'] = [str(item_id)]
collection.save(assessment_part)
|
java
|
public void setBounds(float x, float y, float width, float height) {
setX(x);
setY(y);
setSize(width, height);
}
|
python
|
def setbb(self, x, y):
"""Call this method when point (X,Y) is to be drawn in the
canvas. This methods expands the bounding box to include
this point."""
self.__xmin = min(self.__xmin, max(x, self.__clip_box[0]))
self.__xmax = max(self.__xmax, min(x, self.__clip_box[2]))
self.__ymin = min(self.__ymin, max(y, self.__clip_box[1]))
self.__ymax = max(self.__ymax, min(y, self.__clip_box[3]))
|
java
|
public void setNow(final long unixTimeStamp) throws IllegalTimePointMovement {
/*
* "now" strongly depends on the TimeUnit used for the timeSeries, as
* well as the bucketSize. If, e.g., the TimeUnit is MINUTES and the
* bucketSize is 5, a unix time stamp representing 01/20/1981 08:07:30
* must be mapped to 01/20/1981 08:10:00 (the next valid bucket).
*/
if (this.currentNowIdx == -1 || this.now == null) {
this.currentNowIdx = 0;
this.now = normalizeUnixTimeStamp(unixTimeStamp);
} else {
/*
* Getting the new currentNowIdx is done by calculating the
* difference between the old now and the new now and moving
* the currentNowIdx forward.
*
* [0] [1] [2] [3] [4] [5] [6]
* ↑
* currentNowIdx
*
* Assume we move the now time stamp forward by three buckets:
*
* [0] [1] [2] [3] [4] [5] [6]
* ↑
* currentNowIdx
*
* So the calculation is done in two steps:
* 1.) get the bucket of the new now
* 2.) determine the difference between the buckets, if it's negative => error,
* if it is zero => done, otherwise => erase the fields in between and reset
* to zero or null
*/
final BucketEndPoints newNow = normalizeUnixTimeStamp(unixTimeStamp);
final long diff = this.now.diff(newNow);
if (diff < 0) {
throw new IllegalTimePointMovement(String.format("Cannot move to the past (current: %s, update: %s)",
this.now, newNow));
} else if (diff > 0) {
final int newCurrentNowIdx = idx(currentNowIdx - diff);
/*
* Remove the "passed" information. There are several things we have to
* consider:
* 1.) the whole array has to be reset
* 2.) the array has to be reset partly forward
* 3.) the array has to be reset "around the corner"
*/
if (diff >= config.getTimeSeriesSize()) {
fill(-1, -1);
} else if (newCurrentNowIdx > currentNowIdx) {
fill(0, currentNowIdx);
fill(newCurrentNowIdx, -1);
} else {
fill(newCurrentNowIdx, currentNowIdx);
}
// set the values calculated
this.currentNowIdx = newCurrentNowIdx;
this.now = newNow;
}
}
}
|
python
|
def turn_physical_on(self,ro=None,vo=None):
"""
NAME:
turn_physical_on
PURPOSE:
turn on automatic returning of outputs in physical units
INPUT:
ro= reference distance (kpc; can be Quantity)
vo= reference velocity (km/s; can be Quantity)
OUTPUT:
(none)
HISTORY:
2016-01-19 - Written - Bovy (UofT)
"""
self._roSet= True
self._voSet= True
if not ro is None:
if _APY_LOADED and isinstance(ro,units.Quantity):
ro= ro.to(units.kpc).value
self._ro= ro
if not vo is None:
if _APY_LOADED and isinstance(vo,units.Quantity):
vo= vo.to(units.km/units.s).value
self._vo= vo
self._orb.turn_physical_on(ro=ro,vo=vo)
|
python
|
def require_debian_packages(packages: List[str]) -> None:
"""
Ensure specific packages are installed under Debian.
Args:
packages: list of packages
Raises:
ValueError: if any are missing
"""
present = are_debian_packages_installed(packages)
missing_packages = [k for k, v in present.items() if not v]
if missing_packages:
missing_packages.sort()
msg = (
"Debian packages are missing, as follows. Suggest:\n\n"
"sudo apt install {}".format(" ".join(missing_packages))
)
log.critical(msg)
raise ValueError(msg)
|
python
|
def get_item(self, identifier, item_metadata=None, request_kwargs=None):
"""A method for creating :class:`internetarchive.Item <Item>` and
:class:`internetarchive.Collection <Collection>` objects.
:type identifier: str
:param identifier: A globally unique Archive.org identifier.
:type item_metadata: dict
:param item_metadata: (optional) A metadata dict used to initialize the Item or
Collection object. Metadata will automatically be retrieved
from Archive.org if nothing is provided.
:type request_kwargs: dict
:param request_kwargs: (optional) Keyword arguments to be used in
:meth:`requests.sessions.Session.get` request.
"""
request_kwargs = {} if not request_kwargs else request_kwargs
if not item_metadata:
logger.debug('no metadata provided for "{0}", '
'retrieving now.'.format(identifier))
item_metadata = self.get_metadata(identifier, request_kwargs)
mediatype = item_metadata.get('metadata', {}).get('mediatype')
try:
item_class = self.ITEM_MEDIATYPE_TABLE.get(mediatype, Item)
except TypeError:
item_class = Item
return item_class(self, identifier, item_metadata)
|
java
|
public void addForwardedField(int sourceField, int destinationField) {
FieldSet fs;
if((fs = this.forwardedFields.get(sourceField)) != null) {
fs.add(destinationField);
} else {
fs = new FieldSet(destinationField);
this.forwardedFields.put(sourceField, fs);
}
}
|
java
|
public int compareTo(final HttpString other) {
if(orderInt != 0 && other.orderInt != 0) {
return signum(orderInt - other.orderInt);
}
final int len = Math.min(bytes.length, other.bytes.length);
int res;
for (int i = 0; i < len; i++) {
res = signum(higher(bytes[i]) - higher(other.bytes[i]));
if (res != 0) return res;
}
// shorter strings sort higher
return signum(bytes.length - other.bytes.length);
}
|
python
|
def prt_txt(prt, data_nts, prtfmt=None, nt_fields=None, **kws):
"""Print list of namedtuples into a table using prtfmt."""
lines = get_lines(data_nts, prtfmt, nt_fields, **kws)
if lines:
for line in lines:
prt.write(line)
else:
sys.stdout.write(" 0 items. NOT WRITING\n")
|
java
|
@Override
public boolean configure(final FeatureContext context) {
// Hibernate configuration.
context.register(new HibernateSessionFactory.Binder());
context.register(new HibernateSessionFactoryFactory.Binder());
context.register(new HibernateServiceRegistryFactory.Binder());
context.register(new FulltextSearchFactoryFactory.Binder());
context.register(new FulltextSessionFactory.Binder());
return true;
}
|
java
|
public static void initLogging() throws IOException {
sendCommonsLogToJDKLog();
try (InputStream resource = Thread.currentThread().getContextClassLoader().getResourceAsStream("logging.properties")) {
// apply configuration
if(resource != null) {
try {
LogManager.getLogManager().readConfiguration(resource);
} finally {
resource.close();
}
}
// apply a default format to the log handlers here before throwing an exception further down
Logger log = Logger.getLogger(""); // NOSONAR - local logger used on purpose here
for (Handler handler : log.getHandlers()) {
handler.setFormatter(new DefaultFormatter());
}
if(resource == null) {
throw new IOException("Did not find a file 'logging.properties' in the classpath");
}
}
}
|
java
|
@Override
public void scrollToPosition(int position) {
mCurrentPendingScrollPosition = position;
mPendingScrollPositionOffset = INVALID_OFFSET;
if (mCurrentPendingSavedState != null) {
mCurrentPendingSavedState.putInt("AnchorPosition", RecyclerView.NO_POSITION);
}
requestLayout();
}
|
python
|
def median(array):
"""
Return the median value of a list of numbers.
"""
n = len(array)
if n < 1:
return 0
elif n == 1:
return array[0]
sorted_vals = sorted(array)
midpoint = int(n / 2)
if n % 2 == 1:
return sorted_vals[midpoint]
else:
return (sorted_vals[midpoint - 1] + sorted_vals[midpoint]) / 2.0
|
python
|
def start_packet_groups(self, clear_time_stamps=True, *ports):
""" Start packet groups on ports.
:param clear_time_stamps: True - clear time stamps, False - don't.
:param ports: list of ports to start traffic on, if empty start on all ports.
"""
port_list = self.set_ports_list(*ports)
if clear_time_stamps:
self.api.call_rc('ixClearTimeStamp {}'.format(port_list))
self.api.call_rc('ixStartPacketGroups {}'.format(port_list))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.