language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def get_beam(header):
"""
Create a :class:`AegeanTools.fits_image.Beam` object from a fits header.
BPA may be missing but will be assumed to be zero.
if BMAJ or BMIN are missing then return None instead of a beam object.
Parameters
----------
header : HDUHeader
The fits header.
Returns
-------
beam : :class:`AegeanTools.fits_image.Beam`
Beam object, with a, b, and pa in degrees.
"""
if "BPA" not in header:
log.warning("BPA not present in fits header, using 0")
bpa = 0
else:
bpa = header["BPA"]
if "BMAJ" not in header:
log.warning("BMAJ not present in fits header.")
bmaj = None
else:
bmaj = header["BMAJ"]
if "BMIN" not in header:
log.warning("BMIN not present in fits header.")
bmin = None
else:
bmin = header["BMIN"]
if None in [bmaj, bmin, bpa]:
return None
beam = Beam(bmaj, bmin, bpa)
return beam
|
python
|
def _call(self, x, out=None):
"""Create an interpolator from grid values ``x``.
Parameters
----------
x : `Tensor`
The array of values to be interpolated
out : `FunctionSpaceElement`, optional
Element in which to store the interpolator
Returns
-------
out : `FunctionSpaceElement`
Per-axis interpolator for the grid of this operator. If
``out`` was provided, the returned object is a reference
to it.
"""
def per_axis_interp(arg, out=None):
"""Interpolating function with vectorization."""
if is_valid_input_meshgrid(arg, self.grid.ndim):
input_type = 'meshgrid'
else:
input_type = 'array'
interpolator = _PerAxisInterpolator(
self.grid.coord_vectors, x,
schemes=self.schemes, nn_variants=self.nn_variants,
input_type=input_type)
return interpolator(arg, out=out)
return self.range.element(per_axis_interp, vectorized=True)
|
python
|
def _extract_multiple_hits(self, hits, reads_path, output_path):
'''
splits out regions of a read that hit the HMM. For example when two of
same gene are identified within the same contig, The regions mapping to
the HMM will be split out and written out to a new file as a new record.
Parameters
----------
hits : dict
A dictionary where the keys are the read names, the entry for each
is a list of lists, each containing the range within the contig
(or read) that mapped to the HMM. e.g.:
{'read1': [[3, 126], [305, 413]],
'read2': [[1, 125]],
...
reads_path : str
path to reads file containing each read or contig in FASTA format to
be opened, and split.
output_path : str
path to file to which split reads will be written to in FASTA
format.
Returns
-------
Nothing, output path is known.
'''
complement_information = {}
try:
reads = SeqIO.to_dict(SeqIO.parse(reads_path, "fasta")) # open up reads as dictionary
except:
logging.error("Multiple sequences found with the same ID. The input sequences are either ill formated or are interleaved. \
If you provided GraftM with an interleaved sequence file, please split them into forward and reverse reads, and provide to the the appropriate \
flags (--forward, --reverse). Otherwise, it appears that you have provided sequences with redundant IDs. GraftM doesn't know how to \
deal with these, so please remove/rename sequences with duplicate keys.")
raise InterleavedFileError()
with open(output_path, 'w') as out:
for read_name, entry in hits.iteritems(): # For each contig
ranges = entry["entry"]
complements = entry["strand"]
index = 1
if len(ranges) > 1: # if there are multiple hits in that contig
for r, c in zip(ranges, complements): # for each of those hits
new_record = reads[read_name][r[0] - 1:r[1]] # subset the record by the span of that hit
new_record.id = new_record.id + '_split_%i' % index # give that subset record a new header
SeqIO.write(new_record, out, "fasta") # and write it to output
index += 1 # increment the split counter
complement_information[new_record.id]=c
else: # Otherwise, just write the read back to the file
complement_information[read_name] = entry["strand"][0]
SeqIO.write(reads[read_name], out, "fasta")
return complement_information
|
python
|
def push_scope(self, frame, extra_vars=()):
"""This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.
"""
aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline('%s = l_%s' % (ident, name))
to_declare = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_declare.add('l_' + name)
if to_declare:
self.writeline(' = '.join(to_declare) + ' = missing')
return aliases
|
java
|
private static Pair<DimFilter, RangeSet<Long>> extractConvertibleTimeBounds(final DimFilter filter)
{
if (filter instanceof AndDimFilter) {
final List<DimFilter> children = ((AndDimFilter) filter).getFields();
final List<DimFilter> newChildren = new ArrayList<>();
final List<RangeSet<Long>> rangeSets = new ArrayList<>();
for (DimFilter child : children) {
final Pair<DimFilter, RangeSet<Long>> pair = extractConvertibleTimeBounds(child);
if (pair.lhs != null) {
newChildren.add(pair.lhs);
}
if (pair.rhs != null) {
rangeSets.add(pair.rhs);
}
}
final DimFilter newFilter;
if (newChildren.size() == 0) {
newFilter = null;
} else if (newChildren.size() == 1) {
newFilter = newChildren.get(0);
} else {
newFilter = new AndDimFilter(newChildren);
}
return Pair.of(
newFilter,
rangeSets.isEmpty() ? null : RangeSets.intersectRangeSets(rangeSets)
);
} else if (filter instanceof OrDimFilter) {
final List<DimFilter> children = ((OrDimFilter) filter).getFields();
final List<RangeSet<Long>> rangeSets = new ArrayList<>();
boolean allCompletelyConverted = true;
boolean allHadIntervals = true;
for (DimFilter child : children) {
final Pair<DimFilter, RangeSet<Long>> pair = extractConvertibleTimeBounds(child);
if (pair.lhs != null) {
allCompletelyConverted = false;
}
if (pair.rhs != null) {
rangeSets.add(pair.rhs);
} else {
allHadIntervals = false;
}
}
if (allCompletelyConverted) {
return Pair.of(null, RangeSets.unionRangeSets(rangeSets));
} else {
return Pair.of(filter, allHadIntervals ? RangeSets.unionRangeSets(rangeSets) : null);
}
} else if (filter instanceof NotDimFilter) {
final DimFilter child = ((NotDimFilter) filter).getField();
final Pair<DimFilter, RangeSet<Long>> pair = extractConvertibleTimeBounds(child);
if (pair.rhs != null && pair.lhs == null) {
return Pair.of(null, pair.rhs.complement());
} else {
return Pair.of(filter, null);
}
} else if (filter instanceof BoundDimFilter) {
final BoundDimFilter bound = (BoundDimFilter) filter;
if (BoundRefKey.from(bound).equals(TIME_BOUND_REF_KEY)) {
return Pair.of(null, RangeSets.of(toLongRange(Bounds.toRange(bound))));
} else {
return Pair.of(filter, null);
}
} else {
return Pair.of(filter, null);
}
}
|
java
|
public Dbi<T> openDbi(final byte[] name, final DbiFlags... flags) {
try (Txn<T> txn = readOnly ? txnRead() : txnWrite()) {
final Dbi<T> dbi = new Dbi<>(this, txn, name, null, flags);
txn.commit(); // even RO Txns require a commit to retain Dbi in Env
return dbi;
}
}
|
java
|
public static QueryRunnerService getQueryRunner(Connection conn, Class<? extends TypeHandler> typeHandlerClazz) {
return (QueryRunnerService) ProfilerFactory.newInstance(new QueryRunner(conn, typeHandlerClazz));
}
|
python
|
def get_input_key():
"""Input API key and validate"""
click.secho("No API key found!", fg="yellow", bold=True)
click.secho("Please visit {} and get an API token.".format(RequestHandler.BASE_URL),
fg="yellow",
bold=True)
while True:
confkey = click.prompt(click.style("Enter API key",
fg="yellow", bold=True))
if len(confkey) == 32: # 32 chars
try:
int(confkey, 16) # hexadecimal
except ValueError:
click.secho("Invalid API key", fg="red", bold=True)
else:
break
else:
click.secho("Invalid API key", fg="red", bold=True)
return confkey
|
java
|
@SuppressWarnings("unchecked")
public Collection<TopicAnnotation> getCategories() {
Collection<? extends Enhancement> result = enhancements.get(TopicAnnotation.class);
return (Collection<TopicAnnotation>) result; // Should be safe. Needs to be tested
}
|
python
|
def send_email(name, ctx_dict, send_to=None, subject=u'Subject', **kwargs):
"""
Shortcut function for EmailFromTemplate class
@return: None
"""
eft = EmailFromTemplate(name=name)
eft.subject = subject
eft.context = ctx_dict
eft.get_object()
eft.render_message()
eft.send_email(send_to=send_to, **kwargs)
|
java
|
@SafeVarargs
public static void assertState(String message, DataSet... dataSets) throws DBAssertionError {
multipleStateAssertions(CallInfo.create(message), dataSets);
}
|
java
|
public HttpMove createMoveMethod(final String sourcePath, final String destinationPath) {
return new HttpMove(repositoryURL + sourcePath, repositoryURL + destinationPath);
}
|
java
|
public final void mNOT_EQUAL() throws RecognitionException {
try {
int _type = NOT_EQUAL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// src/riemann/Query.g:10:11: ( '!=' )
// src/riemann/Query.g:10:13: '!='
{
match("!=");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
|
java
|
public SipServletRequest createRequest(SipServletRequest origRequest) {
final SipServletRequestImpl newSipServletRequest = (SipServletRequestImpl) sipFactoryImpl.createRequest(origRequest, false);
final SipServletRequestImpl origRequestImpl = (SipServletRequestImpl) origRequest;
final MobicentsSipSession originalSession = origRequestImpl.getSipSession();
final MobicentsSipSession session = newSipServletRequest.getSipSession();
// B2buaHelperTest.testLinkSipSessions101 assumes the sessions shouldn't be linked together
// sessionMap.put(originalSession.getKey(), session.getKey());
// sessionMap.put(session.getKey(), originalSession.getKey());
// dumpLinkedSessions();
// linkedRequestMap.put(newSipServletRequest, origRequestImpl);
// linkedRequestMap.put(origRequestImpl, newSipServletRequest);
session.setB2buaHelper(this);
originalSession.setB2buaHelper(this);
setOriginalRequest(session, newSipServletRequest);
dumpAppSession(session);
return newSipServletRequest;
}
|
java
|
public ServiceFuture<ConnectionInner> updateAsync(String resourceGroupName, String automationAccountName, String connectionName, ConnectionUpdateParameters parameters, final ServiceCallback<ConnectionInner> serviceCallback) {
return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, automationAccountName, connectionName, parameters), serviceCallback);
}
|
python
|
def seek(self, pos):
"""Seeks the parser to the given position.
"""
if self.debug:
logging.debug('seek: %r' % pos)
self.fp.seek(pos)
# reset the status for nextline()
self.bufpos = pos
self.buf = b''
self.charpos = 0
# reset the status for nexttoken()
self._parse1 = self._parse_main
self._curtoken = b''
self._curtokenpos = 0
self._tokens = []
return
|
python
|
def _merge_mapping(a, b):
"""
MERGE TWO MAPPINGS, a TAKES PRECEDENCE
"""
for name, b_details in b.items():
a_details = a[literal_field(name)]
if a_details.properties and not a_details.type:
a_details.type = "object"
if b_details.properties and not b_details.type:
b_details.type = "object"
if a_details:
a_details.type = _merge_type[a_details.type][b_details.type]
if b_details.type in ES_STRUCT:
_merge_mapping(a_details.properties, b_details.properties)
else:
a[literal_field(name)] = deepcopy(b_details)
return a
|
python
|
def get_message(self, method, args, kwargs, options=None):
"""
Get the soap message for the specified method, args and soapheaders.
This is the entry point for creating the outbound soap message.
@param method: The method being invoked.
@type method: I{service.Method}
@param args: A list of args for the method invoked.
@type args: list
@param kwargs: Named (keyword) args for the method invoked.
@type kwargs: dict
@return: The soap envelope.
@rtype: L{Document}
"""
content = self.headercontent(method, options=options)
header = self.header(content)
content = self.bodycontent(method, args, kwargs)
body = self.body(content)
env = self.envelope(header, body)
if self.options().prefixes:
body.normalizePrefixes()
env.promotePrefixes()
else:
env.refitPrefixes()
return Document(env)
|
python
|
def plot_mean_quantile_returns_spread_time_series(mean_returns_spread,
std_err=None,
bandwidth=1,
ax=None):
"""
Plots mean period wise returns for factor quantiles.
Parameters
----------
mean_returns_spread : pd.Series
Series with difference between quantile mean returns by period.
std_err : pd.Series
Series with standard error of difference between quantile
mean returns each period.
bandwidth : float
Width of displayed error bands in standard deviations.
ax : matplotlib.Axes, optional
Axes upon which to plot.
Returns
-------
ax : matplotlib.Axes
The axes that were plotted on.
"""
if isinstance(mean_returns_spread, pd.DataFrame):
if ax is None:
ax = [None for a in mean_returns_spread.columns]
ymin, ymax = (None, None)
for (i, a), (name, fr_column) in zip(enumerate(ax),
mean_returns_spread.iteritems()):
stdn = None if std_err is None else std_err[name]
a = plot_mean_quantile_returns_spread_time_series(fr_column,
std_err=stdn,
ax=a)
ax[i] = a
curr_ymin, curr_ymax = a.get_ylim()
ymin = curr_ymin if ymin is None else min(ymin, curr_ymin)
ymax = curr_ymax if ymax is None else max(ymax, curr_ymax)
for a in ax:
a.set_ylim([ymin, ymax])
return ax
if mean_returns_spread.isnull().all():
return ax
periods = mean_returns_spread.name
title = ('Top Minus Bottom Quantile Mean Return ({} Period Forward Return)'
.format(periods if periods is not None else ""))
if ax is None:
f, ax = plt.subplots(figsize=(18, 6))
mean_returns_spread_bps = mean_returns_spread * DECIMAL_TO_BPS
mean_returns_spread_bps.plot(alpha=0.4, ax=ax, lw=0.7, color='forestgreen')
mean_returns_spread_bps.rolling(window=22).mean().plot(
color='orangered',
alpha=0.7,
ax=ax
)
ax.legend(['mean returns spread', '1 month moving avg'], loc='upper right')
if std_err is not None:
std_err_bps = std_err * DECIMAL_TO_BPS
upper = mean_returns_spread_bps.values + (std_err_bps * bandwidth)
lower = mean_returns_spread_bps.values - (std_err_bps * bandwidth)
ax.fill_between(mean_returns_spread.index,
lower,
upper,
alpha=0.3,
color='steelblue')
ylim = np.nanpercentile(abs(mean_returns_spread_bps.values), 95)
ax.set(ylabel='Difference In Quantile Mean Return (bps)',
xlabel='',
title=title,
ylim=(-ylim, ylim))
ax.axhline(0.0, linestyle='-', color='black', lw=1, alpha=0.8)
return ax
|
python
|
def new(self, path, desc=None, bare=True):
"""
Create a new bare repo.Local instance.
:param path: Path to new repo.
:param desc: Repo description.
:param bare: Create as bare repo.
:returns: New repo.Local instance.
"""
if os.path.exists(path):
raise RepoError('Path already exists: %s' % path)
try:
os.mkdir(path)
if bare:
Repo.init_bare(path)
else:
Repo.init(path)
repo = Local(path)
if desc:
repo.setDescription(desc)
version = repo.addVersion()
version.save('Repo Initialization')
return repo
except Exception, e:
traceback.print_exc()
raise RepoError('Error creating repo')
|
python
|
def stop(self):
"""
Stops this bot.
Returns as soon as all running threads have finished processing.
"""
self.log.debug('Stopping bot {}'.format(self._name))
self._stop = True
for t in self._threads:
t.join()
self.log.debug('Stopping bot {} finished. All threads joined.'.format(self._name))
|
java
|
@Override
public void eUnset(int featureID) {
switch (featureID) {
case AfplibPackage.BPS__PSEG_NAME:
setPsegName(PSEG_NAME_EDEFAULT);
return;
case AfplibPackage.BPS__TRIPLETS:
getTriplets().clear();
return;
}
super.eUnset(featureID);
}
|
java
|
@Override
public void clampMax(int max) {
if (this.x > max) this.x = max;
if (this.y > max) this.y = max;
if (this.z > max) this.z = max;
}
|
java
|
void outputEntityDecl(String name, String value) throws IOException
{
final java.io.Writer writer = m_writer;
writer.write("<!ENTITY ");
writer.write(name);
writer.write(" \"");
writer.write(value);
writer.write("\">");
writer.write(m_lineSep, 0, m_lineSepLen);
}
|
python
|
def get_executable():
'''
Find executable which matches supported python version in the thin
'''
pymap = {}
with open(os.path.join(OPTIONS.saltdir, 'supported-versions')) as _fp:
for line in _fp.readlines():
ns, v_maj, v_min = line.strip().split(':')
pymap[ns] = (int(v_maj), int(v_min))
pycmds = (sys.executable, 'python3', 'python27', 'python2.7', 'python26', 'python2.6', 'python2', 'python')
for py_cmd in pycmds:
cmd = py_cmd + ' -c "import sys; sys.stdout.write(\'%s:%s\' % (sys.version_info[0], sys.version_info[1]))"'
stdout, _ = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
stdout = stdout.decode(get_system_encoding(), "replace").strip()
else:
stdout = stdout.decode(encoding=get_system_encoding(), errors="replace").strip()
if not stdout:
continue
c_vn = tuple([int(x) for x in stdout.split(':')])
for ns in pymap:
if c_vn[0] == pymap[ns][0] and c_vn >= pymap[ns] and os.path.exists(os.path.join(OPTIONS.saltdir, ns)):
return py_cmd
sys.exit(EX_THIN_PYTHON_INVALID)
|
java
|
public void run()
{
Record recDest = this.getMainRecord();
Iterator<Record> source = this.getSource();
while (source.hasNext())
{
Record recSource = source.next();
this.mergeSourceRecord(recSource, recDest);
}
}
|
python
|
def add_analysis(self, analysis):
"""Adds an analysis to be consumed by the Analyses Chart machinery (js)
:param analysis_object: analysis to be rendered in the chart
"""
analysis_object = api.get_object(analysis)
result = analysis_object.getResult()
results_range = analysis_object.getResultsRange()
range_result = results_range.get('result', None)
range_min = results_range.get('min', None)
range_max = results_range.get('max', None)
# All them must be floatable
for value in [result, range_result, range_min, range_max]:
if not api.is_floatable(value):
return
cap_date = analysis_object.getResultCaptureDate()
cap_date = api.is_date(cap_date) and \
cap_date.strftime('%Y-%m-%d %I:%M %p') or ''
if not cap_date:
return
# Create json
ref_sample_id = analysis_object.getSample().getId()
as_keyword = analysis_object.getKeyword()
as_name = analysis_object.Title()
as_ref = '{} ({})'.format(as_name, as_keyword)
as_rows = self.analyses_dict.get(as_ref, {})
an_rows = as_rows.get(ref_sample_id, [])
an_rows.append({
'date': cap_date,
'target': api.to_float(range_result),
'upper': api.to_float(range_max),
'lower': api.to_float(range_min),
'result': api.to_float(result),
'unit': analysis_object.getUnit(),
'id': api.get_uid(analysis_object)
})
as_rows[ref_sample_id] = an_rows
self.analyses_dict[as_ref] = as_rows
|
python
|
def get_apphook_configs(obj):
"""
Get apphook configs for an object obj
:param obj: any model instance
:return: list of apphook configs for given obj
"""
keys = get_apphook_field_names(obj)
return [getattr(obj, key) for key in keys] if keys else []
|
java
|
public static Response delete(String url, Map<String, String> query) throws HttpException {
return delete(url, query, null, DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT);
}
|
java
|
public static <T> Collector<T, ?, T> toOneElement() {
return java.util.stream.Collectors.collectingAndThen(
java.util.stream.Collectors.toList(),
list -> {
if (list.size() != 1) {
throw new IllegalStateException("Stream should have only one element");
}
return list.get(0);
});
}
|
python
|
def process_crs(crs):
"""
Parses cartopy CRS definitions defined in one of a few formats:
1. EPSG codes: Defined as string of the form "EPSG: {code}" or an integer
2. proj.4 string: Defined as string of the form "{proj.4 string}"
3. cartopy.crs.CRS instance
4. None defaults to crs.PlateCaree
"""
try:
import cartopy.crs as ccrs
import geoviews as gv # noqa
import pyproj
except:
raise ImportError('Geographic projection support requires GeoViews and cartopy.')
if crs is None:
return ccrs.PlateCarree()
if isinstance(crs, basestring) and crs.lower().startswith('epsg'):
try:
crs = ccrs.epsg(crs[5:].lstrip().rstrip())
except:
raise ValueError("Could not parse EPSG code as CRS, must be of the format 'EPSG: {code}.'")
elif isinstance(crs, int):
crs = ccrs.epsg(crs)
elif isinstance(crs, (basestring, pyproj.Proj)):
try:
crs = proj_to_cartopy(crs)
except:
raise ValueError("Could not parse EPSG code as CRS, must be of the format 'proj4: {proj4 string}.'")
elif not isinstance(crs, ccrs.CRS):
raise ValueError("Projection must be defined as a EPSG code, proj4 string, cartopy CRS or pyproj.Proj.")
return crs
|
java
|
public void setCrawlerMetricsList(java.util.Collection<CrawlerMetrics> crawlerMetricsList) {
if (crawlerMetricsList == null) {
this.crawlerMetricsList = null;
return;
}
this.crawlerMetricsList = new java.util.ArrayList<CrawlerMetrics>(crawlerMetricsList);
}
|
java
|
public static <T extends Writer> T pump(Reader reader, T writer, boolean closeReader, boolean closeWriter) throws IOException{
char buff[] = new char[1024];
int len;
Exception exception = null;
try{
while((len=reader.read(buff))!=-1)
writer.write(buff, 0, len);
}catch(Exception ex){
exception = ex;
}finally{
try{
try{
if(closeReader)
reader.close();
}finally{
if(closeWriter)
writer.close();
}
}catch(IOException ex){
if(exception!=null)
ex.printStackTrace();
else
exception = ex;
}
}
if(exception instanceof IOException)
throw (IOException)exception;
else if(exception instanceof RuntimeException)
throw (RuntimeException)exception;
return writer;
}
|
python
|
def prt_results(self, goea_results):
"""Print GOEA results to the screen or to a file."""
# objaart = self.prepgrp.get_objaart(goea_results) if self.prepgrp is not None else None
if self.args.outfile is None:
self._prt_results(goea_results)
else:
# Users can print to both tab-separated file and xlsx file in one run.
outfiles = self.args.outfile.split(",")
grpwr = self.prepgrp.get_objgrpwr(goea_results) if self.prepgrp else None
if grpwr is None:
self.prt_outfiles_flat(goea_results, outfiles)
else:
grpwr.prt_outfiles_grouped(outfiles)
|
java
|
protected void joinInputs(DiGraphNode<N, Branch> node) {
FlowState<L> state = node.getAnnotation();
if (isForward()) {
if (cfg.getEntry() == node) {
state.setIn(createEntryLattice());
} else {
List<DiGraphNode<N, Branch>> inNodes = cfg.getDirectedPredNodes(node);
if (inNodes.size() == 1) {
FlowState<L> inNodeState = inNodes.get(0).getAnnotation();
state.setIn(inNodeState.getOut());
} else if (inNodes.size() > 1) {
List<L> values = new ArrayList<>(inNodes.size());
for (DiGraphNode<N, Branch> currentNode : inNodes) {
FlowState<L> currentNodeState = currentNode.getAnnotation();
values.add(currentNodeState.getOut());
}
state.setIn(joinOp.apply(values));
}
}
} else {
List<DiGraphNode<N, Branch>> inNodes = cfg.getDirectedSuccNodes(node);
if (inNodes.size() == 1) {
DiGraphNode<N, Branch> inNode = inNodes.get(0);
if (inNode == cfg.getImplicitReturn()) {
state.setOut(createEntryLattice());
} else {
FlowState<L> inNodeState = inNode.getAnnotation();
state.setOut(inNodeState.getIn());
}
} else if (inNodes.size() > 1) {
List<L> values = new ArrayList<>(inNodes.size());
for (DiGraphNode<N, Branch> currentNode : inNodes) {
FlowState<L> currentNodeState = currentNode.getAnnotation();
values.add(currentNodeState.getIn());
}
state.setOut(joinOp.apply(values));
}
}
}
|
python
|
def filename_to_task_id(fname):
"""Map filename to the task id that created it assuming 1k tasks."""
# This matches the order and size in WikisumBase.out_filepaths
fname = os.path.basename(fname)
shard_id_increment = {
"train": 0,
"dev": 800,
"test": 900,
}
parts = fname.split("-")
split = parts[1]
shard_id = parts[2]
task_id = int(shard_id) + shard_id_increment[split]
return task_id
|
python
|
def declare_config_variable(self, name, config_id, type_name, default=None, convert=None): #pylint:disable=too-many-arguments;These are all necessary with sane defaults.
"""Declare a config variable that this emulated tile accepts.
The default value (if passed) may be specified as either a `bytes`
object or a python int or list of ints. If an int or list of ints is
passed, it is converted to binary. Otherwise, the raw binary data is
used.
Passing a unicode string is only allowed if as_string is True and it
will be encoded as utf-8 and null terminated for use as a default value.
Args:
name (str): A user friendly name for this config variable so that it can
be printed nicely.
config_id (int): A 16-bit integer id number to identify the config variable.
type_name (str): An encoded type name that will be parsed by parse_size_name()
default (object): The default value if there is one. This should be a
python object that will be converted to binary according to the rules for
the config variable type specified in type_name.
convert (str): whether this variable should be converted to a
python string or bool rather than an int or a list of ints. You can
pass either 'bool', 'string' or None
"""
config = ConfigDescriptor(config_id, type_name, default, name=name, python_type=convert)
self._config_variables[config_id] = config
|
python
|
def shutdown(at_time=None):
'''
Shutdown a running system
at_time
The wait time in minutes before the system will be shutdown.
CLI Example:
.. code-block:: bash
salt '*' system.shutdown 5
'''
cmd = ['shutdown', '-h', ('{0}'.format(at_time) if at_time else 'now')]
ret = __salt__['cmd.run'](cmd, python_shell=False)
return ret
|
java
|
protected boolean doShutdown() {
if (state == State.NOT_RUNNING) {
LOGGER.debug("Engine already shut down.");
return true;
}
LOGGER.debug("Shutting down engine...");
final Lock lock = this.lock.writeLock();
try {
lock.lock();
state = State.STOPPING;
if (!repositories.isEmpty()) {
// Now go through all of the repositories and request they all be shutdown ...
Queue<Future<Boolean>> repoFutures = new LinkedList<Future<Boolean>>();
Queue<String> repoNames = new LinkedList<String>();
for (JcrRepository repository : repositories.values()) {
if (repository != null) {
repoNames.add(repository.getName());
repoFutures.add(repository.shutdown());
}
}
// Now block while each is shutdown ...
while (repoFutures.peek() != null) {
String repoName = repoNames.poll();
try {
// Get the results from the future (this will return only when the shutdown has completed) ...
repoFutures.poll().get();
// We've successfully shut down, so remove it from the map ...
repositories.remove(repoName);
} catch (ExecutionException | InterruptedException e) {
Logger.getLogger(getClass()).error(e, JcrI18n.failedToShutdownDeployedRepository, repoName);
}
}
}
if (repositories.isEmpty()) {
// All repositories were properly shutdown, so now stop the service for starting and shutting down the repos ...
repositoryStarterService.shutdown();
repositoryStarterService = null;
// Do not clear the set of repositories, so that restarting will work just fine ...
this.state = State.NOT_RUNNING;
} else {
// Could not shut down all repositories, so keep running ..
this.state = State.RUNNING;
}
} catch (RuntimeException e) {
this.state = State.RUNNING;
throw e;
} finally {
lock.unlock();
}
return this.state != State.RUNNING;
}
|
python
|
def _update_limits_from_api(self):
"""
Query EC2's DescribeAccountAttributes API action and
update the network interface limit, as needed. Updates ``self.limits``.
More info on the network interface limit, from the docs:
'This limit is the greater of either the default limit (350) or your
On-Demand Instance limit multiplied by 5.
The default limit for On-Demand Instances is 20.'
"""
self.connect()
self.connect_resource()
logger.info("Querying EC2 DescribeAccountAttributes for limits")
attribs = self.conn.describe_account_attributes()
for attrib in attribs['AccountAttributes']:
if attrib['AttributeName'] == 'max-instances':
val = attrib['AttributeValues'][0]['AttributeValue']
if int(val) * 5 > DEFAULT_ENI_LIMIT:
limit_name = 'Network interfaces per Region'
self.limits[limit_name]._set_api_limit(int(val) * 5)
logger.debug("Done setting limits from API")
|
java
|
protected Object serializeArgumentToJson(Object pArg) {
if (pArg == null) {
return null;
} else if (pArg instanceof JSONAware) {
return pArg;
} else if (pArg.getClass().isArray()) {
return serializeArray(pArg);
} else if (pArg instanceof Map) {
return serializeMap((Map) pArg);
} else if (pArg instanceof Collection) {
return serializeCollection((Collection) pArg);
} else {
return pArg instanceof Number || pArg instanceof Boolean ? pArg : pArg.toString();
}
}
|
java
|
@Override
public DeleteProtectionResult deleteProtection(DeleteProtectionRequest request) {
request = beforeClientExecution(request);
return executeDeleteProtection(request);
}
|
python
|
def offers_to_pwl(self, offers):
""" Updates the piece-wise linear total cost function using the given
offer blocks.
Based on off2case.m from MATPOWER by Ray Zimmerman, developed at PSERC
Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more info.
"""
assert not self.is_load
# Only apply offers associated with this generator.
g_offers = [offer for offer in offers if offer.generator == self]
# Fliter out zero quantity offers.
gt_zero = [offr for offr in g_offers if round(offr.quantity, 4) > 0.0]
# Ignore withheld offers.
valid = [offer for offer in gt_zero if not offer.withheld]
p_offers = [v for v in valid if not v.reactive]
q_offers = [v for v in valid if v.reactive]
if p_offers:
self.p_cost = self._offbids_to_points(p_offers)
self.pcost_model = PW_LINEAR
self.online = True
else:
self.p_cost = [(0.0, 0.0), (self.p_max, 0.0)]
self.pcost_model = PW_LINEAR
if q_offers:
# Dispatch at zero real power without shutting down
# if capacity offered for reactive power.
self.p_min = 0.0
self.p_max = 0.0
self.online = True
else:
self.online = False
if q_offers:
self.q_cost = self._offbids_to_points(q_offers)
self.qcost_model = PW_LINEAR
else:
self.q_cost = None#[(0.0, 0.0), (self.q_max, 0.0)]
self.qcost_model = PW_LINEAR
if not len(p_offers) and not len(q_offers):
logger.info("No valid offers for generator [%s], shutting down." %
self.name)
self.online = False
self._adjust_limits()
|
python
|
def updateAARText(self):
'Updates the displayed airspeed, altitude, climb rate Text'
self.airspeedText.set_text('AR: %.1f m/s' % self.airspeed)
self.altitudeText.set_text('ALT: %.1f m ' % self.relAlt)
self.climbRateText.set_text('CR: %.1f m/s' % self.climbRate)
|
python
|
def _authorization_headers_valid(self, token_type, token):
"""Verify authorization headers for a request.
Parameters
token_type (str)
Type of token to access resources.
token (str)
Server Token or OAuth 2.0 Access Token.
Returns
(bool)
True iff token_type and token are valid.
"""
if token_type not in http.VALID_TOKEN_TYPES:
return False
allowed_chars = ascii_letters + digits + '_' + '-' + '=' + '/' + '+'
# True if token only contains allowed_chars
return all(characters in allowed_chars for characters in token)
|
java
|
protected int fill(RecyclerView.Recycler recycler, LayoutState layoutState,
RecyclerView.State state, boolean stopOnFocusable) {
// max offset we should set is mFastScroll + available
final int start = layoutState.mAvailable;
if (layoutState.mScrollingOffset != LayoutState.SCOLLING_OFFSET_NaN) {
// TODO ugly bug fix. should not happen
if (layoutState.mAvailable < 0) {
layoutState.mScrollingOffset += layoutState.mAvailable;
}
recycleByLayoutStateExpose(recycler, layoutState);
}
int remainingSpace = layoutState.mAvailable + layoutState.mExtra + recycleOffset;
while (remainingSpace > 0 && layoutState.hasMore(state)) {
layoutChunkResultCache.resetInternal();
layoutChunk(recycler, state, layoutState, layoutChunkResultCache);
if (layoutChunkResultCache.mFinished) {
break;
}
layoutState.mOffset += layoutChunkResultCache.mConsumed * layoutState.mLayoutDirection;
/**
* Consume the available space if:
* * layoutChunk did not request to be ignored
* * OR we are laying out scrap children
* * OR we are not doing pre-layout
*/
if (!layoutChunkResultCache.mIgnoreConsumed || mLayoutState.mScrapList != null
|| !state.isPreLayout()) {
layoutState.mAvailable -= layoutChunkResultCache.mConsumed;
// we keep a separate remaining space because mAvailable is important for recycling
remainingSpace -= layoutChunkResultCache.mConsumed;
}
if (layoutState.mScrollingOffset != LayoutState.SCOLLING_OFFSET_NaN) {
layoutState.mScrollingOffset += layoutChunkResultCache.mConsumed;
if (layoutState.mAvailable < 0) {
layoutState.mScrollingOffset += layoutState.mAvailable;
}
recycleByLayoutStateExpose(recycler, layoutState);
}
if (stopOnFocusable && layoutChunkResultCache.mFocusable) {
break;
}
}
if (DEBUG) {
validateChildOrderExpose();
}
return start - layoutState.mAvailable;
}
|
java
|
public ThriftConnectionHandle<T> recreateConnectionHandle() throws ThriftConnectionPoolException {
ThriftConnectionHandle<T> handle = new ThriftConnectionHandle<>(this.thriftConnection,
this.thriftConnectionPartition, this.thriftConnectionPool, true);
handle.thriftConnectionPartition = this.thriftConnectionPartition;
handle.connectionCreationTimeInMs = this.connectionCreationTimeInMs;
handle.connectionLastResetInMs = this.connectionLastResetInMs;
handle.connectionLastUsedInMs = this.connectionLastUsedInMs;
handle.possiblyBroken = this.possiblyBroken;
this.thriftConnection = null;
return handle;
}
|
python
|
def bip32_prv(self, s):
"""
Parse a bip32 private key from a text string ("xprv" type).
Return a :class:`BIP32 <pycoin.key.BIP32Node.BIP32Node>` or None.
"""
data = self.parse_b58_hashed(s)
if data is None or not data.startswith(self._bip32_prv_prefix):
return None
return self._network.keys.bip32_deserialize(data)
|
python
|
def update_anomalous_score(self):
"""Update anomalous score.
New anomalous score is the summation of weighted differences
between current summary and reviews. The weights come from credibilities.
Therefore, the new anomalous score is defined as
.. math::
{\\rm anomalous}(r)
= \\sum_{p \\in P} \\mbox{review}(p) \\times \\mbox{credibility}(p) - 0.5
where :math:`P` is a set of products reviewed by this reviewer,
review(:math:`p`) and credibility(:math:`p`) are
review and credibility of product :math:`p`, respectively.
Returns:
absolute difference between old anomalous score and updated one.
"""
old = self.anomalous_score
products = self._graph.retrieve_products(self)
self.anomalous_score = sum(
p.summary.difference(
self._graph.retrieve_review(self, p)) * self._credibility(p) - 0.5
for p in products
)
return abs(self.anomalous_score - old)
|
python
|
def _parse_requirements_file(requirements_file):
'''
Parse requirements.txt and return list suitable for
passing to ``install_requires`` parameter in ``setup()``.
'''
parsed_requirements = []
with open(requirements_file) as rfh:
for line in rfh.readlines():
line = line.strip()
if not line or line.startswith(('#', '-r')):
continue
parsed_requirements.append(line)
return parsed_requirements
|
java
|
public static void main(String[] args) throws Exception {
int zkPort;
boolean secureZK = false;
String zkKeyStore;
String zkKeyStorePasswd = null;
String zkTrustStore = null;
try {
zkPort = Integer.parseInt(System.getProperty(PROPERTY_ZK_PORT));
secureZK = Boolean.parseBoolean(System.getProperty(PROPERTY_SECURE_ZK, "false"));
zkKeyStore = System.getProperty(PROPERTY_ZK_KEY_STORE);
zkKeyStorePasswd = System.getProperty(PROPERTY_ZK_KEY_STORE_PASSWORD);
zkTrustStore = System.getProperty(PROPERTY_ZK_TRUST_STORE);
} catch (Exception ex) {
System.out.println(String.format("Invalid or missing arguments (via system properties). Expected: %s(int). (%s)",
PROPERTY_ZK_PORT, ex.getMessage()));
System.exit(-1);
return;
}
ZooKeeperServiceRunner runner = new ZooKeeperServiceRunner(zkPort, secureZK,
zkKeyStore, zkKeyStorePasswd, zkTrustStore);
runner.initialize();
runner.start();
Thread.sleep(Long.MAX_VALUE);
}
|
python
|
def __call(self):
"""
Calls the callback method
"""
try:
if self.__callback is not None:
self.__callback(self.__successes, self.__errors)
except Exception as ex:
self.__logger.exception("Error calling back count down "
"handler: %s", ex)
else:
self.__called = True
|
python
|
def SampleStop(self):
"""Stops measuring the CPU time."""
if self._start_cpu_time is not None:
self.total_cpu_time += time.clock() - self._start_cpu_time
|
python
|
def set_Y(self, Y):
"""
Set the output data of the model
:param Y: output observations
:type Y: np.ndarray or ObsArray
"""
assert isinstance(Y, (np.ndarray, ObsAr))
state = self.update_model()
self.update_model(False)
if self.normalizer is not None:
self.normalizer.scale_by(Y)
self.Y_normalized = ObsAr(self.normalizer.normalize(Y))
self.Y = Y
else:
self.Y = ObsAr(Y) if isinstance(Y, np.ndarray) else Y
self.Y_normalized = self.Y
self.update_model(state)
|
python
|
def _UploadChunk(self, chunk):
"""Uploads a single chunk to the transfer store flow.
Args:
chunk: A chunk to upload.
Returns:
A `BlobImageChunkDescriptor` object.
"""
blob = _CompressedDataBlob(chunk)
self._action.ChargeBytesToSession(len(chunk.data))
self._action.SendReply(blob, session_id=self._TRANSFER_STORE_SESSION_ID)
return rdf_client_fs.BlobImageChunkDescriptor(
digest=hashlib.sha256(chunk.data).digest(),
offset=chunk.offset,
length=len(chunk.data))
|
java
|
@Override
public void eUnset(int featureID) {
switch (featureID) {
case AfplibPackage.PAGE_POSITION_INFORMATION__PGPRG:
setPGPRG(PGPRG_EDEFAULT);
return;
}
super.eUnset(featureID);
}
|
java
|
private static Set<Annotation> getAnnotations(final Annotation[] annotations, final Class<? extends Annotation> neededAnnotationType) {
final Set<Annotation> ret = new HashSet<>();
for (final Annotation annotation : annotations) {
annotation.annotationType().getAnnotations();
final Annotation[] metaAnnotations = annotation.annotationType().getAnnotations();
for (final Annotation metaAnnotation : metaAnnotations) {
if (metaAnnotation.annotationType().equals(neededAnnotationType)) {
ret.add(annotation);
}
}
}
return ret;
}
|
java
|
private double[][] getAugmentedData(double[][] x) {
double[][] ret = new double[x.length + p][p];
double padding = c * Math.sqrt(lambda2);
for (int i = 0; i < x.length; i++) {
for (int j = 0; j < p; j++) {
ret[i][j] = c * x[i][j];
}
}
for (int i = x.length; i < ret.length; i++) {
ret[i][i - x.length] = padding;
}
return ret;
}
|
java
|
public final Collection<String> getParameters() {
ArrayList<String> params = new ArrayList<String>();
if( parameters == null ) {
return params;
}
// we copy to guarantee that the caller does not screw up our internal storage
params.addAll(Arrays.asList(parameters));
return params;
}
|
java
|
public Object get(final Declaration declaration) {
return declaration.getValue( (InternalWorkingMemory) workingMemory, getObject( getFactHandle( declaration ) ) );
}
|
java
|
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out any hidden serialization magic
s.defaultWriteObject();
// Write out size
s.writeInt(size);
// Write out all elements in the proper order.
for (Node<E> x = first; x != null; x = x.next)
s.writeObject(x.item);
}
|
java
|
public void updateStatus() {
String hostaddr = null;
try {
hostaddr = InetAddress.getByName(ip).getHostAddress();
} catch (UnknownHostException e) {
online = false;
latency = Long.MAX_VALUE;
return;
}
int total = 0;
long totalPing = 0;
// test ping 4 times
int times = 4;
while (total < times) {
total++;
long start = System.currentTimeMillis();
SocketAddress sockaddr = new InetSocketAddress(hostaddr, port);
try (Socket socket = new Socket()) {
socket.connect(sockaddr, 1000);
} catch (Exception e) {
online = false;
return;
}
totalPing += (System.currentTimeMillis() - start);
}
online = true;
latency = totalPing / total;
}
|
java
|
public static Map<String, String> toMap(String[]... wordMappings) {
Map<String, String> mappings = new HashMap<String, String>();
for (int i = 0; i < wordMappings.length; i++) {
String singular = wordMappings[i][0];
String plural = wordMappings[i][1];
mappings.put(singular, plural);
}
return mappings;
}
|
python
|
def _populate_cparams(self, img_array, mct=None, cratios=None, psnr=None,
cinema2k=None, cinema4k=None, irreversible=None,
cbsize=None, eph=None, grid_offset=None, modesw=None,
numres=None, prog=None, psizes=None, sop=None,
subsam=None, tilesize=None, colorspace=None):
"""Directs processing of write method arguments.
Parameters
----------
img_array : ndarray
Image data to be written to file.
kwargs : dictionary
Non-image keyword inputs provided to write method.
"""
other_args = (mct, cratios, psnr, irreversible, cbsize, eph,
grid_offset, modesw, numres, prog, psizes, sop, subsam)
if (((cinema2k is not None or cinema4k is not None) and
(not all([arg is None for arg in other_args])))):
msg = ("Cannot specify cinema2k/cinema4k along with any other "
"options.")
raise IOError(msg)
if cratios is not None and psnr is not None:
msg = "Cannot specify cratios and psnr options together."
raise IOError(msg)
if version.openjpeg_version_tuple[0] == 1:
cparams = opj.set_default_encoder_parameters()
else:
cparams = opj2.set_default_encoder_parameters()
outfile = self.filename.encode()
num_pad_bytes = opj2.PATH_LEN - len(outfile)
outfile += b'0' * num_pad_bytes
cparams.outfile = outfile
if self.filename[-4:].endswith(('.jp2', '.JP2')):
cparams.codec_fmt = opj2.CODEC_JP2
else:
cparams.codec_fmt = opj2.CODEC_J2K
# Set defaults to lossless to begin.
cparams.tcp_rates[0] = 0
cparams.tcp_numlayers = 1
cparams.cp_disto_alloc = 1
cparams.irreversible = 1 if irreversible else 0
if cinema2k is not None:
self._cparams = cparams
self._set_cinema_params('cinema2k', cinema2k)
return
if cinema4k is not None:
self._cparams = cparams
self._set_cinema_params('cinema4k', cinema4k)
return
if cbsize is not None:
cparams.cblockw_init = cbsize[1]
cparams.cblockh_init = cbsize[0]
if cratios is not None:
cparams.tcp_numlayers = len(cratios)
for j, cratio in enumerate(cratios):
cparams.tcp_rates[j] = cratio
cparams.cp_disto_alloc = 1
cparams.csty |= 0x02 if sop else 0
cparams.csty |= 0x04 if eph else 0
if grid_offset is not None:
cparams.image_offset_x0 = grid_offset[1]
cparams.image_offset_y0 = grid_offset[0]
if modesw is not None:
for shift in range(6):
power_of_two = 1 << shift
if modesw & power_of_two:
cparams.mode |= power_of_two
if numres is not None:
cparams.numresolution = numres
if prog is not None:
cparams.prog_order = core.PROGRESSION_ORDER[prog.upper()]
if psnr is not None:
cparams.tcp_numlayers = len(psnr)
for j, snr_layer in enumerate(psnr):
cparams.tcp_distoratio[j] = snr_layer
cparams.cp_fixed_quality = 1
if psizes is not None:
for j, (prch, prcw) in enumerate(psizes):
cparams.prcw_init[j] = prcw
cparams.prch_init[j] = prch
cparams.csty |= 0x01
cparams.res_spec = len(psizes)
if subsam is not None:
cparams.subsampling_dy = subsam[0]
cparams.subsampling_dx = subsam[1]
if tilesize is not None:
cparams.cp_tdx = tilesize[1]
cparams.cp_tdy = tilesize[0]
cparams.tile_size_on = opj2.TRUE
if mct is None:
# If the multi component transform was not specified, we infer
# that it should be used if the color space is RGB.
cparams.tcp_mct = 1 if self._colorspace == opj2.CLRSPC_SRGB else 0
else:
if self._colorspace == opj2.CLRSPC_GRAY:
msg = ("Cannot specify usage of the multi component transform "
"if the colorspace is gray.")
raise IOError(msg)
cparams.tcp_mct = 1 if mct else 0
self._validate_compression_params(img_array, cparams, colorspace)
self._cparams = cparams
|
java
|
private String formatRelationList(List<Relation> value)
{
String result = null;
if (value != null && value.size() != 0)
{
StringBuilder sb = new StringBuilder();
for (Relation relation : value)
{
if (sb.length() != 0)
{
sb.append(m_delimiter);
}
sb.append(formatRelation(relation));
}
result = sb.toString();
}
return (result);
}
|
python
|
def init_signal(self):
"""Init signal
3 signals are added: ``feeder_exited``, ``parser_exited`` and
``reach_max_num``.
"""
self.signal = Signal()
self.signal.set(
feeder_exited=False, parser_exited=False, reach_max_num=False)
|
python
|
def read_digits(self, start: int, char: str) -> int:
"""Return the new position in the source after reading digits."""
source = self.source
body = source.body
position = start
while "0" <= char <= "9":
position += 1
char = body[position : position + 1]
if position == start:
raise GraphQLSyntaxError(
source,
position,
f"Invalid number, expected digit but got: {print_char(char)}.",
)
return position
|
java
|
@Override
protected String processLink(final IExpressionContext context, final String link) {
if (!(context instanceof ISpringWebFluxContext)) {
return link;
}
final ServerWebExchange exchange = ((ISpringWebFluxContext)context).getExchange();
return exchange.transformUrl(link);
}
|
java
|
public void sendDtmf(final String dtmf) throws IOException, AppPlatformException {
final Map<String, Object> params = new HashMap<String, Object>();
params.put("dtmfOut", dtmf);
final String uri = StringUtils.join(new String[]{
getUri(),
"dtmf"
}, '/');
client.post(uri, params);
}
|
python
|
def formatSub(self, string):
"""Convert sub-type specific formatting to GSP formatting.
By default formatSub will replace 1-to-1 all occurences of subtitle specific tags with
their GSP equivalents. Due to performance reasons it will not parse given 'string' in any
other way. That means that it cannot detect exotic situations like existance of opening tags
only. This generic behaviour is good enough for most formats but in some cases it'll be
necessary to provide a specialized version of this method.
If 'gsp_nl' equals to os.linesep, then a given string will be checked against occurance of
any of newline styles (Linux, Windows and Mac).
Trivia: GSP stands for "GenericSubParser" which was SubParser class name before."""
string = string.strip()
string = string.replace('{', '{{').replace('}', '}}')
string = self._formatWithTags("gsp_b_", "_gsp_b", string)
string = self._formatWithTags("gsp_i_", "_gsp_i", string)
string = self._formatWithTags("gsp_u_", "_gsp_u", string)
string = self._formatLinesep(string)
return string
|
python
|
def send(signal=Any, sender=Anonymous, *arguments, **named):
"""Send signal from sender to all connected receivers.
signal -- (hashable) signal value, see connect for details
sender -- the sender of the signal
if Any, only receivers registered for Any will receive
the message.
if Anonymous, only receivers registered to receive
messages from Anonymous or Any will receive the message
Otherwise can be any python object (normally one
registered with a connect if you actually want
something to occur).
arguments -- positional arguments which will be passed to
*all* receivers. Note that this may raise TypeErrors
if the receivers do not allow the particular arguments.
Note also that arguments are applied before named
arguments, so they should be used with care.
named -- named arguments which will be filtered according
to the parameters of the receivers to only provide those
acceptable to the receiver.
Return a list of tuple pairs [(receiver, response), ... ]
if any receiver raises an error, the error propagates back
through send, terminating the dispatch loop, so it is quite
possible to not have all receivers called if a raises an
error.
"""
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in liveReceivers(getAllReceivers(sender, signal)):
response = robustapply.robustApply(
receiver,
signal=signal,
sender=sender,
*arguments,
**named
)
responses.append((receiver, response))
return responses
|
java
|
@Override
public final String getFor(final Class<?> pClass, final String pThingName) {
Field field = this.fieldsRapiHolder.getFor(pClass, pThingName);
Class<?> classKey = field.getType();
if (IHasId.class.isAssignableFrom(classKey)) {
classKey = IHasId.class;
} else if (Enum.class.isAssignableFrom(classKey)) {
classKey = Enum.class;
}
return this.convertersNamesMap.get(classKey);
}
|
java
|
String toJsonString() {
try {
return from(s3obj.getObjectContent());
} catch (Exception e) {
throw new SdkClientException("Error parsing JSON: " + e.getMessage());
}
}
|
java
|
@Override
protected Integer handleRow(ResultSet rs) throws SQLException {
if (this.columnName == null) {
return rs.getInt(this.columnIndex);
}
return rs.getInt(this.columnName);
}
|
java
|
@Nonnull
public PasswordHash createUserDefaultPasswordHash (@Nullable final IPasswordSalt aSalt,
@Nonnull final String sPlainTextPassword)
{
ValueEnforcer.notNull (sPlainTextPassword, "PlainTextPassword");
final IPasswordHashCreator aPHC = getDefaultPasswordHashCreator ();
final String sPasswordHash = aPHC.createPasswordHash (aSalt, sPlainTextPassword);
return new PasswordHash (aPHC.getAlgorithmName (), aSalt, sPasswordHash);
}
|
python
|
def create_or_update_tags(self, tags):
"""
Creates new tags or updates existing tags for an Auto Scaling group.
:type tags: List of :class:`boto.ec2.autoscale.tag.Tag`
:param tags: The new or updated tags.
"""
params = {}
for i, tag in enumerate(tags):
tag.build_params(params, i+1)
return self.get_status('CreateOrUpdateTags', params, verb='POST')
|
java
|
@Override
public Object execIdCall(IdFunctionObject f, Context cx, Scriptable scope,
Scriptable thisObj, Object[] args)
{
if (!f.hasTag(getClassName())) {
return super.execIdCall(f, cx, scope, thisObj, args);
}
int id = f.methodId();
switch (id) {
case Id_constructor:
return js_constructor(cx, scope, args);
case Id_toString:
NativeTypedArrayView<T> realThis = realThis(thisObj, f);
final int arrayLength = realThis.getArrayLength();
final StringBuilder builder = new StringBuilder();
if (arrayLength > 0) {
builder.append(ScriptRuntime.toString(realThis.js_get(0)));
}
for (int i = 1; i < arrayLength; i++) {
builder.append(',');
builder.append(ScriptRuntime.toString(realThis.js_get(i)));
}
return builder.toString();
case Id_get:
if (args.length > 0) {
return realThis(thisObj, f).js_get(ScriptRuntime.toInt32(args[0]));
}
throw ScriptRuntime.constructError("Error", "invalid arguments");
case Id_set:
if (args.length > 0) {
NativeTypedArrayView<T> self = realThis(thisObj, f);
if (args[0] instanceof NativeTypedArrayView) {
int offset = isArg(args, 1) ? ScriptRuntime.toInt32(args[1]) : 0;
self.setRange((NativeTypedArrayView<T>)args[0], offset);
return Undefined.instance;
}
if (args[0] instanceof NativeArray) {
int offset = isArg(args, 1) ? ScriptRuntime.toInt32(args[1]) : 0;
self.setRange((NativeArray)args[0], offset);
return Undefined.instance;
}
if (args[0] instanceof Scriptable) {
// Tests show that we need to ignore a non-array object
return Undefined.instance;
}
if (isArg(args, 2)) {
return self.js_set(ScriptRuntime.toInt32(args[0]), args[1]);
}
}
throw ScriptRuntime.constructError("Error", "invalid arguments");
case Id_subarray:
if (args.length > 0) {
NativeTypedArrayView<T> self = realThis(thisObj, f);
int start = ScriptRuntime.toInt32(args[0]);
int end = isArg(args, 1) ? ScriptRuntime.toInt32(args[1]) : self.length;
return self.js_subarray(cx, scope, start, end);
}
throw ScriptRuntime.constructError("Error", "invalid arguments");
case SymbolId_iterator:
return new NativeArrayIterator(scope, thisObj, ARRAY_ITERATOR_TYPE.VALUES);
}
throw new IllegalArgumentException(String.valueOf(id));
}
|
java
|
public Set<String> keySet(String group) {
group = StrUtil.nullToEmpty(group).trim();
readLock.lock();
try {
final LinkedHashMap<String, String> valueMap = this.get(group);
if (MapUtil.isNotEmpty(valueMap)) {
return valueMap.keySet();
}
} finally {
readLock.unlock();
}
return Collections.emptySet();
}
|
java
|
public boolean hasContact(ResidueNumber resNumber1, ResidueNumber resNumber2) {
return contacts.containsKey(new Pair<ResidueNumber>(resNumber1, resNumber2));
}
|
java
|
public static rnat_stats get(nitro_service service) throws Exception{
rnat_stats obj = new rnat_stats();
rnat_stats[] response = (rnat_stats[])obj.stat_resources(service);
return response[0];
}
|
python
|
def encrypt(text):
'Encrypt a string using an encryption key based on the django SECRET_KEY'
crypt = EncryptionAlgorithm.new(_get_encryption_key())
return crypt.encrypt(to_blocksize(text))
|
java
|
public static void printClassPathEntries (@Nonnull final PrintStream aPS, @Nonnull final String sItemSeparator)
{
forAllClassPathEntries (x -> {
aPS.print (x);
aPS.print (sItemSeparator);
});
}
|
java
|
public List<End<Flow<T>>> getAllEnd()
{
List<End<Flow<T>>> list = new ArrayList<End<Flow<T>>>();
List<Node> nodeList = childNode.get("end");
for(Node node: nodeList)
{
End<Flow<T>> type = new EndImpl<Flow<T>>(this, "end", childNode, node);
list.add(type);
}
return list;
}
|
python
|
def cmd_zf(self, ch=None):
"""zf ch=chname
Zoom the image for the given viewer/channel to fit the window.
"""
viewer = self.get_viewer(ch)
if viewer is None:
self.log("No current viewer/channel.")
return
viewer.zoom_fit()
cur_lvl = viewer.get_zoom()
self.log("zoom=%f" % (cur_lvl))
|
python
|
def tree_to_gexf(tree:'BubbleTree') -> str:
"""Compute the gexf representation of given power graph,
and push it into given file.
See https://gephi.org/gexf/format/index.html
for format doc.
"""
output_nodes, output_edges = '', ''
def build_node(node:str) -> str:
"""Yield strings describing given node, recursively"""
if tree.inclusions[node]: # it's a powernode
yield '<node id="{}" label="{}">'.format(node, node)
yield '<nodes>'
for sub in tree.inclusions[node]:
yield from build_node(sub)
yield '</nodes>'
yield '</node>'
else: # it's a regular node
yield '<node id="{}" label="{}"/>'.format(node, node)
return
# build full hierarchy from the roots
output_nodes += '\n'.join('\n'.join(build_node(root)) for root in tree.roots)
# # add the edges to the final graph
for idx, (source, targets) in enumerate(tree.edges.items()):
for target in targets:
if source <= target: # edges dict is complete. This avoid multiple edges.
output_edges += '<edge id="{}" source="{}" target="{}" />\n'.format(idx, source, target)
return GEXF_TEMPLATE.format(
'directed' if tree.oriented else 'undirected',
output_nodes,
output_edges
)
|
java
|
public static List<String> parse(String address, String addresses) {
List<String> result;
// backwards compatibility - older clients only send a single address in the single address header and don't supply the multi-address header
if (addresses == null || addresses.isEmpty()) {
addresses = address;
}
if (addresses == null) {
result = Collections.emptyList();
}
else {
String[] parts = addresses.split(",");
result = new ArrayList<String>(parts.length);
for (String part : parts) {
part = part.trim();
if (NetworkAddress.isValidIPAddress(part)) {
result.add(part);
}
}
}
return result;
}
|
python
|
def process_action(self):
"""
Process the action and update the related object, returns a boolean if a change is made.
"""
if self.publish_version == self.UNPUBLISH_CHOICE:
actioned = self._unpublish()
else:
actioned = self._publish()
# Only log if an action was actually taken
if actioned:
self._log_action()
return actioned
|
java
|
public Collection<FullColumnDescription> getPartitionKeyColumnDescriptions()
{
if (partitionKeys == null)
{
partitionKeys = new LinkedList<>();
for (FullColumnDescription col : fullColumnDescriptions.values())
{
if (col.isPartitionKeyMember())
{
this.partitionKeys.add(col);
}
}
Collections.sort(this.partitionKeys, PartitionKeyComparator.INSTANCE);
}
return this.partitionKeys;
}
|
python
|
def convertLengthList(self, svgAttr):
"""Convert a list of lengths."""
return [self.convertLength(a) for a in self.split_attr_list(svgAttr)]
|
python
|
def try_set_count(self, count):
"""
Sets the count to the given value if the current count is zero. If count is not zero, this method does nothing
and returns ``false``.
:param count: (int), the number of times count_down() must be invoked before threads can pass through await().
:return: (bool), ``true`` if the new count was set, ``false`` if the current count is not zero.
"""
check_not_negative(count, "count can't be negative")
return self._encode_invoke(count_down_latch_try_set_count_codec, count=count)
|
python
|
def make_diffuse_comp_info_dict(self, galkey):
""" Make a dictionary maping from merged component to information about that component
Parameters
----------
galkey : str
A short key identifying the galprop parameters
"""
galprop_rings = self.read_galprop_rings_yaml(galkey)
ring_limits = galprop_rings.get('ring_limits')
comp_dict = galprop_rings.get('diffuse_comp_dict')
remove_rings = galprop_rings.get('remove_rings', [])
diffuse_comp_info_dict = {}
nring = len(ring_limits) - 1
for source_key in sorted(comp_dict.keys()):
for iring in range(nring):
source_name = "%s_%i" % (source_key, iring)
if source_name in remove_rings:
continue
full_key = "%s_%s" % (source_name, galkey)
diffuse_comp_info_dict[full_key] =\
self.make_diffuse_comp_info(source_name, galkey)
self._diffuse_comp_info_dicts[galkey] = diffuse_comp_info_dict
return diffuse_comp_info_dict
|
python
|
def issue(self, CorpNum, ItemCode, MgtKey, Memo=None, EmailSubject=None, UserID=None):
""" ๋ฐํ
args
CorpNum : ํ๋นํ์ ์ฌ์
์๋ฒํธ
ItemCode : ๋ช
์ธ์ ์ข
๋ฅ ์ฝ๋
[121 - ๊ฑฐ๋๋ช
์ธ์], [122 - ์ฒญ๊ตฌ์], [123 - ๊ฒฌ์ ์],
[124 - ๋ฐ์ฃผ์], [125 - ์
๊ธํ], [126 - ์์์ฆ]
MgtKey : ํํธ๋ ๋ฌธ์๊ด๋ฆฌ๋ฒํธ
Memo : ์ฒ๋ฆฌ๋ฉ๋ชจ
EmailSubject : ๋ฐํ๋ฉ์ผ ์ ๋ชฉ(๋ฏธ๊ธฐ์ฌ์ ๊ธฐ๋ณธ์์์ผ๋ก ์ ์ก)
UserID : ํ๋นํ์ ์์ด๋
return
์ฒ๋ฆฌ๊ฒฐ๊ณผ. consist of code and message
raise
PopbillException
"""
if MgtKey == None or MgtKey == "":
raise PopbillException(-99999999, "๊ด๋ฆฌ๋ฒํธ๊ฐ ์
๋ ฅ๋์ง ์์์ต๋๋ค.")
if ItemCode == None or ItemCode == "":
raise PopbillException(-99999999, "๋ช
์ธ์ ์ข
๋ฅ ์ฝ๋๊ฐ ์
๋ ฅ๋์ง ์์์ต๋๋ค.")
req = {}
postData = ""
if Memo != None and Memo != '':
req["memo"] = Memo
if EmailSubject != None and EmailSubject != '':
req["emailSubject"] = EmailSubject
postData = self._stringtify(req)
return self._httppost('/Statement/' + str(ItemCode) + '/' + MgtKey, postData, CorpNum, UserID, "ISSUE")
|
python
|
def data_check(self, participant):
"""Make sure each trial contains exactly one chosen info."""
infos = participant.infos()
return len([info for info in infos if info.chosen]) * 2 == len(infos)
|
java
|
public ConstraintDefinitionType<ValidationMappingDescriptor> getOrCreateConstraintDefinition()
{
List<Node> nodeList = model.get("constraint-definition");
if (nodeList != null && nodeList.size() > 0)
{
return new ConstraintDefinitionTypeImpl<ValidationMappingDescriptor>(this, "constraint-definition", model, nodeList.get(0));
}
return createConstraintDefinition();
}
|
python
|
def _write(self, s, s_length=None, flush=False, ignore_overflow=False,
err_msg=None):
"""Write ``s``
:type s: str|unicode
:param s: String to write
:param s_length: Custom length of ``s``
:param flush: Set this to flush the terminal stream after writing
:param ignore_overflow: Set this to ignore if s will exceed
the terminal's width
:param err_msg: The error message given to WidthOverflowError
if it is triggered
"""
if not ignore_overflow:
s_length = len(s) if s_length is None else s_length
if err_msg is None:
err_msg = (
"Terminal has {} columns; attempted to write "
"a string {} of length {}.".format(
self.columns, repr(s), s_length)
)
ensure(s_length <= self.columns, WidthOverflowError, err_msg)
self.cursor.write(s)
if flush:
self.cursor.flush()
|
java
|
public void marshall(FileGroupSettings fileGroupSettings, ProtocolMarshaller protocolMarshaller) {
if (fileGroupSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(fileGroupSettings.getDestination(), DESTINATION_BINDING);
protocolMarshaller.marshall(fileGroupSettings.getDestinationSettings(), DESTINATIONSETTINGS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def GetCodeObjectAtLine(module, line):
"""Searches for a code object at the specified line in the specified module.
Args:
module: module to explore.
line: 1-based line number of the statement.
Returns:
(True, Code object) on success or (False, (prev_line, next_line)) on
failure, where prev_line and next_line are the closest lines with code above
and below the specified line, or None if they do not exist.
"""
if not hasattr(module, '__file__'):
return (False, (None, None))
prev_line = 0
next_line = six.MAXSIZE
for code_object in _GetModuleCodeObjects(module):
for co_line_number in _GetLineNumbers(code_object):
if co_line_number == line:
return (True, code_object)
elif co_line_number < line:
prev_line = max(prev_line, co_line_number)
elif co_line_number > line:
next_line = min(next_line, co_line_number)
break
prev_line = None if prev_line == 0 else prev_line
next_line = None if next_line == six.MAXSIZE else next_line
return (False, (prev_line, next_line))
|
java
|
void writeChildren(TreeMap<Character, Long> counts) {
int firstIndex = trie.nodes.length();
counts.forEach((k, v) -> {
if (v > 0) trie.nodes.add(new NodeData(k, (short) -1, -1, v, -1));
});
short length = (short) (trie.nodes.length() - firstIndex);
trie.ensureParentIndexCapacity(firstIndex, length, index);
update(n -> n.setFirstChildIndex(firstIndex).setNumberOfChildren(length));
data = null;
}
|
java
|
@Override
public Query rewrite(IndexReader reader) throws IOException
{
Query cQuery = contextQuery.rewrite(reader);
if (cQuery == contextQuery) // NOSONAR
{
return this;
}
else
{
return new DerefQuery(cQuery, refProperty, nameTest, version, nsMappings);
}
}
|
python
|
def parse_template(self, pattern):
"""Parse template."""
i = _util.StringIter((self._original.decode('latin-1') if self.is_bytes else self._original))
iter(i)
self.result = [""]
while True:
try:
t = next(i)
if self.use_format and t in _CURLY_BRACKETS:
self.handle_format(t, i)
elif t == '\\':
try:
t = next(i)
self.reference(t, i)
except StopIteration:
self.result.append(t)
raise
else:
self.result.append(t)
except StopIteration:
break
if len(self.result) > 1:
self.literal_slots.append("".join(self.result))
del self.result[:]
self.result.append("")
self.slot += 1
if self.is_bytes:
self._template = "".join(self.literal_slots).encode('latin-1')
else:
self._template = "".join(self.literal_slots)
self.groups, self.literals = self.regex_parse_template(self._template, pattern)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.