language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def attention_lm_decoder(decoder_input,
decoder_self_attention_bias,
hparams,
name="decoder"):
"""A stack of attention_lm layers.
Args:
decoder_input: a Tensor
decoder_self_attention_bias: bias Tensor for self-attention
(see common_attention.attention_bias())
hparams: hyperparameters for model
name: a string
Returns:
y: a Tensors
"""
x = decoder_input
with tf.variable_scope(name):
for layer in range(hparams.num_hidden_layers):
with tf.variable_scope("layer_%d" % layer):
with tf.variable_scope("self_attention"):
y = common_attention.multihead_attention(
common_layers.layer_preprocess(
x, hparams), None, decoder_self_attention_bias,
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size, hparams.num_heads, hparams.attention_dropout)
x = common_layers.layer_postprocess(x, y, hparams)
with tf.variable_scope("ffn"):
y = common_layers.conv_hidden_relu(
common_layers.layer_preprocess(x, hparams),
hparams.filter_size,
hparams.hidden_size,
dropout=hparams.relu_dropout)
x = common_layers.layer_postprocess(x, y, hparams)
return common_layers.layer_preprocess(x, hparams)
|
java
|
public static float getFloat(final float value, final float minValue, final float maxValue) {
return Math.min(maxValue, Math.max(minValue, value));
}
|
python
|
def is_address_guard(self, address):
"""
Determines if an address belongs to a guard page.
@note: Returns always C{False} for kernel mode addresses.
@type address: int
@param address: Memory address to query.
@rtype: bool
@return: C{True} if the address belongs to a guard page.
@raise WindowsError: An exception is raised on error.
"""
try:
mbi = self.mquery(address)
except WindowsError:
e = sys.exc_info()[1]
if e.winerror == win32.ERROR_INVALID_PARAMETER:
return False
raise
return mbi.is_guard()
|
python
|
def _make_meridional_diffusion_matrix(K, lataxis):
"""Calls :func:`_make_diffusion_matrix` with appropriate weights for
the meridional diffusion case.
:param array K: dimensionless diffusivities at cell boundaries
of diffusion axis ``lataxis``
:param axis lataxis: latitude axis where diffusion is occuring
Weights are computed as the following:
.. math::
\\begin{array}{ll}
w_1 &= \\cos(\\textrm{bounds}) \\\\
&= \\left[ \\cos(b_0), \\cos(b_1), \\cos(b_2), \\ ... \\ , \\cos(b_{n-1}), \\cos(b_n) \\right] \\\\
w_2 &= \\cos(\\textrm{points}) \\\\
&= \\left[ \\cos(p_0), \\cos(p_1), \\cos(p_2), \\ ... \\ , \\cos(p_{n-1}) \\right]
\\end{array}
when bounds and points from ``lataxis`` are written as
.. math::
\\begin{array}{ll}
\\textrm{bounds} &= [b_0, b_1, b_2, \\ ... \\ , b_{n-1}, b_{n}] \\\\
\\textrm{points} &= [p_0, p_1, p_2, \\ ... \\ , p_{n-1}]
\\end{array}
Giving this input to :func:`_make_diffusion_matrix` results in a matrix like:
.. math::
\\textrm{diffTriDiag}=
\\left[ \\begin{array}{cccccc}
1+\\frac{u_1 }{\\cos(p_0)} & -\\frac{u_1}{\\cos(p_0)} & 0 & & ... & 0 \\\\
-\\frac{u_1}{\\cos(p_1)} & 1+\\frac{u_1 + u_2}{\\cos(p_1)} & -\\frac{u_2}{\\cos(b_1)} & 0 & ... & 0 \\\\
0 & -\\frac{u_2}{\\cos(p_2)} & 1+\\frac{u_2 + u_3}{\\cos(p_2)} & -\\frac{u_3}{\\cos(p_2)} &... & 0 \\\\
& & \\ddots & \\ddots & \\ddots & \\\\
0 & 0 & ... & -\\frac{u_{n-2}}{\\cos(p_{n-2})} & 1+\\frac{u_{n-2} + u_{n-1}}{\\cos(p_{n-2})} & -\\frac{u_{n-1}}{\\cos(p_{n-2})} \\\\
0 & 0 & ... & 0 & -\\frac{u_{n-1}}{\\cos(p_{n-1})} & 1+\\frac{u_{n-1}}{\\cos(p_{n-1})} \\\\
\\end{array} \\right]
with the substitue of:
.. math::
u_i = \\cos(b_i) K_i
"""
phi_stag = np.deg2rad(lataxis.bounds)
phi = np.deg2rad(lataxis.points)
weight1 = np.cos(phi_stag)
weight2 = np.cos(phi)
diag = _make_diffusion_matrix(K, weight1, weight2)
return diag, weight1, weight2
|
java
|
public FSArray getPointerList() {
if (MMAXAnnotation_Type.featOkTst && ((MMAXAnnotation_Type)jcasType).casFeat_pointerList == null)
jcasType.jcas.throwFeatMissing("pointerList", "de.julielab.jules.types.mmax.MMAXAnnotation");
return (FSArray)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((MMAXAnnotation_Type)jcasType).casFeatCode_pointerList)));}
|
java
|
@Override
public void toXML(final StringBuilder builder,
final ConfigVerification errors)
{
if (!controller.isEnableSQLDatabaseOutput()) {
boolean zipComp = controller.isZipCompressionEnabled();
boolean multiFile = controller.isMultipleOutputFiles();
builder.append("\t<output>\r\n");
builder.append("\t\t<OUTPUT_MODE>");
OutputCompressionEnum comp = controller.getOutputCompression();
switch (comp) {
case None:
builder.append(OutputType.UNCOMPRESSED);
break;
case BZip2:
builder.append(OutputType.BZIP2);
break;
case SevenZip:
builder.append(OutputType.SEVENZIP);
break;
default:
throw new RuntimeException("Illegal Output Compression Mode");
}
builder.append("</OUTPUT_MODE>\r\n");
String path = this.outputPathField.getText();
if(path==null||path.equals("")){
errors.add(new ConfigItem(ConfigItemTypes.WARNING,
ConfigErrorKeys.MISSING_VALUE,
"No output path has been set."));
}
if (!path.endsWith(File.separator) && path.contains(File.separator)) {
path += File.separator;
}
builder.append("\t\t<PATH>\"" + path + "\"</PATH>\r\n");
if (multiFile) {
long sizeLimit = -1;
String text = outputSizeLimitField.getText();
if (text.length() == 0) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.MISSING_VALUE,
"The output limit is missing."));
}
else {
try {
sizeLimit = Long.parseLong(text);
if (sizeLimit < 100 * 1024 * 1024) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.VALUE_OUT_OF_RANGE,
"The output limit has to be at"
+ " least 100MB"));
}
}
catch (NumberFormatException nfe) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.ILLEGAL_INPUT,
"NumberFormatException for the"
+ " output limit"));
}
}
switch (comp) {
case None:
builder.append("\t\t<LIMIT_SQL_FILE_SIZE>" + sizeLimit
+ "</LIMIT_SQL_FILE_SIZE>\r\n");
break;
default:
builder.append("\t\t<LIMIT_SQL_ARCHIVE_SIZE>" + sizeLimit
+ "</LIMIT_SQL_ARCHIVE_SIZE>\r\n");
break;
}
}
builder.append("\t\t<MODE_ZIP_COMPRESSION_ENABLED>" + zipComp
+ "</MODE_ZIP_COMPRESSION_ENABLED>\r\n");
if (controller.isEnableDataFileOutput()) {
builder.append("\t\t<MODE_DATAFILE_OUTPUT>true</MODE_DATAFILE_OUTPUT>\r\n");
}else{
builder.append("\t\t<MODE_DATAFILE_OUTPUT>false</MODE_DATAFILE_OUTPUT>\r\n");
}
builder.append("\t</output>\r\n");
}
}
|
java
|
public void addFilterBefore(IRuleFilter filter, Class<? extends IRuleFilter> beforeFilter) {
int index = getIndexOfClass(filters, beforeFilter);
if (index == -1) {
throw new FilterAddException("filter " + beforeFilter.getSimpleName() + " has not been added");
}
filters.add(index, filter);
}
|
python
|
def _create_file_racefree(self, file):
"""
Creates a file, but fails if the file already exists.
This function will thus only succeed if this process actually creates
the file; if the file already exists, it will cause an OSError,
solving race conditions.
:param str file: File to create.
"""
write_lock_flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY
os.open(file, write_lock_flags)
|
java
|
public boolean commitAsync(KafkaMessage msg) {
KafkaConsumer<String, byte[]> consumer = _getConsumer(msg.topic());
synchronized (consumer) {
Set<String> subscription = consumer.subscription();
if (subscription == null || !subscription.contains(msg.topic())) {
// this consumer has not subscribed to the topic
return false;
} else {
try {
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(new TopicPartition(msg.topic(), msg.partition()),
new OffsetAndMetadata(msg.offset() + 1));
consumer.commitAsync(offsets, new OffsetCommitCallback() {
@Override
public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets,
Exception e) {
if (e != null) {
LOGGER.error(e.getMessage(), e);
}
}
});
} catch (WakeupException e) {
} catch (Exception e) {
throw new KafkaException(e);
}
return true;
}
}
}
|
python
|
def QA_indicator_PBX(DataFrame, N1=3, N2=5, N3=8, N4=13, N5=18, N6=24):
'瀑布线'
C = DataFrame['close']
PBX1 = (EMA(C, N1) + EMA(C, 2 * N1) + EMA(C, 4 * N1)) / 3
PBX2 = (EMA(C, N2) + EMA(C, 2 * N2) + EMA(C, 4 * N2)) / 3
PBX3 = (EMA(C, N3) + EMA(C, 2 * N3) + EMA(C, 4 * N3)) / 3
PBX4 = (EMA(C, N4) + EMA(C, 2 * N4) + EMA(C, 4 * N4)) / 3
PBX5 = (EMA(C, N5) + EMA(C, 2 * N5) + EMA(C, 4 * N5)) / 3
PBX6 = (EMA(C, N6) + EMA(C, 2 * N6) + EMA(C, 4 * N6)) / 3
DICT = {'PBX1': PBX1, 'PBX2': PBX2, 'PBX3': PBX3,
'PBX4': PBX4, 'PBX5': PBX5, 'PBX6': PBX6}
return pd.DataFrame(DICT)
|
python
|
def _handle_unknown_method(self, method, remainder, request=None):
'''
Routes undefined actions (like RESET) to the appropriate controller.
'''
if request is None:
self._raise_method_deprecation_warning(self._handle_unknown_method)
# try finding a post_{custom} or {custom} method first
controller = self._find_controller('post_%s' % method, method)
if controller:
return controller, remainder
# if no controller exists, try routing to a sub-controller; note that
# since this isn't a safe GET verb, any local exposes are 405'd
if remainder:
if self._find_controller(remainder[0]):
abort(405)
sub_controller = self._lookup_child(remainder[0])
if sub_controller:
return lookup_controller(sub_controller, remainder[1:],
request)
abort(405)
|
java
|
public static PaxDate now(Clock clock) {
LocalDate now = LocalDate.now(clock);
return PaxDate.ofEpochDay(now.toEpochDay());
}
|
java
|
public Predicate<T> and( final Predicate<T> other ) {
if (other == null || other == this) return this;
return new Predicate<T>() {
@Override
public boolean test( T input ) {
return Predicate.this.test(input) && other.test(input);
}
};
}
|
python
|
def __insert(self, key, value):
'''
Insert a new key to database
'''
if key in self:
getLogger().warning("Cache entry exists, cannot insert a new entry with key='{key}'".format(key=key))
return False
with self.get_conn() as conn:
try:
c = conn.cursor()
c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, value))
conn.commit()
return True
except Exception as e:
# NOTE: A cache error can be forgiven, no?
getLogger().debug("Cache Error: Cannot insert | Detail = %s" % (e,))
return False
|
java
|
public void protectDisplayedTilesForCache(final Canvas pCanvas, final Projection pProjection) {
if (!setViewPort(pCanvas, pProjection)) {
return;
}
TileSystem.getTileFromMercator(mViewPort, TileSystem.getTileSize(mProjection.getZoomLevel()), mProtectedTiles);
final int tileZoomLevel = TileSystem.getInputTileZoomLevel(mProjection.getZoomLevel());
mTileProvider.getTileCache().getMapTileArea().set(tileZoomLevel, mProtectedTiles);
mTileProvider.getTileCache().maintenance();
}
|
java
|
public JsonWriter key(CharSequence key) {
startKey();
if (key == null) {
throw new IllegalArgumentException("Expected map key, but got null.");
}
writeQuotedAndEscaped(key);
writer.write(':');
return this;
}
|
java
|
@Override
public Object apply(Object value, Object... params) {
return super.asString(super.get(0, params)) + super.asString(value);
}
|
python
|
def find_by(self, column=None, value=None, order_by=None, limit=0):
"""
Find all items that matches your a column/value.
:param column: column to search.
:param value: value to look for in `column`.
:param limit: How many rows to fetch.
:param order_by: column on which to order the results. \
To change the sort, prepend with < or >.
"""
with rconnect() as conn:
if column is None or value is None:
raise ValueError("You need to supply both a column and a value")
try:
query = self._base()
if order_by is not None:
query = self._order_by(query, order_by)
if limit > 0:
query = self._limit(query, limit)
query = query.filter({column: value})
log.debug(query)
rv = query.run(conn)
except Exception as e:
log.warn(e)
raise
else:
data = [self._model(_) for _ in rv]
return data
|
python
|
def listdir(dir_name, get_dirs=None, get_files=None, hide_ignored=False):
"""
Return list of all dirs and files inside given dir.
Also can filter contents to return only dirs or files.
Args:
- dir_name: Which directory we need to scan (relative)
- get_dirs: Return dirs list
- get_files: Return files list
- hide_ignored: Exclude files and dirs with initial underscore
"""
if get_dirs is None and get_files is None:
get_dirs = True
get_files = True
source_dir = os.path.join(settings.BASE_DIR, 'app', dir_name)
dirs = []
for dir_or_file_name in os.listdir(source_dir):
path = os.path.join(source_dir, dir_or_file_name)
if hide_ignored and dir_or_file_name.startswith('_'):
continue
is_dir = os.path.isdir(path)
if get_dirs and is_dir or get_files and not is_dir:
dirs.append(dir_or_file_name)
return dirs
|
java
|
public Xid getRemoteTransactionXid(Long internalId) {
for (RemoteTransaction rTx : getRemoteTransactions()) {
RecoverableTransactionIdentifier gtx = (RecoverableTransactionIdentifier) rTx.getGlobalTransaction();
if (gtx.getInternalId() == internalId) {
if (trace) log.tracef("Found xid %s matching internal id %s", gtx.getXid(), internalId);
return gtx.getXid();
}
}
if (trace) log.tracef("Could not find remote transactions matching internal id %s", internalId);
return null;
}
|
java
|
@SuppressWarnings("unchecked")
public EList<String> getDirectionRatiosAsString() {
return (EList<String>) eGet(Ifc2x3tc1Package.Literals.IFC_DIRECTION__DIRECTION_RATIOS_AS_STRING, true);
}
|
python
|
def cyvcf2(context, vcf, include, exclude, chrom, start, end, loglevel, silent,
individual, no_inds):
"""fast vcf parsing with cython + htslib"""
coloredlogs.install(log_level=loglevel)
start_parsing = datetime.now()
log.info("Running cyvcf2 version %s", __version__)
if include and exclude:
log.warning("Can not use include and exclude at the same time")
context.abort()
region = ''
if (chrom or start or end):
if not (chrom and start and end):
log.warning("Please specify chromosome, start and end for region")
context.abort()
else:
region = "{0}:{1}-{2}".format(chrom, start, end)
vcf_obj = VCF(vcf)
for inclusion in include:
if vcf_obj.contains(inclusion):
log.info("Including %s in output", inclusion)
else:
log.warning("%s does not exist in header", inclusion)
context.abort()
for exclusion in exclude:
if vcf_obj.contains(exclusion):
log.info("Excluding %s in output", exclusion)
else:
log.warning("%s does not exist in header", exclusion)
context.abort()
if individual:
# Check if the choosen individuals exists in vcf
test = True
for ind_id in individual:
if ind_id not in vcf_obj.samples:
log.warning("Individual '%s' does not exist in vcf", ind_id)
test = False
if not test:
context.abort()
# Convert individuals to list for VCF.set_individuals
individual = list(individual)
else:
individual = None
# Set individual to be empty list to skip all genotypes
if no_inds:
individual = []
if not silent:
print_header(vcf_obj, include, exclude, individual)
nr_variants = None
try:
for nr_variants, variant in enumerate(vcf_obj(region)):
if not silent:
print_variant(variant, include, exclude)
except Exception as err:
log.warning(err)
context.abort()
if nr_variants is None:
log.info("No variants in vcf")
return
log.info("{0} variants parsed".format(nr_variants+1))
log.info("Time to parse variants: {0}".format(datetime.now() - start_parsing))
|
python
|
def _wait_for_request(self, uuid, connection_adapter=None):
"""Wait for RPC request to arrive.
:param str uuid: Rpc Identifier.
:param obj connection_adapter: Provide custom connection adapter.
:return:
"""
start_time = time.time()
while not self._response[uuid]:
connection_adapter.check_for_errors()
if time.time() - start_time > self._timeout:
self._raise_rpc_timeout_error(uuid)
time.sleep(IDLE_WAIT)
|
java
|
@Override
public CommerceTaxFixedRateAddressRel[] findByCPTaxCategoryId_PrevAndNext(
long commerceTaxFixedRateAddressRelId, long CPTaxCategoryId,
OrderByComparator<CommerceTaxFixedRateAddressRel> orderByComparator)
throws NoSuchTaxFixedRateAddressRelException {
CommerceTaxFixedRateAddressRel commerceTaxFixedRateAddressRel = findByPrimaryKey(commerceTaxFixedRateAddressRelId);
Session session = null;
try {
session = openSession();
CommerceTaxFixedRateAddressRel[] array = new CommerceTaxFixedRateAddressRelImpl[3];
array[0] = getByCPTaxCategoryId_PrevAndNext(session,
commerceTaxFixedRateAddressRel, CPTaxCategoryId,
orderByComparator, true);
array[1] = commerceTaxFixedRateAddressRel;
array[2] = getByCPTaxCategoryId_PrevAndNext(session,
commerceTaxFixedRateAddressRel, CPTaxCategoryId,
orderByComparator, false);
return array;
}
catch (Exception e) {
throw processException(e);
}
finally {
closeSession(session);
}
}
|
java
|
@Override
public LaxImmutableMapBuilder<K, V> orderEntriesByValue(Comparator<? super V> valueComparator) {
this.immutableMapEntryOrdering = checkNotNull(valueComparator);
return this;
}
|
java
|
TimeVal compute_new_date(final TimeVal time, final long upd) {
final double ori_d = time.tv_sec + (double) time.tv_usec / 1000000;
final double new_d = ori_d + (double) upd / 1000;
final TimeVal ret = new TimeVal();
ret.tv_sec = (int) new_d;
ret.tv_usec = (int) ((new_d - ret.tv_sec) * 1000000);
return ret;
}
|
python
|
def _setup_packages(self, sc):
"""
This method compresses and uploads packages to the cluster
"""
packages = self.py_packages
if not packages:
return
for package in packages:
mod = importlib.import_module(package)
try:
mod_path = mod.__path__[0]
except AttributeError:
mod_path = mod.__file__
tar_path = os.path.join(self.run_path, package + '.tar.gz')
tar = tarfile.open(tar_path, "w:gz")
tar.add(mod_path, os.path.basename(mod_path))
tar.close()
sc.addPyFile(tar_path)
|
java
|
public List<String> getTemplateNamesFromPage(int pageId) throws WikiApiException{
if(pageId<1){
throw new WikiApiException("Page ID must be > 0");
}
try {
PreparedStatement statement = null;
ResultSet result = null;
List<String> templateNames = new LinkedList<String>();
try {
statement = connection.prepareStatement("SELECT tpl.templateName FROM "+ GeneratorConstants.TABLE_TPLID_TPLNAME+ " AS tpl, "
+ GeneratorConstants.TABLE_TPLID_PAGEID+ " AS p WHERE tpl.templateId = p.templateId AND p.pageId = ?");
statement.setInt(1, pageId);
result = execute(statement);
if (result == null) {
return templateNames;
}
while (result.next()) {
templateNames.add(result.getString(1).toLowerCase());
}
}
finally {
if (statement != null) {
statement.close();
}
if (result != null) {
result.close();
}
}
return templateNames;
}
catch (Exception e) {
throw new WikiApiException(e);
}
}
|
java
|
public final synchronized Date evalDateBalanceStoreStart(
final Map<String, Object> pAddParam) throws Exception {
Date dateBalanceStoreStart = lazyGetBalanceAtAllDirtyCheck(pAddParam)
.getDateBalanceStoreStart();
Date leastAccountingEntryDate = this.balanceAtAllDirtyCheck
.getLeastAccountingEntryDate();
if (dateBalanceStoreStart.getTime() == this.initDateLong
&& leastAccountingEntryDate.getTime() == this.initDateLong) {
//the first time with no acc-entries, it's start of current ACC year:
Calendar cal = Calendar.getInstance(new Locale("en", "US"));
cal.setTime(getSrvAccSettings().lazyGetAccSettings(pAddParam)
.getCurrentAccYear());
cal.set(Calendar.MONTH, 0);
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
this.balanceAtAllDirtyCheck
.setDateBalanceStoreStart(cal.getTime());
getSrvOrm()
.updateEntity(pAddParam, this.balanceAtAllDirtyCheck);
} else if (dateBalanceStoreStart.getTime() == this.initDateLong
&& leastAccountingEntryDate.getTime() > this.initDateLong) {
//there is at least 1 acc entry
//the start is start of nearest period to the first acc entry:
Long dateFirstEntryLong = this.srvDatabase
.evalLongResult("select min(ITSDATE) as MINIMUMDATE "
+ "from ACCOUNTINGENTRY where REVERSEDID is null;", "MINIMUMDATE");
if (dateFirstEntryLong == null) {
//e.g. dirty reversed acc entry
getLogger().info(null, SrvBalanceStd.class,
"There is no single acc entry, so use current acc year for start balance!");
Calendar cal = Calendar.getInstance(new Locale("en", "US"));
cal.setTime(getSrvAccSettings().lazyGetAccSettings(pAddParam)
.getCurrentAccYear());
cal.set(Calendar.MONTH, 0);
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
this.balanceAtAllDirtyCheck
.setDateBalanceStoreStart(cal.getTime());
} else {
this.balanceAtAllDirtyCheck.setDateBalanceStoreStart(
evalDatePeriodStartFor(pAddParam, new Date(dateFirstEntryLong)));
}
getSrvOrm()
.updateEntity(pAddParam, this.balanceAtAllDirtyCheck);
}
return this.balanceAtAllDirtyCheck.getDateBalanceStoreStart();
}
|
java
|
public static <E, F> PropertySelector<E, F> newPropertySelector(boolean orMode, Attribute<?, ?>... fields) {
PropertySelector<E, F> ps = new PropertySelector<E, F>(checkNotNull(fields));
return ps.orMode(orMode);
}
|
python
|
def _create_ring(self, nodes):
"""Generate a ketama compatible continuum/ring.
"""
for node_name, node_conf in nodes:
for w in range(0, node_conf['vnodes'] * node_conf['weight']):
self._distribution[node_name] += 1
self._ring[self.hashi('%s-%s' % (node_name, w))] = node_name
self._keys = sorted(self._ring.keys())
|
java
|
public static Set<? extends BioPAXElement> getObjectBiopaxPropertyValues(BioPAXElement bpe, String property) {
Set<BioPAXElement> values = new HashSet<BioPAXElement>();
// get the BioPAX L3 property editors map
EditorMap em = SimpleEditorMap.L3;
// get the 'organism' biopax property editor,
// if exists for this type of bpe
@SuppressWarnings("unchecked") PropertyEditor<BioPAXElement, BioPAXElement> editor
= (PropertyEditor<BioPAXElement, BioPAXElement>) em
.getEditorForProperty(property, bpe.getModelInterface());
// if the biopax object does have such property, get values
if (editor != null) {
return editor.getValueFromBean(bpe);
} else
return values;
}
|
java
|
public static CommerceUserSegmentEntry fetchByGroupId_First(long groupId,
OrderByComparator<CommerceUserSegmentEntry> orderByComparator) {
return getPersistence().fetchByGroupId_First(groupId, orderByComparator);
}
|
python
|
def extract_numerics_alert(event):
"""Determines whether a health pill event contains bad values.
A bad value is one of NaN, -Inf, or +Inf.
Args:
event: (`Event`) A `tensorflow.Event` proto from `DebugNumericSummary`
ops.
Returns:
An instance of `NumericsAlert`, if bad values are found.
`None`, if no bad values are found.
Raises:
ValueError: if the event does not have the expected tag prefix or the
debug op name is not the expected debug op name suffix.
"""
value = event.summary.value[0]
debugger_plugin_metadata_content = None
if value.HasField("metadata"):
plugin_data = value.metadata.plugin_data
if plugin_data.plugin_name == constants.DEBUGGER_PLUGIN_NAME:
debugger_plugin_metadata_content = plugin_data.content
if not debugger_plugin_metadata_content:
raise ValueError("Event proto input lacks debugger plugin SummaryMetadata.")
debugger_plugin_metadata_content = tf.compat.as_text(
debugger_plugin_metadata_content)
try:
content_object = json.loads(debugger_plugin_metadata_content)
device_name = content_object["device"]
except (KeyError, ValueError) as e:
raise ValueError("Could not determine device from JSON string %r, %r" %
(debugger_plugin_metadata_content, e))
debug_op_suffix = ":DebugNumericSummary"
if not value.node_name.endswith(debug_op_suffix):
raise ValueError(
"Event proto input does not have the expected debug op suffix %s" %
debug_op_suffix)
tensor_name = value.node_name[:-len(debug_op_suffix)]
elements = tf_debug.load_tensor_from_event(event)
nan_count = elements[constants.NAN_NUMERIC_SUMMARY_OP_INDEX]
neg_inf_count = elements[constants.NEG_INF_NUMERIC_SUMMARY_OP_INDEX]
pos_inf_count = elements[constants.POS_INF_NUMERIC_SUMMARY_OP_INDEX]
if nan_count > 0 or neg_inf_count > 0 or pos_inf_count > 0:
return NumericsAlert(
device_name, tensor_name, event.wall_time, nan_count, neg_inf_count,
pos_inf_count)
return None
|
java
|
public List<XCalElement> children(ICalDataType dataType) {
String localName = dataType.getName().toLowerCase();
List<XCalElement> children = new ArrayList<XCalElement>();
for (Element child : children()) {
if (localName.equals(child.getLocalName()) && XCAL_NS.equals(child.getNamespaceURI())) {
children.add(new XCalElement(child));
}
}
return children;
}
|
java
|
public static void rollbackQuietly(EntityManager entityManager) {
if (entityManager.getTransaction().isActive()/* && entityManager.getTransaction().getRollbackOnly()*/) {
try {
entityManager.getTransaction().rollback();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
|
java
|
void setAttr(String name, int flags)
{
if (null == m_attrs)
m_attrs = new StringToIntTable();
m_attrs.put(name, flags);
}
|
python
|
def delete_device(self, rid):
"""
Deletes device object with given rid
http://docs.exosite.com/portals/#delete-device
"""
headers = {
'User-Agent': self.user_agent(),
'Content-Type': self.content_type()
}
headers.update(self.headers())
r = requests.delete( self.portals_url()+'/devices/'+rid,
headers=headers,
auth=self.auth())
if HTTP_STATUS.NO_CONTENT == r.status_code:
print("Successfully deleted device with rid: {0}".format(rid))
return True
else:
print("Something went wrong: <{0}>: {1}".format(
r.status_code, r.reason))
r.raise_for_status()
return False
|
java
|
private void executeHistoryAction() {
CmsHistoryState state = getState();
if (state.isHistoryBack()) {
History.back();
} else if (state.isHistoryForward()) {
History.forward();
}
}
|
java
|
@Override
public final void setLongProperty(String name, long value) throws JMSException
{
setProperty(name,Long.valueOf(value));
}
|
java
|
public byte[] buildByteCode(Class<?> parentClass, String className) {
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
cw.visit(version, // Java version
ACC_PUBLIC, // public class
convert(className), // package and name
null, // signature (null means not generic)
convert(parentClass), // superclass
new String[] { convert(InvokeDistribute.class) });
// 声明保存实际Target的字段
cw.visitField(ACC_PRIVATE + ACC_FINAL, TARGET_FIELD_NAME, getByteCodeType(parentClass),
null, null).visitEnd();
/* 为类构建默认构造器(编译器会自动生成,但是此处要手动生成bytecode就只能手动生成无参构造器了) */
generateDefaultConstructor(cw, parentClass, className);
// 构建分发方法
buildMethod(cw, className, parentClass);
// buildMethod(cw);
// finish the class definition
cw.visitEnd();
return cw.toByteArray();
}
|
java
|
static String getSimpleFormat(boolean useProxy) {
String format =
AccessController.doPrivileged(
new PrivilegedAction<String>() {
public String run() {
return System.getProperty(FORMAT_PROP_KEY);
}
});
if (useProxy && proxy != null && format == null) {
format = proxy.getProperty(FORMAT_PROP_KEY);
}
if (format != null) {
try {
// validate the user-defined format string
String.format(format, new Date(), "", "", "", "", "");
} catch (IllegalArgumentException e) {
// illegal syntax; fall back to the default format
format = DEFAULT_FORMAT;
}
} else {
format = DEFAULT_FORMAT;
}
return format;
}
|
java
|
public static <K, V> Map<K, V> subMap(Map<K, V> map, K[] keys) {
Map<K, V> answer = new LinkedHashMap<K, V>(keys.length);
for (K key : keys) {
if (map.containsKey(key)) {
answer.put(key, map.get(key));
}
}
return answer;
}
|
java
|
public EClass getIfcSoundValue() {
if (ifcSoundValueEClass == null) {
ifcSoundValueEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(521);
}
return ifcSoundValueEClass;
}
|
python
|
def process_text(text, out_file='sofia_output.json', auth=None):
"""Return processor by processing text given as a string.
Parameters
----------
text : str
A string containing the text to be processed with Sofia.
out_file : Optional[str]
The path to a file to save the reader's output into.
Default: sofia_output.json
auth : Optional[list]
A username/password pair for the Sofia web service. If not given,
the SOFIA_USERNAME and SOFIA_PASSWORD values are loaded from either
the INDRA config or the environment.
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute. If the API did not process
the text, None is returned.
"""
text_json = {'text': text}
if not auth:
user, password = _get_sofia_auth()
else:
user, password = auth
if not user or not password:
raise ValueError('Could not use SOFIA web service since'
' authentication information is missing. Please'
' set SOFIA_USERNAME and SOFIA_PASSWORD in the'
' INDRA configuration file or as environmental'
' variables.')
json_response, status_code, process_status = \
_text_processing(text_json=text_json, user=user, password=password)
# Check response status
if process_status != 'Done' or status_code != 200:
return None
# Cache reading output
if out_file:
with open(out_file, 'w') as fh:
json.dump(json_response, fh, indent=1)
return process_json(json_response)
|
java
|
@Override
public VType indexGet(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask ||
(index == mask + 1 && hasEmptyKey);
return Intrinsics.<VType> cast(values[index]);
}
|
python
|
def create_gce_image(zone,
project,
instance_name,
name,
description):
"""
Shuts down the instance and creates and image from the disk.
Assumes that the disk name is the same as the instance_name (this is the
default behavior for boot disks on GCE).
"""
disk_name = instance_name
try:
down_gce(instance_name=instance_name, project=project, zone=zone)
except HttpError as e:
if e.resp.status == 404:
log_yellow("the instance {} is already down".format(instance_name))
else:
raise e
body = {
"rawDisk": {},
"name": name,
"sourceDisk": "projects/{}/zones/{}/disks/{}".format(
project, zone, disk_name
),
"description": description
}
compute = _get_gce_compute()
gce_wait_until_done(
compute.images().insert(project=project, body=body).execute()
)
return name
|
python
|
def from_other(cls, item):
"""Factory function to return instances of `item` converted into a new
instance of ``cls``. Because this is a class method, it may be called
from any bitmath class object without the need to explicitly
instantiate the class ahead of time.
*Implicit Parameter:*
* ``cls`` A bitmath class, implicitly set to the class of the
instance object it is called on
*User Supplied Parameter:*
* ``item`` A :class:`bitmath.Bitmath` subclass instance
*Example:*
>>> import bitmath
>>> kib = bitmath.KiB.from_other(bitmath.MiB(1))
>>> print kib
KiB(1024.0)
"""
if isinstance(item, Bitmath):
return cls(bits=item.bits)
else:
raise ValueError("The provided items must be a valid bitmath class: %s" %
str(item.__class__))
|
python
|
def getIds(self, query='*:*', fq=None, start=0, rows=1000):
"""Returns a dictionary of: matches: number of matches failed: if true, then an
exception was thrown start: starting index ids: [id, id, ...]
See also the SOLRSearchResponseIterator class
"""
params = {'q': query, 'start': str(start), 'rows': str(rows), 'wt': 'python'}
if fq is not None:
params['fq'] = fq
request = urllib.parse.urlencode(params, doseq=True)
data = None
response = {'matches': 0, 'start': start, 'failed': True, 'ids': []}
try:
rsp = self.doPost(self.solrBase + '', request, self.formheaders)
data = eval(rsp.read())
except Exception:
pass
if data is None:
return response
response['failed'] = False
response['matches'] = data['response']['numFound']
for doc in data['response']['docs']:
response['ids'].append(doc['id'][0])
return response
|
java
|
private void nextSegment() {
assert(checkDir > 0 && seq == rawSeq && pos != limit);
// The input text [segmentStart..pos[ passes the FCD check.
int p = pos;
int prevCC = 0;
for(;;) {
// Fetch the next character's fcd16 value.
int q = p;
int c = Character.codePointAt(seq, p);
p += Character.charCount(c);
int fcd16 = nfcImpl.getFCD16(c);
int leadCC = fcd16 >> 8;
if(leadCC == 0 && q != pos) {
// FCD boundary before the [q, p[ character.
limit = segmentLimit = q;
break;
}
if(leadCC != 0 && (prevCC > leadCC || CollationFCD.isFCD16OfTibetanCompositeVowel(fcd16))) {
// Fails FCD check. Find the next FCD boundary and normalize.
do {
q = p;
if(p == rawLimit) { break; }
c = Character.codePointAt(seq, p);
p += Character.charCount(c);
} while(nfcImpl.getFCD16(c) > 0xff);
normalize(pos, q);
pos = start;
break;
}
prevCC = fcd16 & 0xff;
if(p == rawLimit || prevCC == 0) {
// FCD boundary after the last character.
limit = segmentLimit = p;
break;
}
}
assert(pos != limit);
checkDir = 0;
}
|
python
|
def _resample(self, arrays, ji_windows):
"""Resample all arrays with potentially different resolutions to a common resolution."""
# get a destination array template
win_dst = ji_windows[self.dst_res]
aff_dst = self._layer_meta[self._res_indices[self.dst_res][0]]["transform"]
arrays_dst = list()
for i, array in enumerate(arrays):
arr_dst = np.zeros((int(win_dst.height), int(win_dst.width)))
if self._layer_resolution[i] > self.dst_res:
resampling = getattr(Resampling, self.upsampler)
elif self._layer_resolution[i] < self.dst_res:
resampling = getattr(Resampling, self.downsampler)
else:
arrays_dst.append(array.copy())
continue
reproject(array, arr_dst, # arr_dst[0, :, :, i],
src_transform=self._layer_meta[i]["transform"],
dst_transform=aff_dst,
src_crs=self._layer_meta[0]["crs"],
dst_crs=self._layer_meta[0]["crs"],
resampling=resampling)
arrays_dst.append(arr_dst.copy())
arrays_dst = np.stack(arrays_dst, axis=2) # n_images x n x m x 10 would be the synergise format
return arrays_dst
|
python
|
def _handler(self, sender, setting, value, **kwargs):
"""
handler for ``setting_changed`` signal.
@see :ref:`django:setting-changed`_
"""
if setting.startswith(self.prefix):
self._set_attr(setting, value)
|
java
|
public static void stateNotNull(final Object obj, final String message) throws IllegalStateException {
if (obj == null) {
throw new IllegalStateException(message);
}
}
|
java
|
public java.util.List<PricingDetail> getPricingDetails() {
if (pricingDetails == null) {
pricingDetails = new com.amazonaws.internal.SdkInternalList<PricingDetail>();
}
return pricingDetails;
}
|
python
|
def _check_range(range_):
"""Check that a range is in the format we expect [min, max] and return"""
try:
if not isinstance(range_, list):
range_ = list(range_)
min_, max_ = range_
except (ValueError, TypeError):
raise TypeError("each range in ising_linear_ranges should be a list of length 2.")
if not isinstance(min_, Number) or not isinstance(max_, Number) or min_ > max_:
raise ValueError(("each range in ising_linear_ranges should be a 2-tuple "
"(min, max) where min <= max"))
return range_
|
python
|
def dir_import_table(self):
"""
import table is terminated by a all-null entry, so we have to
check for that
"""
import_header = list(self.optional_data_directories)[1]
import_offset = self.resolve_rva(import_header.VirtualAddress)
i = 0
while True:
offset = import_offset + i*Import_DirectoryTable.get_size()
idt = Import_DirectoryTable(self.stream, offset, self)
if idt.is_empty():
break
else:
yield idt
i += 1
|
python
|
def urlopen(self, url, **kwargs):
"""GET a file-like object for a URL using HTTP.
This is a thin wrapper around :meth:`requests.Session.get` that returns a file-like
object wrapped around the resulting content.
Parameters
----------
url : str
The URL to request
kwargs : arbitrary keyword arguments
Additional keyword arguments to pass to :meth:`requests.Session.get`.
Returns
-------
fobj : file-like object
A file-like interface to the content in the response
See Also
--------
:meth:`requests.Session.get`
"""
return BytesIO(self.create_session().get(url, **kwargs).content)
|
python
|
def send_request(self, *args, **kwargs):
"""Wrapper for session.request
Handle connection reset error even from pyopenssl
"""
try:
return self.session.request(*args, **kwargs)
except ConnectionError:
self.session.close()
return self.session.request(*args, **kwargs)
|
java
|
public static List<String> get(String name, @Nullable Integer value) {
return Args.integer(name, value);
}
|
python
|
def is_py2_stdlib_module(m):
"""
Tries to infer whether the module m is from the Python 2 standard library.
This may not be reliable on all systems.
"""
if PY3:
return False
if not 'stdlib_path' in is_py2_stdlib_module.__dict__:
stdlib_files = [contextlib.__file__, os.__file__, copy.__file__]
stdlib_paths = [os.path.split(f)[0] for f in stdlib_files]
if not len(set(stdlib_paths)) == 1:
# This seems to happen on travis-ci.org. Very strange. We'll try to
# ignore it.
flog.warn('Multiple locations found for the Python standard '
'library: %s' % stdlib_paths)
# Choose the first one arbitrarily
is_py2_stdlib_module.stdlib_path = stdlib_paths[0]
if m.__name__ in sys.builtin_module_names:
return True
if hasattr(m, '__file__'):
modpath = os.path.split(m.__file__)
if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and
'site-packages' not in modpath[0]):
return True
return False
|
java
|
public RemoveMethodType<SessionBeanType<T>> getOrCreateRemoveMethod()
{
List<Node> nodeList = childNode.get("remove-method");
if (nodeList != null && nodeList.size() > 0)
{
return new RemoveMethodTypeImpl<SessionBeanType<T>>(this, "remove-method", childNode, nodeList.get(0));
}
return createRemoveMethod();
}
|
java
|
@SuppressWarnings({ "unchecked" })
@SequentialOnly
public <VV> EntryStream<K, VV> selectByValue(Class<VV> clazz) {
if (isParallel()) {
return (EntryStream<K, VV>) sequential().filterByValue(Fn.instanceOf(clazz)).parallel(maxThreadNum(), splitor());
} else {
return (EntryStream<K, VV>) filterByValue(Fn.instanceOf(clazz));
}
}
|
python
|
def get_filename(self, filename, filesystem=False, convert=False, subpath=''):
"""
Get the filename according to self.to_path, and if filesystem is False
then return unicode filename, otherwise return filesystem encoded filename
@param filename: relative filename, it'll be combine with self.to_path
@param filesystem: if True, then encoding the filename to filesystem
@param convert: if True, then convert filename with FilenameConverter class
@param subpath: sub folder in to_path
"""
from uliweb.utils.common import safe_unicode
#make sure the filename is unicode
s = settings.GLOBAL
if convert:
_p, _f = os.path.split(filename)
_filename = os.path.join(_p, self.filename_convert(_f))
else:
_filename = filename
nfile = safe_unicode(_filename, s.HTMLPAGE_ENCODING)
if subpath:
paths = [application_path(self.to_path), subpath, nfile]
else:
paths = [application_path(self.to_path), nfile]
f = os.path.normpath(os.path.join(*paths)).replace('\\', '/')
if filesystem:
return files.encode_filename(f, to_encoding=s.FILESYSTEM_ENCODING)
return f
|
java
|
public static Collection<ByteBuffer> split(final ByteBuffer buffer,
final int chunkSize) {
final Collection<ByteBuffer> buffers = new LinkedList<ByteBuffer>();
final int limit = buffer.limit();
int totalSent = 0;
while ((totalSent + chunkSize) < limit) {
LOG.trace("Setting limit to: " + (totalSent + chunkSize));
buffer.limit(totalSent + chunkSize);
buffers.add(createBuffer(buffer));
totalSent += chunkSize;
}
// Send any remaining bytes.
buffer.limit(limit);
buffers.add(createBuffer(buffer));
return buffers;
}
|
java
|
protected void destroyRequest(AsyncResourceRequest<V> resourceRequest) {
if(resourceRequest != null) {
try {
// To hand control back to the owner of the
// AsyncResourceRequest, treat "destroy" as an exception since
// there is no resource to pass into useResource, and the
// timeout has not expired.
Exception e = new UnreachableStoreException("Client request was terminated while waiting in the queue.");
resourceRequest.handleException(e);
} catch(Exception ex) {
logger.error("Exception while destroying resource request:", ex);
}
}
}
|
python
|
def check_correct_audience(self, audience):
"Assert that Dataporten sends back our own client id as audience"
client_id, _ = self.get_key_and_secret()
if audience != client_id:
raise AuthException('Wrong audience')
|
java
|
static int mergeInhibitAnyPolicy(int inhibitAnyPolicy,
X509CertImpl currCert) throws CertPathValidatorException
{
if ((inhibitAnyPolicy > 0) && !X509CertImpl.isSelfIssued(currCert)) {
inhibitAnyPolicy--;
}
try {
InhibitAnyPolicyExtension inhAnyPolExt = (InhibitAnyPolicyExtension)
currCert.getExtension(InhibitAnyPolicy_Id);
if (inhAnyPolExt == null)
return inhibitAnyPolicy;
int skipCerts =
inhAnyPolExt.get(InhibitAnyPolicyExtension.SKIP_CERTS).intValue();
if (debug != null)
debug.println("PolicyChecker.mergeInhibitAnyPolicy() "
+ "skipCerts Index from cert = " + skipCerts);
if (skipCerts != -1) {
if (skipCerts < inhibitAnyPolicy) {
inhibitAnyPolicy = skipCerts;
}
}
} catch (IOException e) {
if (debug != null) {
debug.println("PolicyChecker.mergeInhibitAnyPolicy "
+ "unexpected exception");
e.printStackTrace();
}
throw new CertPathValidatorException(e);
}
return inhibitAnyPolicy;
}
|
python
|
def utime(self, times):
"""
Set the access and modified times of this file. If
C{times} is C{None}, then the file's access and modified times are set
to the current time. Otherwise, C{times} must be a 2-tuple of numbers,
of the form C{(atime, mtime)}, which is used to set the access and
modified times, respectively. This bizarre API is mimicked from python
for the sake of consistency -- I apologize.
@param times: C{None} or a tuple of (access time, modified time) in
standard internet epoch time (seconds since 01 January 1970 GMT)
@type times: tuple(int)
"""
if times is None:
times = (time.time(), time.time())
self.sftp._log(DEBUG, 'utime(%s, %r)' % (hexlify(self.handle), times))
attr = SFTPAttributes()
attr.st_atime, attr.st_mtime = times
self.sftp._request(CMD_FSETSTAT, self.handle, attr)
|
python
|
def cublasStrsm(handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb):
"""
Solve a real triangular system with multiple right-hand sides.
"""
status = _libcublas.cublasStrsm_v2(handle,
_CUBLAS_SIDE_MODE[side],
_CUBLAS_FILL_MODE[uplo],
_CUBLAS_OP[trans],
_CUBLAS_DIAG[diag],
m, n, ctypes.byref(ctypes.c_float(alpha)),
int(A), lda, int(B), ldb)
cublasCheckStatus(status)
|
python
|
def smart_convert(original, colorkey, pixelalpha):
"""
this method does several tests on a surface to determine the optimal
flags and pixel format for each tile surface.
this is done for the best rendering speeds and removes the need to
convert() the images on your own
"""
tile_size = original.get_size()
threshold = 127 # the default
try:
# count the number of pixels in the tile that are not transparent
px = pygame.mask.from_surface(original, threshold).count()
except:
# pygame_sdl2 will fail because the mask module is not included
# in this case, just convert_alpha and return it
return original.convert_alpha()
# there are no transparent pixels in the image
if px == tile_size[0] * tile_size[1]:
tile = original.convert()
# there are transparent pixels, and tiled set a colorkey
elif colorkey:
tile = original.convert()
tile.set_colorkey(colorkey, pygame.RLEACCEL)
# there are transparent pixels, and set for perpixel alpha
elif pixelalpha:
tile = original.convert_alpha()
# there are transparent pixels, and we won't handle them
else:
tile = original.convert()
return tile
|
python
|
def list_projects(self, max_results=None, page_token=None, retry=DEFAULT_RETRY):
"""List projects for the project associated with this client.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list
:type max_results: int
:param max_results: (Optional) maximum number of projects to return,
If not passed, defaults to a value set by the API.
:type page_token: str
:param page_token:
(Optional) Token representing a cursor into the projects. If
not passed, the API will return the first page of projects.
The token marks the beginning of the iterator to be returned
and the value of the ``page_token`` can be accessed at
``next_page_token`` of the
:class:`~google.api_core.page_iterator.HTTPIterator`.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.client.Project`
accessible to the current client.
"""
return page_iterator.HTTPIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path="/projects",
item_to_value=_item_to_project,
items_key="projects",
page_token=page_token,
max_results=max_results,
)
|
python
|
def factory(cfg, login, pswd, request_type):
"""
Instantiate ImportRequest
:param cfg: request configuration, should consist of request description (url and parameters) and response for parsing result
:param login:
:param pswd:
:param request_type: TYPE_GET_SINGLE_OBJECT or TYPE_GET_LIST = 'list'
:return: ImportRequest instance
"""
if request_type == ImportRequest.TYPE_GET_LIST:
return ListImportRequest(cfg, login, pswd)
elif request_type == ImportRequest.TYPE_GET_SINGLE_OBJECT:
return SingleObjectImportRequest(cfg, login, pswd)
else:
raise NotImplementedError('Not supported request type - {}'.format(request_type))
|
java
|
public java.util.List<String> getLaunchConfigurationNames() {
if (launchConfigurationNames == null) {
launchConfigurationNames = new com.amazonaws.internal.SdkInternalList<String>();
}
return launchConfigurationNames;
}
|
java
|
@SuppressWarnings("deprecation")
private int getSubsitemapType() throws CmsRpcException {
try {
return OpenCms.getResourceManager().getResourceType(
CmsResourceTypeFolderSubSitemap.TYPE_SUBSITEMAP).getTypeId();
} catch (CmsLoaderException e) {
error(e);
}
return CmsResourceTypeUnknownFolder.getStaticTypeId();
}
|
python
|
def get_action(self, action=None):
""" Returns action to take after call """
if action:
self.action = action
if self.action not in AjaxResponseAction.choices:
raise ValueError(
"Invalid action selected: '{}'".format(self.action))
return self.action
|
python
|
def download_stories(self,
userids: Optional[List[Union[int, Profile]]] = None,
fast_update: bool = False,
filename_target: Optional[str] = ':stories',
storyitem_filter: Optional[Callable[[StoryItem], bool]] = None) -> None:
"""
Download available stories from user followees or all stories of users whose ID are given.
Does not mark stories as seen.
To use this, one needs to be logged in
:param userids: List of user IDs or Profiles to be processed in terms of downloading their stories
:param fast_update: If true, abort when first already-downloaded picture is encountered
:param filename_target: Replacement for {target} in dirname_pattern and filename_pattern
or None if profile name should be used instead
:param storyitem_filter: function(storyitem), which returns True if given StoryItem should be downloaded
"""
if not userids:
self.context.log("Retrieving all visible stories...")
else:
userids = [p if isinstance(p, int) else p.userid for p in userids]
for user_story in self.get_stories(userids):
name = user_story.owner_username
self.context.log("Retrieving stories from profile {}.".format(name))
totalcount = user_story.itemcount
count = 1
for item in user_story.get_items():
if storyitem_filter is not None and not storyitem_filter(item):
self.context.log("<{} skipped>".format(item), flush=True)
continue
self.context.log("[%3i/%3i] " % (count, totalcount), end="", flush=True)
count += 1
with self.context.error_catcher('Download story from user {}'.format(name)):
downloaded = self.download_storyitem(item, filename_target if filename_target else name)
if fast_update and not downloaded:
break
|
python
|
def file_open_ex(self, path, access_mode, open_action, sharing_mode, creation_mode, flags):
"""Opens a file and creates a :py:class:`IGuestFile` object that
can be used for further operations, extended version.
in path of type str
Path to file to open. Guest path style.
in access_mode of type :class:`FileAccessMode`
The file access mode (read, write and/or append).
See :py:class:`FileAccessMode` for details.
in open_action of type :class:`FileOpenAction`
What action to take depending on whether the file exists or not.
See :py:class:`FileOpenAction` for details.
in sharing_mode of type :class:`FileSharingMode`
The file sharing mode in the guest. This parameter is currently
ignore for all guest OSes. It will in the future be implemented for
Windows, OS/2 and maybe Solaris guests only, the others will ignore it.
Use :py:attr:`FileSharingMode.all_p` .
in creation_mode of type int
The UNIX-style access mode mask to create the file with if @a openAction
requested the file to be created (otherwise ignored). Whether/how all
three access groups and associated access rights are realized is guest
OS dependent. The API does the best it can on each OS.
in flags of type :class:`FileOpenExFlag`
Zero or more :py:class:`FileOpenExFlag` values.
return file_p of type :class:`IGuestFile`
:py:class:`IGuestFile` object representing the opened file.
raises :class:`VBoxErrorObjectNotFound`
File to open was not found.
raises :class:`VBoxErrorIprtError`
Error while opening the file.
"""
if not isinstance(path, basestring):
raise TypeError("path can only be an instance of type basestring")
if not isinstance(access_mode, FileAccessMode):
raise TypeError("access_mode can only be an instance of type FileAccessMode")
if not isinstance(open_action, FileOpenAction):
raise TypeError("open_action can only be an instance of type FileOpenAction")
if not isinstance(sharing_mode, FileSharingMode):
raise TypeError("sharing_mode can only be an instance of type FileSharingMode")
if not isinstance(creation_mode, baseinteger):
raise TypeError("creation_mode can only be an instance of type baseinteger")
if not isinstance(flags, list):
raise TypeError("flags can only be an instance of type list")
for a in flags[:10]:
if not isinstance(a, FileOpenExFlag):
raise TypeError(
"array can only contain objects of type FileOpenExFlag")
file_p = self._call("fileOpenEx",
in_p=[path, access_mode, open_action, sharing_mode, creation_mode, flags])
file_p = IGuestFile(file_p)
return file_p
|
python
|
def _ensure_data(values, dtype=None):
"""
routine to ensure that our data is of the correct
input dtype for lower-level routines
This will coerce:
- ints -> int64
- uint -> uint64
- bool -> uint64 (TODO this should be uint8)
- datetimelike -> i8
- datetime64tz -> i8 (in local tz)
- categorical -> codes
Parameters
----------
values : array-like
dtype : pandas_dtype, optional
coerce to this dtype
Returns
-------
(ndarray, pandas_dtype, algo dtype as a string)
"""
# we check some simple dtypes first
try:
if is_object_dtype(dtype):
return ensure_object(np.asarray(values)), 'object', 'object'
if is_bool_dtype(values) or is_bool_dtype(dtype):
# we are actually coercing to uint64
# until our algos support uint8 directly (see TODO)
return np.asarray(values).astype('uint64'), 'bool', 'uint64'
elif is_signed_integer_dtype(values) or is_signed_integer_dtype(dtype):
return ensure_int64(values), 'int64', 'int64'
elif (is_unsigned_integer_dtype(values) or
is_unsigned_integer_dtype(dtype)):
return ensure_uint64(values), 'uint64', 'uint64'
elif is_float_dtype(values) or is_float_dtype(dtype):
return ensure_float64(values), 'float64', 'float64'
elif is_object_dtype(values) and dtype is None:
return ensure_object(np.asarray(values)), 'object', 'object'
elif is_complex_dtype(values) or is_complex_dtype(dtype):
# ignore the fact that we are casting to float
# which discards complex parts
with catch_warnings():
simplefilter("ignore", np.ComplexWarning)
values = ensure_float64(values)
return values, 'float64', 'float64'
except (TypeError, ValueError, OverflowError):
# if we are trying to coerce to a dtype
# and it is incompat this will fall thru to here
return ensure_object(values), 'object', 'object'
# datetimelike
if (needs_i8_conversion(values) or
is_period_dtype(dtype) or
is_datetime64_any_dtype(dtype) or
is_timedelta64_dtype(dtype)):
if is_period_dtype(values) or is_period_dtype(dtype):
from pandas import PeriodIndex
values = PeriodIndex(values)
dtype = values.dtype
elif is_timedelta64_dtype(values) or is_timedelta64_dtype(dtype):
from pandas import TimedeltaIndex
values = TimedeltaIndex(values)
dtype = values.dtype
else:
# Datetime
from pandas import DatetimeIndex
values = DatetimeIndex(values)
dtype = values.dtype
return values.asi8, dtype, 'int64'
elif (is_categorical_dtype(values) and
(is_categorical_dtype(dtype) or dtype is None)):
values = getattr(values, 'values', values)
values = values.codes
dtype = 'category'
# we are actually coercing to int64
# until our algos support int* directly (not all do)
values = ensure_int64(values)
return values, dtype, 'int64'
# we have failed, return object
values = np.asarray(values, dtype=np.object)
return ensure_object(values), 'object', 'object'
|
python
|
def temporal_louvain(tnet, resolution=1, intersliceweight=1, n_iter=100, negativeedge='ignore', randomseed=None, consensus_threshold=0.5, temporal_consensus=True, njobs=1):
r"""
Louvain clustering for a temporal network.
Parameters
-----------
tnet : array, dict, TemporalNetwork
Input network
resolution : int
resolution of Louvain clustering ($\gamma$)
intersliceweight : int
interslice weight of multilayer clustering ($\omega$). Must be positive.
n_iter : int
Number of iterations to run louvain for
randomseed : int
Set for reproduceability
negativeedge : str
If there are negative edges, what should be done with them.
Options: 'ignore' (i.e. set to 0). More options to be added.
consensus : float (0.5 default)
When creating consensus matrix to average over number of iterations, keep values when the consensus is this amount.
Returns
-------
communities : array (node,time)
node,time array of community assignment
Notes
-------
References
----------
"""
tnet = process_input(tnet, ['C', 'G', 'TN'], 'TN')
# Divide resolution by the number of timepoints
resolution = resolution / tnet.T
supranet = create_supraadjacency_matrix(
tnet, intersliceweight=intersliceweight)
if negativeedge == 'ignore':
supranet = supranet[supranet['weight'] > 0]
nxsupra = tnet_to_nx(supranet)
np.random.seed(randomseed)
while True:
comtmp = []
with ProcessPoolExecutor(max_workers=njobs) as executor:
job = {executor.submit(_run_louvain, nxsupra, resolution, tnet.N, tnet.T) for n in range(n_iter)}
for j in as_completed(job):
comtmp.append(j.result())
comtmp = np.stack(comtmp)
comtmp = comtmp.transpose()
comtmp = np.reshape(comtmp, [tnet.N, tnet.T, n_iter], order='F')
if n_iter == 1:
break
nxsupra_old = nxsupra
nxsupra = make_consensus_matrix(comtmp, consensus_threshold)
# If there was no consensus, there are no communities possible, return
if nxsupra is None:
break
if (nx.to_numpy_array(nxsupra, nodelist=np.arange(tnet.N*tnet.T)) == nx.to_numpy_array(nxsupra_old, nodelist=np.arange(tnet.N*tnet.T))).all():
break
communities = comtmp[:, :, 0]
if temporal_consensus == True:
communities = make_temporal_consensus(communities)
return communities
|
java
|
public Observable<ServiceResponse<DetectorResponseInner>> getSiteDetectorResponseSlotWithServiceResponseAsync(String resourceGroupName, String siteName, String detectorName, String slot, DateTime startTime, DateTime endTime, String timeGrain) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (siteName == null) {
throw new IllegalArgumentException("Parameter siteName is required and cannot be null.");
}
if (detectorName == null) {
throw new IllegalArgumentException("Parameter detectorName is required and cannot be null.");
}
if (slot == null) {
throw new IllegalArgumentException("Parameter slot is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.getSiteDetectorResponseSlot(resourceGroupName, siteName, detectorName, slot, this.client.subscriptionId(), startTime, endTime, timeGrain, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DetectorResponseInner>>>() {
@Override
public Observable<ServiceResponse<DetectorResponseInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DetectorResponseInner> clientResponse = getSiteDetectorResponseSlotDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
java
|
public Template load(String path) throws IOException, ParseException {
try(TemplateSource source = sourceLoader.find(path)) {
if (source != null) {
return load(path, source);
}
}
return null;
}
|
java
|
@Override
protected Icon getIcon(JTable table, int column) {
SortKey sortKey = getSortKey(table, column);
if (sortKey != null && table.convertColumnIndexToView(sortKey.getColumn()) == column) {
SortOrder sortOrder = sortKey.getSortOrder();
switch (sortOrder) {
case ASCENDING:
return VerticalSortIcon.ASCENDING;
case DESCENDING:
return VerticalSortIcon.DESCENDING;
case UNSORTED:
return VerticalSortIcon.ASCENDING;
}
}
return null;
}
|
python
|
def prepare_image_question_encoder(image_feat, question, hparams):
"""Prepare encoder.
Args:
image_feat: a Tensor.
question: a Tensor.
hparams: run hyperparameters
Returns:
encoder_input: a Tensor, bottom of encoder stack
encoder_self_attention_bias: a bias tensor for use in encoder self-attention
"""
encoder_input = tf.concat([image_feat, question], axis=1)
encoder_padding = common_attention.embedding_to_padding(encoder_input)
ignore_padding = common_attention.attention_bias_ignore_padding(
encoder_padding)
encoder_self_attention_bias = ignore_padding
encoder_decoder_attention_bias = ignore_padding
# Usual case - not a packed dataset.
if hparams.pos == "timing":
question = common_attention.add_timing_signal_1d(question)
elif hparams.pos == "emb":
question = common_attention.add_positional_embedding(
question, hparams.max_length, "inputs_positional_embedding",
None)
encoder_input = tf.concat([image_feat, question], axis=1)
return (encoder_input, encoder_self_attention_bias,
encoder_decoder_attention_bias)
|
python
|
def get_app_template_dir(app_name):
"""
Get the template directory for an application
We do not use django.db.models.get_app, because this will fail if an
app does not have any models.
Returns a full path, or None if the app was not found.
"""
if app_name in _cache:
return _cache[app_name]
template_dir = None
for app in settings.INSTALLED_APPS:
if app.split('.')[-1] == app_name:
# Do not hide import errors; these should never happen at this
# point anyway
mod = import_module(app)
template_dir = join(abspath(dirname(mod.__file__)), 'templates')
break
_cache[app_name] = template_dir
return template_dir
|
python
|
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'score') and self.score is not None:
_dict['score'] = self.score
return _dict
|
python
|
def write(self, offset, value):
"""
.. _write:
Writes the memory word at ``offset`` to ``value``.
Might raise ReadOnlyError_, if the device is read-only.
Might raise AddressError_, if the offset exceeds the size of the device.
"""
if(not self.mode & 0b10):
raise ReadOnlyError("Device is Read-Only")
if(offset >= self.size):
raise AddressError("Offset({}) not in address space({})".format(offset, self.size))
self.repr_[offset].setvalue(value)
|
python
|
def value_dp_matrix(self):
"""
:return: DataProperty for table data.
:rtype: list
"""
if self.__value_dp_matrix is None:
self.__value_dp_matrix = self.__dp_extractor.to_dp_matrix(
to_value_matrix(self.headers, self.rows)
)
return self.__value_dp_matrix
|
java
|
@Override
public double[] projectDataToScaledSpace(double[] data) {
final int dim = data.length;
double[] dst = new double[dim];
for(int d = 0; d < dim; d++) {
dst[d] = scales[d].getScaled(data[d]);
}
return dst;
}
|
python
|
def setModel(self, model):
"""
Reimplements the **umbra.ui.views.Abstract_QTreeView.setModel** method.
:param model: Model to set.
:type model: QObject
"""
LOGGER.debug("> Setting '{0}' model.".format(model))
if not model:
return
umbra.ui.views.Abstract_QTreeView.setModel(self, model)
|
java
|
@Override
public Subject authenticate(@Sensitive X509Certificate[] certificateChain) throws AuthenticationException {
AuthenticationData authenticationData = createAuthenticationData(certificateChain);
return authenticationService.authenticate(jaasEntryName, authenticationData, null);
}
|
java
|
private void removeBuffers() {
IntBuffer buffer = BufferUtils.createIntBuffer(1);
int queued = AL10.alGetSourcei(source, AL10.AL_BUFFERS_QUEUED);
while (queued > 0)
{
AL10.alSourceUnqueueBuffers(source, buffer);
queued--;
}
}
|
java
|
public static int countOccurrences(byte[] buff, int len, String word) {
int wordlen=word.length();
int end=len-wordlen;
int count=0;
Loop:
for(int c=0;c<=end;c++) {
for(int d=0;d<wordlen;d++) {
char ch1=(char)buff[c+d];
if(ch1<='Z' && ch1>='A') ch1+='a'-'A';
char ch2=word.charAt(d);
if(ch2<='Z' && ch2>='A') ch2+='a'-'A';
if(ch1!=ch2) continue Loop;
}
c+=wordlen-1;
count++;
}
return count;
}
|
java
|
public static Date parseCompressedISO8601Date(String dateString) {
try {
return new Date(compressedIso8601DateFormat.parseMillis(dateString));
} catch (RuntimeException ex) {
throw handleException(ex);
}
}
|
python
|
def unblock_events(self):
"""
Allows the widget to send signals.
"""
self._widget.blockSignals(False)
self._widget.setUpdatesEnabled(True)
|
java
|
@Override
public UpdateCertificateResult updateCertificate(UpdateCertificateRequest request) {
request = beforeClientExecution(request);
return executeUpdateCertificate(request);
}
|
python
|
def process_mav(self, mlog, flightmode_selections):
'''process one file'''
self.vars = {}
idx = 0
all_false = True
for s in flightmode_selections:
if s:
all_false = False
# pre-calc right/left axes
self.num_fields = len(self.fields)
for i in range(0, self.num_fields):
f = self.fields[i]
if f.endswith(":2"):
self.axes[i] = 2
f = f[:-2]
if f.endswith(":1"):
self.first_only[i] = True
f = f[:-2]
self.fields[i] = f
# see which fields are simple
self.simple_field = []
for i in range(0, self.num_fields):
f = self.fields[i]
m = re.match('^([A-Z][A-Z0-9_]*)[.]([A-Za-z_][A-Za-z0-9_]*)$', f)
if m is None:
self.simple_field.append(None)
else:
self.simple_field.append((m.group(1),m.group(2)))
if len(self.flightmode_list) > 0:
# prime the timestamp conversion
self.timestamp_to_days(self.flightmode_list[0][1])
while True:
msg = mlog.recv_match(type=self.msg_types)
if msg is None:
break
if msg.get_type() not in self.msg_types:
continue
if self.condition:
if not mavutil.evaluate_condition(self.condition, mlog.messages):
continue
tdays = self.timestamp_to_days(msg._timestamp)
if all_false or len(flightmode_selections) == 0:
self.add_data(tdays, msg, mlog.messages)
else:
if idx < len(self.flightmode_list) and msg._timestamp >= self.flightmode_list[idx][2]:
idx += 1
elif (idx < len(flightmode_selections) and flightmode_selections[idx]):
self.add_data(tdays, msg, mlog.messages)
|
java
|
private boolean checkDimensions(CLIQUEUnit other, int e) {
for(int i = 0, j = 0; i < e; i++, j += 2) {
if(dims[i] != other.dims[i] || bounds[j] != other.bounds[j] || bounds[j + 1] != bounds[j + 1]) {
return false;
}
}
return true;
}
|
java
|
public static double[] normalize(final double[] values) {
final double sum = 1.0/DoubleAdder.sum(values);
for (int i = values.length; --i >= 0;) {
values[i] = values[i]*sum;
}
return values;
}
|
python
|
def markInputline( self, markerString = ">!<" ):
"""Extracts the exception line from the input string, and marks
the location of the exception with a special symbol.
"""
line_str = self.line
line_column = self.column - 1
if markerString:
line_str = "".join((line_str[:line_column],
markerString, line_str[line_column:]))
return line_str.strip()
|
python
|
def build_album_art_full_uri(self, url):
"""Ensure an Album Art URI is an absolute URI.
Args:
url (str): the album art URI.
Returns:
str: An absolute URI.
"""
# Add on the full album art link, as the URI version
# does not include the ipaddress
if not url.startswith(('http:', 'https:')):
url = 'http://' + self.soco.ip_address + ':1400' + url
return url
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.