language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
protected <T> T eval(Object value, T defaultValue) {
return getExpressionUtil().eval(value, defaultValue);
}
|
python
|
def already_downloaded(track, title, filename):
"""
Returns True if the file has already been downloaded
"""
global arguments
already_downloaded = False
if os.path.isfile(filename):
already_downloaded = True
if arguments['--flac'] and can_convert(filename) \
and os.path.isfile(filename[:-4] + ".flac"):
already_downloaded = True
if arguments['--download-archive'] and in_download_archive(track):
already_downloaded = True
if arguments['--flac'] and can_convert(filename) and os.path.isfile(filename):
already_downloaded = False
if already_downloaded:
if arguments['-c'] or arguments['--remove']:
logger.info('Track "{0}" already downloaded.'.format(title))
return True
else:
logger.error('Track "{0}" already exists!'.format(title))
logger.error('Exiting... (run again with -c to continue)')
sys.exit(0)
return False
|
python
|
def sanitize(self):
'''
Check if the current settings conform to the LISP specifications and
fix where possible.
'''
# WARNING: http://tools.ietf.org/html/draft-ietf-lisp-ddt-00
# does not define this field so the description is taken from
# http://tools.ietf.org/html/draft-ietf-lisp-24
#
# Record TTL: The time in minutes the recipient of the Map-Reply will
# store the mapping. If the TTL is 0, the entry SHOULD be removed
# from the cache immediately. If the value is 0xffffffff, the
# recipient can decide locally how long to store the mapping.
if not isinstance(self.ttl, numbers.Integral) \
or self.ttl < 0 or self.ttl > 0xffffffff:
raise ValueError('Invalid TTL')
# ACT: The "action" field of the mapping record in a Map-Referral
# message encodes 6 action types. The values for the action types are:
#
# NODE-REFERRAL (0): Sent by a DDT node with a child delegation which
# is authoritative for the EID.
#
# MS-REFERRAL (1): Sent by a DDT node that has information about Map
# Server(s) for the EID but it is not one of the Map Servers listed,
# i.e. the DDT-Node sending the referral is not a Map Server.
#
# MS-ACK (2): Sent by a DDT Map Server that has one or more ETR
# registered for the EID.
#
# MS-NOT-REGISTERED (3): Sent by a DDT Map Server that is configured
# for the EID-prefix but for which no ETRs are registered.
#
# DELEGATION-HOLE (4): Sent by an intermediate DDT node with
# authoritative configuration covering the requested EID but without
# any child delegation for the EID. Also sent by a DDT Map Server
# with authoritative configuration covering the requested EID but
# for which no specific site ETR is configured.
#
# NOT-AUTHORITATIVE (5): Sent by a DDT node that does not have
# authoritative configuration for the requested EID. The EID-prefix
# returned MUST be the original requested EID and the TTL MUST be
# set to 0. However, if such a DDT node has a child delegation
# covering the requested EID, it may choose to return NODE-REFERRAL
# or MS-REFERRAL as appropriate. A DDT Map Server with site
# information may choose to return of type MS-ACK or MS-NOT-
# REGISTERED as appropriate.
if self.action not in (self.ACT_NODE_REFERRAL,
self.ACT_MS_REFERRAL,
self.ACT_MS_ACK,
self.ACT_MS_NOT_REGISTERED,
self.ACT_DELEGATION_HOLE,
self.ACT_NOT_AUTHORITATIVE):
raise ValueError('Invalid action')
# WARNING: http://tools.ietf.org/html/draft-ietf-lisp-ddt-00
# does not define this field so the description is taken from
# http://tools.ietf.org/html/draft-ietf-lisp-24
#
# A: The Authoritative bit, when sent is always set to 1 by an ETR.
# When a Map-Server is proxy Map-Replying [LISP-MS] for a LISP site,
# the Authoritative bit is set to 0. This indicates to requesting
# ITRs that the Map-Reply was not originated by a LISP node managed
# at the site that owns the EID-prefix.
if not isinstance(self.authoritative, bool):
raise ValueError('Authoritative flag must be a boolean')
# Incomplete: The "I" bit indicates that a DDT node's referral-set of
# locators is incomplete and the receiver of this message should not
# cache the referral
if not isinstance(self.incomplete, bool):
raise ValueError('Incomplete flag must be a boolean')
# A DDT sets the "incomplete" flag, the TTL, and the Action Type field
# as follows:
#
# -------------------------------------------------------------------
# Type (Action field) Incomplete Referral-set TTL values
# -------------------------------------------------------------------
# 0 NODE-REFERRAL NO YES 1440
# 1 MS-REFERRAL NO YES 1440
# 2 MS-ACK * * 1440
# 3 MS-NOT-REGISTERED * * 1
# 4 DELEGATION-HOLE NO NO 15
# 5 NOT-AUTHORITATIVE YES NO 0
# -------------------------------------------------------------------
#
# *: The "Incomplete" flag setting on Map Server originated referral of
# MS-REFERRAL and MS-NOT-REGISTERED types depend on whether the Map
# Server has the full peer Map Server configuration for the same
# prefix and has encoded the information in the mapping record.
# Incomplete bit is not set when the Map Server has encoded the
# information, which means the referral-set includes all the RLOCs
# of all Map Servers that serve the prefix. It is set when the Map
# Server has not encoded the Map Server set information.
if self.action == self.ACT_NODE_REFERRAL:
if self.incomplete:
raise ValueError('NODE-REFERRAL messages cannot be incomplete')
if not self.locator_records:
raise ValueError('NODE-REFERRAL messages must have locators')
if self.ttl != 1440:
raise ValueError('NODE-REFERRAL messages must have TTL=1440')
elif self.action == self.ACT_MS_REFERRAL:
if self.incomplete:
raise ValueError('MS-REFERRAL messages cannot be incomplete')
if not self.locator_records:
raise ValueError('MS-REFERRAL messages must have locators')
if self.ttl != 1440:
raise ValueError('MS-REFERRAL messages must have TTL=1440')
elif self.action == self.ACT_MS_ACK:
if self.ttl != 1440:
raise ValueError('MS-ACK messages must have TTL=1440')
elif self.action == self.ACT_MS_NOT_REGISTERED:
if self.ttl != 1:
raise ValueError('MS-NOT-REGISTERED messages must have '
'TTL=1')
elif self.action == self.ACT_DELEGATION_HOLE:
if self.incomplete:
raise ValueError('DELEGATION-HOLE messages cannot be '
'incomplete')
if self.locator_records:
raise ValueError('DELEGATION-HOLE messages can not have '
'locators')
if self.ttl != 15:
raise ValueError('DELEGATION-HOLE messages must have TTL=15')
elif self.action == self.ACT_NOT_AUTHORITATIVE:
if not self.incomplete:
raise ValueError('NOT-AUTHORITATIVE messages must be '
'incomplete')
if self.locator_records:
raise ValueError('NOT-AUTHORITATIVE messages can not have '
'locators')
if self.ttl != 0:
raise ValueError('NOT-AUTHORITATIVE messages must have TTL=0')
# WARNING: http://tools.ietf.org/html/draft-ietf-lisp-ddt-00
# does not define this field so the description is taken from
# http://tools.ietf.org/html/draft-ietf-lisp-24
#
# Map-Version Number: When this 12-bit value is non-zero the Map-Reply
# sender is informing the ITR what the version number is for the
# EID-record contained in the Map-Reply. The ETR can allocate this
# number internally but MUST coordinate this value with other ETRs
# for the site. When this value is 0, there is no versioning
# information conveyed. The Map-Version Number can be included in
# Map-Request and Map-Register messages. See Section 6.6.3 for more
# details.
if not isinstance(self.map_version, numbers.Integral) \
or self.map_version < 0 \
or self.map_version >= 2 ** 12:
raise ValueError('Invalid map version')
# EID-prefix: 4 octets if an IPv4 address-family, 16 octets if an IPv6
# address-family.
if not isinstance(self.eid_prefix, LCAFInstanceAddress):
if not isinstance(self.eid_prefix, (IPv4Network, IPv6Network)):
raise ValueError("Unexpected EID prefix %r", self.eid_prefix)
# Wrap in LCAF address with Instance ID
self.eid_prefix = LCAFInstanceAddress(instance_id=0,
address=self.eid_prefix)
# Check locator records
# The probed_locator bits aren't used in this context
for locator_record in self.locator_records:
if not isinstance(locator_record, LocatorRecord) \
or locator_record.probed_locator:
raise ValueError('Invalid Locator record')
locator_record.sanitize()
# For each Map-Reply record, the list of Locators in a Locator-Set MUST
# appear in the same order for each ETR that originates a Map-Reply
# message. The Locator-Set MUST be sorted in order of ascending IP
# address where an IPv4 locator address is considered numerically 'less
# than' an IPv6 locator address.
self.locator_records.sort(key=LocatorRecord.sort_key)
# Check signatures
for dummy in self.signatures:
# TODO: Implement signatures [LISP-Security]
pass
|
python
|
def query_dated(num=10, kind='1'):
'''
List the wiki of dated.
'''
return TabWiki.select().where(
TabWiki.kind == kind
).order_by(
TabWiki.time_update.desc()
).limit(num)
|
java
|
@Override
public Map<String, String[]> getParameterMap() {
if (this.charset.equalsIgnoreCase(this.originCharset)) {
return super.getParameterMap();
}
Map<String, String[]> v = super.getParameterMap();
if (v.isEmpty()) {
return v;
}
Map<String, String[]> map = new HashMap<String, String[]>();
try {
for (String key : v.keySet()) {
String[] values = new String[v.get(key).length];
int i = 0;
for (String value : v.get(key)) {
values[i++] = new String(value.getBytes(this.originCharset),
this.charset);
}
map.put(key, values);
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return map;
}
|
python
|
def remove(self, value, _sa_initiator=None):
"""Remove an item by value, consulting the keyfunc for the key."""
key = self.keyfunc(value)
# Let self[key] raise if key is not in this collection
# testlib.pragma exempt:__ne__
if not self.__contains__(key) or value not in self[key]:
raise sa_exc.InvalidRequestError(
"Can not remove '%s': collection holds '%s' for key '%s'. "
"Possible cause: is the MappedCollection key function "
"based on mutable properties or properties that only obtain "
"values after flush?" %
(value, self[key], key))
self.__getitem__(key, _sa_initiator).remove(value)
|
java
|
public void addResponseCommitListener(final ResponseCommitListener listener) {
//technically it is possible to modify the exchange after the response conduit has been created
//as the response channel should not be retrieved until it is about to be written to
//if we get complaints about this we can add support for it, however it makes the exchange bigger and the connectors more complex
addResponseWrapper(new ConduitWrapper<StreamSinkConduit>() {
@Override
public StreamSinkConduit wrap(ConduitFactory<StreamSinkConduit> factory, HttpServerExchange exchange) {
listener.beforeCommit(exchange);
return factory.create();
}
});
}
|
python
|
def process(self, sched, coro):
"""Add the calling coro in a waiting for signal queue."""
super(WaitForSignal, self).process(sched, coro)
waitlist = sched.sigwait[self.name]
waitlist.append((self, coro))
if self.name in sched.signals:
sig = sched.signals[self.name]
if sig.recipients <= len(waitlist):
sig.process(sched, sig.coro)
del sig.coro
del sched.signals[self.name]
|
java
|
public static Component getDescendantNamed(String name, Component parent) {
Assert.notNull(name, "name");
Assert.notNull(parent, "parent");
if (name.equals(parent.getName())) { // Base case
return parent;
} else if (parent instanceof Container) { // Recursive case
for (final Component component : ((Container) parent).getComponents()) {
final Component foundComponent = SwingUtils.getDescendantNamed(name, component);
if (foundComponent != null) {
return foundComponent;
}
}
}
return null;
}
|
java
|
public void marshall(StopAutomationExecutionRequest stopAutomationExecutionRequest, ProtocolMarshaller protocolMarshaller) {
if (stopAutomationExecutionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(stopAutomationExecutionRequest.getAutomationExecutionId(), AUTOMATIONEXECUTIONID_BINDING);
protocolMarshaller.marshall(stopAutomationExecutionRequest.getType(), TYPE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
Key getSigningKey(JwtConsumerConfig config, JwtContext jwtContext, Map properties) throws KeyException {
Key signingKey = null;
if (config == null) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "JWT consumer config object is null");
}
return null;
}
signingKey = getSigningKeyBasedOnSignatureAlgorithm(config, jwtContext, properties);
if (signingKey == null) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "A signing key could not be found");
}
}
return signingKey;
}
|
java
|
private void recacheChildren()
throws CacheReloadException
{
if (isDirty()) {
Type.cacheType(this);
}
for (final Type child : getChildTypes()) {
child.recacheChildren();
}
}
|
java
|
public static boolean areEquals(final String s1, final String s2) {
return s1 == null ? s2 == null : s1.equals(s2);
}
|
python
|
def new_stats_exporter(options=None, interval=None):
"""Get a stats exporter and running transport thread.
Create a new `StackdriverStatsExporter` with the given options and start
periodically exporting stats to stackdriver in the background.
Fall back to default auth if `options` is null. This will raise
`google.auth.exceptions.DefaultCredentialsError` if default credentials
aren't configured.
See `opencensus.metrics.transport.get_exporter_thread` for details on the
transport thread.
:type options: :class:`Options`
:param exporter: Options to pass to the exporter
:type interval: int or float
:param interval: Seconds between export calls.
:rtype: :class:`StackdriverStatsExporter`
:return: The newly-created exporter.
"""
if options is None:
_, project_id = google.auth.default()
options = Options(project_id=project_id)
if str(options.project_id).strip() == "":
raise ValueError(ERROR_BLANK_PROJECT_ID)
ci = client_info.ClientInfo(client_library_version=get_user_agent_slug())
client = monitoring_v3.MetricServiceClient(client_info=ci)
exporter = StackdriverStatsExporter(client=client, options=options)
transport.get_exporter_thread(stats.stats, exporter, interval=interval)
return exporter
|
java
|
void handle(OngoingRequest ongoingRequest, RequestContext requestContext, Endpoint endpoint) {
try {
endpoint.invoke(requestContext)
.whenComplete((message, throwable) -> {
try {
if (message != null) {
ongoingRequest.reply(message);
} else if (throwable != null) {
// unwrap CompletionException
if (throwable instanceof CompletionException) {
throwable = throwable.getCause();
}
handleException(throwable, ongoingRequest);
} else {
LOG.error(
"Both message and throwable null in EndpointInvocationHandler for request "
+ ongoingRequest
+ " - this shouldn't happen!");
handleException(new IllegalStateException("Both message and throwable null"),
ongoingRequest);
}
} catch (Throwable t) {
// don't try to respond here; just log the fact that responding failed.
LOG.error("Exception caught when replying", t);
}
});
} catch (Exception e) {
handleException(e, ongoingRequest);
}
}
|
java
|
public LocalTime withNano(int nanoOfSecond) {
if (this.nano == nanoOfSecond) {
return this;
}
NANO_OF_SECOND.checkValidValue(nanoOfSecond);
return create(hour, minute, second, nanoOfSecond);
}
|
java
|
@Override
public DescribePortfolioShareStatusResult describePortfolioShareStatus(DescribePortfolioShareStatusRequest request) {
request = beforeClientExecution(request);
return executeDescribePortfolioShareStatus(request);
}
|
python
|
def setNeutral(self, aMathObject, deltaName="origin"):
"""Set the neutral object."""
self._neutral = aMathObject
self.addDelta(Location(), aMathObject-aMathObject, deltaName, punch=False, axisOnly=True)
|
java
|
private ParseTree parseSwitchStatement() {
SourcePosition start = getTreeStartLocation();
eat(TokenType.SWITCH);
eat(TokenType.OPEN_PAREN);
ParseTree expression = parseExpression();
eat(TokenType.CLOSE_PAREN);
eat(TokenType.OPEN_CURLY);
ImmutableList<ParseTree> caseClauses = parseCaseClauses();
eat(TokenType.CLOSE_CURLY);
return new SwitchStatementTree(getTreeLocation(start), expression, caseClauses);
}
|
python
|
def _get_formset_data(self):
"""Formats the self.filtered_data in a way suitable for a formset."""
data = []
for datum in self.filtered_data:
form_data = {}
for column in self.columns.values():
value = column.get_data(datum)
form_data[column.name] = value
form_data['id'] = self.get_object_id(datum)
data.append(form_data)
return data
|
java
|
public double evaluateClustering(Database db, Relation<O> rel, DistanceQuery<O> dq, Clustering<?> c) {
List<? extends Cluster<?>> clusters = c.getAllClusters();
MeanVariance msil = new MeanVariance();
int ignorednoise = 0;
for(Cluster<?> cluster : clusters) {
// Note: we treat 1-element clusters the same as noise.
if(cluster.size() <= 1 || cluster.isNoise()) {
switch(noiseOption){
case IGNORE_NOISE:
ignorednoise += cluster.size();
continue; // Ignore noise elements
case TREAT_NOISE_AS_SINGLETONS:
// As suggested in Rousseeuw, we use 0 for singletons.
msil.put(0., cluster.size());
continue;
case MERGE_NOISE:
break; // Treat as cluster below
}
}
ArrayDBIDs ids = DBIDUtil.ensureArray(cluster.getIDs());
double[] as = new double[ids.size()]; // temporary storage.
DBIDArrayIter it1 = ids.iter(), it2 = ids.iter();
for(it1.seek(0); it1.valid(); it1.advance()) {
// a: In-cluster distances
double a = as[it1.getOffset()]; // Already computed distances
for(it2.seek(it1.getOffset() + 1); it2.valid(); it2.advance()) {
final double dist = dq.distance(it1, it2);
a += dist;
as[it2.getOffset()] += dist;
}
a /= (ids.size() - 1);
// b: minimum average distance to other clusters:
double b = Double.POSITIVE_INFINITY;
for(Cluster<?> ocluster : clusters) {
if(ocluster == /* yes, reference identity */cluster) {
continue; // Same cluster
}
if(ocluster.size() <= 1 || ocluster.isNoise()) {
switch(noiseOption){
case IGNORE_NOISE:
continue; // Ignore noise elements
case TREAT_NOISE_AS_SINGLETONS:
// Treat noise cluster as singletons:
for(DBIDIter it3 = ocluster.getIDs().iter(); it3.valid(); it3.advance()) {
final double dist = dq.distance(it1, it3);
b = dist < b ? dist : b; // Minimum average
}
continue;
case MERGE_NOISE:
break; // Treat as cluster below
}
}
final DBIDs oids = ocluster.getIDs();
double btmp = 0.;
for(DBIDIter it3 = oids.iter(); it3.valid(); it3.advance()) {
btmp += dq.distance(it1, it3);
}
btmp /= oids.size(); // Average
b = btmp < b ? btmp : b; // Minimum average
}
// One cluster only?
b = b < Double.POSITIVE_INFINITY ? b : a;
msil.put((b - a) / (b > a ? b : a));
}
}
double penalty = 1.;
// Only if {@link NoiseHandling#IGNORE_NOISE}:
if(penalize && ignorednoise > 0) {
penalty = (rel.size() - ignorednoise) / (double) rel.size();
}
final double meansil = penalty * msil.getMean();
final double stdsil = penalty * msil.getSampleStddev();
if(LOG.isStatistics()) {
LOG.statistics(new StringStatistic(key + ".silhouette.noise-handling", noiseOption.toString()));
if(ignorednoise > 0) {
LOG.statistics(new LongStatistic(key + ".silhouette.noise", ignorednoise));
}
LOG.statistics(new DoubleStatistic(key + ".silhouette.mean", meansil));
LOG.statistics(new DoubleStatistic(key + ".silhouette.stddev", stdsil));
}
EvaluationResult ev = EvaluationResult.findOrCreate(db.getHierarchy(), c, "Internal Clustering Evaluation", "internal evaluation");
MeasurementGroup g = ev.findOrCreateGroup("Distance-based Evaluation");
g.addMeasure("Silhouette +-" + FormatUtil.NF2.format(stdsil), meansil, -1., 1., 0., false);
db.getHierarchy().resultChanged(ev);
return meansil;
}
|
python
|
def is_chat_admin(user):
"""Checks if a user is a chat admin"""
from indico_chat.plugin import ChatPlugin
return ChatPlugin.settings.acls.contains_user('admins', user)
|
java
|
public SDVariable eye(String name, int rows, int cols) {
return eye(name, rows, cols, Eye.DEFAULT_DTYPE);
}
|
java
|
public static boolean getSetPortUsedBySupervisor(String instanceName, String supervisorHost, int port, RegistryOperations registryOperations) {
String appPath = RegistryUtils.serviceclassPath(
JOYConstants.APP_NAME, JOYConstants.APP_TYPE);
String path = RegistryUtils.servicePath(
JOYConstants.APP_NAME, JOYConstants.APP_TYPE, instanceName);
String hostPath = RegistryUtils.componentPath(
JOYConstants.APP_NAME, JOYConstants.APP_TYPE, instanceName, supervisorHost);
try {
List<String> instanceNames = registryOperations.list(appPath);
for (String instance : instanceNames) {
String servicePath = RegistryUtils.servicePath(
JOYConstants.APP_NAME, JOYConstants.APP_TYPE, instance);
Map<String, ServiceRecord> hosts = RegistryUtils.listServiceRecords(registryOperations, servicePath);
for (String host : hosts.keySet()) {
ServiceRecord sr = hosts.get(JOYConstants.HOST);
String[] portList = sr.get(JOYConstants.PORT_LIST).split(JOYConstants.COMMA);
for (String usedport : portList) {
if (Integer.parseInt(usedport) == port)
return true;
}
}
}
if (registryOperations.exists(path)) {
ServiceRecord sr = registryOperations.resolve(path);
String[] portList = sr.get(JOYConstants.PORT_LIST).split(JOYConstants.COMMA);
String portListUpdate = join(portList, JOYConstants.COMMA, true) + String.valueOf(port);
sr.set(JOYConstants.PORT_LIST, portListUpdate);
registryOperations.bind(path, sr, BindFlags.OVERWRITE);
return false;
} else {
registryOperations.mknode(path, true);
ServiceRecord sr = new ServiceRecord();
String portListUpdate = String.valueOf(port);
sr.set(JOYConstants.PORT_LIST, portListUpdate);
registryOperations.bind(path, sr, BindFlags.OVERWRITE);
return false;
}
} catch (Exception ex) {
return true;
}
}
|
java
|
private void writeArray(JsonGenerator jsonGenerator, Object[] array, boolean firstInHierarchy) throws IOException, SQLException {
if(!firstInHierarchy) {
jsonGenerator.writeStartArray();
}
for(int i = 0; i < array.length; i++) {
if (array[i] instanceof Integer) {
jsonGenerator.writeNumber((int) array[i]);
} else if (array[i] instanceof String) {
if (array[i].equals("{}")) {
jsonGenerator.writeStartObject();
jsonGenerator.writeEndObject();
} else {
jsonGenerator.writeString((String) array[i]);
}
} else if (array[i] instanceof Double) {
jsonGenerator.writeNumber((double) array[i]);
} else if (array[i] instanceof Boolean) {
jsonGenerator.writeBoolean((boolean) array[i]);
} else if (array[i] instanceof Object[]) {
writeArray(jsonGenerator, (Object[]) array[i], false);
}
}
if(!firstInHierarchy) {
jsonGenerator.writeEndArray();
}
}
|
python
|
def _generate_O(self, L):
"""Form the overlaps matrix, which is just all the different observed
combinations of values of pairs of sources
Note that we only include the k non-abstain values of each source,
otherwise the model not minimal --> leads to singular matrix
"""
L_aug = self._get_augmented_label_matrix(L)
self.d = L_aug.shape[1]
self.O = torch.from_numpy(L_aug.T @ L_aug / self.n).float()
|
java
|
public JsonDeserializationException traceError( String message, JsonReader reader ) {
getLogger().log( Level.SEVERE, message );
traceReaderInfo( reader );
return new JsonDeserializationException( message );
}
|
python
|
def init(spark_home=None, python_path=None, edit_rc=False, edit_profile=False):
"""Make pyspark importable.
Sets environment variables and adds dependencies to sys.path.
If no Spark location is provided, will try to find an installation.
Parameters
----------
spark_home : str, optional, default = None
Path to Spark installation, will try to find automatically
if not provided.
python_path : str, optional, default = None
Path to Python for Spark workers (PYSPARK_PYTHON),
will use the currently running Python if not provided.
edit_rc : bool, optional, default = False
Whether to attempt to persist changes by appending to shell
config.
edit_profile : bool, optional, default = False
Whether to create an IPython startup file to automatically
configure and import pyspark.
"""
if not spark_home:
spark_home = find()
if not python_path:
python_path = os.environ.get('PYSPARK_PYTHON', sys.executable)
# ensure SPARK_HOME is defined
os.environ['SPARK_HOME'] = spark_home
# ensure PYSPARK_PYTHON is defined
os.environ['PYSPARK_PYTHON'] = python_path
if not os.environ.get("PYSPARK_SUBMIT_ARGS", None):
os.environ["PYSPARK_SUBMIT_ARGS"] = ''
# add pyspark to sys.path
spark_python = os.path.join(spark_home, 'python')
py4j = glob(os.path.join(spark_python, 'lib', 'py4j-*.zip'))[0]
sys.path[:0] = [spark_python, py4j]
if edit_rc:
change_rc(spark_home, spark_python, py4j)
if edit_profile:
edit_ipython_profile(spark_home, spark_python, py4j)
|
python
|
def get_url(self, version=None):
"""
Return the filename of the bundled bundle
"""
if self.fixed_bundle_url:
return self.fixed_bundle_url
return '%s.%s.%s' % (os.path.join(self.bundle_url_root, self.bundle_filename), version or self.get_version(), self.bundle_type)
|
python
|
def tree(height=3, is_perfect=False):
"""Generate a random binary tree and return its root node.
:param height: Height of the tree (default: 3, range: 0 - 9 inclusive).
:type height: int
:param is_perfect: If set to True (default: False), a perfect binary tree
with all levels filled is returned. If set to False, a perfect binary
tree may still be generated by chance.
:type is_perfect: bool
:return: Root node of the binary tree.
:rtype: binarytree.Node
:raise binarytree.exceptions.TreeHeightError: If height is invalid.
**Example**:
.. doctest::
>>> from binarytree import tree
>>>
>>> root = tree()
>>>
>>> root.height
3
.. doctest::
>>> from binarytree import tree
>>>
>>> root = tree(height=5, is_perfect=True)
>>>
>>> root.height
5
>>> root.is_perfect
True
.. doctest::
>>> from binarytree import tree
>>>
>>> root = tree(height=20) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TreeHeightError: height must be an int between 0 - 9
"""
_validate_tree_height(height)
values = _generate_random_node_values(height)
if is_perfect:
return build(values)
leaf_count = _generate_random_leaf_count(height)
root = Node(values.pop(0))
leaves = set()
for value in values:
node = root
depth = 0
inserted = False
while depth < height and not inserted:
attr = random.choice(('left', 'right'))
if getattr(node, attr) is None:
setattr(node, attr, Node(value))
inserted = True
node = getattr(node, attr)
depth += 1
if inserted and depth == height:
leaves.add(node)
if len(leaves) == leaf_count:
break
return root
|
java
|
public ChannelFuture close(Channel channel, CloseWebSocketFrame frame, ChannelPromise promise) {
if (channel == null) {
throw new NullPointerException("channel");
}
channel.writeAndFlush(frame, promise);
applyForceCloseTimeout(channel, promise);
return promise;
}
|
java
|
public static Map toMap(String json) {
try {
return mapper.readValue(json, Map.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
|
java
|
@Service
public String startWorkflow(StartWorkflowRequest startWorkflowRequest) {
return startWorkflow(startWorkflowRequest.getName(), startWorkflowRequest.getVersion(), startWorkflowRequest.getCorrelationId(), startWorkflowRequest.getInput(),
startWorkflowRequest.getExternalInputPayloadStoragePath(), startWorkflowRequest.getTaskToDomain(), startWorkflowRequest.getWorkflowDef());
}
|
java
|
private SimpleHash buildModel(ValueStack stack, Component component) {
Map<?, ?> context = stack.getContext();
HttpServletRequest req = (HttpServletRequest) context.get(ServletActionContext.HTTP_REQUEST);
// build hash
SimpleHash model = (SimpleHash) req.getAttribute(FreemarkerManager.ATTR_TEMPLATE_MODEL);
if (null == model) {
model = freemarkerManager.buildTemplateModel(stack, null,
(ServletContext) context.get(ServletActionContext.SERVLET_CONTEXT), req,
(HttpServletResponse) context.get(ServletActionContext.HTTP_RESPONSE), config.getObjectWrapper());
req.setAttribute(FreemarkerManager.ATTR_TEMPLATE_MODEL, model);
}
return model;
}
|
java
|
private MembershipImpl getFromCache(String userName, String groupId, String type)
{
return (MembershipImpl)cache.get(cache.getMembershipKey(userName, groupId, type), CacheType.MEMBERSHIP);
}
|
java
|
static byte[][] getPathComponents(String[] strings) {
if (strings.length == 0) {
return new byte[][]{null};
}
byte[][] bytes = new byte[strings.length][];
for (int i = 0; i < strings.length; i++)
bytes[i] = DFSUtil.string2Bytes(strings[i]);
return bytes;
}
|
python
|
def save(self, *args, **kwargs):
"""Saving ensures that the slug, if not set, is set to the slugified name."""
self.clean()
if not self.slug:
self.slug = slugify(self.name)
super(SpecialCoverage, self).save(*args, **kwargs)
if self.query and self.query != {}:
# Always save and require client to filter active date range
self._save_percolator()
|
java
|
public static Query all(String field, Object... values) {
return new Query().all(field, values);
}
|
java
|
@SuppressWarnings("unchecked")
public Dictionary<String, String> updateDictionaryConfig(Dictionary<String, String> dictionary, boolean returnCopy)
{
if (returnCopy)
dictionary = BaseBundleActivator.putAll(dictionary, null);
if (dictionary == null)
dictionary = new Hashtable<String, String>();
try {
String servicePid = dictionary.get(BundleConstants.SERVICE_PID);
if (servicePid != null)
{
ServiceReference caRef = context.getServiceReference(ConfigurationAdmin.class.getName());
if (caRef != null)
{
ConfigurationAdmin configAdmin = (ConfigurationAdmin)context.getService(caRef);
Configuration config = configAdmin.getConfiguration(servicePid);
configProperties = config.getProperties();
if (configProperties == null)
configProperties = new Hashtable<String, String>();
// First, move all settings to dictionary
dictionary = BaseBundleActivator.putAll(configProperties, dictionary);
dictionary.put(BaseWebappServlet.ALIAS, this.calculateWebAlias(dictionary));
// Next, move all saveable settings to the config dictionary (and save them)
Enumeration<String> keys = dictionary.keys();
while (keys.hasMoreElements())
{
String key = keys.nextElement();
if (isPersistentProperty(key))
configProperties.put(key, dictionary.get(key)); // Make sure all the fully qualified keys are persisted
}
// push the configuration dictionary to the ConfigAdminService
config.update(configProperties);
}
}
if (dictionary.get(BaseWebappServlet.ALIAS) == null)
dictionary.put(BaseWebappServlet.ALIAS, this.calculateWebAlias(dictionary));
} catch (IOException e) {
e.printStackTrace();
}
return dictionary;
}
|
python
|
def execute(self, commands, encoding='json', **kwargs):
"""Executes the list of commands on the destination node
This method takes a list of commands and sends them to the
destination node, returning the results. The execute method handles
putting the destination node in enable mode and will pass the
enable password, if required.
Args:
commands (list): A list of commands to execute on the remote node
encoding (string): The encoding to send along with the request
message to the destination node. Valid values include 'json'
or 'text'. This argument will influence the response object
encoding
**kwargs: Arbitrary keyword arguments
Returns:
A decoded response message as a native Python dictionary object
that has been deserialized from JSON.
Raises:
CommandError: A CommandError is raised that includes the error
code, error message along with the list of commands that were
sent to the node. The exception instance is also stored in
the error property and is availble until the next request is
sent
"""
if encoding not in ('json', 'text'):
raise TypeError('encoding must be one of [json, text]')
try:
self.error = None
request = self.request(commands, encoding=encoding, **kwargs)
response = self.send(request)
return response
except(ConnectionError, CommandError, TypeError) as exc:
exc.commands = commands
self.error = exc
raise
|
python
|
def avl_join2(t1, t2):
"""
join two trees without any intermediate key
Returns:
Node: new_root
O(log(n) + log(m)) = O(r(t1) + r(t2))
For AVL-Trees the rank r(t1) = height(t1) - 1
"""
if t1 is None and t2 is None:
new_root = None
elif t2 is None:
new_root = t1
elif t1 is None:
new_root = t2
else:
new_left, last_node = avl_split_last(t1)
debug = 0
if debug:
EulerTourTree(root=new_left)._assert_nodes('new_left')
EulerTourTree(root=last_node)._assert_nodes('last_node')
EulerTourTree(root=t2)._assert_nodes('t2')
print('new_left')
EulerTourTree(root=new_left).print_tree()
print('last_node')
EulerTourTree(root=last_node).print_tree()
print('t2')
EulerTourTree(root=t2).print_tree()
new_root = avl_join(new_left, t2, last_node)
if debug:
print('new_root')
EulerTourTree(root=new_root).print_tree()
EulerTourTree(root=last_node)._assert_nodes('new_root')
return new_root
|
java
|
public Observable<WorkItemConfigurationInner> createAsync(String resourceGroupName, String resourceName, WorkItemCreateConfiguration workItemConfigurationProperties) {
return createWithServiceResponseAsync(resourceGroupName, resourceName, workItemConfigurationProperties).map(new Func1<ServiceResponse<WorkItemConfigurationInner>, WorkItemConfigurationInner>() {
@Override
public WorkItemConfigurationInner call(ServiceResponse<WorkItemConfigurationInner> response) {
return response.body();
}
});
}
|
python
|
def EAS2TAS(ARSP,GPS,BARO,ground_temp=25):
'''EAS2TAS from ARSP.Temp'''
tempK = ground_temp + 273.15 - 0.0065 * GPS.Alt
return sqrt(1.225 / (BARO.Press / (287.26 * tempK)))
|
java
|
private Collection<IndexedInstance> getInstancesForVertex(Map<String, AttributeValueMap> map, AtlasVertex foundVertex) {
//loop through the unique attributes. For each attribute, check to see if the vertex property that
//corresponds to that attribute has a value from one or more of the instances that were passed in.
for(Map.Entry<String, AttributeValueMap> entry : map.entrySet()) {
String propertyName = entry.getKey();
AttributeValueMap valueMap = entry.getValue();
Object vertexValue = foundVertex.getProperty(propertyName, Object.class);
Collection<IndexedInstance> instances = valueMap.get(vertexValue);
if(instances != null && instances.size() > 0) {
//return first match. Let the underling graph determine if this is a problem
//(i.e. if the other unique attributes change be changed safely to match what
//the user requested).
return instances;
}
//try another attribute
}
return Collections.emptyList();
}
|
python
|
def package_meta():
"""Read __init__.py for global package metadata.
Do this without importing the package.
"""
_version_re = re.compile(r'__version__\s+=\s+(.*)')
_url_re = re.compile(r'__url__\s+=\s+(.*)')
_license_re = re.compile(r'__license__\s+=\s+(.*)')
with open('lambda_uploader/__init__.py', 'rb') as ffinit:
initcontent = ffinit.read()
version = str(ast.literal_eval(_version_re.search(
initcontent.decode('utf-8')).group(1)))
url = str(ast.literal_eval(_url_re.search(
initcontent.decode('utf-8')).group(1)))
licencia = str(ast.literal_eval(_license_re.search(
initcontent.decode('utf-8')).group(1)))
return {
'version': version,
'license': licencia,
'url': url,
}
|
python
|
def _handle_tag_definemorphshape2(self):
"""Handle the DefineMorphShape2 tag."""
obj = _make_object("DefineMorphShape2")
obj.CharacterId = unpack_ui16(self._src)
obj.StartBounds = self._get_struct_rect()
obj.EndBounds = self._get_struct_rect()
obj.StartEdgeBounds = self._get_struct_rect()
obj.EndEdgeBounds = self._get_struct_rect()
bc = BitConsumer(self._src)
bc.u_get(6) # reserved
obj.UsesNonScalingStrokes = bc.u_get(1)
obj.UsesScalingStrokes = bc.u_get(1)
obj.Offset = unpack_ui32(self._src)
# FIXME: this tag needs more work; I'm skipping some attributes here
self._src.read(obj.Offset)
obj.EndEdges = self._get_struct_shape()
return obj
|
java
|
public VDirectory createApplication(String appName) {
Tenant tenant = new Tenant(TenantService.instance().getDefaultTenantDef());
return createApplication(tenant, appName);
}
|
python
|
def _extract_from_subworkflow(vs, step):
"""Remove internal variable names when moving from sub-workflow to main.
"""
substep_ids = set([x.name for x in step.workflow])
out = []
for var in vs:
internal = False
parts = var["id"].split("/")
if len(parts) > 1:
if parts[0] in substep_ids:
internal = True
if not internal:
var.pop("source", None)
out.append(var)
return out
|
python
|
def with_details(self, key, value):
"""
Sets a parameter for additional error details.
This details can be used to restore error description in other languages.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param key: a details parameter name
:param value: a details parameter name
:return: this exception object
"""
self.details = self.details if self.details != None else {}
self.details[key] = value
return self
|
python
|
def get(self,style):
""" what's the value of a style at the current stack level"""
level = len(self.stack) -1
while level >= 0:
if style in self.stack[level]:
return self.stack[level][style]
else:
level = level - 1
return None
|
python
|
def step_impl(context):
"""Compares text as written to the log output"""
expected_lines = context.text.split('\n')
assert len(expected_lines) == len(context.output)
for expected, actual in zip(expected_lines, context.output):
print('--\n\texpected: {}\n\tactual: {}'.format(expected, actual))
assert expected == actual
|
java
|
String getWhereTaskletWasScheduledTo(final int taskletId) {
for (final Map.Entry<String, VortexWorkerManager> entry : runningWorkers.entrySet()) {
final String workerId = entry.getKey();
final VortexWorkerManager vortexWorkerManager = entry.getValue();
if (vortexWorkerManager.containsTasklet(taskletId)) {
return workerId;
}
}
return null;
}
|
python
|
def fetch_userid(self, side):
"""Return the userid for the specified bed side."""
for user in self.users:
obj = self.users[user]
if obj.side == side:
return user
|
python
|
def load_scene(self, item):
"""Load scene from json."""
scene = Scene.from_config(self.pyvlx, item)
self.add(scene)
|
java
|
@Override
void visit(ImageElement element, String value) throws IOException {
if(inInode) {
switch(element) {
case INODE_PATH:
if(value.equals("")) path = "/";
else path = value;
break;
case PERMISSION_STRING:
perms = value;
break;
case REPLICATION:
replication = value;
break;
case USER_NAME:
username = value;
break;
case GROUP_NAME:
group = value;
break;
case NUM_BYTES:
filesize += Long.valueOf(value);
break;
case MODIFICATION_TIME:
modTime = value;
break;
case SYMLINK:
linkTarget = value;
break;
case INODE_TYPE:
type = value;
break;
case INODE_HARDLINK_ID:
hardlinkId = value;
break;
default:
// This is OK. We're not looking for all the values.
break;
}
}
}
|
java
|
@Override
protected void saveBeans(List<ProxyActionStatus> addedBeans, List<ProxyActionStatus> modifiedBeans,
List<ProxyActionStatus> removedBeans) {
// CRUD operations on Target will be done through repository methods
}
|
java
|
public static <T extends ResourceId> Builder<T> newBuilder(Status status, T replacement) {
return new Builder<T>().setStatus(status).setReplacement(replacement);
}
|
python
|
def _board_from_game_image(self, game_image):
"""Return a board object matching the board in the game image.
Return None if any tiles are not identified.
"""
# board image
board_rect = self._board_tools['board_region'].region_in(game_image)
t, l, b, r = board_rect
board_image = game_image[t:b, l:r]
# board grid and tiles --> fill in a Board object
board = Board()
grid = self._board_tools['grid']
tile_id = self._board_tools['tile_id']
for p, borders in grid.borders_by_grid_position(board_image):
t, l, b, r = borders
tile = board_image[t:b, l:r]
tile_character = tile_id.identify(tile)
if tile_character is None:
return None # soft failure
board[p] = Tile.singleton(tile_character)
return board
|
python
|
def _resize_image_if_necessary(image_fobj, target_pixels=None):
"""Resize an image to have (roughly) the given number of target pixels.
Args:
image_fobj: File object containing the original image.
target_pixels: If given, number of pixels that the image must have.
Returns:
A file object.
"""
if target_pixels is None:
return image_fobj
cv2 = tfds.core.lazy_imports.cv2
# Decode image using OpenCV2.
image = cv2.imdecode(
np.fromstring(image_fobj.read(), dtype=np.uint8), flags=3)
# Get image height and width.
height, width, _ = image.shape
actual_pixels = height * width
if actual_pixels > target_pixels:
factor = np.sqrt(target_pixels / actual_pixels)
image = cv2.resize(image, dsize=None, fx=factor, fy=factor)
# Encode the image with quality=72 and store it in a BytesIO object.
_, buff = cv2.imencode(".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), 72])
return io.BytesIO(buff.tostring())
|
java
|
@Override
@PublicEvolving
public <R> SingleOutputStreamOperator<R> transform(String operatorName,
TypeInformation<R> outTypeInfo, OneInputStreamOperator<T, R> operator) {
SingleOutputStreamOperator<R> returnStream = super.transform(operatorName, outTypeInfo, operator);
// inject the key selector and key type
OneInputTransformation<T, R> transform = (OneInputTransformation<T, R>) returnStream.getTransformation();
transform.setStateKeySelector(keySelector);
transform.setStateKeyType(keyType);
return returnStream;
}
|
java
|
private boolean waitForTasksToFinish() throws Exception {
for (Future<Boolean> future : futures) {
try {
if (!future.get()) {
cancel();
return false;
}
} catch (Exception e) {
error.compareAndSet(null, e);
cancel();
throw e;
}
}
return true;
}
|
java
|
@XmlElementDecl(namespace = "http://www.w3.org/1998/Math/MathML", name = "log")
public JAXBElement<ElementaryFunctionsType> createLog(ElementaryFunctionsType value) {
return new JAXBElement<ElementaryFunctionsType>(_Log_QNAME, ElementaryFunctionsType.class, null, value);
}
|
python
|
def _step(self,
model: TrainingModel,
batch: mx.io.DataBatch,
checkpoint_interval: int,
metric_train: mx.metric.EvalMetric,
metric_loss: Optional[mx.metric.EvalMetric] = None):
"""
Performs an update to model given a batch and updates metrics.
"""
if model.monitor is not None:
model.monitor.tic()
####################
# Forward & Backward
####################
model.run_forward_backward(batch, metric_train)
# If using an extended optimizer, provide extra state information about the current batch
optimizer = model.optimizer
if metric_loss is not None and isinstance(optimizer, SockeyeOptimizer):
# Loss for this batch
metric_loss.reset()
metric_loss.update(batch.label, model.module.get_outputs())
[(_, m_val)] = metric_loss.get_name_value()
batch_state = BatchState(metric_val=m_val)
optimizer.pre_update_batch(batch_state)
########
# UPDATE
########
if self.update_interval == 1 or self.state.batches % self.update_interval == 0:
# Gradient rescaling
gradient_norm = None
if self.state.updates > 0 and (self.state.updates + 1) % checkpoint_interval == 0:
# compute values for logging to metrics (before rescaling...)
gradient_norm = self.state.gradient_norm = model.get_global_gradient_norm()
self.state.gradients = model.get_gradients()
# note: C.GRADIENT_CLIPPING_TYPE_ABS is handled by the mxnet optimizer directly
if self.optimizer_config.gradient_clipping_type == C.GRADIENT_CLIPPING_TYPE_NORM:
if gradient_norm is None:
gradient_norm = model.get_global_gradient_norm()
# clip gradients
if gradient_norm > self.optimizer_config.gradient_clipping_threshold:
ratio = self.optimizer_config.gradient_clipping_threshold / gradient_norm
model.rescale_gradients(ratio)
model.update()
if self.update_interval > 1:
model.zero_gradients()
self.state.updates += 1
if model.monitor is not None:
results = model.monitor.toc()
if results:
for _, k, v in results:
logger.info('Monitor: Batch [{:d}] {:s} {:s}'.format(self.state.updates, k, v))
|
java
|
public final SIDestinationAddress createSIDestinationAddress(String destinationName
,boolean localOnly
)
throws NullPointerException {
if (destinationName == null) {
throw new NullPointerException("destinationName");
}
return new JsDestinationAddressImpl(destinationName, localOnly, null, null, false);
}
|
java
|
public void doAESEncryption() throws Exception{
if(!initAESDone)
initAES();
cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
//System.out.println(secretKey.getEncoded());
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
AlgorithmParameters params = cipher.getParameters();
iv = params.getParameterSpec(IvParameterSpec.class).getIV();
secretCipher = cipher.doFinal(secretPlain);
clearPlain();
}
|
python
|
def _give_columns_django_field_attributes(self):
"""
Add Django Field attributes to each cqlengine.Column instance.
So that the Django Options class may interact with it as if it were
a Django Field.
"""
methods_to_add = (
django_field_methods.value_from_object,
django_field_methods.value_to_string,
django_field_methods.get_attname,
django_field_methods.get_cache_name,
django_field_methods.pre_save,
django_field_methods.get_prep_value,
django_field_methods.get_choices,
django_field_methods.get_choices_default,
django_field_methods.save_form_data,
django_field_methods.formfield,
django_field_methods.get_db_prep_value,
django_field_methods.get_db_prep_save,
django_field_methods.db_type_suffix,
django_field_methods.select_format,
django_field_methods.get_internal_type,
django_field_methods.get_attname_column,
django_field_methods.check,
django_field_methods._check_field_name,
django_field_methods._check_db_index,
django_field_methods.deconstruct,
django_field_methods.run_validators,
django_field_methods.clean,
django_field_methods.get_db_converters,
django_field_methods.get_prep_lookup,
django_field_methods.get_db_prep_lookup,
django_field_methods.get_filter_kwargs_for_object,
django_field_methods.set_attributes_from_name,
django_field_methods.db_parameters,
django_field_methods.get_pk_value_on_save,
django_field_methods.get_col,
)
for name, cql_column in six.iteritems(self._defined_columns):
self._set_column_django_attributes(cql_column=cql_column, name=name)
for method in methods_to_add:
try:
method_name = method.func_name
except AttributeError:
# python 3
method_name = method.__name__
new_method = six.create_bound_method(method, cql_column)
setattr(cql_column, method_name, new_method)
|
java
|
public static Image getInstance(PdfWriter writer, java.awt.Image awtImage, float quality) throws BadElementException, IOException {
return getInstance(new PdfContentByte(writer), awtImage, quality);
}
|
python
|
def _get_response(self, params):
""" wrap the call to the requests package """
return self._session.get(
self._api_url, params=params, timeout=self._timeout
).json(encoding="utf8")
|
python
|
def get_file(self, user, handle):
"""Retrieve a file for a user.
:returns: a :class:`pathlib.Path` instance to this file,
or None if no file can be found for this handle.
"""
user_dir = self.user_dir(user)
if not user_dir.exists():
return None
if not is_valid_handle(handle):
return None
file_path = user_dir / handle
if not file_path.exists() and not file_path.is_file():
return None
return file_path
|
java
|
public void addChild(TreeElement child)
throws IllegalArgumentException
{
TreeElement theChild = child;
theChild.setParent(this);
if (getName() == null) {
setName("0");
}
if (_children == null) {
_children = new ArrayList();
}
//int n = _children.size();
//if (n > 0) {
// TreeElement node = (TreeElement) _children.get(n - 1);
// node.setLast(false);
//}
//theChild.setLast(true);
_children.add(child);
int n = _children.size();
theChild.updateName(this, n - 1);
}
|
java
|
public static long calculateCRC32(byte[] buffer, int offset, int length)
{
if (!init_done)
{
initialize();
}
for (int i = offset; i < offset + length; i++)
{
long tmp1 = (crc >>> 8) & 0x00FFFFFFL;
long tmp2 = values[(int) ((crc ^ Character.toUpperCase((char) buffer[i])) & 0xff)];
crc = tmp1 ^ tmp2;
// System.out.println("CRC: "+crc);
}
return crc;
}
|
python
|
def disaggregate_radiation(self, method='pot_rad', pot_rad=None):
"""
Disaggregate solar radiation.
Parameters
----------
method : str, optional
Disaggregation method.
``pot_rad``
Calculates potential clear-sky hourly radiation and scales it according to the
mean daily radiation. (Default)
``pot_rad_via_ssd``
Calculates potential clear-sky hourly radiation and scales it according to the
observed daily sunshine duration.
``pot_rad_via_bc``
Calculates potential clear-sky hourly radiation and scales it according to daily
minimum and maximum temperature.
``mean_course``
Hourly radiation follows an observed average course (calculated for each month).
pot_rad : Series, optional
Hourly values of potential solar radiation. If ``None``, calculated internally.
"""
if self.sun_times is None:
self.calc_sun_times()
if pot_rad is None and method != 'mean_course':
pot_rad = melodist.potential_radiation(self.data_disagg.index, self.lon, self.lat, self.timezone)
self.data_disagg.glob = melodist.disaggregate_radiation(
self.data_daily,
sun_times=self.sun_times,
pot_rad=pot_rad,
method=method,
angstr_a=self.statistics.glob.angstroem.a,
angstr_b=self.statistics.glob.angstroem.b,
bristcamp_a=self.statistics.glob.bristcamp.a,
bristcamp_c=self.statistics.glob.bristcamp.c,
mean_course=self.statistics.glob.mean_course
)
|
python
|
def plot_series(self, xres, varied_data, varied_idx, **kwargs):
""" Plots the results from :meth:`solve_series`.
Parameters
----------
xres : array
Of shape ``(varied_data.size, self.nx)``.
varied_data : array
See :meth:`solve_series`.
varied_idx : int or str
See :meth:`solve_series`.
\\*\\*kwargs :
Keyword arguments passed to :func:`pyneqsys.plotting.plot_series`.
"""
for attr in 'names latex_names'.split():
if kwargs.get(attr, None) is None:
kwargs[attr] = getattr(self, attr)
ax = plot_series(xres, varied_data, **kwargs)
if self.par_by_name and isinstance(varied_idx, str):
varied_idx = self.param_names.index(varied_idx)
if self.latex_param_names:
ax.set_xlabel('$%s$' % self.latex_param_names[varied_idx])
elif self.param_names:
ax.set_xlabel(self.param_names[varied_idx])
return ax
|
java
|
@SuppressWarnings("PMD.UseStringBufferForStringAppends")
private static String processClass(final Class clazz) {
String res;
// simpleName on anonymous class is empty string
if (clazz.isAnonymousClass()) {
res = clazz.getEnclosingClass().getSimpleName();
try {
if (clazz.getEnclosingConstructor() != null) {
res += HASH + toStringConstructor(clazz.getEnclosingConstructor(), IgnoreGenericsMap.getInstance());
} else if (clazz.getEnclosingMethod() != null) {
res += HASH + toStringMethod(clazz.getEnclosingMethod(), IgnoreGenericsMap.getInstance());
}
} catch (Throwable ignored) {
// ignore possible "enclosing method not found" in groovy
}
res += "$" + (clazz.getSuperclass() == Object.class && clazz.getInterfaces().length > 0
? clazz.getInterfaces()[0].getSimpleName() : clazz.getSuperclass().getSimpleName());
} else {
res = clazz.getSimpleName();
}
return res;
}
|
python
|
def has_public_binary_operator(type_, operator_symbol):
"""returns True, if `type_` has public binary operator, otherwise False"""
type_ = type_traits.remove_alias(type_)
type_ = type_traits.remove_cv(type_)
type_ = type_traits.remove_declarated(type_)
assert isinstance(type_, class_declaration.class_t)
if type_traits.is_std_string(type_) or type_traits.is_std_wstring(type_):
# In some case compare operators of std::basic_string are not
# instantiated
return True
operators = type_.member_operators(
function=matchers.custom_matcher_t(
lambda decl: not decl.is_artificial) &
matchers.access_type_matcher_t('public'),
symbol=operator_symbol, allow_empty=True, recursive=False)
if operators:
return True
declarated = cpptypes.declarated_t(type_)
const = cpptypes.const_t(declarated)
reference = cpptypes.reference_t(const)
operators = type_.top_parent.operators(
function=lambda decl: not decl.is_artificial,
arg_types=[reference, None],
symbol=operator_symbol,
allow_empty=True,
recursive=True)
if operators:
return True
for bi in type_.recursive_bases:
assert isinstance(bi, class_declaration.hierarchy_info_t)
if bi.access_type != class_declaration.ACCESS_TYPES.PUBLIC:
continue
operators = bi.related_class.member_operators(
function=matchers.custom_matcher_t(
lambda decl: not decl.is_artificial) &
matchers.access_type_matcher_t('public'),
symbol=operator_symbol, allow_empty=True, recursive=False)
if operators:
return True
return False
|
python
|
def nvmlDeviceGetCurrPcieLinkGeneration(handle):
r"""
/**
* Retrieves the current PCIe link generation
*
* For Fermi &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param currLinkGen Reference in which to return the current PCIe link generation
*
* @return
* - \ref NVML_SUCCESS if \a currLinkGen has been populated
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a currLinkGen is null
* - \ref NVML_ERROR_NOT_SUPPORTED if PCIe link information is not available
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetCurrPcieLinkGeneration
"""
fn = _nvmlGetFunctionPointer("nvmlDeviceGetCurrPcieLinkGeneration")
gen = c_uint()
ret = fn(handle, byref(gen))
_nvmlCheckReturn(ret)
return bytes_to_str(gen.value)
|
java
|
public void setLastAccessTime(long lastAccessTime) {
while (true) {
long current = this.lastAccessTime;
if (current >= lastAccessTime) {
break;
}
if (ACCESSTIME_UPDATER.compareAndSet(this, current, lastAccessTime)) {
break;
}
}
}
|
python
|
def reinstall_ruby(ruby, runas=None, env=None):
'''
Reinstall a ruby implementation
ruby
The version of ruby to reinstall
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.reinstall_ruby 1.9.3-p385
'''
return _rvm(['reinstall', ruby], runas=runas, env=env)
|
java
|
List<ControlFlowBlock> getControlFlowBlocksForMethod(final String methodName,
final Type returnType,
final Type... argumentTypes) {
final MethodNode method = findMethodByDescriptor(methodName, returnType, argumentTypes);
return getControlFlowBlocksForMethod(method);
}
|
python
|
def make_error_router():
""" Creates an error router
An error router takes a higher order observable a input and returns two
observables: One containing the flattened items of the input observable
and another one containing the flattened errors of the input observable.
.. image:: ../docs/asset/error_router.png
:scale: 60%
:align: center
Returns
-------
error_observable: observable
An observable emitting errors remapped.
route_error: function
A lettable function routing errors and taking three parameters:
* source: Observable (higher order). Observable with errors to route.
* error_map: function. Function used to map errors before routing them.
* source_map: function. A function used to select the observable from each item is source.
Examples
--------
>>> sink, route_error = make_error_router()
my_observable.let(route_error, error_map=lambda e: e)
"""
sink_observer = None
def on_subscribe(observer):
nonlocal sink_observer
sink_observer = observer
def dispose():
nonlocal sink_observer
sink_observer = None
return dispose
def route_error(obs, convert):
""" Handles error raised by obs observable
catches any error raised by obs, maps it to anther object with the
convert function, and emits in on the error observer.
"""
def catch_error(e):
sink_observer.on_next(convert(e))
return Observable.empty()
return obs.catch_exception(catch_error)
def catch_or_flat_map(source, error_map, source_map=lambda i: i):
return source.flat_map(lambda i: route_error(source_map(i), error_map))
return Observable.create(on_subscribe), catch_or_flat_map
|
python
|
def label_field(self, f):
"""
Select one field as the label field.
Note that this field will be exclude from feature fields.
:param f: Selected label field
:type f: str
:rtype: DataFrame
"""
if f is None:
raise ValueError("Label field name cannot be None.")
self._assert_ml_fields_valid(f)
return _change_singleton_roles(self, {_get_field_name(f): FieldRole.LABEL}, clear_feature=True)
|
python
|
def solve(self):
"""
Solve the entire F2L. (Generator)
"""
for i in range(4):
for slot in ["FR", "RB", "BL", "LF"]:
solver = F2LPairSolver(self.cube, slot)
if not solver.is_solved():
yield tuple([self.cube[slot[i]].colour for i in range(2)]), solver.solve()
break
|
java
|
public void norm_setBaseRelated(){
String [] time_grid=new String[6];
time_grid=normalizer.getTimeBase().split("-");
int[] ini = new int[6];
for(int i = 0 ; i < 6; i++)
ini[i] = Integer.parseInt(time_grid[i]);
Calendar calendar = Calendar.getInstance();
calendar.setFirstDayOfWeek(Calendar.MONDAY);
calendar.set(ini[0], ini[1]-1, ini[2], ini[3], ini[4], ini[5]);
calendar.getTime();
boolean[] flag = {false,false,false};//观察时间表达式是否因当前相关时间表达式而改变时间
String rule="\\d+(?=天[以之]?前)";
Pattern pattern=Pattern.compile(rule);
Matcher match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[2] = true;
int day = Integer.parseInt(match.group());
calendar.add(Calendar.DATE, -day);
}
rule="\\d+(?=天[以之]?后)";
pattern=Pattern.compile(rule);
match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[2] = true;
int day = Integer.parseInt(match.group());
calendar.add(Calendar.DATE, day);
}
rule="\\d+(?=(个)?月[以之]?前)";
pattern=Pattern.compile(rule);
match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[1] = true;
int month = Integer.parseInt(match.group());
calendar.add(Calendar.MONTH, -month);
}
rule="\\d+(?=(个)?月[以之]?后)";
pattern=Pattern.compile(rule);
match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[1] = true;
int month = Integer.parseInt(match.group());
calendar.add(Calendar.MONTH, month);
}
rule="\\d+(?=年[以之]?前)";
pattern=Pattern.compile(rule);
match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[0] = true;
int year = Integer.parseInt(match.group());
calendar.add(Calendar.YEAR, -year);
}
rule="\\d+(?=年[以之]?后)";
pattern=Pattern.compile(rule);
match=pattern.matcher(Time_Expression);
if(match.find())
{
flag[0] = true;
int year = Integer.parseInt(match.group());
calendar.add(Calendar.YEAR, year);
}
String s = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(calendar.getTime());
String[] time_fin = s.split("-");
if(flag[0]||flag[1]||flag[2]){
_tp.tunit[0] = Integer.parseInt(time_fin[0]);
}
if(flag[1]||flag[2])
_tp.tunit[1] = Integer.parseInt(time_fin[1]);
if(flag[2])
_tp.tunit[2] = Integer.parseInt(time_fin[2]);
}
|
python
|
def _get_version():
"""Return the project version from VERSION file."""
with open(join(dirname(__file__), '{{project.package}}/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
return version
|
java
|
private void clear(long time) {
if (time < lastTime.get() + size) {
return;
}
int index = (int) (time % size);
for (int i = 0; i < size; i++) {
if (i != index) {
counts.set(i, 0);
}
}
}
|
python
|
def update(self, item_id, attributes, silent=False, hook=True):
"""
Updates the item using the supplied attributes. If 'silent' is true, Podio will send
no notifications to subscribed users and not post updates to the stream.
Important: webhooks will still be called.
"""
if not isinstance(attributes, dict):
raise TypeError('Must be of type dict')
attributes = json.dumps(attributes)
return self.transport.PUT(body=attributes,
type='application/json',
url='/item/%d%s' % (item_id, self.get_options(silent=silent,
hook=hook)))
|
python
|
def swap(self, position: int) -> None:
"""
Perform a SWAP operation on the stack.
"""
idx = -1 * position - 1
try:
self.values[-1], self.values[idx] = self.values[idx], self.values[-1]
except IndexError:
raise InsufficientStack("Insufficient stack items for SWAP{0}".format(position))
|
java
|
public static void addDataOptions(final ArgP argp) {
argp.addOption("--full-scan", "Scan the entire data table.");
argp.addOption("--fix", "Fix errors as they're found. Use in combination with"
+ " other flags.");
argp.addOption("--fix-all", "Set all flags and fix errors as they're found.");
argp.addOption("--compact", "Compacts rows after parsing.");
argp.addOption("--resolve-duplicates",
"Keeps the oldest (default) or newest duplicates. See --last-write-wins");
argp.addOption("--last-write-wins",
"Last data point written will be kept when fixing duplicates.\n" +
" May be set via config file and the 'tsd.storage.fix_duplicates' option.");
argp.addOption("--delete-orphans",
"Delete any time series rows where one or more UIDs fail resolution.");
argp.addOption("--delete-unknown-columns",
"Delete any unrecognized column that doesn't belong to OpenTSDB.");
argp.addOption("--delete-bad-values",
"Delete single column datapoints with bad values.");
argp.addOption("--delete-bad-rows", "Delete rows with invalid keys.");
argp.addOption("--delete-bad-compacts",
"Delete compacted columns that cannot be parsed.");
argp.addOption("--threads", "NUMBER",
"Number of threads to use when executing a full table scan.");
argp.addOption("--sync", "Wait for each fix operation to finish to continue.");
}
|
java
|
@Override
public void close() {
if (closed)
return;
closed = true;
tcpSocketConsumer.prepareToShutdown();
if (shouldSendCloseMessage)
eventLoop.addHandler(new EventHandler() {
@Override
public boolean action() throws InvalidEventHandlerException {
try {
TcpChannelHub.this.sendCloseMessage();
tcpSocketConsumer.stop();
closed = true;
if (LOG.isDebugEnabled())
Jvm.debug().on(getClass(), "closing connection to " + socketAddressSupplier);
while (clientChannel != null) {
if (LOG.isDebugEnabled())
Jvm.debug().on(getClass(), "waiting for disconnect to " + socketAddressSupplier);
}
} catch (ConnectionDroppedException e) {
throw new InvalidEventHandlerException(e);
}
// we just want this to run once
throw new InvalidEventHandlerException();
}
@NotNull
@Override
public String toString() {
return TcpChannelHub.class.getSimpleName() + "..close()";
}
});
}
|
java
|
@Override
public Page<Dashboard> getDashboardByTitleWithFilter(String title, String type, Pageable pageable) {
Page<Dashboard> dashboardItems = null;
if ((type != null) && (!type.isEmpty()) && (!UNDEFINED.equalsIgnoreCase(type))) {
dashboardItems = dashboardRepository.findAllByTypeContainingIgnoreCaseAndTitleContainingIgnoreCase(type, title, pageable);
} else {
dashboardItems = dashboardRepository.findAllByTitleContainingIgnoreCase(title, pageable);
}
return dashboardItems;
}
|
java
|
public static CommerceTierPriceEntry fetchByUuid_C_First(String uuid,
long companyId,
OrderByComparator<CommerceTierPriceEntry> orderByComparator) {
return getPersistence()
.fetchByUuid_C_First(uuid, companyId, orderByComparator);
}
|
python
|
def log_url (self, url_data):
"""Send new url to all configured loggers."""
self.check_active_loggers()
do_print = self.do_print(url_data)
# Only send a transport object to the loggers, not the complete
# object instance.
for log in self.loggers:
log.log_filter_url(url_data, do_print)
|
java
|
public RESTParameter add(RESTParameter childParam) {
Utils.require(!Utils.isEmpty(childParam.getName()), "Child parameter name cannot be empty");
m_parameters.add(childParam);
return this;
}
|
java
|
private void processInheritsCall(Node n) {
if (n.getChildCount() == 3) {
Node subClass = n.getSecondChild();
Node superClass = subClass.getNext();
if (subClass.isUnscopedQualifiedName() && superClass.isUnscopedQualifiedName()) {
knownClosureSubclasses.add(subClass.getQualifiedName());
}
}
}
|
python
|
def subtract(self, expr, simplify):
"""
Return a new expression where the `expr` expression has been removed
from this expression if it exists.
"""
args = self.args
if expr in self.args:
args = list(self.args)
args.remove(expr)
elif isinstance(expr, self.__class__):
if all(arg in self.args for arg in expr.args):
args = tuple(arg for arg in self.args if arg not in expr)
if len(args) == 0:
return None
if len(args) == 1:
return args[0]
newexpr = self.__class__(*args)
if simplify:
newexpr = newexpr.simplify()
return newexpr
|
java
|
public void writeTo(OutputStream out) throws TLVParserException {
Util.notNull(out, "Output stream");
try {
assertActualContentLengthIsInTLVLimits(getContentLength());
out.write(encodeHeader());
out.write(getContent());
} catch (IOException e) {
throw new TLVParserException("Writing TLV element (" + convertHeader() + ") to output stream failed", e);
}
}
|
python
|
def set_courses(self, course_ids):
"""Sets the courses.
arg: course_ids (osid.id.Id[]): the course ``Ids``
raise: InvalidArgument - ``course_ids`` is invalid
raise: NullArgument - ``course_ids`` is ``null``
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.ActivityForm.set_assets_template
if not isinstance(course_ids, list):
raise errors.InvalidArgument()
if self.get_courses_metadata().is_read_only():
raise errors.NoAccess()
idstr_list = []
for object_id in course_ids:
if not self._is_valid_id(object_id):
raise errors.InvalidArgument()
idstr_list.append(str(object_id))
self._my_map['courseIds'] = idstr_list
|
python
|
def before_run(self):
"""Initialize the scheduling process"""
# Actions and checks counters
self.nb_checks = 0
self.nb_internal_checks = 0
self.nb_checks_launched = 0
self.nb_actions_launched = 0
self.nb_checks_results = 0
self.nb_checks_results_timeout = 0
self.nb_checks_results_passive = 0
self.nb_checks_results_active = 0
self.nb_actions_results = 0
self.nb_actions_results_timeout = 0
self.nb_actions_results_passive = 0
self.nb_broks_dropped = 0
self.nb_checks_dropped = 0
self.nb_actions_dropped = 0
# Broks, notifications, ... counters
self.nb_broks = 0
self.nb_notifications = 0
self.nb_event_handlers = 0
self.nb_external_commands = 0
self.ticks = 0
|
java
|
public Observable<Void> renewAsync(String resourceGroupName, String certificateOrderName, RenewCertificateOrderRequest renewCertificateOrderRequest) {
return renewWithServiceResponseAsync(resourceGroupName, certificateOrderName, renewCertificateOrderRequest).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
|
java
|
public void runQueryWithNamedParameters() throws InterruptedException {
// [START bigquery_query_params_named]
// BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
String corpus = "romeoandjuliet";
long minWordCount = 250;
String query =
"SELECT word, word_count\n"
+ "FROM `bigquery-public-data.samples.shakespeare`\n"
+ "WHERE corpus = @corpus\n"
+ "AND word_count >= @min_word_count\n"
+ "ORDER BY word_count DESC";
// Note: Standard SQL is required to use query parameters.
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(query)
.addNamedParameter("corpus", QueryParameterValue.string(corpus))
.addNamedParameter("min_word_count", QueryParameterValue.int64(minWordCount))
.build();
// Print the results.
for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
for (FieldValue val : row) {
System.out.printf("%s,", val.toString());
}
System.out.printf("\n");
}
// [END bigquery_query_params_named]
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.