language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def copy_files_to_folder(src, dest, xtn='*.txt'):
"""
copies all the files from src to dest folder
"""
try:
all_files = glob.glob(os.path.join(src,xtn))
for f in all_files:
copy_file(f, dest)
except Exception as ex:
print('ERROR copy_files_to_folder - ' + str(ex))
|
python
|
def addOntology(self):
"""
Adds a new Ontology to this repo.
"""
self._openRepo()
name = self._args.name
filePath = self._getFilePath(self._args.filePath,
self._args.relativePath)
if name is None:
name = getNameFromPath(filePath)
ontology = ontologies.Ontology(name)
ontology.populateFromFile(filePath)
self._updateRepo(self._repo.insertOntology, ontology)
|
python
|
def coerce(self, value):
"""Coerce a cleaned value."""
if self._coerce is not None:
value = self._coerce(value)
return value
|
java
|
public static @CheckForNull String resolveOrNull(User u, String avatarSize) {
Matcher matcher = iconSizeRegex.matcher(avatarSize);
if (matcher.matches() && matcher.groupCount() == 2) {
int width = Integer.parseInt(matcher.group(1));
int height = Integer.parseInt(matcher.group(2));
for (UserAvatarResolver r : all()) {
String name = r.findAvatarFor(u, width, height);
if(name!=null) return name;
}
} else {
LOGGER.warning(String.format("Could not split up the avatar size (%s) into a width and height.", avatarSize));
}
return null;
}
|
python
|
def _safe_exit(start, output):
"""
exit without breaking pipes
"""
try:
sys.stdout.write(output)
sys.stdout.flush()
except TypeError: # python3
sys.stdout.write(str(output, 'utf-8'))
sys.stdout.flush()
except IOError:
pass
seconds = time.time() - start
print("\n\n%5.3f seconds" % (seconds), file=sys.stderr)
|
python
|
def fourier_segments(self):
""" Return a list of the FFT'd segments.
Return the list of FrequencySeries. Additional properties are
added that describe the strain segment. The property 'analyze'
is a slice corresponding to the portion of the time domain equivelant
of the segment to analyze for triggers. The value 'cumulative_index'
indexes from the beginning of the original strain series.
"""
if not self._fourier_segments:
self._fourier_segments = []
for seg_slice, ana in zip(self.segment_slices, self.analyze_slices):
if seg_slice.start >= 0 and seg_slice.stop <= len(self.strain):
freq_seg = make_frequency_series(self.strain[seg_slice])
# Assume that we cannot have a case where we both zero-pad on
# both sides
elif seg_slice.start < 0:
strain_chunk = self.strain[:seg_slice.stop]
strain_chunk.prepend_zeros(-seg_slice.start)
freq_seg = make_frequency_series(strain_chunk)
elif seg_slice.stop > len(self.strain):
strain_chunk = self.strain[seg_slice.start:]
strain_chunk.append_zeros(seg_slice.stop - len(self.strain))
freq_seg = make_frequency_series(strain_chunk)
freq_seg.analyze = ana
freq_seg.cumulative_index = seg_slice.start + ana.start
freq_seg.seg_slice = seg_slice
self._fourier_segments.append(freq_seg)
return self._fourier_segments
|
python
|
def __rename_directory(self, source, target):
"""
Renames a directory using given source and target names.
:param source: Source file.
:type source: unicode
:param target: Target file.
:type target: unicode
"""
for node in itertools.chain(self.__script_editor.model.get_project_nodes(source),
self.__script_editor.model.get_directory_nodes(source)):
self.__script_editor.model.unregister_project_nodes(node)
self.__script_editor.unregister_node_path(node)
self.__rename_path(source, target)
node.name = os.path.basename(target)
node.path = target
self.__script_editor.model.node_changed(node)
self.__script_editor.register_node_path(node)
self.__script_editor.model.set_project_nodes(node)
|
python
|
def _get_vm_info(self):
"""
Returns this VM info.
:returns: dict of info
"""
vm_info = {}
results = yield from self.manager.execute("showvminfo", [self._vmname, "--machinereadable"])
for info in results:
try:
name, value = info.split('=', 1)
except ValueError:
continue
vm_info[name.strip('"')] = value.strip('"')
return vm_info
|
python
|
def volume_shell(f_dist, m_dist):
""" Compute the sensitive volume using sum over spherical shells.
Parameters
-----------
f_dist: numpy.ndarray
The distances of found injections
m_dist: numpy.ndarray
The distances of missed injections
Returns
--------
volume: float
Volume estimate
volume_error: float
The standard error in the volume
"""
f_dist.sort()
m_dist.sort()
distances = numpy.concatenate([f_dist, m_dist])
dist_sorting = distances.argsort()
distances = distances[dist_sorting]
low = 0
vol = 0
vol_err = 0
for i in range(len(distances)):
if i == len(distances) - 1:
break
high = (distances[i+1] + distances[i]) / 2
bin_width = high - low
if dist_sorting[i] < len(f_dist):
vol += 4 * numpy.pi * distances[i]**2.0 * bin_width
vol_err += (4 * numpy.pi * distances[i]**2.0 * bin_width)**2.0
low = high
vol_err = vol_err ** 0.5
return vol, vol_err
|
java
|
public void bootstrap() throws ConfigurationException {
XMLConfigurator configurator = new XMLConfigurator();
for (String config : configs) {
log.debug("Loading XMLTooling configuration " + config);
configurator.load(Configuration.class.getResourceAsStream(config));
}
}
|
java
|
public Expression<Double> gte(double value) {
String valueString = "'" + value + "'";
return new Expression<Double>(this, Operation.gte, valueString);
}
|
python
|
def create_supervised_tbptt_trainer(
model,
optimizer,
loss_fn,
tbtt_step,
dim=0,
device=None,
non_blocking=False,
prepare_batch=_prepare_batch
):
"""Create a trainer for truncated backprop through time supervised models.
Training recurrent model on long sequences is computationally intensive as
it requires to process the whole sequence before getting a gradient.
However, when the training loss is computed over many outputs
(`X to many <https://karpathy.github.io/2015/05/21/rnn-effectiveness/>`_),
there is an opportunity to compute a gradient over a subsequence. This is
known as
`truncated backpropagation through time <https://machinelearningmastery.com/
gentle-introduction-backpropagation-time/>`_.
This supervised trainer apply gradient optimization step every `tbtt_step`
time steps of the sequence, while backpropagating through the same
`tbtt_step` time steps.
Args:
model (`torch.nn.Module`): the model to train.
optimizer (`torch.optim.Optimizer`): the optimizer to use.
loss_fn (torch.nn loss function): the loss function to use.
tbtt_step (int): the length of time chunks (last one may be smaller).
dim (int): axis representing the time dimension.
device (str, optional): device type specification (default: None).
Applies to both model and batches.
non_blocking (bool, optional): if True and this copy is between CPU and GPU,
the copy may occur asynchronously with respect to the host. For other cases,
this argument has no effect.
prepare_batch (callable, optional): function that receives `batch`, `device`,
`non_blocking` and outputs tuple of tensors `(batch_x, batch_y)`.
Returns:
Engine: a trainer engine with supervised update function.
"""
if device:
model.to(device)
def _update(engine, batch):
loss_list = []
hidden = None
x, y = batch
for batch_t in zip(x.split(tbtt_step, dim=dim), y.split(tbtt_step, dim=dim)):
x_t, y_t = prepare_batch(batch_t, device=device, non_blocking=non_blocking)
# Fire event for start of iteration
engine.fire_event(Tbptt_Events.TIME_ITERATION_STARTED)
# Forward, backward and
model.train()
optimizer.zero_grad()
if hidden is None:
y_pred_t, hidden = model(x_t)
else:
hidden = _detach_hidden(hidden)
y_pred_t, hidden = model(x_t, hidden)
loss_t = loss_fn(y_pred_t, y_t)
loss_t.backward()
optimizer.step()
# Setting state of engine for consistent behaviour
engine.state.output = loss_t.item()
loss_list.append(loss_t.item())
# Fire event for end of iteration
engine.fire_event(Tbptt_Events.TIME_ITERATION_COMPLETED)
# return average loss over the time splits
return sum(loss_list) / len(loss_list)
engine = Engine(_update)
engine.register_events(*Tbptt_Events)
return engine
|
java
|
public UniformDistribution estimate(DoubleMinMax mm) {
return new UniformDistribution(Math.max(mm.getMin(), -Double.MAX_VALUE), Math.min(mm.getMax(), Double.MAX_VALUE));
}
|
python
|
def labels(self):
"""All labels present in the match patterns.
RETURNS (set): The string labels.
DOCS: https://spacy.io/api/entityruler#labels
"""
all_labels = set(self.token_patterns.keys())
all_labels.update(self.phrase_patterns.keys())
return tuple(all_labels)
|
python
|
def __remove_queue_logging_handler():
'''
This function will run once the additional loggers have been synchronized.
It just removes the QueueLoggingHandler from the logging handlers.
'''
global LOGGING_STORE_HANDLER
if LOGGING_STORE_HANDLER is None:
# Already removed
return
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if handler is LOGGING_STORE_HANDLER:
root_logger.removeHandler(LOGGING_STORE_HANDLER)
# Redefine the null handler to None so it can be garbage collected
LOGGING_STORE_HANDLER = None
break
|
python
|
def unregister(self, signal):
"""
Unregisters an existing signal
:param signal: Name of the signal
"""
if signal in self.signals.keys():
del(self.signals[signal])
self.__log.debug("Signal %s unregisterd" % signal)
else:
self.__log.debug("Signal %s does not exist and could not be unregistered.")
|
java
|
public void marshall(TerminateJobFlowsRequest terminateJobFlowsRequest, ProtocolMarshaller protocolMarshaller) {
if (terminateJobFlowsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(terminateJobFlowsRequest.getJobFlowIds(), JOBFLOWIDS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def read_credentials(self, credentials):
"""
Reads credentials from configuration parameters.
Each section represents an individual CredentialParams
:param credentials: configuration parameters to be read
"""
self._items.clear()
for key in credentials.get_key_names():
value = credentials.get_as_nullable_string(key)
self._items.append(CredentialParams.from_tuples([key, value]))
|
python
|
def do_execute(self):
"""
The actual execution of the actor.
:return: None if successful, otherwise error message
:rtype: str
"""
for i in range(
int(self.resolve_option("min")),
int(self.resolve_option("max")) + 1,
int(self.resolve_option("step"))):
self._output.append(Token(i))
return None
|
java
|
public Set<String> getGroupIds( String parentGroupId )
{
File parentDir =
StringUtils.isEmpty( parentGroupId ) ? root : new File( root, parentGroupId.replace( '.', '/' ) );
if ( !parentDir.isDirectory() )
{
return Collections.emptySet();
}
File[] groupDirs = parentDir.listFiles();
Set<String> result = new HashSet<String>();
for ( int i = 0; i < groupDirs.length; i++ )
{
if ( groupDirs[i].isDirectory() )
{
result.add( groupDirs[i].getName() );
}
}
return result;
}
|
java
|
public YUVFrameGrabber getYUVFrameGrabber(int w, int h, int input, int std,
ImageFormat imf) throws V4L4JException{
if(!supportYUV420 || deviceInfo==null)
throw new ImageFormatException("This video device does not support "
+"YUV-encoding of its frames.");
if(imf!=null){
if(!deviceInfo.getFormatList().
getYUVEncodableFormats().contains(imf))
throw new ImageFormatException(
"The image format "+imf.getName()+
" cannot be converted to YUV420");
} else
//if imf is null, pick the first format that can be rgb encoded
//the list returned by getYUVEncodableFormats() is sorted by best
//format first, and if we re here, we know there is at least one
//format in there
imf = deviceInfo.getFormatList().getYUVEncodableFormats().get(0);
synchronized(this){
if(fg==null) {
state.get();
fg = new YUVFrameGrabber(deviceInfo, v4l4jObject, w, h, input,
std, findTuner(input), imf, threadFactory);
try {
fg.init();
} catch (V4L4JException ve){
fg = null;
state.put();
throw ve;
} catch (StateException se){
fg = null;
state.put();
throw se;
} catch (Throwable t){
fg = null;
state.put();
throw new V4L4JException("Error", t);
}
return (YUVFrameGrabber) fg;
} else {
if(fg.getClass().isInstance(YUVFrameGrabber.class))
return (YUVFrameGrabber) fg;
else {
state.put();
throw new StateException("A FrameGrabber object already "+
"exists");
}
}
}
}
|
python
|
def report(usaf):
"""generate report for usaf base"""
fig = plt.figure()
ax = fig.add_subplot(111)
station_info = geo.station_info(usaf)
y = {}
for i in range(1991, 2011):
monthData = monthly(usaf, i)
t = sum(monthData)
y[i] = t
print t
tmy3tot = tmy3.total(usaf)
average = sum([v for k, v in y.items()])/20.
s = sorted(y.items(), key=lambda t: t[1])
o = sorted(y.items(), key=lambda t: t[0])
twohigh = s[-1][1] + s[-2][1]
twolow = s[0][1] + s[1][1]
mintol = 1-twolow/2./average
plustol = twohigh/2./average-1
txt = ""
txt += "%s\n" % station_info['Site Name']
txt += 'TMY3/hist: %s/' % int(round(tmy3tot))
txt += '%s\n' % int(round(average))
txt += "high/low av: %s/" % int(round(twohigh/2.))
txt += "%s\n" % int(round(twolow/2.))
txt += "+%s/-%s%% " % (round(plustol*100, 0), round(mintol*100, 0))
txt += "(-%s%% of TMY3)" % round((1-twolow/2./tmy3tot)*100, 0)
print txt
x = np.array([k for k, v in o])
y = np.array([v for k, v in o])
rx = x[1:]
ry = [(v + y[i+1])/2 for i, v in enumerate(y[:-1])]
fit = pylab.polyfit(x, y, 3)
fit_fn = pylab.poly1d(fit)
f = interp1d(x, y, kind='cubic')
f2 = interp1d(rx, ry, kind='cubic')
xnew = np.linspace(min(x), max(x), 200)
x2 = np.linspace(min(rx), max(rx), 200)
# ax.plot(x,y)
ax.plot(xnew, f(xnew), label="Annual GHI")
ax.plot(xnew, fit_fn(xnew), label='trendline')
ax.plot(x2, f2(x2), label='2 Year Ave')
ax.plot([min(x), max(x)], [tmy3tot, tmy3tot], linestyle='--')
leg = plt.legend(title=txt, loc=4, fancybox=True)
leg.get_frame().set_alpha(0.5)
# fig.text(min(x),min(y)-min(y)*.1,txt)
# fig.text(.1,.1,txt)
plt.tight_layout()
fig.savefig('%s_annual_GHI.pdf' % (usaf), format='pdf')
|
java
|
@Override
public <E> E get(Class<E> clazz, Object key) throws ConcurrentModificationException {
if (clazz == null) {
throw new IllegalArgumentException("'clazz' must not be [" + clazz + "]");
}
if (key == null) {
throw new IllegalArgumentException("'id' must not be [" + key + "]");
}
if (!isTransactional()) {
ResourceManager<Transaction> manager = new AppEngineResourceManager(datastore());
return manager.get(clazz, key);
} else {
return ((AppEngineGlobalTransaction) transaction.get()).coordinator().get(clazz, key);
}
}
|
python
|
def do_transition_for(brain_or_object, transition):
"""Performs a workflow transition for the passed in object.
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: The object where the transtion was performed
"""
if not isinstance(transition, basestring):
fail("Transition type needs to be string, got '%s'" % type(transition))
obj = get_object(brain_or_object)
ploneapi.content.transition(obj, transition)
return obj
|
java
|
public static <T> Predicates<T> attributeIn(
Function<? super T, ?> function,
Iterable<?> iterable)
{
return new AttributePredicate<T, Object>(function, Predicates.in(iterable));
}
|
python
|
def inspect(self, image=None, json=True, app=None, quiet=True):
'''inspect will show labels, defile, runscript, and tests for an image
Parameters
==========
image: path of image to inspect
json: print json instead of raw text (default True)
quiet: Don't print result to the screen (default True)
app: if defined, return help in context of an app
'''
check_install()
# No image provided, default to use the client's loaded image
if image is None:
image = self._get_uri()
cmd = self._init_command('inspect')
if app is not None:
cmd = cmd + ['--app', app]
options = ['e','d','l','r','hf','t']
# After Singularity 3.0, helpfile was changed to H from
if "version 3" in self.version():
options = ['e','d','l','r','H','t']
[cmd.append('-%s' % x) for x in options]
if json is True:
cmd.append('--json')
cmd.append(image)
result = run_command(cmd, quiet=False)
if result['return_code'] == 0:
result = jsonp.loads(result['message'][0])
# Fix up labels
labels = parse_labels(result)
if not quiet:
print(jsonp.dumps(result, indent=4))
return result
|
java
|
protected void showCustomSlideStep(final Node node) {
addSubSlide(node);
final Node nextSlide = this.subSlides.get(model().getStepPosition());
if (this.currentSubSlide == null || nextSlide == null) {
// No Animation
this.currentSubSlide = nextSlide;
} else {
performStepAnimation(nextSlide);
}
}
|
python
|
def _allocate_output(self, windows, shape):
"""
Override the default array allocation to produce a LabelArray when we
have a string-like dtype.
"""
if self.dtype == int64_dtype:
return super(CustomClassifier, self)._allocate_output(
windows,
shape,
)
# This is a little bit of a hack. We might not know what the
# categories for a LabelArray are until it's actually been loaded, so
# we need to look at the underlying data.
return windows[0].data.empty_like(shape)
|
python
|
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
errno_names = dir(errno)
nums = [getattr(errno, k) for k in errnames if k in errno_names]
# de-dupe the list
return dict.fromkeys(nums).keys()
|
java
|
@SuppressWarnings("unchecked")
protected boolean bindingLogin(String username, Object password) throws LoginException,
NamingException {
final String cacheToken = Credential.MD5.digest(username + ":" + password.toString());
if (_cacheDuration > 0) { // only worry about caching if there is a cacheDuration set.
CachedUserInfo cached = USERINFOCACHE.get(cacheToken);
if (cached != null) {
if (System.currentTimeMillis() < cached.expires) {
LOG.debug("Cache Hit for " + username + ".");
userInfoCacheHits++;
JAASUserInfo jaasUserInfo = new JAASUserInfo(cached.userInfo);
try {
jaasUserInfo.fetchRoles();
} catch(Exception ex) {
if(_debug) {
LOG.debug("Failed to fetch roles",ex);
}
throw new LoginException("Error obtaining user info.");
}
setCurrentUser(jaasUserInfo);
setAuthenticated(true);
return true;
} else {
LOG.info("Cache Eviction for " + username + ".");
USERINFOCACHE.remove(cacheToken);
}
} else {
LOG.debug("Cache Miss for " + username + ".");
}
}
SearchResult searchResult = findUser(username);
String userDn = searchResult.getNameInNamespace();
LOG.info("Attempting authentication: " + userDn);
DirContext dirContext = createBindUserDirContext(userDn, password);
setDemographicAttributes(searchResult.getAttributes());
// use _rootContext to find roles, if configured to doso
if ( _forceBindingLoginUseRootContextForRoles ) {
dirContext = _rootContext;
LOG.debug("Using _rootContext for role lookup.");
}
List roles = getUserRolesByDn(dirContext, userDn, username);
UserInfo userInfo = new UserInfo(username, new Password(password.toString()), roles);
if (_cacheDuration > 0) {
USERINFOCACHE.put(cacheToken,
new CachedUserInfo(userInfo,
System.currentTimeMillis() + _cacheDuration));
LOG.debug("Adding " + username + " set to expire: " + System.currentTimeMillis() + _cacheDuration);
}
JAASUserInfo jaasUserInfo = new JAASUserInfo(userInfo);
try {
jaasUserInfo.fetchRoles();
} catch(Exception ex) {
if(_debug) {
LOG.debug("Failed to fetch roles",ex);
}
throw new LoginException("Error obtaining user info.");
}
setCurrentUser(jaasUserInfo);
setAuthenticated(true);
return true;
}
|
java
|
public void doAfterDeserialization() {
if ( listeners == null ) {
listeners = new ArrayList<SessionListener>();
}
if ( notes == null ) {
notes = new ConcurrentHashMap<String, Object>();
}
}
|
python
|
def set_result(self, result):
'''
Set the result to Future object, wake up all the waiters
:param result: result to set
'''
if hasattr(self, '_result'):
raise ValueError('Cannot set the result twice')
self._result = result
self._scheduler.emergesend(FutureEvent(self, result = result))
|
python
|
def float_input(message, low, high):
'''
Ask a user for a float input between two values
args:
message (str): Prompt for user
low (float): Low value, user entered value must be > this value to be accepted
high (float): High value, user entered value must be < this value to be accepted
returns:
float_in (int): Input float
'''
float_in = low - 1.0
while (float_in < low) or (float_in > high):
inp = input('Enter a ' + message + ' (float between ' + str(low) + ' and ' + str(high) + '): ')
if re.match('^([0-9]*[.])?[0-9]+$', inp) is not None:
float_in = float(inp)
else:
print(colored('Must be a float, try again!', 'red'))
return float_in
|
java
|
public void addMessage(String message) {
ActionMessages messages = (ActionMessages) next.get(Flash.MESSAGES);
if (null == messages) {
messages = new ActionMessages();
put(Flash.MESSAGES, messages);
}
messages.getMessages().add(message);
}
|
python
|
def get_conditional_probs(self, source=None):
"""Returns the full conditional probabilities table as a numpy array,
where row i*(k+1) + ly is the conditional probabilities of source i
emmiting label ly (including abstains 0), conditioned on different
values of Y, i.e.:
c_probs[i*(k+1) + ly, y] = P(\lambda_i = ly | Y = y)
Note that this simply involves inferring the kth row by law of total
probability and adding in to mu.
If `source` is not None, returns only the corresponding block.
"""
c_probs = np.zeros((self.m * (self.k + 1), self.k))
mu = self.mu.detach().clone().numpy()
for i in range(self.m):
# si = self.c_data[(i,)]['start_index']
# ei = self.c_data[(i,)]['end_index']
# mu_i = mu[si:ei, :]
mu_i = mu[i * self.k : (i + 1) * self.k, :]
c_probs[i * (self.k + 1) + 1 : (i + 1) * (self.k + 1), :] = mu_i
# The 0th row (corresponding to abstains) is the difference between
# the sums of the other rows and one, by law of total prob
c_probs[i * (self.k + 1), :] = 1 - mu_i.sum(axis=0)
c_probs = np.clip(c_probs, 0.01, 0.99)
if source is not None:
return c_probs[source * (self.k + 1) : (source + 1) * (self.k + 1)]
else:
return c_probs
|
python
|
def _check_team_exists(team):
"""
Check that the team registry actually exists.
"""
if team is None:
return
hostname = urlparse(get_registry_url(team)).hostname
try:
socket.gethostbyname(hostname)
except IOError:
try:
# Do we have internet?
socket.gethostbyname('quiltdata.com')
except IOError:
message = "Can't find quiltdata.com. Check your internet connection."
else:
message = "Unable to connect to registry. Is the team name %r correct?" % team
raise CommandException(message)
|
java
|
protected String getFormattedNodeXml(final Node nodeToConvert, boolean formatXml) {
String formattedNodeXml;
try {
final int numberOfBlanksToIndent = formatXml ? 2 : -1;
final Transformer transformer = createXmlTransformer(numberOfBlanksToIndent);
final StringWriter buffer = new StringWriter();
transformer.transform(new DOMSource(nodeToConvert), new StreamResult(buffer));
formattedNodeXml = buffer.toString();
} catch (final Exception e) {
formattedNodeXml = "ERROR " + e.getMessage();
}
return formattedNodeXml;
}
|
java
|
public static void init(Context contextValue, ExecutorService service) {
context = contextValue;
if (service == null) {
executerService = Executors.newFixedThreadPool(THREAD_POOL_SIZE_DEFAULT);
} else {
executerService = service;
}
}
|
java
|
protected int[] parseTime(CharSequenceScanner scanner) {
int hour = read2Digits(scanner);
boolean colon = scanner.expect(':');
int minute = read2Digits(scanner);
int second = 0;
if (minute == -1) {
if (!colon) {
minute = 0;
}
} else {
colon = scanner.expect(':');
second = read2Digits(scanner);
if ((second == -1) && (!colon)) {
second = 0;
}
if (((hour < 0) || (hour > 23)) || ((minute < 0) || (minute > 59)) || ((second < 0) || (second > 59))) {
throw new IllegalDateFormatException(scanner.getOriginalString());
}
}
return new int[] { hour, minute, second };
}
|
python
|
def touch(self, connection=None):
"""
Mark this update as complete.
IMPORTANT, If the marker table doesn't exist,
the connection transaction will be aborted and the connection reset.
Then the marker table will be created.
"""
self.create_marker_table()
if connection is None:
connection = self.connect()
connection.execute_non_query(
"""IF NOT EXISTS(SELECT 1
FROM {marker_table}
WHERE update_id = %(update_id)s)
INSERT INTO {marker_table} (update_id, target_table)
VALUES (%(update_id)s, %(table)s)
ELSE
UPDATE t
SET target_table = %(table)s
, inserted = GETDATE()
FROM {marker_table} t
WHERE update_id = %(update_id)s
""".format(marker_table=self.marker_table),
{"update_id": self.update_id, "table": self.table})
# make sure update is properly marked
assert self.exists(connection)
|
python
|
def can_approve(self, user, **data):
"""
Only org admins can approve joining an organisation
:param user: a User
:param data: data that the user wants to update
"""
is_org_admin = user.is_org_admin(self.organisation_id)
is_reseller_preverifying = user.is_reseller() and data.get('pre_verified', False)
raise Return(is_org_admin or is_reseller_preverifying)
|
java
|
@Pure
public static boolean intersectsCoplanarTriangleTriangle(
double v1x, double v1y, double v1z,
double v2x, double v2y, double v2z,
double v3x, double v3y, double v3z,
double u1x, double u1y, double u1z,
double u2x, double u2y, double u2z,
double u3x, double u3y, double u3z) {
int i0, i1;
// first project onto an axis-aligned plane, that maximizes the area
// of the triangles, compute indices: i0,i1.
{
double nx = v1y * (v2z - v3z) + v2y * (v3z - v1z) + v3y * (v1z - v2z);
double ny = v1z * (v2x - v3x) + v2z * (v3x - v1x) + v3z * (v1x - v2x);
double nz = v1x * (v2y - v3y) + v2x * (v3y - v1y) + v3x * (v1y - v2y);
nx = (nx<0) ? -nx : nx;
ny = (ny<0) ? -ny : ny;
nz = (nz<0) ? -nz : nz;
if(nx>ny) {
if(nx>nz) {
// nx is greatest
i0 = 1;
i1 = 2;
}
else {
// nz is greatest
i0 = 0;
i1 = 1;
}
}
else { /* nx<=ny */
if(nz>ny) {
// nz is greatest
i0 = 0;
i1 = 1;
}
else {
// ny is greatest
i0 = 0;
i1 = 2;
}
}
}
double[] tv1 = new double[] {v1x,v1y,v1z};
double[] tv2 = new double[] {v2x,v2y,v2z};
double[] tv3 = new double[] {v3x,v3y,v3z};
double[] tu1 = new double[] {u1x,u1y,u1z};
double[] tu2 = new double[] {u2x,u2y,u2z};
double[] tu3 = new double[] {u3x,u3y,u3z};
// test all edges of triangle 1 against the edges of triangle 2
if (intersectsCoplanarTriangle(i0,i1,0,tv1,tv2,tu1,tu2,tu3)) return true;
if (intersectsCoplanarTriangle(i0,i1,0,tv2,tv3,tu1,tu2,tu3)) return true;
if (intersectsCoplanarTriangle(i0,i1,0,tv3,tv1,tu1,tu2,tu3)) return true;
// finally, test if tri1 is totally contained in tri2 or vice versa
if (AbstractTriangle3F.containsTrianglePoint(i0,i1,tv1,tu1,tu2,tu3)) return true;
if (AbstractTriangle3F.containsTrianglePoint(i0,i1,tu1,tv1,tv2,tv3)) return true;
return false;
}
|
python
|
def _threads(self, handlers):
""" Calculates maximum number of threads that will be started """
if self.threads < len(handlers):
return self.threads
return len(handlers)
|
java
|
public void encrypt(ByteBuffer src) throws SSLException {
if (!handshakeComplete) {
throw new IllegalStateException();
}
if (!src.hasRemaining()) {
if (outNetBuffer == null) {
outNetBuffer = emptyBuffer;
}
return;
}
createOutNetBuffer(src.remaining());
// Loop until there is no more data in src
while (src.hasRemaining()) {
SSLEngineResult result = sslEngine.wrap(src, outNetBuffer.buf());
if (result.getStatus() == SSLEngineResult.Status.OK) {
if (result.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_TASK) {
doTasks();
}
} else if (result.getStatus() == SSLEngineResult.Status.BUFFER_OVERFLOW) {
outNetBuffer.capacity(outNetBuffer.capacity() << 1);
outNetBuffer.limit(outNetBuffer.capacity());
} else {
throw new SSLException("SSLEngine error during encrypt: "
+ result.getStatus() + " src: " + src
+ "outNetBuffer: " + outNetBuffer);
}
}
outNetBuffer.flip();
}
|
python
|
def update(self, content):
"""Enumerates the bytes of the supplied bytearray and updates the CRC-64.
No return value.
"""
for byte in content:
self._crc64 = (self._crc64 >> 8) ^ self._lookup_table[(self._crc64 & 0xff) ^ byte]
|
java
|
public static boolean isJSNode (@Nullable final IHCNode aNode)
{
final IHCNode aUnwrappedNode = HCHelper.getUnwrappedNode (aNode);
return isDirectJSNode (aUnwrappedNode);
}
|
java
|
public void registerDefinedSchema(DataSchema schema)
{
final ClassTemplateSpec spec = createFromDataSchema(schema);
_schemaToClassMap.put(schema, spec);
_classNameToSchemaMap.put(spec.getFullName(), schema);
}
|
java
|
private Map<TemplateType, JSType> evaluateTypeTransformations(
ImmutableList<TemplateType> templateTypes,
Map<TemplateType, JSType> inferredTypes,
FlowScope scope) {
Map<String, JSType> typeVars = null;
Map<TemplateType, JSType> result = null;
TypeTransformation ttlObj = null;
for (TemplateType type : templateTypes) {
if (type.isTypeTransformation()) {
// Lazy initialization when the first type transformation is found
if (ttlObj == null) {
ttlObj = new TypeTransformation(compiler, scope.getDeclarationScope());
typeVars = buildTypeVariables(inferredTypes);
result = new LinkedHashMap<>();
}
// Evaluate the type transformation expression using the current
// known types for the template type variables
JSType transformedType = ttlObj.eval(
type.getTypeTransformation(),
ImmutableMap.copyOf(typeVars));
result.put(type, transformedType);
// Add the transformed type to the type variables
typeVars.put(type.getReferenceName(), transformedType);
}
}
return result;
}
|
java
|
protected void synchronizeFieldCaption(QueryPage queryPage, String fieldName, String fieldCaption) {
QueryFieldDAO queryFieldDAO = new QueryFieldDAO();
QueryOptionField queryField = (QueryOptionField) queryFieldDAO.findByQueryPageAndName(queryPage, fieldName);
if (queryField != null)
queryFieldDAO.updateCaption(queryField, fieldCaption);
}
|
java
|
private String getSign(String body) {
String signNodeName = "<" + AlipayConstants.SIGN + ">";
String signEndNodeName = "</" + AlipayConstants.SIGN + ">";
int indexOfSignNode = body.indexOf(signNodeName);
int indexOfSignEndNode = body.indexOf(signEndNodeName);
if (indexOfSignNode < 0 || indexOfSignEndNode < 0) {
return null;
}
// 签名
return body.substring(indexOfSignNode + signNodeName.length(), indexOfSignEndNode);
}
|
java
|
public double getCount(F first, S second) {
Counter<S> counter = maps.get(first);
if (counter == null)
return 0.0;
return counter.getCount(second);
}
|
python
|
def stop(self, *args):
"""
Stops the TendrilManager. Requires cooperation from the
listener implementation, which must watch the ``running``
attribute and ensure that it stops accepting connections
should that attribute become False. Note that some tendril
managers will not exit from the listening thread until all
connections have been closed.
"""
# Remove ourself from the dictionary of running managers
try:
del self._running_managers[self._manager_key]
except KeyError:
pass
self.running = False
self._local_addr = None
self._local_addr_event.clear()
|
java
|
public boolean handleOption(Option option, String value) {
switch (option) {
case ENCODING:
encodingName = value;
return true;
case MULTIRELEASE:
multiReleaseValue = value;
locations.setMultiReleaseValue(value);
return true;
default:
return locations.handleOption(option, value);
}
}
|
python
|
def photo(self):
"""
Returns either the :tl:`WebDocument` thumbnail for
normal results or the :tl:`Photo` for media results.
"""
if isinstance(self.result, types.BotInlineResult):
return self.result.thumb
elif isinstance(self.result, types.BotInlineMediaResult):
return self.result.photo
|
python
|
def _synchronize_node(configfile, node):
"""Performs the Synchronize step of a Chef run:
Uploads all cookbooks, all roles and all databags to a node and add the
patch for data bags
Returns the node object of the node which is about to be configured,
or None if this node object cannot be found.
"""
msg = "Synchronizing nodes, environments, roles, cookbooks and data bags..."
if env.parallel:
msg = "[{0}]: {1}".format(env.host_string, msg)
print(msg)
# First upload node.json
remote_file = '/etc/chef/node.json'
put(configfile, remote_file, use_sudo=True, mode=400)
with hide('stdout'):
sudo('chown root:$(id -g -n root) {0}'.format(remote_file))
# Remove local temporary node file
os.remove(configfile)
# Synchronize kitchen
extra_opts = "-q"
if env.follow_symlinks:
extra_opts += " --copy-links"
ssh_opts = ""
if env.ssh_config_path:
ssh_opts += " -F %s" % os.path.expanduser(env.ssh_config_path)
if env.encrypted_data_bag_secret:
put(env.encrypted_data_bag_secret,
"/etc/chef/encrypted_data_bag_secret",
use_sudo=True,
mode=0600)
sudo('chown root:$(id -g -n root) /etc/chef/encrypted_data_bag_secret')
paths_to_sync = ['./data_bags', './roles', './environments']
for cookbook_path in cookbook_paths:
paths_to_sync.append('./{0}'.format(cookbook_path))
# Add berksfile directory to sync_list
if env.berksfile:
paths_to_sync.append(env.berksfile_cookbooks_directory)
if env.loglevel is "debug":
extra_opts = ""
if env.gateway:
ssh_key_file = '.ssh/' + os.path.basename(' '.join(env.ssh_config.lookup(
env.host_string)['identityfile']))
ssh_opts += " " + env.gateway + " ssh -o StrictHostKeyChecking=no -i "
ssh_opts += ssh_key_file
rsync_project(
env.node_work_path,
' '.join(paths_to_sync),
exclude=('*.svn', '.bzr*', '.git*', '.hg*'),
delete=True,
extra_opts=extra_opts,
ssh_opts=ssh_opts
)
if env.sync_packages_dest_dir and env.sync_packages_local_dir:
print("Uploading packages from {0} to remote server {2} directory "
"{1}").format(env.sync_packages_local_dir,
env.sync_packages_dest_dir, env.host_string)
try:
rsync_project(
env.sync_packages_dest_dir,
env.sync_packages_local_dir+"/*",
exclude=('*.svn', '.bzr*', '.git*', '.hg*'),
delete=True,
extra_opts=extra_opts,
ssh_opts=ssh_opts
)
except:
print("Warning: package upload failed. Continuing cooking...")
_add_environment_lib()
|
java
|
@Override
public String modifyDatastreamByValue(String pid,
String dsID,
org.fcrepo.server.types.gen.ArrayOfString altIDs,
String dsLabel,
String mimeType,
String formatURI,
DataHandler dsContent,
String checksumType,
String checksum,
String logMessage,
boolean force) {
LOG.debug("start: modifyDatastreamByValue, {}, {}", pid, dsID);
assertInitialized();
try {
MessageContext ctx = context.getMessageContext();
InputStream byteStream = null;
if (dsContent != null) {
byteStream = dsContent.getInputStream();
}
String[] altIDsArray = null;
if (altIDs != null && altIDs.getItem() != null) {
altIDsArray = altIDs.getItem().toArray(EMPTY_STRING_ARRAY);
}
return DateUtility.convertDateToString(m_management
.modifyDatastreamByValue(ReadOnlyContext
.getSoapContext(ctx),
pid,
dsID,
altIDsArray,
dsLabel,
mimeType,
formatURI,
byteStream,
checksumType,
checksum,
logMessage,
null));
} catch (Throwable th) {
LOG.error("Error modifying datastream by value", th);
throw CXFUtility.getFault(th);
} finally {
LOG.debug("end: modifyDatastreamByValue, {}, {}", pid, dsID);
}
}
|
java
|
private static void verifyType(final WireFormat.FieldType type,
final Object value) {
if (value == null) {
throw new NullPointerException();
}
boolean isValid = false;
switch (type.getJavaType()) {
case INT: isValid = value instanceof Integer ; break;
case LONG: isValid = value instanceof Long ; break;
case FLOAT: isValid = value instanceof Float ; break;
case DOUBLE: isValid = value instanceof Double ; break;
case BOOLEAN: isValid = value instanceof Boolean ; break;
case STRING: isValid = value instanceof String ; break;
case BYTE_STRING: isValid = value instanceof ByteString; break;
case ENUM:
// TODO(kenton): Caller must do type checking here, I guess.
isValid = value instanceof Internal.EnumLite;
break;
case MESSAGE:
// TODO(kenton): Caller must do type checking here, I guess.
isValid =
(value instanceof MessageLite) || (value instanceof LazyField);
break;
}
if (!isValid) {
// TODO(kenton): When chaining calls to setField(), it can be hard to
// tell from the stack trace which exact call failed, since the whole
// chain is considered one line of code. It would be nice to print
// more information here, e.g. naming the field. We used to do that.
// But we can't now that FieldSet doesn't use descriptors. Maybe this
// isn't a big deal, though, since it would only really apply when using
// reflection and generally people don't chain reflection setters.
throw new IllegalArgumentException(
"Wrong object type used with protocol message reflection.");
}
}
|
java
|
public DecoratedKey partitionKey(Document document) {
String string = document.get(FIELD_NAME);
ByteBuffer partitionKey = ByteBufferUtils.fromString(string);
return partitionKey(partitionKey);
}
|
java
|
public CertificateBundle updateCertificate(UpdateCertificateRequest updateCertificateRequest) {
return updateCertificate(updateCertificateRequest.vaultBaseUrl(), updateCertificateRequest.certificateName(),
updateCertificateRequest.certificateVersion(), updateCertificateRequest.certificatePolicy(),
updateCertificateRequest.certificateAttributes(), updateCertificateRequest.tags());
}
|
python
|
def ge(self, event_property, value):
"""A greater-than-or-equal-to filter chain.
>>> request_time = EventExpression('request', 'elapsed_ms')
>>> filtered = request_time.ge('elapsed_ms', 500)
>>> print(filtered)
request(elapsed_ms).ge(elapsed_ms, 500)
"""
c = self.copy()
c.filters.append(filters.GE(event_property, value))
return c
|
java
|
public ISREInstall getSelectedSRE() {
if (this.enableSystemWideSelector && this.systemSREButton.getSelection()) {
return SARLRuntime.getDefaultSREInstall();
}
if (!this.projectProviderFactories.isEmpty() && this.projectSREButton.getSelection()) {
return retreiveProjectSRE();
}
return getSpecificSRE();
}
|
java
|
public boolean isSupportedLanguage(String language) {
String lang = getLanguageComponent(language);
return languageLookup.containsKey(lang);
}
|
java
|
protected void remoteInit() throws JMSException
{
CreateBrowserQuery query = new CreateBrowserQuery();
query.setBrowserId(id);
query.setSessionId(session.getId());
query.setQueue(queue);
query.setMessageSelector(messageSelector);
transportEndpoint.blockingRequest(query);
}
|
java
|
public void propertyChange(PropertyChangeEvent evt)
{
String strProperty = evt.getPropertyName();
if (this.isValidProperty(strProperty))
{
Object objCurrentValue = this.get(strProperty);
if (evt.getNewValue() != objCurrentValue)
{
m_strCurrentProperty = strProperty; // Eliminate the chance of echos
if (evt.getNewValue() != null)
this.put(strProperty, evt.getNewValue());
else
this.remove(strProperty);
//x this.firePropertyChange(strProperty, evt.getOldValue(), evt.getNewValue()); // Propogate the property change
if (propertyChange != null)
propertyChange.firePropertyChange(evt);
m_strCurrentProperty = null;
}
}
}
|
java
|
private void doHandleAs(Subject subject, final HttpExchange pHttpExchange) {
try {
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
public Void run() throws IOException {
doHandle(pHttpExchange);
return null;
}
});
} catch (PrivilegedActionException e) {
throw new SecurityException("Security exception: " + e.getCause(),e.getCause());
}
}
|
python
|
def _init_metadata(self, **kwargs):
"""Initialize form metadata"""
osid_objects.OsidObjectForm._init_metadata(self, **kwargs)
self._grade_system_default = self._mdata['grade_system']['default_id_values'][0]
|
java
|
protected HttpURLConnection createConnection(URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
// Workaround for HttpURLConnection not observing the
// HttpURLConnection.setFollowRedirects() property.
// Happening in Android M release
// https://code.google.com/p/android/issues/detail?id=194495
connection.setInstanceFollowRedirects(HttpURLConnection.getFollowRedirects());
return connection;
}
|
python
|
def get_deployment_targets(self, project, deployment_group_id, tags=None, name=None, partial_name_match=None, expand=None, agent_status=None, agent_job_result=None, continuation_token=None, top=None, enabled=None, property_filters=None):
"""GetDeploymentTargets.
[Preview API] Get a list of deployment targets in a deployment group.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group.
:param [str] tags: Get only the deployment targets that contain all these comma separted list of tags.
:param str name: Name pattern of the deployment targets to return.
:param bool partial_name_match: When set to true, treats **name** as pattern. Else treats it as absolute match. Default is **false**.
:param str expand: Include these additional details in the returned objects.
:param str agent_status: Get only deployment targets that have this status.
:param str agent_job_result: Get only deployment targets that have this last job result.
:param str continuation_token: Get deployment targets with names greater than this continuationToken lexicographically.
:param int top: Maximum number of deployment targets to return. Default is **1000**.
:param bool enabled: Get only deployment targets that are enabled or disabled. Default is 'null' which returns all the targets.
:param [str] property_filters:
:rtype: [DeploymentMachine]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
query_parameters = {}
if tags is not None:
tags = ",".join(tags)
query_parameters['tags'] = self._serialize.query('tags', tags, 'str')
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if partial_name_match is not None:
query_parameters['partialNameMatch'] = self._serialize.query('partial_name_match', partial_name_match, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if agent_status is not None:
query_parameters['agentStatus'] = self._serialize.query('agent_status', agent_status, 'str')
if agent_job_result is not None:
query_parameters['agentJobResult'] = self._serialize.query('agent_job_result', agent_job_result, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if enabled is not None:
query_parameters['enabled'] = self._serialize.query('enabled', enabled, 'bool')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
response = self._send(http_method='GET',
location_id='2f0aa599-c121-4256-a5fd-ba370e0ae7b6',
version='5.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[DeploymentMachine]', self._unwrap_collection(response))
|
java
|
@Override
public Message<?> preSend(Message<?> message, MessageChannel channel) {
if (emptyMessage(message)) {
return message;
}
Message<?> retrievedMessage = getMessage(message);
MessageHeaderAccessor headers = mutableHeaderAccessor(retrievedMessage);
TraceContextOrSamplingFlags extracted = this.extractor.extract(headers);
Span span = this.threadLocalSpan.next(extracted);
MessageHeaderPropagation.removeAnyTraceHeaders(headers,
this.tracing.propagation().keys());
this.injector.inject(span.context(), headers);
if (!span.isNoop()) {
span.kind(Span.Kind.PRODUCER).name("send").start();
span.remoteServiceName(REMOTE_SERVICE_NAME);
addTags(message, span, channel);
}
if (log.isDebugEnabled()) {
log.debug("Created a new span in pre send" + span);
}
Message<?> outputMessage = outputMessage(message, retrievedMessage, headers);
if (isDirectChannel(channel)) {
beforeHandle(outputMessage, channel, null);
}
return outputMessage;
}
|
java
|
public static String message(String key, Locale locale, Object... params) {
String pattern;
try {
pattern = ResourceBundle.getBundle(BUNDLE, locale == null ? Locale.getDefault() : locale).getString(key);
} catch (MissingResourceException e) {
pattern = key;
}
return empty(params) ? pattern : new MessageFormat(pattern).format(params);
}
|
java
|
private static void waiting(int milliseconds) {
long t0, t1;
t0 = System.currentTimeMillis();
do {
t1 = System.currentTimeMillis();
} while ((t1 - t0) < milliseconds);
}
|
python
|
def followers(self):
"""获取关注此问题的用户
:return: 关注此问题的用户
:rtype: Author.Iterable
:问题: 要注意若执行过程中另外有人关注,可能造成重复获取到某些用户
"""
self._make_soup()
followers_url = self.url + 'followers'
for x in common_follower(followers_url, self.xsrf, self._session):
yield x
|
python
|
def absent(name, purge=False, force=False):
'''
Ensure that the named user is absent
name
The name of the user to remove
purge
Set purge to True to delete all of the user's files as well as the user,
Default is ``False``.
force
If the user is logged in, the absent state will fail. Set the force
option to True to remove the user even if they are logged in. Not
supported in FreeBSD and Solaris, Default is ``False``.
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
lusr = __salt__['user.info'](name)
if lusr:
# The user is present, make it not present
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'User {0} set for removal'.format(name)
return ret
beforegroups = set(salt.utils.user.get_group_list(name))
ret['result'] = __salt__['user.delete'](name, purge, force)
aftergroups = set([g for g in beforegroups if __salt__['group.info'](g)])
if ret['result']:
ret['changes'] = {}
for g in beforegroups - aftergroups:
ret['changes']['{0} group'.format(g)] = 'removed'
ret['changes'][name] = 'removed'
ret['comment'] = 'Removed user {0}'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Failed to remove user {0}'.format(name)
return ret
ret['comment'] = 'User {0} is not present'.format(name)
return ret
|
java
|
public Object get(int row, @NotNull String column) {
return rows.get(row).get(column);
}
|
java
|
public Release getReleaseInfo(String appName, String releaseName) {
return connection.execute(new ReleaseInfo(appName, releaseName), apiKey);
}
|
java
|
public Observable<Page<DetectorDefinitionInner>> listSiteDetectorsSlotAsync(final String resourceGroupName, final String siteName, final String diagnosticCategory, final String slot) {
return listSiteDetectorsSlotWithServiceResponseAsync(resourceGroupName, siteName, diagnosticCategory, slot)
.map(new Func1<ServiceResponse<Page<DetectorDefinitionInner>>, Page<DetectorDefinitionInner>>() {
@Override
public Page<DetectorDefinitionInner> call(ServiceResponse<Page<DetectorDefinitionInner>> response) {
return response.body();
}
});
}
|
python
|
def nagios(self, stream=sys.stdout):
"""
return 0 (OK) if there are no errors in the state.
return 1 (WARNING) if a backfill app only has 1 error.
return 2 (CRITICAL) if a backfill app has > 1 error.
return 2 (CRITICAL) if a non-backfill app has 1 error.
"""
warnings = []
criticals = []
for class_name, job_class in self.config.crontabber.jobs.class_list:
if job_class.app_name in self.job_state_database:
info = self.job_state_database.get(job_class.app_name)
if not info.get('error_count', 0):
continue
error_count = info['error_count']
# trouble!
serialized = (
'%s (%s) | %s | %s' %
(job_class.app_name,
class_name,
info['last_error']['type'],
info['last_error']['value'])
)
if (
error_count == 1 and
hasattr(job_class, "_is_backfill_app")
):
# just a warning for now
warnings.append(serialized)
else:
# anything worse than that is critical
criticals.append(serialized)
if criticals:
stream.write('CRITICAL - ')
stream.write('; '.join(criticals))
stream.write('\n')
return 2
elif warnings:
stream.write('WARNING - ')
stream.write('; '.join(warnings))
stream.write('\n')
return 1
stream.write('OK - All systems nominal')
stream.write('\n')
return 0
|
java
|
public void parseArgument(Collection<String> args) throws CmdLineException {
parseArgument(args.toArray(new String[args.size()]));
}
|
java
|
void computePointsAndWeights(EllipseRotated_F64 ellipse) {
// use the semi-major axis to scale the input points for numerical stability
double localScale = ellipse.a;
samplePts.reset();
weights.reset();
int numSamples = radialSamples * 2 + 2;
int numPts = numSamples - 1;
Point2D_F64 sample = new Point2D_F64();
for (int i = 0; i < numSampleContour; i++) {
// find a point along the ellipse at evenly spaced angles
double theta = 2.0 * Math.PI * i / numSampleContour;
UtilEllipse_F64.computePoint(theta, ellipse, sample);
// compute the unit tangent along the ellipse at this point
double tanX = sample.x - ellipse.center.x;
double tanY = sample.y - ellipse.center.y;
double r = Math.sqrt(tanX * tanX + tanY * tanY);
tanX /= r;
tanY /= r;
// define the line it will sample along
double x = sample.x - numSamples * tanX / 2.0;
double y = sample.y - numSamples * tanY / 2.0;
double lengthX = numSamples * tanX;
double lengthY = numSamples * tanY;
// Unless all the sample points are inside the image, ignore this point
if (!integral.isInside(x, y) || !integral.isInside(x + lengthX, y + lengthY))
continue;
double sample0 = integral.compute(x, y, x + tanX, y + tanY);
x += tanX;
y += tanY;
for (int j = 0; j < numPts; j++) {
double sample1 = integral.compute(x, y, x + tanX, y + tanY);
double w = sample0 - sample1;
if (w < 0) w = -w;
if (w > 0) {
// convert into a local coordinate so make the linear fitting more numerically stable and
// independent on position in the image
samplePts.grow().set((x - ellipse.center.x) / localScale, (y - ellipse.center.y) / localScale);
weights.add(w);
}
x += tanX;
y += tanY;
sample0 = sample1;
}
}
}
|
java
|
public static long copy(File file, OutputStream outputStream) throws IOException
{
Params.notNull(file, "Input file");
return copy(new FileInputStream(file), outputStream);
}
|
python
|
def _preloading_env(self):
"""
A "stripped" jinja environment.
"""
ctx = self.env.globals
try:
ctx['random_model'] = lambda *a, **kw: None
ctx['random_models'] = lambda *a, **kw: None
yield self.env
finally:
ctx['random_model'] = jinja2.contextfunction(random_model)
ctx['random_models'] = jinja2.contextfunction(random_models)
|
java
|
public void setValue(String newValue, @Nullable String newName) {
if (isChecked()) {
searcher.updateFacetRefinement(this.attribute, value, false)
.updateFacetRefinement(newName != null ? newName : attribute, newValue, true)
.search();
}
this.value = newValue;
applyEventualNewAttribute(newName);
}
|
java
|
public alluxio.grpc.ScheduleAsyncPersistencePOptions getOptions() {
return options_ == null ? alluxio.grpc.ScheduleAsyncPersistencePOptions.getDefaultInstance() : options_;
}
|
java
|
@Override
public void configure(Configuration parameters) {
// enforce sequential configuration() calls
synchronized (CONFIGURE_MUTEX) {
if (mapreduceInputFormat instanceof Configurable) {
((Configurable) mapreduceInputFormat).setConf(configuration);
}
}
}
|
python
|
def get_dict(self, only_attributes=None, exclude_attributes=None, df_format=False):
"""Get a dictionary of this object's attributes. Optional format for storage in a Pandas DataFrame.
Args:
only_attributes (str, list): Attributes that should be returned. If not provided, all are returned.
exclude_attributes (str, list): Attributes that should be excluded.
df_format (bool): If dictionary values should be formatted for a dataframe
(everything possible is transformed into strings, int, or float -
if something can't be transformed it is excluded)
Returns:
dict: Dictionary of attributes
"""
# Choose attributes to return, return everything in the object if a list is not specified
if not only_attributes:
keys = list(self.__dict__.keys())
else:
keys = ssbio.utils.force_list(only_attributes)
# Remove keys you don't want returned
if exclude_attributes:
exclude_attributes = ssbio.utils.force_list(exclude_attributes)
for x in exclude_attributes:
if x in keys:
keys.remove(x)
# Copy attributes into a new dictionary
df_dict = {}
for k, orig_v in self.__dict__.items():
if k in keys:
v = deepcopy(orig_v)
if df_format:
if v and not isinstance(v, str) and not isinstance(v, int) and not isinstance(v,
float) and not isinstance(
v, bool):
try:
df_dict[k] = ssbio.utils.force_string(deepcopy(v))
except TypeError:
log.warning('{}: excluding attribute from dict, cannot transform into string'.format(k))
elif not v and not isinstance(v, int) and not isinstance(v, float):
df_dict[k] = None
else:
df_dict[k] = deepcopy(v)
else:
df_dict[k] = deepcopy(v)
return df_dict
|
python
|
def highest_precedence_dtype(exprs):
"""Return the highest precedence type from the passed expressions
Also verifies that there are valid implicit casts between any of the types
and the selected highest precedence type.
This is a thin wrapper around datatypes highest precedence check.
Parameters
----------
exprs : Iterable[ir.ValueExpr]
A sequence of Expressions
Returns
-------
dtype: DataType
The highest precedence datatype
"""
if not exprs:
raise ValueError('Must pass at least one expression')
return dt.highest_precedence(expr.type() for expr in exprs)
|
java
|
private MetricImpl getCommittedMetric(MetricImpl.MetricType metricType) {
return (MetricImpl)committedMetrics.get(metricType.name());
}
|
python
|
def network(prefix, default_length=24):
"""
Given a prefix, this function returns the corresponding network
address.
:type prefix: string
:param prefix: An IP prefix.
:type default_length: long
:param default_length: The default ip prefix length.
:rtype: string
:return: The IP network address.
"""
address, pfxlen = parse_prefix(prefix, default_length)
ip = ip2int(address)
return int2ip(ip & pfxlen2mask_int(pfxlen))
|
python
|
def decompose_nfkd(text):
"""Perform unicode compatibility decomposition.
This will replace some non-standard value representations in unicode and
normalise them, while also separating characters and their diacritics into
two separate codepoints.
"""
if text is None:
return None
if not hasattr(decompose_nfkd, '_tr'):
decompose_nfkd._tr = Transliterator.createInstance('Any-NFKD')
return decompose_nfkd._tr.transliterate(text)
|
java
|
public static br_restart[] restart(nitro_service client, br_restart[] resources) throws Exception
{
if(resources == null)
throw new Exception("Null resource array");
if(resources.length == 1)
return ((br_restart[]) resources[0].perform_operation(client, "restart"));
return ((br_restart[]) perform_operation_bulk_request(client, resources, "restart"));
}
|
java
|
private IssueSeverity determineLevel(String path) {
if (isGrandfathered(path))
return IssueSeverity.WARNING;
else
return IssueSeverity.ERROR;
}
|
java
|
private static void createAndShowGUI() {
//Create and set up the window.
JFrame frame = new JFrame("TableFilterDemo");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//Create and set up the content pane.
TableFilterDemo newContentPane = new TableFilterDemo();
newContentPane.setOpaque(true); //content panes must be opaque
frame.setContentPane(newContentPane);
//Display the window.
frame.pack();
frame.setVisible(true);
}
|
java
|
public static void bindScheduled(Binder binder, Reflections reflections) {
Set<Class<?>> classes = reflections.getTypesAnnotatedWith(Scheduled.class, true);
Set<Method> methods = reflections.getMethodsAnnotatedWith(Scheduled.class);
Set<SchedulerTask> tasks = new HashSet<SchedulerTask>();
for(Class<?> clazz : classes) {
if(Runnable.class.isAssignableFrom(clazz)){
tasks.add(new SchedulerTask(clazz));
}
}
for(Method method : methods) {
tasks.add(new SchedulerTask(method.getDeclaringClass(), method));
}
for(SchedulerTask task : tasks) {
binder.requestInjection(task);
}
binder.bind(new TypeLiteral<Set<SchedulerTask>>(){}).toInstance(tasks);
}
|
python
|
def client(self):
"""
:rtype: keycloak.client.KeycloakClient
"""
if self._client is None:
self._client = KeycloakClient(server_url=self._server_url,
headers=self._headers)
return self._client
|
java
|
private ElasticSearchSetup.Connection legacyConfiguration(Configuration config) {
Node node;
Client client;
if (config.get(LOCAL_MODE)) {
log.debug("Configuring ES for JVM local transport");
boolean clientOnly = config.get(CLIENT_ONLY);
boolean local = config.get(LOCAL_MODE);
NodeBuilder builder = NodeBuilder.nodeBuilder();
Preconditions.checkArgument(config.has(INDEX_CONF_FILE) || config.has(INDEX_DIRECTORY),
"Must either configure configuration file or base directory");
if (config.has(INDEX_CONF_FILE)) {
String configFile = config.get(INDEX_CONF_FILE);
ImmutableSettings.Builder sb = ImmutableSettings.settingsBuilder();
log.debug("Configuring ES from YML file [{}]", configFile);
FileInputStream fis = null;
try {
fis = new FileInputStream(configFile);
sb.loadFromStream(configFile, fis);
builder.settings(sb.build());
} catch (FileNotFoundException e) {
throw new TitanException(e);
} finally {
IOUtils.closeQuietly(fis);
}
} else {
String dataDirectory = config.get(INDEX_DIRECTORY);
log.debug("Configuring ES with data directory [{}]", dataDirectory);
File f = new File(dataDirectory);
if (!f.exists()) f.mkdirs();
ImmutableSettings.Builder b = ImmutableSettings.settingsBuilder();
for (String sub : DATA_SUBDIRS) {
String subdir = dataDirectory + File.separator + sub;
f = new File(subdir);
if (!f.exists()) f.mkdirs();
b.put("path." + sub, subdir);
}
b.put("script.disable_dynamic", false);
b.put("indices.ttl.interval", "5s");
builder.settings(b.build());
String clustername = config.get(CLUSTER_NAME);
Preconditions.checkArgument(StringUtils.isNotBlank(clustername), "Invalid cluster name: %s", clustername);
builder.clusterName(clustername);
}
node = builder.client(clientOnly).data(!clientOnly).local(local).node();
client = node.client();
} else {
log.debug("Configuring ES for network transport");
ImmutableSettings.Builder settings = ImmutableSettings.settingsBuilder();
if (config.has(CLUSTER_NAME)) {
String clustername = config.get(CLUSTER_NAME);
Preconditions.checkArgument(StringUtils.isNotBlank(clustername), "Invalid cluster name: %s", clustername);
settings.put("cluster.name", clustername);
} else {
settings.put("client.transport.ignore_cluster_name", true);
}
log.debug("Transport sniffing enabled: {}", config.get(CLIENT_SNIFF));
settings.put("client.transport.sniff", config.get(CLIENT_SNIFF));
settings.put("script.disable_dynamic", false);
TransportClient tc = new TransportClient(settings.build());
int defaultPort = config.has(INDEX_PORT)?config.get(INDEX_PORT):HOST_PORT_DEFAULT;
for (String host : config.get(INDEX_HOSTS)) {
String[] hostparts = host.split(":");
String hostname = hostparts[0];
int hostport = defaultPort;
if (hostparts.length == 2) hostport = Integer.parseInt(hostparts[1]);
log.info("Configured remote host: {} : {}", hostname, hostport);
tc.addTransportAddress(new InetSocketTransportAddress(hostname, hostport));
}
client = tc;
node = null;
}
return new ElasticSearchSetup.Connection(node, client);
}
|
java
|
public void write(final ComponentDocumentationWrapper componentWrapper, final OutputStream outputStream)
throws IOException {
final Map<String, Object> data = new HashMap<>();
try {
data.put("breadcrumbs", _breadcrumbs);
data.put("component", componentWrapper);
{
final Set<ConfiguredPropertyDescriptor> configuredProperties =
componentWrapper.getComponentDescriptor().getConfiguredProperties();
final List<ConfiguredPropertyDescriptor> properties = new ArrayList<>(configuredProperties);
final List<ConfiguredPropertyDocumentationWrapper> propertyList = new ArrayList<>();
for (final ConfiguredPropertyDescriptor property : properties) {
final HiddenProperty hiddenProperty = property.getAnnotation(HiddenProperty.class);
final Deprecated deprecatedProperty = property.getAnnotation(Deprecated.class);
// we do not show hidden or deprecated properties in docs
if ((hiddenProperty == null || hiddenProperty.hiddenForLocalAccess() == false)
&& deprecatedProperty == null) {
final ConfiguredPropertyDocumentationWrapper wrapper =
new ConfiguredPropertyDocumentationWrapper(property);
propertyList.add(wrapper);
}
}
data.put("properties", propertyList);
}
/* Write data to a file */
final Writer out = new OutputStreamWriter(outputStream);
_template.process(data, out);
out.flush();
out.close();
} catch (final TemplateException e) {
throw new IllegalStateException("Unexpected templare exception", e);
}
}
|
python
|
def shapeexprlabel_to_IRI(self, shapeExprLabel: ShExDocParser.ShapeExprLabelContext) \
-> Union[ShExJ.BNODE, ShExJ.IRIREF]:
""" shapeExprLabel: iri | blankNode """
if shapeExprLabel.iri():
return self.iri_to_iriref(shapeExprLabel.iri())
else:
return ShExJ.BNODE(shapeExprLabel.blankNode().getText())
|
python
|
def clear_trace_filter_cache():
'''
Clear the trace filter cache.
Call this after reloading.
'''
global should_trace_hook
try:
# Need to temporarily disable a hook because otherwise
# _filename_to_ignored_lines.clear() will never complete.
old_hook = should_trace_hook
should_trace_hook = None
# Clear the linecache
linecache.clearcache()
_filename_to_ignored_lines.clear()
finally:
should_trace_hook = old_hook
|
java
|
public java.util.List<? extends com.google.javascript.jscomp.FunctionInformationMap.EntryOrBuilder>
getEntryOrBuilderList() {
return entry_;
}
|
java
|
public Vector2d mul(double scalar, Vector2d dest) {
dest.x = x * scalar;
dest.y = y * scalar;
return dest;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.