language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def inventory(self):
""" Get inventory for all chassis. """
for chassis in self.chassis_list.values():
chassis.inventory(modules_inventory=True)
|
java
|
public void processFile(String fileName, boolean failOnFileNotFound) throws Exception {
boolean fileFound = false;
InputStream f = null;
try {
String furl = null;
File file = new File(fileName); // files in filesystem
if (!file.exists()) {
URL url = classLoadHelper.getResource(fileName);
if (url != null) {
try {
furl = URLDecoder.decode(url.getPath(), "UTF-8");
} catch (UnsupportedEncodingException e) {
furl = url.getPath();
}
file = new File(furl);
try {
f = url.openStream();
} catch (IOException ignor) {
// Swallow the exception
}
}
} else {
try {
f = new java.io.FileInputStream(file);
} catch (FileNotFoundException e) {
// ignore
}
}
if (f == null) {
fileFound = false;
} else {
fileFound = true;
}
} finally {
try {
if (f != null) {
f.close();
}
} catch (IOException ioe) {
logger.warn("Error closing jobs file " + fileName, ioe);
}
}
if (!fileFound) {
if (failOnFileNotFound) {
throw new SchedulerException("File named '" + fileName + "' does not exist.");
} else {
logger.warn(
"File named '"
+ fileName
+ "' does not exist. This is OK if you don't want to use an XML job config file.");
}
} else {
processFile(fileName);
}
}
|
java
|
public ScanRequest withScanFilter(java.util.Map<String, Condition> scanFilter) {
setScanFilter(scanFilter);
return this;
}
|
java
|
private void scanTruncationSnapshots() {
if (m_truncationSnapshotPath == null) {
try {
m_truncationSnapshotPath = new String(m_zk.getData(VoltZK.test_scan_path, false, null), "UTF-8");
} catch (Exception e) {
return;
}
}
Object params[] = new Object[1];
params[0] = m_truncationSnapshotPath;
long handle = m_nextCallbackHandle++;
m_procedureCallbacks.put(handle, new ProcedureCallback() {
@Override
public void clientCallback(final ClientResponse clientResponse)
throws Exception {
if (clientResponse.getStatus() != ClientResponse.SUCCESS){
SNAP_LOG.error(clientResponse.getStatusString());
return;
}
final VoltTable results[] = clientResponse.getResults();
if (results.length == 1) {
final VoltTable result = results[0];
boolean advanced = result.advanceRow();
assert(advanced);
assert(result.getColumnCount() == 1);
assert(result.getColumnType(0) == VoltType.STRING);
loggingLog.error("Snapshot scan failed with failure response: " + result.getString("ERR_MSG"));
return;
}
assert(results.length == 3);
final VoltTable snapshots = results[0];
assert(snapshots.getColumnCount() == 10);
TreeMap<Long, TruncationSnapshotAttempt> foundSnapshots =
new TreeMap<Long, TruncationSnapshotAttempt>();
while (snapshots.advanceRow()) {
final String path = snapshots.getString("PATH");
final String pathType = snapshots.getString("PATHTYPE");
final String nonce = snapshots.getString("NONCE");
final Long txnId = snapshots.getLong("TXNID");
TruncationSnapshotAttempt snapshotAttempt = new TruncationSnapshotAttempt();
snapshotAttempt.path = path;
snapshotAttempt.pathType = pathType;
snapshotAttempt.nonce = nonce;
foundSnapshots.put(txnId, snapshotAttempt);
}
for (Map.Entry<Long, TruncationSnapshotAttempt> entry : foundSnapshots.entrySet()) {
if (!m_truncationSnapshotAttempts.containsKey(entry.getKey())) {
loggingLog.info("Truncation snapshot scan discovered new snapshot txnid " + entry.getKey() +
" path " + entry.getValue().path + " nonce " + entry.getValue().nonce);
m_truncationSnapshotAttempts.put(entry.getKey(), entry.getValue());
}
}
}
});
m_initiator.initiateSnapshotDaemonWork("@SnapshotScan", handle, params);
}
|
python
|
def cli(env, title, subject_id, body, hardware_identifier, virtual_identifier, priority):
"""Create a support ticket."""
ticket_mgr = SoftLayer.TicketManager(env.client)
if body is None:
body = click.edit('\n\n' + ticket.TEMPLATE_MSG)
created_ticket = ticket_mgr.create_ticket(
title=title,
body=body,
subject=subject_id,
priority=priority)
if hardware_identifier:
hardware_mgr = SoftLayer.HardwareManager(env.client)
hardware_id = helpers.resolve_id(hardware_mgr.resolve_ids, hardware_identifier, 'hardware')
ticket_mgr.attach_hardware(created_ticket['id'], hardware_id)
if virtual_identifier:
vs_mgr = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vs_mgr.resolve_ids, virtual_identifier, 'VS')
ticket_mgr.attach_virtual_server(created_ticket['id'], vs_id)
env.fout(ticket.get_ticket_results(ticket_mgr, created_ticket['id']))
|
java
|
public static double cosineOrHaversineDeg(double lat1, double lon1, double lat2, double lon2) {
return cosineOrHaversineRad(deg2rad(lat1), deg2rad(lon1), deg2rad(lat2), deg2rad(lon2));
}
|
java
|
@Override
public EClass getIfcOffsetCurve3D() {
if (ifcOffsetCurve3DEClass == null) {
ifcOffsetCurve3DEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(395);
}
return ifcOffsetCurve3DEClass;
}
|
python
|
def MI_referenceNames(self,
env,
objectName,
resultClassName,
role):
# pylint: disable=invalid-name
"""Return instance names of an association class.
Implements the WBEM operation ReferenceNames in terms
of the references method. A derived class will not normally
override this method.
"""
logger = env.get_logger()
logger.log_debug('CIMProvider MI_referenceNames <2> called. ' \
'resultClass: %s' % (resultClassName))
ch = env.get_cimom_handle()
if not resultClassName:
raise pywbem.CIMError(
pywbem.CIM_ERR_FAILED,
"Empty resultClassName passed to ReferenceNames")
assocClass = ch.GetClass(resultClassName, objectName.namespace,
LocalOnly=False,
IncludeQualifiers=True)
keys = pywbem.NocaseDict()
keyNames = [p.name for p in assocClass.properties.values()
if 'key' in p.qualifiers]
for keyName in keyNames:
p = assocClass.properties[keyName]
keys.__setitem__(p.name, p)
_strip_quals(keys)
model = pywbem.CIMInstance(classname=assocClass.classname,
properties=keys)
model.path = pywbem.CIMInstanceName(classname=assocClass.classname,
namespace=objectName.namespace)
#if role is None:
# raise pywbem.CIMError(pywbem.CIM_ERR_FAILED,
# "** this shouldn't happen")
if role:
if role not in model.properties:
raise pywbem.CIMError(pywbem.CIM_ERR_FAILED,
"** this shouldn't happen")
model[role] = objectName
for inst in self.references(env=env,
object_name=objectName,
model=model,
assoc_class=assocClass,
result_class_name='',
role=role,
result_role=None,
keys_only=True):
for prop in inst.properties.values():
if hasattr(prop.value, 'namespace') and \
prop.value.namespace is None:
prop.value.namespace = objectName.namespace
yield build_instance_name(inst, keyNames)
logger.log_debug('CIMProvider MI_referenceNames returning')
|
java
|
public <T, E extends Exception> T doUntilResult(ExceptionalSupplier<T, E> task)
throws InterruptedException, BackoffStoppedException, E {
T result = task.get(); // give an immediate try
return (result != null) ? result : retryWork(task);
}
|
java
|
protected String getContextPath(){
if(context != null) return context;
if(get("context_path") == null){
throw new ViewException("context_path missing - red alarm!");
}
return get("context_path").toString();
}
|
python
|
def printSysLog(self, logString):
"""
Log one or more lines. Optionally, add them to logEntries list.
Input:
Strings to be logged.
"""
if zvmsdklog.LOGGER.getloglevel() <= logging.DEBUG:
# print log only when debug is enabled
if self.daemon == '':
self.logger.debug(self.requestId + ": " + logString)
else:
self.daemon.logger.debug(self.requestId + ": " + logString)
if self.captureLogs is True:
self.results['logEntries'].append(self.requestId + ": " +
logString)
return
|
python
|
def _read_eeprom(self, address, size):
'''Read EEPROM
'''
self._intf.write(self._base_addr + self.CAL_EEPROM_ADD, array('B', pack('>H', address & 0x3FFF))) # 14-bit address, 16384 bytes
n_pages, n_bytes = divmod(size, self.CAL_EEPROM_PAGE_SIZE)
data = array('B')
for _ in range(n_pages):
data.extend(self._intf.read(self._base_addr + self.CAL_EEPROM_ADD | 1, size=self.CAL_EEPROM_PAGE_SIZE))
if n_bytes > 0:
data.extend(self._intf.read(self._base_addr + self.CAL_EEPROM_ADD | 1, size=n_bytes))
return data
|
python
|
def prepare(self, strict=True):
""" preparation for loaded json
:param bool strict: when in strict mode, exception would be raised if not valid.
"""
self.__root = self.prepare_obj(self.raw, self.__url)
self.validate(strict=strict)
if hasattr(self.__root, 'schemes') and self.__root.schemes:
if len(self.__root.schemes) > 0:
self.__schemes = self.__root.schemes
else:
# extract schemes from the url to load spec
self.__schemes = [six.moves.urlparse(self.__url).schemes]
s = Scanner(self)
s.scan(root=self.__root, route=[Merge()])
s.scan(root=self.__root, route=[PatchObject()])
s.scan(root=self.__root, route=[Aggregate()])
# reducer for Operation
tr = TypeReduce(self.__sep)
cy = CycleDetector()
s.scan(root=self.__root, route=[tr, cy])
# 'op' -- shortcut for Operation with tag and operaionId
self.__op = utils.ScopeDict(tr.op)
# 'm' -- shortcut for model in Swagger 1.2
if hasattr(self.__root, 'definitions') and self.__root.definitions != None:
self.__m = utils.ScopeDict(self.__root.definitions)
else:
self.__m = utils.ScopeDict({})
# update scope-separater
self.__m.sep = self.__sep
self.__op.sep = self.__sep
# cycle detection
if len(cy.cycles['schema']) > 0 and strict:
raise errs.CycleDetectionError('Cycles detected in Schema Object: {0}'.format(cy.cycles['schema']))
|
java
|
static Library getSharedLibrary(String id) {
if (bundleContext == null) {
return null;
}
// Filter the SharedLibrary service references by ID.
String filter = "(" + "id=" + id + ")";
Collection<ServiceReference<Library>> refs = null;
try {
refs = bundleContext.getServiceReferences(Library.class, filter);
} catch (InvalidSyntaxException e) {
if (tc.isErrorEnabled()) {
Tr.error(tc, "cls.library.id.invalid", id, e.toString());
}
return null;
}
if (refs.isEmpty())
return null;
return bundleContext.getService(getHighestRankedService(refs));
}
|
python
|
def append_line(filename, **line):
"""Safely (i.e. with locking) append a line to
the given file, serialized as JSON.
"""
global lock
data = json.dumps(line, separators=(',', ':')) + '\n'
with lock:
with file(filename, 'a') as fp:
fp.seek(0, SEEK_END)
fp.write(data)
|
java
|
public static void wrongParameterNumber(String methodName, String className){
throw new ConversionParameterException(MSG.INSTANCE.message(conversionParameterException,methodName,className));
}
|
java
|
public final String getStartFormattedLong() {
Granularity startGran = this.interval.getStartGranularity();
return formatStart(startGran != null ? startGran.getLongFormat() : null);
}
|
java
|
@Override
public StepStatus createStepStatus(long stepExecId) {
logger.entering(CLASSNAME, "createStepStatus", stepExecId);
Connection conn = null;
PreparedStatement statement = null;
StepStatus stepStatus = new StepStatus(stepExecId);
try {
conn = getConnection();
statement = conn.prepareStatement("INSERT INTO stepstatus (id, obj) VALUES(?, ?)");
statement.setLong(1, stepExecId);
statement.setBytes(2, serializeObject(stepStatus));
statement.executeUpdate();
} catch (SQLException e) {
throw new PersistenceException(e);
} catch (IOException e) {
throw new PersistenceException(e);
} finally {
cleanupConnection(conn, null, statement);
}
logger.exiting(CLASSNAME, "createStepStatus");
return stepStatus;
}
|
java
|
public static final String[] getKeywordValuesForLocale(String key, ULocale locale,
boolean commonlyUsed) {
// Note: The parameter commonlyUsed is not used.
// The switch is in the method signature for consistency
// with other locale services.
// Read available collation values from collation bundles.
ICUResourceBundle bundle = (ICUResourceBundle)
UResourceBundle.getBundleInstance(
ICUData.ICU_COLLATION_BASE_NAME, locale);
KeywordsSink sink = new KeywordsSink();
bundle.getAllItemsWithFallback("collations", sink);
return sink.values.toArray(new String[sink.values.size()]);
}
|
python
|
def run(self, cmd, timeout=None, key=None):
"""
Run a command on the phablet device using ssh
:param cmd:
a list of strings to execute as a command
:param timeout:
a timeout (in seconds) for device discovery
:param key:
a path to a public ssh key to use for connection
:returns:
the exit code of the command
This method will not allow you to capture stdout/stderr from the target
process. If you wish to do that please consider switching to one of
subprocess functions along with. :meth:`cmdline()`.
"""
if not isinstance(cmd, list):
raise TypeError("cmd needs to be a list")
if not all(isinstance(item, str) for item in cmd):
raise TypeError("cmd needs to be a list of strings")
self.connect(timeout, key)
return self._run_ssh(cmd)
|
python
|
def astype(self, dtype, copy=True):
"""
Cast to a NumPy array with 'dtype'.
Parameters
----------
dtype : str or dtype
Typecode or data-type to which the array is cast.
copy : bool, default True
Whether to copy the data, even if not necessary. If False,
a copy is made only if the old dtype does not match the
new dtype.
Returns
-------
array : ndarray
NumPy ndarray with 'dtype' for its dtype.
"""
return np.array(self, dtype=dtype, copy=copy)
|
python
|
def _set_request_user_id_metric(self, request):
"""
Add request_user_id metric
Metrics:
request_user_id
"""
if hasattr(request, 'user') and hasattr(request.user, 'id') and request.user.id:
monitoring.set_custom_metric('request_user_id', request.user.id)
|
python
|
def from_ivorn(cls, ivorn, nside=256):
"""
Creates a `~mocpy.moc.MOC` object from a given ivorn.
Parameters
----------
ivorn : str
nside : int, optional
256 by default
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC.
"""
return cls.from_url('%s?%s' % (MOC.MOC_SERVER_ROOT_URL,
urlencode({
'ivorn': ivorn,
'get': 'moc',
'order': int(np.log2(nside))
})))
|
python
|
def get_version(self):
# type: () -> str
"""
Retrieves the bundle version, using the ``__version__`` or
``__version_info__`` attributes of its module.
:return: The bundle version, "0.0.0" by default
"""
# Get the version value
version = getattr(self.__module, "__version__", None)
if version:
return version
# Convert the __version_info__ entry
info = getattr(self.__module, "__version_info__", None)
if info:
return ".".join(str(part) for part in __version_info__)
# No version
return "0.0.0"
|
java
|
private String genReference(String id, ElementDefinition.TypeRefComponent typ) {
ST shex_ref = tmplt(REFERENCE_DEFN_TEMPLATE);
String ref = getTypeName(typ);
shex_ref.add("id", id);
shex_ref.add("ref", ref);
references.add(ref);
return shex_ref.render();
}
|
python
|
def _writeResponse(self, response, request, status=200):
"""
request -- request message
response --- response message
status -- HTTP Status
"""
request.setResponseCode(status)
if self.encoding is not None:
mimeType = 'text/xml; charset="%s"' % self.encoding
else:
mimeType = "text/xml"
request.setHeader("Content-Type", mimeType)
request.setHeader("Content-Length", str(len(response)))
request.write(response)
request.finish()
|
java
|
@VisibleForTesting
public NamedType createNamedType(
StaticTypedScope scope, String reference, String sourceName, int lineno, int charno) {
return new NamedType(scope, this, reference, sourceName, lineno, charno);
}
|
java
|
public ServiceFuture<AppServiceEnvironmentResourceInner> beginCreateOrUpdateAsync(String resourceGroupName, String name, AppServiceEnvironmentResourceInner hostingEnvironmentEnvelope, final ServiceCallback<AppServiceEnvironmentResourceInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, name, hostingEnvironmentEnvelope), serviceCallback);
}
|
java
|
public void addSession(String id, Session session) {
synchronized (session) {
SessionSchedulable schedulable =
new SessionSchedulable(session, getType());
idToSession.put(id, schedulable);
}
}
|
java
|
public static Part buildFilePart(final String name, final File file) throws IOException {
//Files.probeContentType(file.toPath()) always returns null due to unfixed jdk bug
//using Tika to fetch file mime type instead
final String type = new Tika().detect(file);
final FileContent content = new FileContent(type, file);
final Part part = new Part(content);
final HttpHeaders headers = new HttpHeaders();
final String disposition = String.format(Request.FILE_CONTENT_DISPOSITION, name, file.getName());
headers.set(Request.CONTENT_DISPOSITION, disposition);
part.setHeaders(headers);
return part;
}
|
python
|
def xml_starttag (self, name, attrs=None):
"""
Write XML start tag.
"""
self.write(self.indent*self.level)
self.write(u"<%s" % xmlquote(name))
if attrs:
for name, value in attrs.items():
args = (xmlquote(name), xmlquoteattr(value))
self.write(u' %s="%s"' % args)
self.writeln(u">")
self.level += 1
|
python
|
def _values(metadata, rel):
"""Searches a set <metadata> to find all relations <rel>
Returns a list of the values of those relations
(A list, because a rel can occur more than once)"""
result = []
for r in metadata:
if(r[REL] == rel):
result.append(r[VAL])
return result
|
python
|
def _get_from_bin(self):
"""
Retrieves the Java library path according to the real installation of
the java executable
:return: The path to the JVM library, or None
"""
# Find the real interpreter installation path
java_bin = os.path.realpath(self._java)
if os.path.exists(java_bin):
# Get to the home directory
java_home = os.path.abspath(os.path.join(os.path.dirname(java_bin),
'..'))
# Look for the JVM library
return self.find_libjvm(java_home)
|
python
|
def _dispatch_commands(self, from_state, to_state, smtp_command):
"""This method dispatches a SMTP command to the appropriate handler
method. It is called after a new command was received and a valid
transition was found."""
#print from_state, ' -> ', to_state, ':', smtp_command
name_handler_method = 'smtp_%s' % smtp_command.lower().replace(' ', '_')
try:
handler_method = getattr(self, name_handler_method)
except AttributeError:
# base_msg = 'No handler for %s though transition is defined (no method %s)'
# print base_msg % (smtp_command, name_handler_method)
self.reply(451, 'Temporary Local Problem: Please come back later')
else:
# Don't catch InvalidDataError here - else the state would be moved
# forward. Instead the handle_input will catch it and send out the
# appropriate reply.
handler_method()
|
java
|
@Override
public Map<String, List<String>> parameters() {
Map<String, List<String>> result = new HashMap<>();
for (String key : request.params().names()) {
result.put(key, request.params().getAll(key));
}
return result;
}
|
python
|
def get_plan_from_semi_dual(alpha, b, C, regul):
"""
Retrieve optimal transportation plan from optimal semi-dual potentials.
Parameters
----------
alpha: array, shape = len(a)
Optimal semi-dual potentials.
b: array, shape = len(b)
Second input histogram (should be non-negative and sum to 1).
C: array, shape = len(a) x len(b)
Ground cost matrix.
regul: Regularization object
Should implement a delta_Omega(X) method.
Returns
-------
T: array, shape = len(a) x len(b)
Optimal transportation plan.
"""
X = alpha[:, np.newaxis] - C
return regul.max_Omega(X, b)[1] * b
|
python
|
def read_bytes(self):
"""
reading bytes; update progress bar after 1 ms
"""
global exit_flag
for self.i in range(0, self.length) :
self.bytes[self.i] = i_max[self.i]
self.maxbytes[self.i] = total_chunks[self.i]
self.progress[self.i]["maximum"] = total_chunks[self.i]
self.progress[self.i]["value"] = self.bytes[self.i]
self.str[self.i].set(file_name[self.i]+ " " + str(self.bytes[self.i])
+ "KB / " + str(int(self.maxbytes[self.i] + 1)) + " KB")
if exit_flag == self.length:
exit_flag = 0
self.frame.destroy()
else:
self.frame.after(10, self.read_bytes)
|
java
|
public static String findEncodingFor(Writer w)
{
if (w instanceof OutputStreamWriter) {
String enc = ((OutputStreamWriter) w).getEncoding();
/* [WSTX-146]: It is important that we normalize this, since
* older JDKs return legacy encoding names ("UTF8" instead of
* canonical "UTF-8")
*/
return normalize(enc);
}
return null;
}
|
java
|
public static boolean verifyProjectType( SimpleFeatureType schema, IHMProgressMonitor pm ) {
String searchedField = PipesTrentoP.ID.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.DRAIN_AREA.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.INITIAL_ELEVATION.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.FINAL_ELEVATION.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.RUNOFF_COEFFICIENT.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.KS.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.MINIMUM_PIPE_SLOPE.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.AVERAGE_RESIDENCE_TIME.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.PIPE_SECTION_TYPE.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.AVERAGE_SLOPE.getAttributeName();
verifyFeatureKey(findAttributeName(schema, searchedField), searchedField, pm);
searchedField = PipesTrentoP.PER_AREA.getAttributeName();
String attributeName = findAttributeName(schema, searchedField);
if (attributeName != null) {
return true;
}
return false;
}
|
java
|
@Trivial // traced by caller
public static CompletableFuture<Void> runAsync(Runnable action, Executor executor) {
// Reject ManagedTask so that we have the flexibility to decide later how to handle ManagedTaskListener and execution properties
if (action instanceof ManagedTask)
throw new IllegalArgumentException(ManagedTask.class.getName());
FutureRefExecutor futureExecutor = supportsAsync(executor);
ThreadContextDescriptor contextDescriptor;
if (action instanceof ContextualSupplier) {
ContextualRunnable r = (ContextualRunnable) action;
contextDescriptor = r.getContextDescriptor();
action = r.getAction();
} else if (executor instanceof WSManagedExecutorService) {
WSContextService contextSvc = ((WSManagedExecutorService) executor).getContextService();
contextDescriptor = contextSvc.captureThreadContext(XPROPS_SUSPEND_TRAN);
} else {
contextDescriptor = null;
}
if (JAVA8) {
action = new ContextualRunnable(contextDescriptor, action);
CompletableFuture<Void> completableFuture = CompletableFuture.runAsync(action, futureExecutor == null ? executor : futureExecutor);
return new ManagedCompletableFuture<Void>(completableFuture, executor, futureExecutor);
} else {
ManagedCompletableFuture<Void> completableFuture = new ManagedCompletableFuture<Void>(executor, futureExecutor);
action = new ContextualRunnable(contextDescriptor, action, completableFuture);
(futureExecutor == null ? executor : futureExecutor).execute(action);
return completableFuture;
}
}
|
java
|
private void detectImplmentedExtension() {
if (isImplExtRegistered == false) {
Object o = getThis();
Class thisClass = o.getClass();
// superclass interfaces
Class[] declared = thisClass.getSuperclass().getInterfaces();
for (Class declare : declared) {
detectImplmentedExtension(declare);
}
// subclass interfaces
declared = thisClass.getInterfaces();
for (Class declare : declared) {
detectImplmentedExtension(declare);
}
isImplExtRegistered = true;
}
}
|
java
|
public static Class getComponentJavaAccess(PageContext pc, Component component, RefBoolean isNew, boolean create, boolean writeLog, boolean suppressWSbeforeArg, boolean output,
boolean returnValue) throws PageException {
isNew.setValue(false);
String classNameOriginal = component.getPageSource().getClassName();
String className = getClassname(component, null).concat("_wrap");
String real = className.replace('.', '/');
String realOriginal = classNameOriginal.replace('.', '/');
Mapping mapping = component.getPageSource().getMapping();
PhysicalClassLoader cl = null;
try {
cl = (PhysicalClassLoader) ((PageContextImpl) pc).getRPCClassLoader(false);
}
catch (IOException e) {
throw Caster.toPageException(e);
}
Resource classFile = cl.getDirectory().getRealResource(real.concat(".class"));
Resource classFileOriginal = mapping.getClassRootDirectory().getRealResource(realOriginal.concat(".class"));
// LOAD CLASS
// print.out(className);
// check last Mod
if (classFile.lastModified() >= classFileOriginal.lastModified()) {
try {
Class clazz = cl.loadClass(className);
if (clazz != null && !hasChangesOfChildren(classFile.lastModified(), clazz)) return registerTypeMapping(clazz);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
if (!create) return null;
isNew.setValue(true);
// print.out("new");
// CREATE CLASS
ClassWriter cw = ASMUtil.getClassWriter();
cw.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC, real, null, "java/lang/Object", null);
// GeneratorAdapter ga = new
// GeneratorAdapter(Opcodes.ACC_PUBLIC,Page.STATIC_CONSTRUCTOR,null,null,cw);
// StaticConstrBytecodeContext statConstr = null;//new
// BytecodeContext(null,null,null,cw,real,ga,Page.STATIC_CONSTRUCTOR);
/// ga = new GeneratorAdapter(Opcodes.ACC_PUBLIC,Page.CONSTRUCTOR,null,null,cw);
ConstrBytecodeContext constr = null;// new BytecodeContext(null,null,null,cw,real,ga,Page.CONSTRUCTOR);
// field component
// FieldVisitor fv = cw.visitField(Opcodes.ACC_PRIVATE, "c", "Llucee/runtime/ComponentImpl;", null,
// null);
// fv.visitEnd();
java.util.List<LitString> _keys = new ArrayList<LitString>();
// remote methods
Collection.Key[] keys = component.keys(Component.ACCESS_REMOTE);
int max;
for (int i = 0; i < keys.length; i++) {
max = -1;
while ((max = createMethod(constr, _keys, cw, real, component.get(keys[i]), max, writeLog, suppressWSbeforeArg, output, returnValue)) != -1) {
break;// for overload remove this
}
}
// Constructor
GeneratorAdapter adapter = new GeneratorAdapter(Opcodes.ACC_PUBLIC, CONSTRUCTOR_OBJECT, null, null, cw);
adapter.loadThis();
adapter.invokeConstructor(Types.OBJECT, CONSTRUCTOR_OBJECT);
lucee.transformer.bytecode.Page.registerFields(
new BytecodeContext(null, constr, getPage(constr), _keys, cw, real, adapter, CONSTRUCTOR_OBJECT, writeLog, suppressWSbeforeArg, output, returnValue), _keys);
adapter.returnValue();
adapter.endMethod();
cw.visitEnd();
byte[] barr = cw.toByteArray();
try {
ResourceUtil.touch(classFile);
IOUtil.copy(new ByteArrayInputStream(barr), classFile, true);
cl = (PhysicalClassLoader) ((PageContextImpl) pc).getRPCClassLoader(true);
return registerTypeMapping(cl.loadClass(className, barr));
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
throw Caster.toPageException(t);
}
}
|
java
|
@RequestMapping(value="/{providerId}", method=RequestMethod.GET, params="error")
public RedirectView oauth2ErrorCallback(@PathVariable String providerId,
@RequestParam("error") String error,
@RequestParam(value="error_description", required=false) String errorDescription,
@RequestParam(value="error_uri", required=false) String errorUri,
NativeWebRequest request) {
Map<String, String> errorMap = new HashMap<String, String>();
errorMap.put("error", error);
if (errorDescription != null) { errorMap.put("errorDescription", errorDescription); }
if (errorUri != null) { errorMap.put("errorUri", errorUri); }
sessionStrategy.setAttribute(request, AUTHORIZATION_ERROR_ATTRIBUTE, errorMap);
return connectionStatusRedirect(providerId, request);
}
|
java
|
@Override
public void processXML() throws InjectionException {
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "processXML : " + this);
List<? extends AdministeredObject> administeredObjectDefinitions = ivNameSpaceConfig.getJNDIEnvironmentRefs(AdministeredObject.class);
if (administeredObjectDefinitions != null)
{
for (AdministeredObject administeredObject : administeredObjectDefinitions)
{
String jndiName = administeredObject.getName();
InjectionBinding<AdministeredObjectDefinition> injectionBinding = ivAllAnnotationsCollection.get(jndiName);
AdministeredObjectDefinitionInjectionBinding binding;
if (injectionBinding != null)
{
binding = (AdministeredObjectDefinitionInjectionBinding) injectionBinding;
}
else
{
binding = new AdministeredObjectDefinitionInjectionBinding(jndiName, ivNameSpaceConfig);
addInjectionBinding(binding);
}
binding.mergeXML(administeredObject);
}
}
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "processXML : " + this);
}
|
java
|
public ClassNode getClassNode() {
if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) {
// let's try load it from the classpath
String groovyFile = theClass.getName();
int idx = groovyFile.indexOf('$');
if (idx > 0) {
groovyFile = groovyFile.substring(0, idx);
}
groovyFile = groovyFile.replace('.', '/') + ".groovy";
//System.out.println("Attempting to load: " + groovyFile);
URL url = theClass.getClassLoader().getResource(groovyFile);
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(groovyFile);
}
if (url != null) {
try {
/*
* todo there is no CompileUnit in scope so class name
* checking won't work but that mostly affects the bytecode
* generation rather than viewing the AST
*/
CompilationUnit.ClassgenCallback search = new CompilationUnit.ClassgenCallback() {
public void call(ClassVisitor writer, ClassNode node) {
if (node.getName().equals(theClass.getName())) {
MetaClassImpl.this.classNode = node;
}
}
};
CompilationUnit unit = new CompilationUnit();
unit.setClassgenCallback(search);
unit.addSource(url);
unit.compile(Phases.CLASS_GENERATION);
}
catch (Exception e) {
throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e);
}
}
}
return classNode;
}
|
python
|
def delete_vector(self, data, v=None):
"""
Deletes vector v and his id (data) in all matching buckets in the storage.
The data argument must be JSON-serializable.
"""
# Delete data id in each hashes
for lshash in self.lshashes:
if v is None:
keys = self.storage.get_all_bucket_keys(lshash.hash_name)
else:
keys = lshash.hash_vector(v)
self.storage.delete_vector(lshash.hash_name, keys, data)
|
python
|
def _has_changed(instance):
"""
Check if some tracked fields have changed
"""
for field, value in instance._original_fields.items():
if field != 'pk' and \
not isinstance(instance._meta.get_field(field), ManyToManyField):
try:
if field in getattr(instance, '_tracked_fields', []):
if isinstance(instance._meta.get_field(field), ForeignKey):
if getattr(instance, '{0}_id'.format(field)) != value:
return True
else:
if getattr(instance, field) != value:
return True
except TypeError:
# Can't compare old and new value, should be different.
return True
return False
|
java
|
public Future<Map<String, Summoner>> getSummoners(String... names) {
return new ApiFuture<>(() -> handler.getSummoners(names));
}
|
python
|
def handle_AnalysisRequest(self, instance):
"""Possible redirects for an AR.
- If AR is sample_due: receive it before proceeding.
- If AR belongs to Batch, redirect to the BatchBook view.
- If AR does not belong to Batch:
- if permission/workflow permit: go to AR manage_results.
- For other ARs, just redirect to the view screen.
"""
# - If AR is sample_due: receive it before proceeding.
wf = getToolByName(self.context, 'portal_workflow')
if wf.getInfoFor(instance, 'review_state') == 'sample_due':
try:
wf.doActionFor(instance, 'receive')
except WorkflowException:
pass
# - If AR belongs to Batch, redirect to the BatchBook view.
batch = instance.getBatch()
if batch:
return batch.absolute_url() + "/batchbook"
# - if permission/workflow permit: go to AR manage_results.
mtool = getToolByName(self.context, 'portal_membership')
if mtool.checkPermission(EditResults, instance):
return instance.absolute_url() + '/manage_results'
# - For other ARs, just redirect to the view screen.
return instance.absolute_url()
|
python
|
def solve_full(z, Fval, DPhival, G, A):
M, N=G.shape
P, N=A.shape
"""Total number of inequality constraints"""
m=M
"""Primal variable"""
x=z[0:N]
"""Multiplier for equality constraints"""
nu=z[N:N+P]
"""Multiplier for inequality constraints"""
l=z[N+P:N+P+M]
"""Slacks"""
s=z[N+P+M:]
"""Dual infeasibility"""
rd = Fval[0:N]
"""Primal infeasibility"""
rp1 = Fval[N:N+P]
rp2 = Fval[N+P:N+P+M]
"""Centrality"""
rc = Fval[N+P+M:]
"""Sigma matrix"""
SIG = np.diag(l/s)
"""Condensed system"""
if issparse(DPhival):
if not issparse(A):
A = csr_matrix(A)
H = DPhival + mydot(G.T, mydot(SIG, G))
J = bmat([[H, A.T], [A, None]])
else:
if issparse(A):
A = A.toarray()
J = np.zeros((N+P, N+P))
J[0:N, 0:N] = DPhival + mydot(G.T, mydot(SIG, G))
J[0:N, N:] = A.T
J[N:, 0:N] = A
b1 = -rd - mydot(G.T, mydot(SIG, rp2)) + mydot(G.T, rc/s)
b2 = -rp1
b = np.hstack((b1, b2))
"""Prepare iterative solve via MINRES"""
sign = np.zeros(N+P)
sign[0:N/2] = 1.0
sign[N/2:] = -1.0
S = diags(sign, 0)
J_new = mydot(S, csr_matrix(J))
b_new = mydot(S, b)
dJ_new = np.abs(J_new.diagonal())
dPc = np.ones(J_new.shape[0])
ind = (dJ_new > 0.0)
dPc[ind] = 1.0/dJ_new[ind]
Pc = diags(dPc, 0)
dxnu, info = minres(J_new, b_new, tol=1e-8, M=Pc)
# dxnu = solve(J, b)
dx = dxnu[0:N]
dnu = dxnu[N:]
"""Obtain search directions for l and s"""
ds = -rp2 - mydot(G, dx)
dl = -mydot(SIG, ds) - rc/s
dz = np.hstack((dx, dnu, dl, ds))
return dz
|
java
|
public static void disposeSplash() {
if (instance != null) {
Container container = instance;
while ((container = container.getParent()) != null)
{
if (container instanceof Window)
((Window)container).dispose();
}
instance = null;
}
}
|
python
|
def status(ctx):
"""Print a status of this Lambda function"""
status = ctx.status()
click.echo(click.style('Policy', bold=True))
if status['policy']:
line = ' {} ({})'.format(
status['policy']['PolicyName'],
status['policy']['Arn'])
click.echo(click.style(line, fg='green'))
click.echo(click.style('Role', bold=True))
if status['role']:
line = ' {} ({})'.format(
status['role']['RoleName'],
status['role']['Arn'])
click.echo(click.style(line, fg='green'))
click.echo(click.style('Function', bold=True))
if status['function']:
line = ' {} ({})'.format(
status['function']['Configuration']['FunctionName'],
status['function']['Configuration']['FunctionArn'])
click.echo(click.style(line, fg='green'))
else:
click.echo(click.style(' None', fg='green'))
click.echo(click.style('Event Sources', bold=True))
if status['event_sources']:
for event_source in status['event_sources']:
if event_source:
arn = event_source.get('EventSourceArn')
state = event_source.get('State', 'Enabled')
line = ' {}: {}'.format(arn, state)
click.echo(click.style(line, fg='green'))
else:
click.echo(click.style(' None', fg='green'))
|
python
|
def get(self, *args, **kwargs):
"""
Returns a single instance matching this query, optionally with additional filter kwargs.
A DoesNotExistError will be raised if there are no rows matching the query
A MultipleObjectsFoundError will be raised if there is more than one row matching the queyr
"""
if args or kwargs:
return self.filter(*args, **kwargs).get()
self._execute_query()
if len(self._result_cache) == 0:
raise self.model.DoesNotExist
elif len(self._result_cache) > 1:
raise self.model.MultipleObjectsReturned(
'{} objects found'.format(len(self._result_cache)))
else:
return self[0]
|
python
|
def sign(self, byts):
'''
Compute the ECC signature for the given bytestream.
Args:
byts (bytes): The bytes to sign.
Returns:
bytes: The RSA Signature bytes.
'''
chosen_hash = c_hashes.SHA256()
hasher = c_hashes.Hash(chosen_hash, default_backend())
hasher.update(byts)
digest = hasher.finalize()
return self.priv.sign(digest,
c_ec.ECDSA(c_utils.Prehashed(chosen_hash))
)
|
python
|
def find_by_name(self, item_name, items_list, name_list=None):
"""
Return item from items_list with name item_name.
"""
if not name_list:
names = [item.name for item in items_list if item]
else:
names = name_list
if item_name in names:
ind = names.index(item_name)
return items_list[ind]
return False
|
python
|
def hash_array(vals, encoding='utf8', hash_key=None, categorize=True):
"""
Given a 1d array, return an array of deterministic integers.
.. versionadded:: 0.19.2
Parameters
----------
vals : ndarray, Categorical
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
1d uint64 numpy array of hash values, same length as the vals
"""
if not hasattr(vals, 'dtype'):
raise TypeError("must pass a ndarray-like")
dtype = vals.dtype
if hash_key is None:
hash_key = _default_hash_key
# For categoricals, we hash the categories, then remap the codes to the
# hash values. (This check is above the complex check so that we don't ask
# numpy if categorical is a subdtype of complex, as it will choke).
if is_categorical_dtype(dtype):
return _hash_categorical(vals, encoding, hash_key)
elif is_extension_array_dtype(dtype):
vals, _ = vals._values_for_factorize()
dtype = vals.dtype
# we'll be working with everything as 64-bit values, so handle this
# 128-bit value early
if np.issubdtype(dtype, np.complex128):
return hash_array(vals.real) + 23 * hash_array(vals.imag)
# First, turn whatever array this is into unsigned 64-bit ints, if we can
# manage it.
elif isinstance(dtype, np.bool):
vals = vals.astype('u8')
elif issubclass(dtype.type, (np.datetime64, np.timedelta64)):
vals = vals.view('i8').astype('u8', copy=False)
elif issubclass(dtype.type, np.number) and dtype.itemsize <= 8:
vals = vals.view('u{}'.format(vals.dtype.itemsize)).astype('u8')
else:
# With repeated values, its MUCH faster to categorize object dtypes,
# then hash and rename categories. We allow skipping the categorization
# when the values are known/likely to be unique.
if categorize:
from pandas import factorize, Categorical, Index
codes, categories = factorize(vals, sort=False)
cat = Categorical(codes, Index(categories),
ordered=False, fastpath=True)
return _hash_categorical(cat, encoding, hash_key)
try:
vals = hashing.hash_object_array(vals, hash_key, encoding)
except TypeError:
# we have mixed types
vals = hashing.hash_object_array(vals.astype(str).astype(object),
hash_key, encoding)
# Then, redistribute these 64-bit ints within the space of 64-bit ints
vals ^= vals >> 30
vals *= np.uint64(0xbf58476d1ce4e5b9)
vals ^= vals >> 27
vals *= np.uint64(0x94d049bb133111eb)
vals ^= vals >> 31
return vals
|
java
|
public static appfwxmlcontenttype[] get_filtered(nitro_service service, String filter) throws Exception{
appfwxmlcontenttype obj = new appfwxmlcontenttype();
options option = new options();
option.set_filter(filter);
appfwxmlcontenttype[] response = (appfwxmlcontenttype[]) obj.getfiltered(service, option);
return response;
}
|
java
|
public synchronized SSLConfig getOutboundDefaultSSLConfig() throws IllegalArgumentException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.entry(tc, "getOutboundDefaultSSLConfig");
SSLConfig outboundDefaultSSLConfig = null;
String outboundDefaultAlias = getGlobalProperty(LibertyConstants.SSLPROP_OUTBOUND_DEFAULT_ALIAS);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "outboundDefaultAlias: " + outboundDefaultAlias);
if (outboundDefaultAlias != null) {
outboundDefaultSSLConfig = sslConfigMap.get(outboundDefaultAlias);
if (outboundDefaultSSLConfig != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "outboundDefaultAlias not null, getOutboundDefaultSSLConfig for: " + outboundDefaultAlias);
return outboundDefaultSSLConfig;
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "defaultAlias is null");
return null;
}
|
python
|
def _draw_lines(self, bg, colour, extent, line, xo, yo):
"""Draw a set of lines from a vector tile."""
coords = [self._scale_coords(x, y, extent, xo, yo) for x, y in line]
self._draw_lines_internal(coords, colour, bg)
|
java
|
public static String queryEncode(String query, Charset charset) {
return encodeReserved(query, FragmentType.QUERY, charset);
/* spaces will be encoded as 'plus' symbols here, we want them pct-encoded */
// return encoded.replaceAll("\\+", "%20");
}
|
java
|
public <T> T update(InputHandler inputHandler, OutputHandler<T> outputHandler) throws SQLException {
AssertUtils.assertNotNull(inputHandler, nullException());
AssertUtils.assertNotNull(outputHandler, nullException());
String sql = inputHandler.getQueryString();
return this.<T>update(this.getStatementHandler(), sql, outputHandler, inputHandler.getQueryParameters());
}
|
python
|
def is_friend(self):
""":class:`bool`: Checks if the user is your friend.
.. note::
This only applies to non-bot accounts.
"""
r = self.relationship
if r is None:
return False
return r.type is RelationshipType.friend
|
java
|
public PhotoList<Photo> recentlyUpdated(Date minDate, Set<String> extras, int perPage, int page) throws FlickrException {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("method", METHOD_RECENTLY_UPDATED);
parameters.put("min_date", Long.toString(minDate.getTime() / 1000L));
if (extras != null && !extras.isEmpty()) {
parameters.put("extras", StringUtilities.join(extras, ","));
}
if (perPage > 0) {
parameters.put("per_page", Integer.toString(perPage));
}
if (page > 0) {
parameters.put("page", Integer.toString(page));
}
Response response = transport.get(transport.getPath(), parameters, apiKey, sharedSecret);
if (response.isError()) {
throw new FlickrException(response.getErrorCode(), response.getErrorMessage());
}
Element photosElement = response.getPayload();
PhotoList<Photo> photos = PhotoUtils.createPhotoList(photosElement);
return photos;
}
|
java
|
@Deprecated
public static <V,T extends Exception> V impersonate(Authentication auth, Callable<V,T> body) throws T {
SecurityContext old = impersonate(auth);
try {
return body.call();
} finally {
SecurityContextHolder.setContext(old);
}
}
|
java
|
public double[] getMeanMonthly () {
// ---------------------------------------------
// Provides a per-month mean of data (across all years for that month).
// e.g. 12 means returned- one for all days in January in all years,
// one for all dats in Feb in all years, etc.
// Advance through all data, and sum each month's value.
int monthlyCount[]; //the number of entries per month
double monthlySum[]; //the sum of the data per month
double meanMonthly[]; // The mean value per month
monthlyCount = new int[12]; // Just need count and sum for 12 months
monthlySum = new double[12];
meanMonthly = new double[12];
// Init data to clear.
for (int i=0; i<12; i++) {
monthlyCount[i] = 0;
monthlySum[i] = 0;
meanMonthly[i] = 0;
}
int totalCount = 0;
for (int i=0; i<this.getNumData(); i++) {
Date date = this.getDate(i);
double data = this.getData(i);
int month = -1;
if (i > this.getNumDates()) {throw new IllegalArgumentException("No date for data #" + i);}
// Get month data out of date.
month = date.getMonth();
if (month > 11) {throw new IllegalArgumentException("Month number data out of range: " + month);}
// Add to appropriate sum.
monthlyCount[month]++;
monthlySum[month] +=data;
totalCount++;
}
if (totalCount == 0) {throw new IllegalArgumentException("No data used in Mean Montly calculation.");}
for (int i=0; i<12; i++) {
meanMonthly[i] = monthlySum[i]/monthlyCount[i];
}
return meanMonthly;
}
|
java
|
@Override
public int removeAll(KTypePredicate<? super KType> predicate) {
final int before = size();
if (hasEmptyKey) {
if (predicate.apply(Intrinsics.<KType> empty())) {
hasEmptyKey = false;
values[mask + 1] = Intrinsics.<VType> empty();
}
}
final KType[] keys = Intrinsics.<KType[]> cast(this.keys);
for (int slot = 0, max = this.mask; slot <= max;) {
KType existing;
if (!Intrinsics.<KType> isEmpty(existing = keys[slot]) &&
predicate.apply(existing)) {
// Shift, do not increment slot.
shiftConflictingKeys(slot);
} else {
slot++;
}
}
return before - size();
}
|
java
|
public static MatFileHeader createHeader()
{
return new MatFileHeader( DEFAULT_DESCRIPTIVE_TEXT + (new Date()).toString(),
DEFAULT_VERSION,
DEFAULT_ENDIAN_INDICATOR,
ByteOrder.BIG_ENDIAN );
}
|
java
|
private boolean isExternal(String filename) {
boolean external = false;
if (filename.indexOf("://") > 0) {
external = true;
}
return external;
}
|
java
|
@Override
public GetUserAttributeVerificationCodeResult getUserAttributeVerificationCode(GetUserAttributeVerificationCodeRequest request) {
request = beforeClientExecution(request);
return executeGetUserAttributeVerificationCode(request);
}
|
java
|
public boolean isPresolved() {
// Check if the tool has added unsolved dependencies to this instance that need further resolution.
boolean unsolvedDependenciesExist = getDirectDeps().stream().anyMatch(d -> !getTransientDeps(d).isEmpty());
return !unsolvedDependenciesExist;
}
|
java
|
public static <T> String join(Iterator<T> iterator, CharSequence conjunction) {
return IterUtil.join(iterator, conjunction);
}
|
java
|
public void setPermissions(java.util.Collection<String> permissions) {
if (permissions == null) {
this.permissions = null;
return;
}
this.permissions = new java.util.ArrayList<String>(permissions);
}
|
python
|
def replace_pattern(name,
pattern,
repl,
count=0,
flags=8,
bufsize=1,
append_if_not_found=False,
prepend_if_not_found=False,
not_found_content=None,
search_only=False,
show_changes=True,
backslash_literal=False,
source='running',
path=None,
test=False,
replace=True,
debug=False,
commit=True):
'''
.. versionadded:: 2019.2.0
Replace occurrences of a pattern in the configuration source. If
``show_changes`` is ``True``, then a diff of what changed will be returned,
otherwise a ``True`` will be returned when changes are made, and ``False``
when no changes are made.
This is a pure Python implementation that wraps Python's :py:func:`~re.sub`.
pattern
A regular expression, to be matched using Python's
:py:func:`~re.search`.
repl
The replacement text.
count: ``0``
Maximum number of pattern occurrences to be replaced. If count is a
positive integer ``n``, only ``n`` occurrences will be replaced,
otherwise all occurrences will be replaced.
flags (list or int): ``8``
A list of flags defined in the ``re`` module documentation from the
Python standard library. Each list item should be a string that will
correlate to the human-friendly flag name. E.g., ``['IGNORECASE',
'MULTILINE']``. Optionally, ``flags`` may be an int, with a value
corresponding to the XOR (``|``) of all the desired flags. Defaults to
8 (which supports 'MULTILINE').
bufsize (int or str): ``1``
How much of the configuration to buffer into memory at once. The
default value ``1`` processes one line at a time. The special value
``file`` may be specified which will read the entire file into memory
before processing.
append_if_not_found: ``False``
If set to ``True``, and pattern is not found, then the content will be
appended to the file.
prepend_if_not_found: ``False``
If set to ``True`` and pattern is not found, then the content will be
prepended to the file.
not_found_content
Content to use for append/prepend if not found. If None (default), uses
``repl``. Useful when ``repl`` uses references to group in pattern.
search_only: ``False``
If set to true, this no changes will be performed on the file, and this
function will simply return ``True`` if the pattern was matched, and
``False`` if not.
show_changes: ``True``
If ``True``, return a diff of changes made. Otherwise, return ``True``
if changes were made, and ``False`` if not.
backslash_literal: ``False``
Interpret backslashes as literal backslashes for the repl and not
escape characters. This will help when using append/prepend so that
the backslashes are not interpreted for the repl on the second run of
the state.
source: ``running``
The configuration source. Choose from: ``running``, ``candidate``, or
``startup``. Default: ``running``.
path
Save the temporary configuration to a specific path, then read from
there.
test: ``False``
Dry run? If set as ``True``, will apply the config, discard and return
the changes. Default: ``False`` and will commit the changes on the
device.
commit: ``True``
Commit the configuration changes? Default: ``True``.
debug: ``False``
Debug mode. Will insert a new key in the output dictionary, as
``loaded_config`` containing the raw configuration loaded on the device.
replace: ``True``
Load and replace the configuration. Default: ``True``.
If an equal sign (``=``) appears in an argument to a Salt command it is
interpreted as a keyword argument in the format ``key=val``. That
processing can be bypassed in order to pass an equal sign through to the
remote shell command by manually specifying the kwarg:
State SLS Example:
.. code-block:: yaml
update_policy_name:
netconfig.replace_pattern:
- pattern: OLD-POLICY-NAME
- repl: new-policy-name
- debug: true
'''
ret = salt.utils.napalm.default_ret(name)
# the user can override the flags the equivalent CLI args
# which have higher precedence
test = __salt__['config.merge']('test', test)
debug = __salt__['config.merge']('debug', debug)
commit = __salt__['config.merge']('commit', commit)
replace = __salt__['config.merge']('replace', replace) # this might be a bit risky
replace_ret = __salt__['net.replace_pattern'](pattern,
repl,
count=count,
flags=flags,
bufsize=bufsize,
append_if_not_found=append_if_not_found,
prepend_if_not_found=prepend_if_not_found,
not_found_content=not_found_content,
search_only=search_only,
show_changes=show_changes,
backslash_literal=backslash_literal,
source=source,
path=path,
test=test,
replace=replace,
debug=debug,
commit=commit)
return salt.utils.napalm.loaded_ret(ret, replace_ret, test, debug)
|
python
|
def get_checkerboard_matrix(kernel_width, kernel_type="default", gaussian_param=0.1):
"""
example matrix for width = 2
-1 -1 1 1
-1 -1 1 1
1 1 -1 -1
1 1 -1 -1
:param kernel_type:
:param kernel_width:
:return:
"""
if kernel_type is "gaussian":
return get_gaussian_kernel(kernel_width, gaussian_param)
if kernel_type is "default":
return np.vstack((
np.hstack((
-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width))
))
))
elif kernel_type is "bottom_right":
return np.vstack((
np.hstack((
np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width))
))
))
elif kernel_type is "top_left":
return np.vstack((
np.hstack((
-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
))
))
|
java
|
private Node tryMinimizeExprResult(Node n) {
Node originalExpr = n.getFirstChild();
MinimizedCondition minCond = MinimizedCondition.fromConditionNode(originalExpr);
MeasuredNode mNode =
minCond.getMinimized(MinimizationStyle.ALLOW_LEADING_NOT);
if (mNode.isNot()) {
// Remove the leading NOT in the EXPR_RESULT.
replaceNode(originalExpr, mNode.withoutNot());
} else {
replaceNode(originalExpr, mNode);
}
return n;
}
|
python
|
def _TransmitBreakpointUpdates(self, service):
"""Tries to send pending breakpoint updates to the backend.
Sends all the pending breakpoint updates. In case of transient failures,
the breakpoint is inserted back to the top of the queue. Application
failures are not retried (for example updating breakpoint in a final
state).
Each pending breakpoint maintains a retry counter. After repeated transient
failures the breakpoint is discarded and dropped from the queue.
Args:
service: client to use for API calls
Returns:
(reconnect, timeout) tuple. The first element ("reconnect") is set to
true on unexpected HTTP responses. The caller should discard the HTTP
connection and create a new one. The second element ("timeout") is
set to None if all pending breakpoints were sent successfully. Otherwise
returns time interval in seconds to stall before retrying.
"""
reconnect = False
retry_list = []
# There is only one consumer, so two step pop is safe.
while self._transmission_queue:
breakpoint, retry_count = self._transmission_queue.popleft()
try:
service.debuggees().breakpoints().update(
debuggeeId=self._debuggee_id, id=breakpoint['id'],
body={'breakpoint': breakpoint}).execute()
native.LogInfo('Breakpoint %s update transmitted successfully' % (
breakpoint['id']))
except apiclient.errors.HttpError as err:
# Treat 400 error codes (except timeout) as application error that will
# not be retried. All other errors are assumed to be transient.
status = err.resp.status
is_transient = ((status >= 500) or (status == 408))
if is_transient and retry_count < self.max_transmit_attempts - 1:
native.LogInfo('Failed to send breakpoint %s update: %s' % (
breakpoint['id'], traceback.format_exc()))
retry_list.append((breakpoint, retry_count + 1))
elif is_transient:
native.LogWarning(
'Breakpoint %s retry count exceeded maximum' % breakpoint['id'])
else:
# This is very common if multiple instances are sending final update
# simultaneously.
native.LogInfo('%s, breakpoint: %s' % (err, breakpoint['id']))
except BaseException:
native.LogWarning(
'Fatal error sending breakpoint %s update: %s' % (
breakpoint['id'], traceback.format_exc()))
reconnect = True
self._transmission_queue.extend(retry_list)
if not self._transmission_queue:
self.update_backoff.Succeeded()
# Nothing to send, wait until next breakpoint update.
return (reconnect, None)
else:
return (reconnect, self.update_backoff.Failed())
|
java
|
public Integer delInfoByIdListService(List<String> idList,String tableName){
int status=0;
String tempDbType=calcuDbType();
String tempKeyId=calcuIdKey();
for(String id:idList){
int retStatus=getInnerDao().delObjByBizId(tableName, id, tempKeyId);
status=status+retStatus;
}
return status;
}
|
java
|
private static void appendThrown(StringBuilder message, LogRecord event)
{
final Throwable thrown = event.getThrown();
if (thrown != null)
{
final StringWriter sw = new StringWriter();
thrown.printStackTrace(new PrintWriter(sw));
message.append(sw);
}
}
|
java
|
public java.util.List<DocumentServiceWarning> getWarnings() {
if (warnings == null) {
warnings = new com.amazonaws.internal.SdkInternalList<DocumentServiceWarning>();
}
return warnings;
}
|
python
|
def unstruct_strat(self):
# type: () -> UnstructureStrategy
"""The default way of unstructuring ``attrs`` classes."""
return (
UnstructureStrategy.AS_DICT
if self._unstructure_attrs == self.unstructure_attrs_asdict
else UnstructureStrategy.AS_TUPLE
)
|
java
|
@SuppressWarnings({"WeakerAccess", "unused"})
public void setDocumentPreserveAspectRatio(PreserveAspectRatio preserveAspectRatio)
{
if (this.rootElement == null)
throw new IllegalArgumentException("SVG document is empty");
this.rootElement.preserveAspectRatio = preserveAspectRatio;
}
|
java
|
public void cleanupDisappearedTopology() throws Exception {
StormClusterState clusterState = nimbusData.getStormClusterState();
List<String> activeTopologies = clusterState.active_storms();
if (activeTopologies == null) {
return;
}
Set<String> cleanupIds = get_cleanup_ids(clusterState, activeTopologies);
for (String sysTopology : JStormMetrics.SYS_TOPOLOGIES) {
cleanupIds.remove(sysTopology);
}
for (String topologyId : cleanupIds) {
LOG.info("Cleaning up " + topologyId);
clusterState.try_remove_storm(topologyId);
nimbusData.getTaskHeartbeatsCache().remove(topologyId);
nimbusData.getTasksHeartbeat().remove(topologyId);
NimbusUtils.removeTopologyTaskTimeout(nimbusData, topologyId);
// delete topology files in blobstore
List<String> deleteKeys = BlobStoreUtils.getKeyListFromId(nimbusData, topologyId);
BlobStoreUtils.cleanup_keys(deleteKeys, nimbusData.getBlobStore(), nimbusData.getStormClusterState());
// don't need to delete local dir
}
}
|
python
|
async def dbpoolStats(self, *args, **kwargs):
"""
Statistics on the Database client pool
This method is only for debugging the ec2-manager
This method is ``experimental``
"""
return await self._makeApiCall(self.funcinfo["dbpoolStats"], *args, **kwargs)
|
java
|
@Override
public DeleteTableVersionResult deleteTableVersion(DeleteTableVersionRequest request) {
request = beforeClientExecution(request);
return executeDeleteTableVersion(request);
}
|
java
|
public static <K, V> QueuedKeyedResourcePool<K, V> create(ResourceFactory<K, V> factory,
ResourcePoolConfig config) {
return new QueuedKeyedResourcePool<K, V>(factory, config);
}
|
java
|
@Override
public CommerceSubscriptionEntry removeByC_C_C(String CPInstanceUuid,
long CProductId, long commerceOrderItemId)
throws NoSuchSubscriptionEntryException {
CommerceSubscriptionEntry commerceSubscriptionEntry = findByC_C_C(CPInstanceUuid,
CProductId, commerceOrderItemId);
return remove(commerceSubscriptionEntry);
}
|
python
|
def setupTxns(self, key, force: bool = False):
"""
Create base transactions
:param key: ledger
:param force: replace existing transaction files
"""
import data
dataDir = os.path.dirname(data.__file__)
# TODO: Need to get "test" and "live" from ENVS property in config.py
# but that gives error due to some dependency issue
allEnvs = {
"local": Environment("pool_transactions_local",
"domain_transactions_local"),
"test": Environment("pool_transactions_sandbox",
"domain_transactions_sandbox"),
"live": Environment("pool_transactions_live",
"domain_transactions_live")
}
for env in allEnvs.values():
fileName = getattr(env, key, None)
if not fileName:
continue
sourceFilePath = os.path.join(dataDir, fileName)
if not os.path.exists(sourceFilePath):
continue
destFilePath = os.path.join(
self.base_dir, genesis_txn_file(fileName))
if os.path.exists(destFilePath) and not force:
continue
copyfile(sourceFilePath, destFilePath)
return self
|
python
|
def coerce(val: t.Any,
coerce_type: t.Optional[t.Type] = None,
coercer: t.Optional[t.Callable] = None) -> t.Any:
"""
Casts a type of ``val`` to ``coerce_type`` with ``coercer``.
If ``coerce_type`` is bool and no ``coercer`` specified it uses
:func:`~django_docker_helpers.utils.coerce_str_to_bool` by default.
:param val: a value of any type
:param coerce_type: any type
:param coercer: provide a callback that takes ``val`` and returns a value with desired type
:return: type casted value
"""
if not coerce_type and not coercer:
return val
if coerce_type and type(val) is coerce_type:
return val
if coerce_type and coerce_type is bool and not coercer:
coercer = coerce_str_to_bool
if coercer is None:
coercer = coerce_type
return coercer(val)
|
python
|
def upper2_for_ramp_wall(self) -> Set[Point2]:
""" Returns the 2 upper ramp points of the main base ramp required for the supply depot and barracks placement properties used in this file. """
if len(self.upper) > 5:
# NOTE: this was way too slow on large ramps
return set() # HACK: makes this work for now
# FIXME: please do
upper2 = sorted(list(self.upper), key=lambda x: x.distance_to(self.bottom_center), reverse=True)
while len(upper2) > 2:
upper2.pop()
return set(upper2)
|
java
|
@Pure
@SuppressWarnings("static-method")
public boolean isParameterExists(int index) {
final String[] params = getCommandLineParameters();
return index >= 0 && index < params.length && params[index] != null;
}
|
java
|
public void removeNodeMetaData(Object key) {
if (key==null) throw new GroovyBugError("Tried to remove meta data with null key "+this+".");
if (metaDataMap == null) {
return;
}
metaDataMap.remove(key);
}
|
java
|
private static void displayResults(BufferedImage orig,
Planar<GrayF32> distortedImg,
ImageDistort allInside, ImageDistort fullView ) {
// render the results
Planar<GrayF32> undistortedImg = new Planar<>(GrayF32.class,
distortedImg.getWidth(),distortedImg.getHeight(),distortedImg.getNumBands());
allInside.apply(distortedImg, undistortedImg);
BufferedImage out1 = ConvertBufferedImage.convertTo(undistortedImg, null,true);
fullView.apply(distortedImg,undistortedImg);
BufferedImage out2 = ConvertBufferedImage.convertTo(undistortedImg, null,true);
// display in a single window where the user can easily switch between images
ListDisplayPanel panel = new ListDisplayPanel();
panel.addItem(new ImagePanel(orig), "Original");
panel.addItem(new ImagePanel(out1), "Undistorted All Inside");
panel.addItem(new ImagePanel(out2), "Undistorted Full View");
ShowImages.showWindow(panel, "Removing Lens Distortion", true);
}
|
python
|
def rename_axis(self, mapper, axis, copy=True, level=None):
"""
Rename one of axes.
Parameters
----------
mapper : unary callable
axis : int
copy : boolean, default True
level : int, default None
"""
obj = self.copy(deep=copy)
obj.set_axis(axis, _transform_index(self.axes[axis], mapper, level))
return obj
|
python
|
def genenare_callmap_sif(self, filepath):
"""
Generate a sif file from the call map
"""
graph = self.call_map
if graph is None:
raise AngrGirlScoutError('Please generate the call graph first.')
f = open(filepath, "wb")
for src, dst in graph.edges():
f.write("0x%x\tDirectEdge\t0x%x\n" % (src, dst))
f.close()
|
python
|
def oem_name(self, value):
"""The oem_name property.
Args:
value (string). the property value.
"""
if value == self._defaults['ai.device.oemName'] and 'ai.device.oemName' in self._values:
del self._values['ai.device.oemName']
else:
self._values['ai.device.oemName'] = value
|
java
|
public ServiceFuture<SummarizeResultsInner> summarizeForSubscriptionLevelPolicyAssignmentAsync(String subscriptionId, String policyAssignmentName, QueryOptions queryOptions, final ServiceCallback<SummarizeResultsInner> serviceCallback) {
return ServiceFuture.fromResponse(summarizeForSubscriptionLevelPolicyAssignmentWithServiceResponseAsync(subscriptionId, policyAssignmentName, queryOptions), serviceCallback);
}
|
java
|
public Host getRandomHost(Set<Host> activeHosts) {
Random random = new Random();
List<Host> hostsUp = new ArrayList<Host>(CollectionUtils.filter(activeHosts, new Predicate<Host>() {
@Override
public boolean apply(Host x) {
return x.isUp();
}
}));
return hostsUp.get(random.nextInt(hostsUp.size()));
}
|
python
|
def scroll_deck(self, decknum, scroll_x, scroll_y):
"""Move a deck."""
self.scroll_deck_x(decknum, scroll_x)
self.scroll_deck_y(decknum, scroll_y)
|
python
|
async def set_async(self, type_name, entity):
"""Sets an entity asynchronously using the API. Shortcut for using async_call() with the 'Set' method.
:param type_name: The type of entity
:param entity: The entity to set
:raise MyGeotabException: Raises when an exception occurs on the MyGeotab server
"""
return await self.call_async('Set', type_name=type_name, entity=entity)
|
python
|
async def _executemany(self, query, dps, cursor):
"""
executemany
"""
result_map = None
if isinstance(query, str):
await cursor.executemany(query, dps)
elif isinstance(query, DDLElement):
raise exc.ArgumentError(
"Don't mix sqlalchemy DDL clause "
"and execution with parameters"
)
elif isinstance(query, ClauseElement):
compiled = query.compile(dialect=self._dialect)
params = []
is_update = isinstance(query, UpdateBase)
for dp in dps:
params.append(
self._base_params(
query,
dp,
compiled,
is_update,
)
)
await cursor.executemany(str(compiled), params)
result_map = compiled._result_columns
else:
raise exc.ArgumentError(
"sql statement should be str or "
"SQLAlchemy data "
"selection/modification clause"
)
ret = await create_result_proxy(
self,
cursor,
self._dialect,
result_map
)
self._weak_results.add(ret)
return ret
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.