language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def withColumnRenamed(self, existing, new):
"""Returns a new :class:`DataFrame` by renaming an existing column.
This is a no-op if schema doesn't contain the given column name.
:param existing: string, name of the existing column to rename.
:param new: string, new name of the column.
>>> df.withColumnRenamed('age', 'age2').collect()
[Row(age2=2, name=u'Alice'), Row(age2=5, name=u'Bob')]
"""
return DataFrame(self._jdf.withColumnRenamed(existing, new), self.sql_ctx)
|
python
|
def static_parser(static):
"""Parse object describing static routes.
Might be a list, a dict or a list of dicts.
"""
if static is None:
return
if isinstance(static, dict):
static = static.items()
for group in static:
if not isinstance(group, dict):
yield group
continue
for item in group.items():
yield item
|
java
|
public void updateClob(String columnName, java.sql.Clob c) throws SQLException {
try {
rsetImpl.updateClob(columnName, c);
} catch (SQLException ex) {
FFDCFilter.processException(ex, "com.ibm.ws.rsadapter.jdbc.WSJdbcResultSet.updateClob", "4123", this);
throw WSJdbcUtil.mapException(this, ex);
} catch (NullPointerException nullX) {
// No FFDC code needed; we might be closed.
throw runtimeXIfNotClosed(nullX);
}
}
|
python
|
def subs(self, *args, **kwargs):
"""Substitute a symbolic expression in ``['bond', 'angle', 'dihedral']``
This is a wrapper around the substitution mechanism of
`sympy <http://docs.sympy.org/latest/tutorial/basic_operations.html>`_.
Any symbolic expression in the columns
``['bond', 'angle', 'dihedral']`` of ``self`` will be substituted
with value.
.. note:: This function is not side-effect free.
If all symbolic expressions are evaluated and are concrete numbers
and ``perform_checks`` is True, a check for the transformation
to cartesian coordinates is performed.
If no :class:`~chemcoord.exceptions.InvalidReference`
exceptions are raised, the resulting cartesian is written to
``self._metadata['last_valid_cartesian']``.
Args:
symb_expr (sympy expression):
value :
perform_checks (bool): If ``perform_checks is True``,
it is asserted, that the resulting Zmatrix can be converted
to cartesian coordinates.
Dummy atoms will be inserted automatically if necessary.
Returns:
Zmat: Zmatrix with substituted symbolic expressions.
If all resulting sympy expressions in a column are numbers,
the column is recasted to 64bit float.
"""
perform_checks = kwargs.pop('perform_checks', True)
cols = ['bond', 'angle', 'dihedral']
out = self.copy()
def get_subs_f(*args):
def subs_function(x):
if hasattr(x, 'subs'):
x = x.subs(*args)
try:
x = float(x)
except TypeError:
pass
return x
return subs_function
for col in cols:
if out.loc[:, col].dtype is np.dtype('O'):
out.unsafe_loc[:, col] = out.loc[:, col].map(get_subs_f(*args))
try:
out.unsafe_loc[:, col] = out.loc[:, col].astype('f8')
except (SystemError, TypeError):
pass
if perform_checks:
try:
new_cartesian = out.get_cartesian()
except (AttributeError, TypeError):
# Unevaluated symbolic expressions are remaining.
pass
except InvalidReference as e:
if out.dummy_manipulation_allowed:
out._manipulate_dummies(e, inplace=True)
else:
raise e
else:
out._metadata['last_valid_cartesian'] = new_cartesian
self._metadata['last_valid_cartesian'] = new_cartesian
return out
|
java
|
public static void sleep(long ms)
{
long start = uptimeMillis();
long duration = ms;
boolean interrupted = false;
do {
try {
Thread.sleep(duration);
}
catch (InterruptedException e) {
interrupted = true;
}
duration = start + ms - uptimeMillis();
} while (duration > 0);
if (interrupted) {
// Important: we don't want to quietly eat an interrupt() event,
// so we make sure to re-interrupt the thread so that the next
// call to Thread.sleep() or Object.wait() will be interrupted.
Thread.currentThread().interrupt();
}
}
|
python
|
def configure(ctx, helper, edit):
'''
Update configuration
'''
ctx.obj.config = ConfigFile(ctx.obj.config_file)
if edit:
ctx.obj.config.edit_config_file()
return
if os.path.isfile(ctx.obj.config.config_file):
ctx.obj.config.read_config()
if ctx.obj.profile is None:
ctx.obj.profile = ctx.obj.config.default_profile
args, kwargs = _parse_args_and_kwargs(ctx.args)
assert len(args) == 0, 'Unrecognized arguments: "{}"'.format(args)
if ctx.obj.profile not in ctx.obj.config.config['profiles']:
ctx.obj.config.config['profiles'][ctx.obj.profile] = {
'api': {'user_config': {}}, 'manager': {}, 'authorities': {}}
profile_config = ctx.obj.config.config['profiles'][ctx.obj.profile]
profile_config['api']['user_config'].update(kwargs)
ctx.obj.config.write_config(ctx.obj.config_file)
_generate_api(ctx)
if ctx.obj.api.manager is not None:
check_requirements(
to_populate=profile_config['api']['user_config'],
prompts=ctx.obj.api.manager.required_user_config,
helper=helper)
ctx.obj.config.write_config(ctx.obj.config_file)
|
python
|
def completions(self, symbol, attribute, recursive = False):
"""Finds all possible symbol completions of the given symbol that belong
to this module and its dependencies.
:arg symbol: the code symbol that needs to be completed.
:arg attribute: one of ['dependencies', 'publics', 'members',
'types', 'executables'] for specifying which collections to search.
:arg result: the possible completions collected so far in the search.
"""
possible = []
for ekey in self.collection(attribute):
if symbol in ekey:
possible.append(ekey)
#Try this out on all the dependencies as well to find all the possible
#completions.
if recursive:
for depkey in self.dependencies:
#Completions need to be fast. If the module for the parent isn't already
#loaded, we just ignore the completions it might have.
if depkey in self.parent.modules:
possible.extend(self.parent.modules[depkey].completions(symbol, attribute))
return possible
|
python
|
def put(self, measurementId, deviceId):
"""
Initialises the measurement session from the given device.
:param measurementId:
:param deviceId:
:return:
"""
logger.info('Starting measurement ' + measurementId + ' for ' + deviceId)
if self._measurementController.startMeasurement(measurementId, deviceId):
logger.info('Started measurement ' + measurementId + ' for ' + deviceId)
return None, 200
else:
logger.warning('Failed to start measurement ' + measurementId + ' for ' + deviceId)
return None, 404
|
python
|
def parse(self, parser):
"""Main method to render data into the template."""
lineno = next(parser.stream).lineno
if parser.stream.skip_if('name:short'):
parser.stream.skip(1)
short = parser.parse_expression()
else:
short = nodes.Const(False)
result = self.call_method('_commit_hash', [short], [], lineno=lineno)
return nodes.Output([result], lineno=lineno)
|
python
|
def handle_start_scan_command(self, scan_et):
""" Handles <start_scan> command.
@return: Response string for <start_scan> command.
"""
target_str = scan_et.attrib.get('target')
ports_str = scan_et.attrib.get('ports')
# For backward compatibility, if target and ports attributes are set,
# <targets> element is ignored.
if target_str is None or ports_str is None:
target_list = scan_et.find('targets')
if target_list is None or not target_list:
raise OSPDError('No targets or ports', 'start_scan')
else:
scan_targets = self.process_targets_element(target_list)
else:
scan_targets = []
for single_target in target_str_to_list(target_str):
scan_targets.append([single_target, ports_str, ''])
scan_id = scan_et.attrib.get('scan_id')
if scan_id is not None and scan_id != '' and not valid_uuid(scan_id):
raise OSPDError('Invalid scan_id UUID', 'start_scan')
try:
parallel = int(scan_et.attrib.get('parallel', '1'))
if parallel < 1 or parallel > 20:
parallel = 1
except ValueError:
raise OSPDError('Invalid value for parallel scans. '
'It must be a number', 'start_scan')
scanner_params = scan_et.find('scanner_params')
if scanner_params is None:
raise OSPDError('No scanner_params element', 'start_scan')
params = self._preprocess_scan_params(scanner_params)
# VTS is an optional element. If present should not be empty.
vt_selection = {}
scanner_vts = scan_et.find('vt_selection')
if scanner_vts is not None:
if not scanner_vts:
raise OSPDError('VTs list is empty', 'start_scan')
else:
vt_selection = self.process_vts_params(scanner_vts)
# Dry run case.
if 'dry_run' in params and int(params['dry_run']):
scan_func = self.dry_run_scan
scan_params = None
else:
scan_func = self.start_scan
scan_params = self.process_scan_params(params)
scan_id = self.create_scan(scan_id, scan_targets,
scan_params, vt_selection)
scan_process = multiprocessing.Process(target=scan_func,
args=(scan_id,
scan_targets,
parallel))
self.scan_processes[scan_id] = scan_process
scan_process.start()
id_ = Element('id')
id_.text = scan_id
return simple_response_str('start_scan', 200, 'OK', id_)
|
java
|
public static lbgroup_lbvserver_binding[] get(nitro_service service, String name) throws Exception{
lbgroup_lbvserver_binding obj = new lbgroup_lbvserver_binding();
obj.set_name(name);
lbgroup_lbvserver_binding response[] = (lbgroup_lbvserver_binding[]) obj.get_resources(service);
return response;
}
|
java
|
protected void callConfigModifiers(Scriptable rawConfig) {
if( aggregator.getPlatformServices() != null){
IServiceReference[] refs = null;
try {
refs = aggregator.getPlatformServices().getServiceReferences(IConfigModifier.class.getName(), "(name="+getAggregator().getName()+")"); //$NON-NLS-1$ //$NON-NLS-2$
} catch (PlatformServicesException e) {
if (log.isLoggable(Level.SEVERE)) {
log.log(Level.SEVERE, e.getMessage(), e);
}
}
if (refs != null) {
for (IServiceReference ref : refs) {
IConfigModifier modifier =
(IConfigModifier) aggregator.getPlatformServices().getService(ref);
if (modifier != null) {
try {
modifier.modifyConfig(getAggregator(), rawConfig);
} catch (Exception e) {
if (log.isLoggable(Level.SEVERE)) {
log.log(Level.SEVERE, e.getMessage(), e);
}
} finally {
aggregator.getPlatformServices().ungetService(ref);
}
}
}
}
}
}
|
python
|
def validate_listeners(self):
"""Validates that some listeners are actually registered"""
if self.exception:
# pylint: disable=raising-bad-type
raise self.exception
listeners = self.__listeners_for_thread
if not sum(len(l) for l in listeners):
raise ValueError("No active listeners")
|
java
|
public Set<DiscoveryEntryWithMetaInfo> filter(Version callerVersion,
Set<DiscoveryEntryWithMetaInfo> discoveryEntries,
Map<String, Set<Version>> discoveredVersions) {
if (callerVersion == null || discoveryEntries == null) {
throw new IllegalArgumentException(String.format("Neither callerVersion (%s) nor discoveryEntries (%s) can be null.",
callerVersion,
discoveryEntries));
}
Iterator<DiscoveryEntryWithMetaInfo> iterator = discoveryEntries.iterator();
while (iterator.hasNext()) {
DiscoveryEntry discoveryEntry = iterator.next();
if (discoveredVersions != null) {
Set<Version> versionsByDomain = discoveredVersions.get(discoveryEntry.getDomain());
if (versionsByDomain == null) {
versionsByDomain = new HashSet<>();
discoveredVersions.put(discoveryEntry.getDomain(), versionsByDomain);
}
versionsByDomain.add(discoveryEntry.getProviderVersion());
}
if (!versionCompatibilityChecker.check(callerVersion, discoveryEntry.getProviderVersion())) {
iterator.remove();
}
}
return discoveryEntries;
}
|
java
|
public static void writeVarLongCount(DataOutput out, long val) throws IOException {
if (val < 0) {
throw new IOException("Illegal count (must be non-negative): " + val);
}
while ((val & ~0x7FL) != 0) {
out.write(((int) val) | 0x80);
val >>>= 7;
}
out.write((int) val);
}
|
java
|
private Collection<JcrProperty> getProperties( String repository, String workspace, String path, Node node ) throws RepositoryException {
ArrayList<PropertyDefinition> names = new ArrayList<>();
NodeType primaryType = node.getPrimaryNodeType();
PropertyDefinition[] defs = primaryType.getPropertyDefinitions();
names.addAll(Arrays.asList(defs));
NodeType[] mixinType = node.getMixinNodeTypes();
for (NodeType type : mixinType) {
defs = type.getPropertyDefinitions();
names.addAll(Arrays.asList(defs));
}
ArrayList<JcrProperty> list = new ArrayList<>();
for (PropertyDefinition def : names) {
String name = def.getName();
String type = PropertyType.nameFromValue(def.getRequiredType());
Property p = null;
try {
p = node.getProperty(def.getName());
} catch (PathNotFoundException e) {
}
String display = values(def, p);
String value = def.isMultiple() ? multiValue(p)
: singleValue(p, def, repository, workspace, path);
list.add(new JcrProperty(name, type, value, display));
}
return list;
}
|
java
|
public void marshall(GetCrawlerRequest getCrawlerRequest, ProtocolMarshaller protocolMarshaller) {
if (getCrawlerRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getCrawlerRequest.getName(), NAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def length(self):
"""Returns the route length (cost)
Returns
-------
int
Route length (cost).
"""
cost = 0
depot = self._problem.depot()
last = depot
for i in self._nodes:
a, b = last, i
if a.name() > b.name():
a, b = b, a
cost = cost + self._problem.distance(a, b)
last = i
cost = cost + self._problem.distance(depot, last)
return cost
|
python
|
def endian(self):
"""
The target architecture byte order. One of :class:`Target.Endian`.
"""
if self._endian is None:
value = self._DEFAULT_ENDIAN[self.arch]
if value is None:
raise NotImplementedError('Could not determine the default byte order of %s architecture.' % self.arch)
return value
else:
return self._endian
|
java
|
public final void castExpression() throws RecognitionException {
BaseDescr expr =null;
try {
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:540:5: ( ( LEFT_PAREN primitiveType )=> LEFT_PAREN primitiveType RIGHT_PAREN expr= unaryExpression | ( LEFT_PAREN type )=> LEFT_PAREN type RIGHT_PAREN unaryExpressionNotPlusMinus )
int alt55=2;
int LA55_0 = input.LA(1);
if ( (LA55_0==LEFT_PAREN) ) {
int LA55_1 = input.LA(2);
if ( (synpred20_DRL6Expressions()) ) {
alt55=1;
}
else if ( (synpred21_DRL6Expressions()) ) {
alt55=2;
}
else {
if (state.backtracking>0) {state.failed=true; return;}
int nvaeMark = input.mark();
try {
input.consume();
NoViableAltException nvae =
new NoViableAltException("", 55, 1, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
else {
if (state.backtracking>0) {state.failed=true; return;}
NoViableAltException nvae =
new NoViableAltException("", 55, 0, input);
throw nvae;
}
switch (alt55) {
case 1 :
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:540:8: ( LEFT_PAREN primitiveType )=> LEFT_PAREN primitiveType RIGHT_PAREN expr= unaryExpression
{
match(input,LEFT_PAREN,FOLLOW_LEFT_PAREN_in_castExpression2727); if (state.failed) return;
pushFollow(FOLLOW_primitiveType_in_castExpression2729);
primitiveType();
state._fsp--;
if (state.failed) return;
match(input,RIGHT_PAREN,FOLLOW_RIGHT_PAREN_in_castExpression2731); if (state.failed) return;
pushFollow(FOLLOW_unaryExpression_in_castExpression2735);
expr=unaryExpression();
state._fsp--;
if (state.failed) return;
}
break;
case 2 :
// src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:541:8: ( LEFT_PAREN type )=> LEFT_PAREN type RIGHT_PAREN unaryExpressionNotPlusMinus
{
match(input,LEFT_PAREN,FOLLOW_LEFT_PAREN_in_castExpression2752); if (state.failed) return;
pushFollow(FOLLOW_type_in_castExpression2754);
type();
state._fsp--;
if (state.failed) return;
match(input,RIGHT_PAREN,FOLLOW_RIGHT_PAREN_in_castExpression2756); if (state.failed) return;
pushFollow(FOLLOW_unaryExpressionNotPlusMinus_in_castExpression2758);
unaryExpressionNotPlusMinus();
state._fsp--;
if (state.failed) return;
}
break;
}
}
catch (RecognitionException re) {
throw re;
}
finally {
// do for sure before leaving
}
}
|
java
|
@Override public View createView(ProjectFile file, byte[] fixedMeta, byte[] fixedData, Var2Data varData, Map<Integer, FontBase> fontBases) throws IOException
{
View view;
int splitViewFlag = MPPUtility.getShort(fixedData, 110);
if (splitViewFlag == 1)
{
view = new SplitView9(file, fixedData, varData);
}
else
{
ViewType type = ViewType.getInstance(MPPUtility.getShort(fixedData, 112));
switch (type)
{
case GANTT_CHART:
{
view = new GanttChartView14(file, fixedMeta, fixedData, varData, fontBases);
break;
}
default:
{
view = new GenericView14(file, fixedData, varData);
break;
}
}
}
return (view);
}
|
python
|
def _fetch_credentials(self, session):
"""Fetch the endpoint URI and authorization token for this session.
Those two information are the basis for all future calls to the Swift
(OpenStack) API for the storage container.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
"""
headers = {
'Authorization': 'Bearer {0}'.format(self.access_token),
}
r = session.get("https://api.hubic.com/1.0/account/credentials",
headers=headers,
authenticated=False)
response = r.json()
# if we get an error here, the OpenStack SDK will take care to try
# again for us.
if 'error' in response:
raise AuthorizationFailure()
self.endpoint = response['endpoint']
self.auth_token = response['token']
|
java
|
private Method getMethod(Class<?> clazz) {
Method method = null;
if (methodName != null) {
method = getPublicMethod(clazz, methodName, methodParams);
if (method != null
&& returnType != null
&& !returnType.isAssignableFrom(method.getReturnType())) {
// If the return type is non-null it must be compatible.
method = null;
}
}
return method;
}
|
python
|
def search(query, data, replacements=None):
"""Yield objects from 'data' that match the 'query'."""
query = q.Query(query, params=replacements)
for entry in data:
if solve.solve(query, entry).value:
yield entry
|
java
|
protected HttpGet createHttpGet(String url, IGs2Credential credential, String service, String module, String function) {
Long timestamp = System.currentTimeMillis()/1000;
url = StringUtils.replace(url, "{service}", service);
url = StringUtils.replace(url, "{region}", region.getName());
HttpGet get = new HttpGet(url);
get.setHeader("Content-Type", "application/json");
credential.authorized(get, service, module, function, timestamp);
return get;
}
|
python
|
def table(
self, kudu_name, name=None, database=None, persist=False, external=True
):
"""
Convenience to expose an existing Kudu table (using CREATE TABLE) as an
Impala table. To create a new table both in the Hive Metastore with
storage in Kudu, use create_table.
Note: all tables created are EXTERNAL for now. Creates a temporary
table (like parquet_file and others) unless persist=True.
If you create a persistent table you can thereafter use it like any
other Impala table.
Parameters
----------
kudu_name : string
The name of the table in the Kudu cluster
name : string, optional
Name of the created table in Impala / Hive Metastore. Randomly
generated if not specified.
database : string, optional
Database to create the table in. Uses the temp db if not provided
persist : boolean, default False
If True, do not drop the table upon Ibis garbage collection /
interpreter shutdown. Be careful using this in conjunction with the
`external` option.
external : boolean, default True
If True, create the Impala table as EXTERNAL so the Kudu data is not
deleted when the Impala table is dropped
Returns
-------
parquet_table : ImpalaTable
"""
# Law of demeter, but OK for now because internal class coupling
name, database = self.impala_client._get_concrete_table_path(
name, database, persist=persist
)
self.create_table(name, kudu_name, database=database, external=True)
return self.impala_client._wrap_new_table(name, database, persist)
|
java
|
protected void addDescription(PackageElement pkg, Content dlTree, SearchIndexItem si) {
Content link = getPackageLink(pkg, new StringContent(utils.getPackageName(pkg)));
if (configuration.showModules) {
si.setContainingModule(utils.getFullyQualifiedName(utils.containingModule(pkg)));
}
si.setLabel(utils.getPackageName(pkg));
si.setCategory(resources.getText("doclet.Packages"));
Content dt = HtmlTree.DT(link);
dt.addContent(" - ");
dt.addContent(contents.package_);
dt.addContent(" " + utils.getPackageName(pkg));
dlTree.addContent(dt);
Content dd = new HtmlTree(HtmlTag.DD);
addSummaryComment(pkg, dd);
dlTree.addContent(dd);
}
|
java
|
public void setText(Phrase phrase) {
bidiLine = null;
composite = false;
compositeColumn = null;
compositeElements = null;
listIdx = 0;
splittedRow = false;
waitPhrase = phrase;
}
|
python
|
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
winchester_config['database'],
prefix='',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
|
java
|
@Override
public void clearCache() {
entityCache.clearCache(CPRuleAssetCategoryRelImpl.class);
finderCache.clearCache(FINDER_CLASS_NAME_ENTITY);
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITH_PAGINATION);
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION);
}
|
java
|
@Nullable
public String getDescription(int tagType)
{
Object object = _directory.getObject(tagType);
if (object == null)
return null;
// special presentation for long arrays
if (object.getClass().isArray()) {
final int length = Array.getLength(object);
if (length > 16) {
return String.format("[%d values]", length);
}
}
if (object instanceof Date) {
// Produce a date string having a format that includes the offset in form "+00:00"
return new SimpleDateFormat("EEE MMM dd HH:mm:ss Z yyyy")
.format((Date) object)
.replaceAll("([0-9]{2} [^ ]+)$", ":$1");
}
// no special handling required, so use default conversion to a string
return _directory.getString(tagType);
}
|
java
|
public void registerCard(String type, Class<? extends Card> cardClz) {
mDefaultCardResolver.register(type, cardClz);
}
|
java
|
@JsonIgnore
public Collection<View> getViews() {
HashSet<View> views = new HashSet<>();
views.addAll(getSystemLandscapeViews());
views.addAll(getSystemContextViews());
views.addAll(getContainerViews());
views.addAll(getComponentViews());
views.addAll(getDynamicViews());
views.addAll(getDeploymentViews());
return views;
}
|
python
|
def search_pending(self, wallet):
"""
Tells the node to look for pending blocks for any account in
**wallet**
.. enable_control required
:param wallet: Wallet to search for pending blocks
:type wallet: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.search_pending(
... wallet="000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F"
... )
True
"""
wallet = self._process_value(wallet, 'wallet')
payload = {"wallet": wallet}
resp = self.call('search_pending', payload)
return resp['started'] == '1'
|
java
|
public void error(SourceLocator srcLctr, String msg, Object args[], Exception e) throws TransformerException
{
//msg = (null == msg) ? XSLTErrorResources.ER_PROCESSOR_ERROR : msg;
String formattedMsg = XSLMessages.createMessage(msg, args);
// Locator locator = m_stylesheetLocatorStack.isEmpty()
// ? null :
// ((Locator)m_stylesheetLocatorStack.peek());
// Locator locator = null;
ErrorListener errHandler = m_transformer.getErrorListener();
if (null != errHandler)
errHandler.fatalError(new TransformerException(formattedMsg, srcLctr));
else
throw new TransformerException(formattedMsg, srcLctr);
}
|
java
|
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
if (!(DITA_OT_NS.equals(uri) && EXTENSION_ELEM.equals(localName))) {
getContentHandler().endElement(uri, localName, qName);
}
}
|
python
|
def parse_author(self, value):
"""
Attempts to split an author name into last and first parts.
"""
tokens = tuple([t.upper().strip() for t in value.split(',')])
if len(tokens) == 1:
tokens = value.split(' ')
if len(tokens) > 0:
if len(tokens) > 1:
aulast, auinit = tokens[0:2] # Ignore JR, II, III, etc.
else:
aulast = tokens[0]
auinit = ''
else:
aulast, auinit = tokens[0], ''
aulast = _strip_punctuation(aulast).upper()
auinit = _strip_punctuation(auinit).upper()
return aulast, auinit
|
python
|
def run():
"""Command for reflection database objects"""
parser = OptionParser(
version=__version__, description=__doc__,
)
parser.add_option(
'-u', '--url', dest='url',
help='Database URL (connection string)',
)
parser.add_option(
'-r', '--render', dest='render', default='dot',
choices=['plantuml', 'dot'],
help='Output format - plantuml or dot',
)
parser.add_option(
'-l', '--list', dest='list', action='store_true',
help='Output database list of tables and exit',
)
parser.add_option(
'-i', '--include', dest='include',
help='List of tables to include through ","',
)
parser.add_option(
'-e', '--exclude', dest='exclude',
help='List of tables to exlude through ","',
)
(options, args) = parser.parse_args()
if not options.url:
print('-u/--url option required')
exit(1)
engine = create_engine(options.url)
meta = MetaData()
meta.reflect(bind=engine)
if options.list:
print('Database tables:')
tables = sorted(meta.tables.keys())
def _g(l, i):
try:
return tables[i]
except IndexError:
return ''
for i in range(0, len(tables), 2):
print(' {0}{1}{2}'.format(
_g(tables, i),
' ' * (38 - len(_g(tables, i))),
_g(tables, i + 1),
))
exit(0)
tables = set(meta.tables.keys())
if options.include:
tables &= set(map(string.strip, options.include.split(',')))
if options.exclude:
tables -= set(map(string.strip, options.exclude.split(',')))
desc = describe(map(lambda x: operator.getitem(meta.tables, x), tables))
print(getattr(render, options.render)(desc))
|
python
|
def format_single_dict(dictionary, output_name):
'''
Currently used for metadata fields
'''
output_payload = {}
if dictionary:
for (k, v) in dictionary.items():
output_payload[output_name + '[' + k + ']'] = v
return output_payload
|
python
|
def stripped_lib_dict(lib_dict, strip_prefix):
""" Return `lib_dict` with `strip_prefix` removed from start of paths
Use to give form of `lib_dict` that appears relative to some base path
given by `strip_prefix`. Particularly useful for analyzing wheels where we
unpack to a temporary path before analyzing.
Parameters
----------
lib_dict : dict
See :func:`tree_libs` for definition. All depending and depended paths
are canonical (therefore absolute)
strip_prefix : str
Prefix to remove (if present) from all depended and depending library
paths in `lib_dict`
Returns
-------
relative_dict : dict
`lib_dict` with `strip_prefix` removed from beginning of all depended
and depending library paths.
"""
relative_dict = {}
stripper = get_prefix_stripper(strip_prefix)
for lib_path, dependings_dict in lib_dict.items():
ding_dict = {}
for depending_libpath, install_name in dependings_dict.items():
ding_dict[stripper(depending_libpath)] = install_name
relative_dict[stripper(lib_path)] = ding_dict
return relative_dict
|
java
|
public static boolean isFloat(final byte[] qualifier, final int offset) {
validateQualifier(qualifier, offset);
if ((qualifier[offset] & Const.MS_BYTE_FLAG) == Const.MS_BYTE_FLAG) {
return (qualifier[offset + 3] & Const.FLAG_FLOAT) == Const.FLAG_FLOAT;
} else {
return (qualifier[offset + 1] & Const.FLAG_FLOAT) == Const.FLAG_FLOAT;
}
}
|
java
|
public void modifyInstanceAttributes(ModifyVpcAttributesRequest modifyVpcAttributesRequest) {
checkNotNull(modifyVpcAttributesRequest, "request should not be null.");
checkStringNotEmpty(modifyVpcAttributesRequest.getVpcId(), "request vpcId should not be empty.");
checkStringNotEmpty(modifyVpcAttributesRequest.getName(), "request name should not be empty.");
if (Strings.isNullOrEmpty(modifyVpcAttributesRequest.getClientToken())) {
modifyVpcAttributesRequest.setClientToken(this.generateClientToken());
}
InternalRequest internalRequest = this.createRequest(
modifyVpcAttributesRequest, HttpMethodName.PUT, VPC_PREFIX, modifyVpcAttributesRequest.getVpcId());
internalRequest.addParameter(NetworkAction.modifyAttribute.name(), null);
internalRequest.addParameter("clientToken", modifyVpcAttributesRequest.getClientToken());
fillPayload(internalRequest, modifyVpcAttributesRequest);
this.invokeHttpClient(internalRequest, AbstractBceResponse.class);
}
|
java
|
private IntegerConstant parseIntegerConstant(int index)
throws IOException
{
int value = readInt();
return new IntegerConstant(_class.getConstantPool(), index, value);
}
|
java
|
public static String format(final String strPattern, final Object... argArray) {
if (StrUtil.isBlank(strPattern) || ArrayUtil.isEmpty(argArray)) {
return strPattern;
}
final int strPatternLength = strPattern.length();
// 初始化定义好的长度以获得更好的性能
StringBuilder sbuf = new StringBuilder(strPatternLength + 50);
int handledPosition = 0;// 记录已经处理到的位置
int delimIndex;// 占位符所在位置
for (int argIndex = 0; argIndex < argArray.length; argIndex++) {
delimIndex = strPattern.indexOf(StrUtil.EMPTY_JSON, handledPosition);
if (delimIndex == -1) {// 剩余部分无占位符
if (handledPosition == 0) { // 不带占位符的模板直接返回
return strPattern;
}
// 字符串模板剩余部分不再包含占位符,加入剩余部分后返回结果
sbuf.append(strPattern, handledPosition, strPatternLength);
return sbuf.toString();
}
// 转义符
if (delimIndex > 0 && strPattern.charAt(delimIndex - 1) == StrUtil.C_BACKSLASH) {// 转义符
if (delimIndex > 1 && strPattern.charAt(delimIndex - 2) == StrUtil.C_BACKSLASH) {// 双转义符
// 转义符之前还有一个转义符,占位符依旧有效
sbuf.append(strPattern, handledPosition, delimIndex - 1);
sbuf.append(StrUtil.utf8Str(argArray[argIndex]));
handledPosition = delimIndex + 2;
} else {
// 占位符被转义
argIndex--;
sbuf.append(strPattern, handledPosition, delimIndex - 1);
sbuf.append(StrUtil.C_DELIM_START);
handledPosition = delimIndex + 1;
}
} else {// 正常占位符
sbuf.append(strPattern, handledPosition, delimIndex);
sbuf.append(StrUtil.utf8Str(argArray[argIndex]));
handledPosition = delimIndex + 2;
}
}
// append the characters following the last {} pair.
// 加入最后一个占位符后所有的字符
sbuf.append(strPattern, handledPosition, strPattern.length());
return sbuf.toString();
}
|
python
|
def has_name_version(self, name: str, version: str) -> bool:
"""Check if there exists a network with the name/version combination in the database."""
return self.session.query(exists().where(and_(Network.name == name, Network.version == version))).scalar()
|
java
|
@Override
public DescribeInventoryDeletionsResult describeInventoryDeletions(DescribeInventoryDeletionsRequest request) {
request = beforeClientExecution(request);
return executeDescribeInventoryDeletions(request);
}
|
java
|
@Override
public UpdateServiceAccessPoliciesResult updateServiceAccessPolicies(UpdateServiceAccessPoliciesRequest request) {
request = beforeClientExecution(request);
return executeUpdateServiceAccessPolicies(request);
}
|
java
|
public static Schema fromJson(String json) {
try{
return JsonMappers.getMapper().readValue(json, Schema.class);
} catch (Exception e){
//TODO better exceptions
throw new RuntimeException(e);
}
}
|
python
|
def set_default_locators_and_formatters(self, axis):
"""
Set up the locators and formatters for the scale.
Parameters
----------
axis: matplotlib.axis
Axis for which to set locators and formatters.
"""
axis.set_major_locator(_LogicleLocator(self._transform))
axis.set_minor_locator(_LogicleLocator(self._transform,
subs=np.arange(2.0, 10.)))
axis.set_major_formatter(matplotlib.ticker.LogFormatterSciNotation(
labelOnlyBase=True))
|
java
|
public void setErrorListener(ErrorListener listener)
throws IllegalArgumentException
{
if (null == listener)
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_ERRORLISTENER, null));
// "ErrorListener");
m_errorListener = listener;
}
|
python
|
def getdata(inputfile, argnum=None, close=False):
"""
Get data from the .dat files
args:
inputfile: file
Input File
close: bool, default=False
Closes inputfile if True
inputfile (File): Input file
close (boolean): Closes inputfile if True (default: False)
returns:
dictionary:
data: list of parsed data
variables: dictionary of errors and other additional variables
"""
# get data and converts them to list
# outputtype - list, dict, all
output = []
add_data = {}
line_num = 0
for line in inputfile:
line_num += 1
if ("#" not in line) and (line != ""):
linesplit = line.split()
if argnum is not None and len(linesplit) != int(argnum):
raise ValueError(
"Line {:d} has {:d} arguments (need {:d})".format(
line_num, len(linesplit), argnum))
output.append(linesplit)
# additional float variable
if "#f" in line:
data = line.split()[1].split("=")
add_data[data[0]] = float(data[1])
# additional list float variable
if "#l" in line:
data = line.split()[1].split("=")
add_data[data[0]] = [float(e) for e in data[1].split(",")]
if close:
inputfile.close()
output = cleandata(output)
return {
"data": np.array(output),
"variables": add_data,
}
|
python
|
def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.exists(p):
os.unlink(p)
adios(distinfo_path)
if not os.path.exists(egginfo_path):
# There is no egg-info. This is probably because the egg-info
# file/directory is not named matching the distribution name used
# to name the archive file. Check for this case and report
# accordingly.
import glob
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
possible = glob.glob(pat)
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
if possible:
alt = os.path.basename(possible[0])
err += " (%s found - possible misnamed archive file?)" % (alt,)
raise ValueError(err)
if os.path.isfile(egginfo_path):
# .egg-info is a single file
pkginfo_path = egginfo_path
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
shutil.copytree(egginfo_path, distinfo_path,
ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt',
'not-zip-safe'}
)
# delete dependency_links if it is only whitespace
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
with open(dependency_links_path, 'r') as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
# XXX heuristically copy any LICENSE/LICENSE.txt?
license = self.license_file()
if license:
license_filename = 'LICENSE.txt'
shutil.copy(license, os.path.join(distinfo_path, license_filename))
adios(egginfo_path)
|
python
|
def export_base_dts(cls, graph, obj, nsm):
""" Export the base DTS information in a simple reusable way
:param graph: Current graph where the information lie
:param obj: Object for which we build info
:param nsm: Namespace manager
:return: Dict
"""
o = {
"@id": str(obj.asNode()),
"@type": nsm.qname(obj.type),
nsm.qname(RDF_NAMESPACES.HYDRA.title): str(obj.get_label()),
nsm.qname(RDF_NAMESPACES.HYDRA.totalItems): obj.size
}
for desc in graph.objects(obj.asNode(), RDF_NAMESPACES.HYDRA.description):
o[nsm.qname(RDF_NAMESPACES.HYDRA.description)] = str(desc)
return o
|
java
|
public void drawItem(Graphics2D g2, CategoryItemRendererState state,
Rectangle2D dataArea, CategoryPlot plot, CategoryAxis domainAxis,
ValueAxis rangeAxis, CategoryDataset dataset, int row, int column,
int pass) {
// check the value we are plotting...
Number dataValue = dataset.getValue(row, column);
if (dataValue == null) {
return;
}
double value = dataValue.doubleValue();
Rectangle2D adjusted = new Rectangle2D.Double(dataArea.getX(), dataArea.getY() + getYOffset(),
dataArea.getWidth() - getXOffset(), dataArea.getHeight() - getYOffset());
PlotOrientation orientation = plot.getOrientation();
double barW0 = calculateBarW0(plot, orientation, adjusted, domainAxis, state, row, column);
double[] barL0L1 = calculateBarL0L1(value);
if (barL0L1 == null) {
return; // the bar is not visible
}
RectangleEdge edge = plot.getRangeAxisEdge();
float transL0 = (float) rangeAxis.valueToJava2D(barL0L1[0], adjusted, edge);
float transL1 = (float) rangeAxis.valueToJava2D(barL0L1[1], adjusted, edge);
float barL0 = Math.min(transL0, transL1);
float barLength = Math.abs(transL1 - transL0);
// draw the bar...
GeneralPath bar = new GeneralPath();
if (orientation == PlotOrientation.HORIZONTAL) {
bar.moveTo(barL0, (float) barW0);
bar.lineTo(barL0, (float) (barW0 + state.getBarWidth()));
bar.lineTo(barL0 + barLength, (float) (barW0 + state.getBarWidth()));
bar.lineTo(barL0 + barLength, (float) barW0);
bar.closePath();
} else {
bar.moveTo((float) barW0, (float) (barL0 - getYOffset() / 2));
bar.lineTo((float) barW0, (float) (barL0 + barLength - getYOffset() / 2));
Arc2D arc = new Arc2D.Double(barW0, (barL0 + barLength - getYOffset()), state.getBarWidth(),
getYOffset(), 180, 180, Arc2D.OPEN);
bar.append(arc, true);
bar.lineTo((float) (barW0 + state.getBarWidth()), (float) (barL0 - getYOffset() / 2));
arc = new Arc2D.Double(barW0, (barL0 - getYOffset()), state.getBarWidth(), getYOffset(), 0, -180, Arc2D.OPEN);
bar.append(arc, true);
bar.closePath();
}
Paint itemPaint = getItemPaint(row, column);
if (getGradientPaintTransformer() != null && itemPaint instanceof GradientPaint) {
GradientPaint gp = (GradientPaint) itemPaint;
itemPaint = getGradientPaintTransformer().transform(gp, bar);
}
g2.setPaint(itemPaint);
g2.fill(bar);
Shape bar3dTop = new Ellipse2D.Double(barW0, barL0 - getYOffset(), state.getBarWidth(), getYOffset());
if (itemPaint instanceof GradientPaint) {
g2.setPaint(((GradientPaint) itemPaint).getColor2());
}
g2.fill(bar3dTop);
if (isDrawBarOutline() && state.getBarWidth() > BAR_OUTLINE_WIDTH_THRESHOLD) {
g2.setStroke(getItemOutlineStroke(row, column));
g2.setPaint(getItemOutlinePaint(row, column));
g2.draw(bar);
if (bar3dTop != null) {
g2.draw(bar3dTop);
}
}
CategoryItemLabelGenerator generator = getItemLabelGenerator(row, column);
if (generator != null && isItemLabelVisible(row, column)) {
drawItemLabel(g2, dataset, row, column, plot, generator, bar.getBounds2D(), (value < 0.0));
}
// collect entity and tool tip information...
if (state.getInfo() != null) {
EntityCollection entities = state.getEntityCollection();
if (entities != null) {
String tip = null;
CategoryToolTipGenerator tipster = getToolTipGenerator(row, column);
if (tipster != null) {
tip = tipster.generateToolTip(dataset, row, column);
}
String url = null;
if (getItemURLGenerator(row, column) != null) {
url = getItemURLGenerator(row, column).generateURL(dataset, row, column);
}
CategoryItemEntity entity = new CategoryItemEntity(bar.getBounds2D(), tip, url, dataset,
dataset.getRowKey(row), dataset.getColumnKey(column));
entities.add(entity);
}
}
}
|
java
|
private Object invokeMethod(ServletContextImpl appContext, final String methodName, Object[] params) throws Throwable {
try {
Method method = objectCache.get(methodName);
if (method == null) {
method = appContext.getClass().getMethod(methodName, classCache.get(methodName));
objectCache.put(methodName, method);
}
return executeMethod(method, appContext, params);
} catch (Exception ex) {
handleException(ex);
return null;
} finally {
params = null;
}
}
|
java
|
protected String defaultActionHtmlContent() {
StringBuffer result = new StringBuffer(2048);
result.append("<form name='");
result.append(getList().getId());
result.append("-form' action='");
result.append(getDialogRealUri());
result.append("' method='post' class='nomargin'");
if (getList().getMetadata().isSearchable()) {
result.append(" onsubmit=\"listSearchAction('");
result.append(getList().getId());
result.append("', '");
result.append(getList().getMetadata().getSearchAction().getId());
result.append("', '");
result.append(getList().getMetadata().getSearchAction().getConfirmationMessage().key(getLocale()));
result.append("');\"");
}
result.append(">\n");
result.append(allParamsAsHidden());
result.append("\n");
getList().setWp(this);
result.append(getList().listHtml());
result.append("\n</form>\n");
return result.toString();
}
|
python
|
def add_link(self, name, desc, layout, node_1, node_2):
"""
Add a link to a network. Links are what effectively
define the network topology, by associating two already
existing nodes.
"""
existing_link = get_session().query(Link).filter(Link.name==name, Link.network_id==self.id).first()
if existing_link is not None:
raise HydraError("A link with name %s is already in network %s"%(name, self.id))
l = Link()
l.name = name
l.description = desc
l.layout = json.dumps(layout) if layout is not None else None
l.node_a = node_1
l.node_b = node_2
get_session().add(l)
self.links.append(l)
return l
|
python
|
def username_is_available(self, new_username):
"""Check if ``new_username`` is still available.
| Returns True if ``new_username`` does not exist or belongs to the current user.
| Return False otherwise.
"""
# Return True if new_username equals current user's username
if self.user_manager.call_or_get(current_user.is_authenticated):
if new_username == current_user.username:
return True
# Return True if new_username does not exist,
# Return False otherwise.
return self.find_user_by_username(new_username) == None
|
python
|
def add(self, dist):
"""Add `dist` if we ``can_add()`` it and it has not already been added
"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
if dist not in dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
|
java
|
public static <T extends Levenshtein> T NGram(String baseTarget, String compareTarget) {
return NGram(baseTarget, compareTarget, null);
}
|
java
|
public static byte[] AesDecryptByBase64Str(String base64String,byte[] decryptKey) throws Exception {
byte[] content=Base64Decode(base64String);
return AesDecryptByBytes(content, decryptKey);
}
|
java
|
public final void setText(@Nullable final String text) {
boolean hasDisabledDependents = shouldDisableDependents();
this.text = text;
persistString(text);
boolean isDisablingDependents = shouldDisableDependents();
if (isDisablingDependents != hasDisabledDependents) {
notifyDependencyChange(isDisablingDependents);
}
notifyChanged();
}
|
java
|
public void trimToSize() {
if (_size < _data.length) {
_data = _size == 0
? EMPTY_ARRAY
: copyOf(_data, _size);
}
}
|
python
|
def pyxlines(self):
"""Cython code lines.
Assumptions:
* Function shall be a method
* Method shall be inlined
* Method returns nothing
* Method arguments are of type `int` (except self)
* Local variables are generally of type `int` but of type `double`
when their name starts with `d_`
"""
lines = [' '+line for line in self.cleanlines]
lines[0] = lines[0].replace('def ', 'cpdef inline void ')
lines[0] = lines[0].replace('):', ') %s:' % _nogil)
for name in self.untypedarguments:
lines[0] = lines[0].replace(', %s ' % name, ', int %s ' % name)
lines[0] = lines[0].replace(', %s)' % name, ', int %s)' % name)
for name in self.untypedinternalvarnames:
if name.startswith('d_'):
lines.insert(1, ' cdef double ' + name)
else:
lines.insert(1, ' cdef int ' + name)
return Lines(*lines)
|
java
|
public void createTable() {
// [START bigtable_hw_create_table_veneer]
// Checks if table exists, creates table if does not exist.
if (!adminClient.exists(tableId)) {
System.out.println("Creating table: " + tableId);
CreateTableRequest createTableRequest =
CreateTableRequest.of(tableId).addFamily(COLUMN_FAMILY);
adminClient.createTable(createTableRequest);
System.out.printf("Table %s created successfully%n", tableId);
}
// [END bigtable_hw_create_table_veneer]
}
|
java
|
@Override
public void forEach(Consumer<? super P_OUT> action) {
evaluate(ForEachOps.makeRef(action, false));
}
|
python
|
def load_windowstime(buf, pos):
"""Load LE64 windows timestamp"""
# unix epoch (1970) in seconds from windows epoch (1601)
unix_epoch = 11644473600
val1, pos = load_le32(buf, pos)
val2, pos = load_le32(buf, pos)
secs, n1secs = divmod((val2 << 32) | val1, 10000000)
dt = datetime.fromtimestamp(secs - unix_epoch, UTC)
dt = dt.replace(microsecond=n1secs // 10)
return dt, pos
|
java
|
protected void configureYahooClient(final Collection<BaseClient> properties) {
val yahoo = pac4jProperties.getYahoo();
if (StringUtils.isNotBlank(yahoo.getId()) && StringUtils.isNotBlank(yahoo.getSecret())) {
val client = new YahooClient(yahoo.getId(), yahoo.getSecret());
configureClient(client, yahoo);
LOGGER.debug("Created client [{}] with identifier [{}]", client.getName(), client.getKey());
properties.add(client);
}
}
|
java
|
public void marshall(ListTaskDefinitionFamiliesRequest listTaskDefinitionFamiliesRequest, ProtocolMarshaller protocolMarshaller) {
if (listTaskDefinitionFamiliesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listTaskDefinitionFamiliesRequest.getFamilyPrefix(), FAMILYPREFIX_BINDING);
protocolMarshaller.marshall(listTaskDefinitionFamiliesRequest.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(listTaskDefinitionFamiliesRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listTaskDefinitionFamiliesRequest.getMaxResults(), MAXRESULTS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public static String getFileExt(String fileName) {
int dotPos = fileName.lastIndexOf('.');
if (dotPos != -1 && dotPos != fileName.length() - 1) {
return fileName.substring(dotPos);
} else {
return null;
}
}
|
java
|
@Override
protected void extraFileActions(PackageSymbol pack, JavaFileObject fo) {
if (fo.isNameCompatible("package", JavaFileObject.Kind.HTML)) {
pack.sourcefile = fo;
}
}
|
java
|
protected void performValidation(String path, Object value, List<ValidationResult> results) {
String name = path != null ? path : "value";
if (value == null) {
// Check for required values
if (_required)
results.add(new ValidationResult(path, ValidationResultType.Error, "VALUE_IS_NULL",
name + " cannot be null", "NOT NULL", null));
} else {
value = ObjectReader.getValue(value);
// Check validation rules
if (_rules != null) {
for (IValidationRule rule : _rules)
rule.validate(path, this, value, results);
}
}
}
|
python
|
def endpoint_delete(auth=None, **kwargs):
'''
Delete an endpoint
CLI Example:
.. code-block:: bash
salt '*' keystoneng.endpoint_delete id=3bee4bd8c2b040ee966adfda1f0bfca9
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.delete_endpoint(**kwargs)
|
java
|
Iterator<FileSet<CopyEntity>> getCopyEntities(CopyConfiguration configuration, Comparator<FileSet<CopyEntity>> prioritizer,
PushDownRequestor<FileSet<CopyEntity>> requestor) throws IOException {
if (HiveUtils.isPartitioned(this.dataset.table)) {
return new PartitionIterator(this.sourcePartitions, configuration, prioritizer, requestor);
} else {
FileSet<CopyEntity> fileSet = new UnpartitionedTableFileSet(this.dataset.table.getCompleteName(), this.dataset, this);
return Iterators.singletonIterator(fileSet);
}
}
|
java
|
public OvhTask service_externalContact_POST(String service, String displayName, String externalEmailAddress, String firstName, Boolean hiddenFromGAL, String initials, String lastName) throws IOException {
String qPath = "/email/mxplan/{service}/externalContact";
StringBuilder sb = path(qPath, service);
HashMap<String, Object>o = new HashMap<String, Object>();
addBody(o, "displayName", displayName);
addBody(o, "externalEmailAddress", externalEmailAddress);
addBody(o, "firstName", firstName);
addBody(o, "hiddenFromGAL", hiddenFromGAL);
addBody(o, "initials", initials);
addBody(o, "lastName", lastName);
String resp = exec(qPath, "POST", sb.toString(), o);
return convertTo(resp, OvhTask.class);
}
|
java
|
final void cleanStack() {
boolean unlinked = false;
Completion p;
while ((p = stack) != null && !p.isLive()) // ensure head of stack live
unlinked = casStack(p, p.next);
if (p != null && !unlinked) { // try to unlink first nonlive
for (Completion q = p.next; q != null;) {
Completion s = q.next;
if (q.isLive()) {
p = q;
q = s;
} else {
casNext(p, q, s);
break;
}
}
}
}
|
python
|
def get_bins_by_query(self, bin_query):
"""Gets a list of ``Bins`` matching the given bin query.
arg: bin_query (osid.resource.BinQuery): the bin query
return: (osid.resource.BinList) - the returned ``BinList``
raise: NullArgument - ``bin_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - a ``bin_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.get_bins_by_query_template
if self._catalog_session is not None:
return self._catalog_session.get_catalogs_by_query(bin_query)
query_terms = dict(bin_query._query_terms)
collection = JSONClientValidated('resource',
collection='Bin',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
return objects.BinList(result, runtime=self._runtime)
|
java
|
public ClassInfoList filter(final ClassInfoFilter filter) {
final Set<ClassInfo> reachableClassesFiltered = new LinkedHashSet<>(size());
final Set<ClassInfo> directlyRelatedClassesFiltered = new LinkedHashSet<>(directlyRelatedClasses.size());
for (final ClassInfo ci : this) {
if (filter.accept(ci)) {
reachableClassesFiltered.add(ci);
if (directlyRelatedClasses.contains(ci)) {
directlyRelatedClassesFiltered.add(ci);
}
}
}
return new ClassInfoList(reachableClassesFiltered, directlyRelatedClassesFiltered, sortByName);
}
|
java
|
public synchronized void authConnection(MemcachedConnection conn,
OperationFactory opFact, AuthDescriptor authDescriptor,
MemcachedNode node) {
interruptOldAuth(node);
AuthThread newSASLAuthenticator =
new AuthThread(conn, opFact, authDescriptor, node);
nodeMap.put(node, newSASLAuthenticator);
}
|
python
|
def get_status(video_id, _connection=None):
"""
Get the status of a video given the ``video_id`` parameter.
"""
c = _connection
if not c:
c = connection.APIConnection()
return c.post('get_upload_status', video_id=video_id)
|
python
|
def system_qos_qos_service_policy_attach_rbridge_id_remove_rb_remove_range(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
attach = ET.SubElement(service_policy, "attach")
rbridge_id = ET.SubElement(attach, "rbridge-id")
remove = ET.SubElement(rbridge_id, "remove")
rb_remove_range = ET.SubElement(remove, "rb-remove-range")
rb_remove_range.text = kwargs.pop('rb_remove_range')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
public DescribeHostsResult withHosts(Host... hosts) {
if (this.hosts == null) {
setHosts(new com.amazonaws.internal.SdkInternalList<Host>(hosts.length));
}
for (Host ele : hosts) {
this.hosts.add(ele);
}
return this;
}
|
python
|
def _copy_file_or_directory(self, source, destination_directory):
"""Recursively copies files from source to destination_directory.
Args:
source: source file or directory to copy into destination_directory
destination_directory: destination directory in which to copy source
"""
if os.path.isdir(source):
for item in os.listdir(source):
full_source = os.path.join(source, item)
full_destination = os.path.join(destination_directory, item)
shutil.copytree(full_source, full_destination)
else:
shutil.copy2(source, destination_directory)
|
python
|
def unblock(self, other_user_id):
"""Unblock the given user.
:param str other_user_id: the ID of the user to unblock
:return: ``True`` if successful
:rtype: bool
"""
params = {'user': self.user_id, 'otherUser': other_user_id}
response = self.session.delete(self.url, params=params)
return response.ok
|
java
|
public static JsMessagingEngine[] getMessagingEngines(final String busName) {
final String methodName = "getMessagingEngines";
if (TRACE.isEntryEnabled()) {
SibTr.entry(TRACE, methodName, busName);
}
final JsMessagingEngine[] result;
synchronized (MESSAGING_ENGINES) {
// Do we have any messaging engines for the given bus?
final Set messagingEngines = (Set) MESSAGING_ENGINES.get(busName);
if (messagingEngines == null) {
// If not, return an empty array
result = new JsMessagingEngine[0];
} else {
// If we do, convert the set to an array
result = (JsMessagingEngine[]) messagingEngines
.toArray(new JsMessagingEngine[messagingEngines.size()]);
}
}
if (TRACE.isEntryEnabled()) {
SibTr.exit(TRACE, methodName, result);
}
return result;
}
|
java
|
public TableFactor observe(int variable, final int value) {
return marginalize(variable, 0, (marginalizedVariableValue, assignment) -> {
if (marginalizedVariableValue == value) {
return (old, n) -> {
// This would mean that we're observing something with 0 probability, which will wonk up downstream
// stuff
// assert(n != 0);
assert !Double.isNaN(n);
return n;
};
} else {
return (old, n) -> {
assert !Double.isNaN(old);
return old;
};
}
});
}
|
java
|
private File getRamlOutputFile(File input) {
String ramlFileName = input.getName().substring(0, input.getName().length() - 4) + "raml";
File outDir;
if (outputDirectory == null) {
outDir = new File(WatcherUtils.getExternalAssetsDestination(basedir), "raml");
} else {
outDir = new File(basedir, outputDirectory);
}
return new File(outDir, ramlFileName);
}
|
java
|
private int findPrecedingOrAncestorOrSelf(
XPathContext xctxt, XPath fromMatchPattern, XPath countMatchPattern,
int context, ElemNumber namespaceContext)
throws javax.xml.transform.TransformerException
{
DTM dtm = xctxt.getDTM(context);
while (DTM.NULL != context)
{
if (null != fromMatchPattern)
{
if (fromMatchPattern.getMatchScore(xctxt, context)
!= XPath.MATCH_SCORE_NONE)
{
context = DTM.NULL;
break;
}
}
if (null != countMatchPattern)
{
if (countMatchPattern.getMatchScore(xctxt, context)
!= XPath.MATCH_SCORE_NONE)
{
break;
}
}
int prevSibling = dtm.getPreviousSibling(context);
if (DTM.NULL == prevSibling)
{
context = dtm.getParent(context);
}
else
{
// Now go down the chain of children of this sibling
context = dtm.getLastChild(prevSibling);
if (context == DTM.NULL)
context = prevSibling;
}
}
return context;
}
|
python
|
def resize_old(self, block_size, order=0, mode='constant', cval=False):
'''
geo.resize(new_shape, order=0, mode='constant', cval=np.nan, preserve_range=True)
Returns resized georaster
'''
if not cval:
cval = np.nan
if (self.raster.dtype.name.find('float') != -1 and
np.max(np.abs([self.max(), self.min()])) > 1):
raster2 = (self.raster-self.min())/(self.max()-self.min())
else:
raster2 = self.raster.copy()
raster2 = raster2.astype(float)
raster2[self.raster.mask] = np.nan
raster2 = resize(raster2, block_size, order=order, mode=mode, cval=cval)
raster2 = np.ma.masked_array(raster2, mask=np.isnan(raster2),
fill_value=self.raster.fill_value)
raster2 = raster2*(self.max()-self.min())+self.min()
raster2[raster2.mask] = self.nodata_value
raster2.mask = np.logical_or(np.isnan(raster2.data), raster2.data == self.nodata_value)
geot = list(self.geot)
[geot[-1], geot[1]] = np.array([geot[-1], geot[1]])*self.shape/block_size
return GeoRaster(raster2, tuple(geot), nodata_value=self.nodata_value,\
projection=self.projection, datatype=self.datatype)
|
python
|
def deconstruct(self):
"""Handle django.db.migrations."""
name, path, args, kwargs = super(VersionField, self).deconstruct()
kwargs['partial'] = self.partial
kwargs['coerce'] = self.coerce
return name, path, args, kwargs
|
python
|
def schnabel_eskow(mat, eps=1e-16):
"""
Scnabel-Eskow algorithm for modified Cholesky factorisation algorithm.
Args:
mat (numpy.ndarray) : Must be a non-singular and symmetric matrix If
sparse, the result will also be sparse.
eps (float) : Error tolerance used in algorithm.
Returns
-------
perm : 2d array
Permutation matrix used for pivoting.
lowtri : 2d array
Lower triangular factor
err : 1d array
Positive diagonals of shift matrix `err`.
Examples
--------
>>> mat = [[4, 2, 1], [2, 6, 3], [1, 3, -.004]]
>>> perm, lowtri, err = schnabel_eskow(mat)
>>> perm, lowtri = numpy.matrix(perm), numpy.matrix(lowtri)
>>> print(numpy.around(perm*lowtri*lowtri.T*perm.T, 4))
[[5.504 2. 1. ]
[2. 6. 3. ]
[1. 3. 1.5 ]]
"""
mat = numpy.asfarray(mat)
tau = eps**(1/3.)
# Create the matrix err and mat.
size = mat.shape[0]
mat0 = mat
mat = 1*mat
err = numpy.zeros(size, dtype=float)
# Permutation matrix.
perm = numpy.eye(size)
# Calculate gamma.
gamma = abs(mat.diagonal()).max()
# Phase one, mat potentially positive definite.
###############################################
def invariant(mat, k):
"""Return `True` if the invariant is satisfied."""
A_ = numpy.eye(size)
L_ = numpy.eye(size)
A_[k:, k:] = numpy.triu(mat[k:, k:], 0) + numpy.triu(mat[k:, k:], 1).T
L_[:, :k] = numpy.tril(mat[:, :k])
return numpy.allclose(
numpy.dot(L_, numpy.dot(A_, L_.T)),
numpy.dot(perm, numpy.dot(mat0, perm.T)) + numpy.diag(err)
)
def jiter_factor(mat, j, perm, err):
"""Perform jth iteration of factorisation.
"""
assert invariant(mat, j)
mat[j, j] = numpy.sqrt(mat[j, j])
mat[j+1:, j] /= mat[j, j]
mat[j+1:, j+1:] -= mat[j+1:, j:j+1]*mat[j+1:, j:j+1].T
mat[j, j+1:] = 0
assert invariant(mat, j+1)
def permute(mat, perm, i, j):
"""Exchange rows and columns i and j of mat and recored the
permutation in perm"""
p = numpy.arange(size, dtype=int)
if i != j:
p[[i, j]] = j, i
perm[::] = perm[p, :]
mat[::] = mat[p, :]
mat[::] = mat[:, p]
def exec_phasetwo(mat, perm, err, j):
"""Phase 2 of the algorithm, mat not positive definite."""
if j == size:
# delta = err[size].
delta = -mat[size-1, size-1] + max(
-tau*mat[size-1, size-1]/(1 - tau), tau*tau*gamma)
err[size-1] = delta
mat[size-1, size-1] += delta
mat[size-1, size-1] = numpy.sqrt(mat[size-1, size-1])
else:
# Number of iterations performed in phase one (less 1).
k = j - 1
# Calculate the lower Gerschgorin bounds of Ak+1.
tmp = mat[k+1:, k+1:]
g = tmp.diagonal().copy()
tmp = abs(numpy.tril(tmp, -1))
g -= tmp.sum(axis=0)
g -= tmp.sum(axis=1)
# Modified Cholesky decomposition.
delta_prev = 0.0
for j in range(k+1, size-2):
# Pivot on the maximum lower Gerschgorin bound
# estimate.
i = j + numpy.argmax(g[j-(k+1):])
# Interchange row and column i and j.
permute(mat, perm, i, j)
# Calculate err[j] and add to diagonal.
normj = abs(mat[j+1:, j]).sum()
delta = max(0.0,
-mat[j, j] + max(normj, tau*tau*gamma),
delta_prev) # delta = E[size].
if delta > 0.0:
mat[j, j] += delta
delta_prev = delta
err[j] = delta
# Update Gerschgorin bound estimates.
if mat[j, j] != normj:
temp = 1.0 - normj / mat[j, j]
g[j-k:] += abs(mat[j+1:, j])*temp
# Perform jth iteration of factorisation.
jiter_factor(mat, j, perm, err)
# Final 2*2 submatrix.
mini = mat[-2:, -2:]
mini[1, 0] = mini[0, 1]
eigs = numpy.sort(numpy.linalg.eigvalsh(mini))
delta = max(
0, -eigs[0] + max(tau*(eigs[1] - eigs[0])/(1 - tau),
tau*tau*gamma), delta_prev)
if delta > 0.0:
mat[size-2, size-2] += delta
mat[size-1, size-1] += delta
delta_prev = delta
err[size-2] = err[size-1] = delta
mat[size-2, size-2] = numpy.sqrt(mat[size-2, size-2])
mat[size-1, size-2] /= mat[size-2, size-2]
mat[size-1, size-1] = numpy.sqrt(
mat[size-1, size-1] - mat[size-1, size-2]**2)
for j in range(size):
# Calculate max_Aii and min_Aii
diag = mat.diagonal()[j:]
# Test for phase 2, mat not positive definite.
if diag.max() < tau*tau * gamma or diag.min() < - 0.1 * diag.max():
exec_phasetwo(mat, perm, err, j)
break
else:
# Pivot on maximum diagonal of remaining submatrix.
i = j + numpy.argmax(mat.diagonal()[j:])
# Interchange row and column i and j.
permute(mat, perm, i, j)
# Test for phase 2 again.
min_num = 1e99
mat_diag = mat.diagonal()
if j + 1 < size:
min_num = (mat_diag[i] -
mat[i, j+1:]**2/mat_diag[j+1:]).min()
else:
min_num = mat_diag[i]
if j+1 <= size and min_num < - 0.1 * gamma:
exec_phasetwo(mat, perm, err, j)
break
# Perform jth iteration of factorisation.
else:
jiter_factor(mat, j, perm, err)
# The Cholesky factor of mat.
return perm, numpy.tril(mat), err
|
java
|
public java.lang.String getUrlRegex() {
java.lang.Object ref = urlRegex_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
urlRegex_ = s;
return s;
}
}
|
java
|
MongoCollection<BsonDocument> getUndoCollection(final MongoNamespace namespace) {
return localClient
.getDatabase(String.format("sync_undo_%s", namespace.getDatabaseName()))
.getCollection(namespace.getCollectionName(), BsonDocument.class)
.withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry());
}
|
python
|
def make_plot(self):
"""Generate the plot from time series and arguments
"""
args = self.args
fftlength = float(args.secpfft)
overlap = args.overlap
self.log(2, "Calculating spectrum secpfft: {0}, overlap: {1}".format(
fftlength, overlap))
overlap *= fftlength
# create plot
plot = Plot(figsize=self.figsize, dpi=self.dpi)
ax = plot.gca()
# handle user specified plot labels
if self.args.legend:
nlegargs = len(self.args.legend[0])
else:
nlegargs = 0
if nlegargs > 0 and nlegargs != self.n_datasets:
warnings.warn('The number of legends specified must match '
'the number of time series'
' (channels * start times). '
'There are {:d} series and {:d} legends'.format(
len(self.timeseries), len(self.args.legend)))
nlegargs = 0 # don't use themm
for i in range(0, self.n_datasets):
series = self.timeseries[i]
if nlegargs:
label = self.args.legend[0][i]
else:
label = series.channel.name
if len(self.start_list) > 1:
label += ', {0}'.format(series.epoch.gps)
asd = series.asd(fftlength=fftlength, overlap=overlap)
self.spectra.append(asd)
if self.usetex:
label = label_to_latex(label)
ax.plot(asd, label=label)
if args.xscale == 'log' and not args.xmin:
args.xmin = 1/fftlength
return plot
|
java
|
public Observable<StorageAccountListKeysResultInner> regenerateKeyAsync(String resourceGroupName, String accountName, String keyName) {
return regenerateKeyWithServiceResponseAsync(resourceGroupName, accountName, keyName).map(new Func1<ServiceResponse<StorageAccountListKeysResultInner>, StorageAccountListKeysResultInner>() {
@Override
public StorageAccountListKeysResultInner call(ServiceResponse<StorageAccountListKeysResultInner> response) {
return response.body();
}
});
}
|
java
|
@Pure
public static boolean intersectsCapsuleCapsule(
double capsule1Ax, double capsule1Ay, double capsule1Az, double capsule1Bx, double capsule1By, double capsule1Bz, double capsule1Radius,
double capsule2Ax, double capsule2Ay, double capsule2Az, double capsule2Bx, double capsule2By, double capsule2Bz, double capsule2Radius) {
double dist2 = AbstractSegment3F.distanceSquaredSegmentSegment(
capsule1Ax, capsule1Ay, capsule1Az, capsule1Bx, capsule1By, capsule1Bz,
capsule2Ax, capsule2Ay, capsule2Az, capsule2Bx, capsule2By, capsule2Bz);
// If (squared) distance smaller than (squared) sum of radii, they collide
double radius = capsule1Radius + capsule2Radius;
return dist2 <= (radius * radius);
}
|
java
|
private void init() {
int decorationWidth = getDecorationWidth();
m_decorationBox.setWidth(decorationWidth + "px");
m_primary.getElement().getStyle().setMarginLeft(decorationWidth, Style.Unit.PX);
}
|
java
|
@Override
public Class<?> findClass(final String qualifiedClassName)
{
final byte[] bytes = this.compiledJavaFileObject.getBytes();
return defineClass(qualifiedClassName, bytes, 0, bytes.length);
}
|
python
|
def xpubsubSockets(self, hostSub, portSub, hostPub, portPub):
'''
Creates frontend and backend for a XPUB/XSUB forwarding device
'''
frontend_addr = self.tcpAddress(hostSub, portSub)
backend_addr = self.tcpAddress(hostPub, portPub)
frontendSocket = self._context.socket(zmq.SUB)
frontendSocket.bind(frontend_addr)
frontendSocket.setsockopt(zmq.SUBSCRIBE, b'')
backendSocket = self._context.socket(zmq.PUB)
backendSocket.bind(backend_addr)
return frontendSocket, backendSocket
|
java
|
private File createFilename(String filenamePrefix, File journalDirectory)
throws JournalException {
String filename =
JournalHelper.createTimestampedFilename(filenamePrefix,
new Date());
File theFile = new File(journalDirectory, filename);
if (theFile.exists()) {
throw new JournalException("File '" + theFile.getPath()
+ "' already exists.");
}
return theFile;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.