language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
@Bean
@ConditionalOnMissingBean
SoftwareModuleManagement softwareModuleManagement(final EntityManager entityManager,
final DistributionSetRepository distributionSetRepository,
final SoftwareModuleRepository softwareModuleRepository,
final SoftwareModuleMetadataRepository softwareModuleMetadataRepository,
final SoftwareModuleTypeRepository softwareModuleTypeRepository,
final NoCountPagingRepository criteriaNoCountDao, final AuditorAware<String> auditorProvider,
final ArtifactManagement artifactManagement, final QuotaManagement quotaManagement,
final VirtualPropertyReplacer virtualPropertyReplacer, final JpaProperties properties) {
return new JpaSoftwareModuleManagement(entityManager, distributionSetRepository, softwareModuleRepository,
softwareModuleMetadataRepository, softwareModuleTypeRepository, criteriaNoCountDao, auditorProvider,
artifactManagement, quotaManagement, virtualPropertyReplacer, properties.getDatabase());
}
|
java
|
public boolean isParamPanelOrChildSelected(String panelName) {
DefaultMutableTreeNode node = getTreeNodeFromPanelName(panelName);
if (node != null) {
TreePath panelPath = new TreePath(node.getPath());
if (getTreeParam().isPathSelected(panelPath)) {
return true;
}
TreePath selectedPath = getTreeParam().getSelectionPath();
return selectedPath != null && panelPath.equals(selectedPath.getParentPath());
}
return false;
}
|
java
|
public void interpret(Specification table)
{
Statistics stats = new Statistics();
Example row = table.nextExample();
Example scenario = row.firstChild();
try
{
ScenarioMessage message = new ScenarioMessage( fixture.getTarget(), scenario.getContent() );
Call call = new Call( message );
call.will( Annotate.given( scenario, message, stats ) ).when( message.annotationIs( Given.class ) );
call.will( Annotate.then( scenario, message, stats ) ).when( message.annotationIs( Then.class ) );
call.will( Annotate.when( scenario, stats ) ).when( message.annotationIs( When.class ) );
call.will( Annotate.check( scenario, stats ) ).when( message.annotationIs( Check.class ) );
call.will( Annotate.display( scenario ) ).when( message.annotationIs( Display.class ) );
call.execute();
table.exampleDone( stats );
}
catch (Exception e)
{
reportException( table );
scenario.annotate( Annotations.exception( e ) );
}
}
|
python
|
def last(self, values, axis=0):
"""return values at last occurance of its associated key
Parameters
----------
values : array_like, [keys, ...]
values to pick the last value of per group
axis : int, optional
alternative reduction axis for values
Returns
-------
unique: ndarray, [groups]
unique keys
reduced : ndarray, [groups, ...]
value array, reduced over groups
"""
values = np.asarray(values)
return self.unique, np.take(values, self.index.sorter[self.index.stop-1], axis)
|
java
|
protected ControlAreYouFlushed createControlAreYouFlushed(SIBUuid8 target, long ID, SIBUuid12 stream)
throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "createControlAreYouFlushed");
ControlAreYouFlushed flushedqMsg;
// Create new message and send it
try
{
flushedqMsg = _cmf.createNewControlAreYouFlushed();
}
catch (MessageCreateFailedException e)
{
// FFDC
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.AbstractInputHandler.createControlAreYouFlushed",
"1:588:1.170",
this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.exception(tc, e);
SibTr.exit(tc, "createControlAreYouFlushed", e);
}
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.AbstractInputHandler",
"1:600:1.170",
e });
throw new SIResourceException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.AbstractInputHandler",
"1:608:1.170",
e },
null),
e);
}
// As we are using the Guaranteed Header - set all the attributes as
// well as the ones we want.
SIMPUtils.setGuaranteedDeliveryProperties(flushedqMsg,
_messageProcessor.getMessagingEngineUuid(),
target,
stream,
null,
_destination.getUuid(),
ProtocolType.UNICASTOUTPUT,
GDConfig.PROTOCOL_VERSION);
if(_destination.isPubSub())
{
flushedqMsg.setGuaranteedProtocolType(ProtocolType.PUBSUBOUTPUT);
}
flushedqMsg.setRequestID(ID);
flushedqMsg.setPriority(SIMPConstants.CTRL_MSG_PRIORITY);
flushedqMsg.setReliability(SIMPConstants.CONTROL_MESSAGE_RELIABILITY);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "createControlAreYouFlushed", flushedqMsg);
return flushedqMsg;
}
|
java
|
public LongConstant getLongByValue(long value)
{
for (int i = 0; i < _entries.size(); i++) {
ConstantPoolEntry entry = _entries.get(i);
if (! (entry instanceof LongConstant))
continue;
LongConstant longEntry = (LongConstant) entry;
if (longEntry.getValue() == value)
return longEntry;
}
return null;
}
|
python
|
def get_inline_styles(cls, instance):
"""
Returns a dictionary of CSS attributes to be added as style="..." to the current HTML tag.
"""
inline_styles = getattr(cls, 'default_inline_styles', {})
css_style = instance.glossary.get('inline_styles')
if css_style:
inline_styles.update(css_style)
return inline_styles
|
java
|
private void scheduleGeneralConfiguredJobs()
throws ConfigurationException, JobException, IOException {
LOG.info("Scheduling configured jobs");
for (Properties jobProps : loadGeneralJobConfigs()) {
if (!jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) {
// A job without a cron schedule is considered a one-time job
jobProps.setProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "true");
}
boolean runOnce = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false"));
scheduleJob(jobProps, runOnce ? new RunOnceJobListener() : new EmailNotificationJobListener());
this.listener.addToJobNameMap(jobProps);
}
}
|
java
|
private void onSetReplicateOnWrite(CfDef cfDef, Properties cfProperties, StringBuilder builder)
{
String replicateOnWrite = cfProperties.getProperty(CassandraConstants.REPLICATE_ON_WRITE);
if (builder != null)
{
String replicateOn_Write = CQLTranslator.getKeyword(CassandraConstants.REPLICATE_ON_WRITE);
builder.append(replicateOn_Write);
builder.append(CQLTranslator.EQ_CLAUSE);
builder.append(Boolean.parseBoolean(replicateOnWrite));
builder.append(CQLTranslator.AND_CLAUSE);
}
else if (cfDef != null)
{
cfDef.setReplicate_on_write(false);
}
}
|
java
|
private boolean creatingFunctionShortensGremlin(GroovyExpression headExpr) {
int tailLength = getTailLength();
int length = headExpr.toString().length() - tailLength;
int overhead = 0;
if (nextFunctionBodyStart instanceof AbstractFunctionExpression) {
overhead = functionDefLength;
} else {
overhead = INITIAL_FUNCTION_DEF_LENGTH;
}
overhead += FUNCTION_CALL_OVERHEAD * scaleFactor;
//length * scaleFactor = space taken by having the expression be inlined [scaleFactor] times
//overhead + length = space taken by the function definition and its calls
return length * scaleFactor > overhead + length;
}
|
python
|
def is_analysis_compatible(using):
"""
Returns True if the analysis defined in Python land and ES for the connection `using` are compatible
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
if es_analysis == DOES_NOT_EXIST:
return True
# we want to ensure everything defined in Python land is exactly matched in ES land
for section in python_analysis:
# there is an analysis section (analysis, tokenizers, filters, etc) defined in Python that isn't in ES
if section not in es_analysis:
return False
# for this section of analysis (analysis, tokenizer, filter, etc), get
# all the items defined in that section, and make sure they exist, and
# are equal in Python land
subdict_python = python_analysis[section]
subdict_es = es_analysis[section]
for name in subdict_python:
# this analyzer, filter, etc isn't defined in ES
if name not in subdict_es:
return False
# this analyzer, filter etc doesn't match what is in ES
if subdict_python[name] != subdict_es[name]:
return False
return True
|
java
|
@Override
public void print(String line) {
try {
console.pushToConsole(line);
} catch (IOException e) {
e.printStackTrace();
}
}
|
java
|
private static void writeZonePropsByDOW_GEQ_DOM_sub(Writer writer, int month,
int dayOfMonth, int dayOfWeek, int numDays, long untilTime, int fromOffset) throws IOException {
int startDayNum = dayOfMonth;
boolean isFeb = (month == Calendar.FEBRUARY);
if (dayOfMonth < 0 && !isFeb) {
// Use positive number if possible
startDayNum = MONTHLENGTH[month] + dayOfMonth + 1;
}
beginRRULE(writer, month);
writer.write(ICAL_BYDAY);
writer.write(EQUALS_SIGN);
writer.write(ICAL_DOW_NAMES[dayOfWeek - 1]); // SU, MO, TU...
writer.write(SEMICOLON);
writer.write(ICAL_BYMONTHDAY);
writer.write(EQUALS_SIGN);
writer.write(Integer.toString(startDayNum));
for (int i = 1; i < numDays; i++) {
writer.write(COMMA);
writer.write(Integer.toString(startDayNum + i));
}
if (untilTime != MAX_TIME) {
appendUNTIL(writer, getDateTimeString(untilTime + fromOffset));
}
writer.write(NEWLINE);
}
|
python
|
def assess(self, cases=None):
"""Try to sort the **cases** using the network, return the number of
misses. If **cases** is None, test all possible cases according to
the network dimensionality.
"""
if cases is None:
cases = product(range(2), repeat=self.dimension)
misses = 0
ordered = [[0]*(self.dimension-i) + [1]*i for i in range(self.dimension+1)]
for sequence in cases:
sequence = list(sequence)
self.sort(sequence)
misses += (sequence != ordered[sum(sequence)])
return misses
|
python
|
def parse_column_key_value(table_schema, setting_string):
"""
Parses 'setting_string' as str formatted in <column>[:<key>]=<value>
and returns str type 'column' and json formatted 'value'
"""
if ':' in setting_string:
# splits <column>:<key>=<value> into <column> and <key>=<value>
column, value = setting_string.split(':', 1)
elif '=' in setting_string:
# splits <column>=<value> into <column> and <value>
column, value = setting_string.split('=', 1)
else:
# stores <column> and <value>=None
column = setting_string
value = None
if value is not None:
type_ = table_schema.columns[column].type
value = datum_from_string(type_, value)
return column, value
|
java
|
public static Method getStaticMethod(Class<?> clazz, String methodName, Class<?>... args) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
try {
Method method = clazz.getMethod(methodName, args);
return Modifier.isStatic(method.getModifiers()) ? method : null;
}
catch (NoSuchMethodException ex) {
return null;
}
}
|
java
|
public static DateTimeComponents parse(String dateString, Boolean hasTime) {
Matcher m = regex.matcher(dateString);
if (!m.find()) {
throw Messages.INSTANCE.getIllegalArgumentException(19, dateString);
}
int i = 1;
int year = Integer.parseInt(m.group(i++));
int month = Integer.parseInt(m.group(i++));
int date = Integer.parseInt(m.group(i++));
i++; //skip
String hourStr = m.group(i++);
if (hasTime == null) {
hasTime = (hourStr != null);
}
if (!hasTime) {
return new DateTimeComponents(year, month, date);
}
int hour = (hourStr == null) ? 0 : Integer.parseInt(hourStr);
String minuteStr = m.group(i++);
int minute = (minuteStr == null) ? 0 : Integer.parseInt(minuteStr);
String secondStr = m.group(i++);
int second = (secondStr == null) ? 0 : Integer.parseInt(secondStr);
boolean utc = "Z".equals(m.group(i++));
return new DateTimeComponents(year, month, date, hour, minute, second, utc);
}
|
python
|
def resolve_xref(self, env, fromdocname, builder,
typ, target, node, contnode):
# type: (BuildEnvironment, unicode, Builder, unicode, unicode, nodes.Node, nodes.Node) -> nodes.Node # NOQA
"""Resolve the pending_xref *node* with the given *typ* and *target*.
This method should return a new node, to replace the xref node,
containing the *contnode* which is the markup content of the
cross-reference.
If no resolution can be found, None can be returned; the xref node will
then given to the :event:`missing-reference` event, and if that yields no
resolution, replaced by *contnode*.
The method can also raise :exc:`sphinx.environment.NoUri` to suppress
the :event:`missing-reference` event being emitted.
"""
for fullname, (docname, objtype) in self.data['objects'].items():
if fullname.name == target:
return make_refnode(builder, fromdocname, docname, fullname2id(fullname), contnode, fullname.name)
return None
|
python
|
def log(ltype, method, page, user_agent):
"""Writes to the log a message in the following format::
"<datetime>: <exception> method <HTTP method> page <path> \
user agent <user_agent>"
"""
try:
f = open(settings.DJANGOSPAM_LOG, "a")
f.write("%s: %s method %s page %s user agent %s\n" % \
(datetime.datetime.now(), ltype, method, page, user_agent))
f.close()
except:
if settings.DJANGOSPAM_FAIL_ON_LOG:
exc_type, exc_value = sys.exc_info()[:2]
raise LogError(exc_type, exc_value)
|
java
|
@Override
public void transferSucceeded(TransferEvent event) {
TransferResource resource = event.getResource();
downloads.remove(resource);
long contentLength = event.getTransferredBytes();
if (contentLength >= 0) {
long duration = System.currentTimeMillis() - resource.getTransferStartTime();
double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
StringBuilder sb = new StringBuilder().append("Completed")
.append(event.getRequestType() == TransferEvent.RequestType.PUT ? " upload of " : " download of ")
.append(resource.getResourceName())
.append(event.getRequestType() == TransferEvent.RequestType.PUT ? " into " : " from ")
.append(resource.getRepositoryUrl()).append(", transferred ")
.append(contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B").append(" at ")
.append(new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH)).format(kbPerSec))
.append("KB/sec");
log.fine(sb.toString());
}
}
|
java
|
public static <T> List<T> slice(List<T> list, int... indexes) {
List<T> slice = new ArrayList<>(indexes.length);
for (int i : indexes) {
slice.add(list.get(i));
}
return slice;
}
|
python
|
def logout(self):
"""
Log out, revoking the access tokens
and forgetting the login details if they were given.
"""
self.revoke_refresh_token()
self.revoke_access_token()
self._username, self._password = None, None
|
python
|
def argmin(self, values):
"""return the index into values corresponding to the minimum value of the group
Parameters
----------
values : array_like, [keys]
values to pick the argmin of per group
Returns
-------
unique: ndarray, [groups]
unique keys
argmin : ndarray, [groups]
index into value array, representing the argmin per group
"""
keys, minima = self.min(values)
minima = minima[self.inverse]
# select the first occurence of the minimum in each group
index = as_index((self.inverse, values == minima))
return keys, index.sorter[index.start[-self.groups:]]
|
python
|
def fit_predict(self,
X,
num_epochs=10,
updates_epoch=10,
stop_param_updates=dict(),
batch_size=1,
show_progressbar=False):
"""First fit, then predict."""
self.fit(X,
num_epochs,
updates_epoch,
stop_param_updates,
batch_size,
show_progressbar)
return self.predict(X, batch_size=batch_size)
|
python
|
def typestring(obj):
"""Make a string for the object's type
Parameters
----------
obj : obj
Python object.
Returns
-------
`str`
String representation of the object's type. This is the type's
importable namespace.
Examples
--------
>>> import docutils.nodes
>>> para = docutils.nodes.paragraph()
>>> typestring(para)
'docutils.nodes.paragraph'
"""
obj_type = type(obj)
return '.'.join((obj_type.__module__, obj_type.__name__))
|
python
|
def raid_alert(self, status, used, available, type):
"""RAID alert messages.
[available/used] means that ideally the array may have _available_
devices however, _used_ devices are in use.
Obviously when used >= available then things are good.
"""
if type == 'raid0':
return 'OK'
if status == 'inactive':
return 'CRITICAL'
if used is None or available is None:
return 'DEFAULT'
elif used < available:
return 'WARNING'
return 'OK'
|
java
|
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String title = this.getIntent().getStringExtra(EXTRA_TITLE);
if(title == null) title = getApplicationName(this);
boolean enableAnonymous = this.getIntent().getBooleanExtra(EXTRA_ENABLE_ANONYMOUS_LOGIN,false);
CredentialView credentialView = new CredentialView(this,title);
if(enableAnonymous)credentialView.enableAnonymousLogin();
setContentView(credentialView);
BackendServices.addLoginListener(new LoginListener() {
@Override
public void onNext(BackendUser backendUser) {
if(backendUser != null){
AuthActivity.this.finish();
}
}
});
this.closeOptionsMenu();
}
|
python
|
def add(self, sim, module, package=None):
"""
Add simulation to layer.
"""
super(Simulations, self).add(sim, module, package)
# only update layer info if it is missing!
if sim not in self.layer:
# copy simulation source parameters to :attr:`Layer.layer`
self.layer[sim] = {'module': module, 'package': package}
|
python
|
def get_resources_to_check(client_site_url, apikey):
"""Return a list of resource IDs to check for broken links.
Calls the client site's API to get a list of resource IDs.
:raises CouldNotGetResourceIDsError: if getting the resource IDs fails
for any reason
"""
url = client_site_url + u"deadoralive/get_resources_to_check"
response = requests.get(url, headers=dict(Authorization=apikey))
if not response.ok:
raise CouldNotGetResourceIDsError(
u"Couldn't get resource IDs to check: {code} {reason}".format(
code=response.status_code, reason=response.reason))
return response.json()
|
java
|
public final boolean removeExistingConfigurations(String filter) throws Exception {
final boolean trace = TraceComponent.isAnyTracingEnabled();
BundleContext bundleContext = DataSourceService.priv.getBundleContext(FrameworkUtil.getBundle(DataSourceResourceFactoryBuilder.class));
ConfigurationAdmin configAdmin = configAdminRef.getService();
try {
Configuration[] existingConfigurations = configAdmin.listConfigurations(filter);
if (existingConfigurations != null)
for (Configuration config : existingConfigurations) {
Dictionary<?, ?> cfgProps = config.getProperties();
// Don't remove configuration that came from server.xml
if (cfgProps != null && FILE.equals(cfgProps.get(CONFIG_SOURCE))) {
if (trace && tc.isDebugEnabled())
Tr.debug(tc, "configuration found in server.xml: ", config.getPid());
return false;
} else {
if (trace && tc.isDebugEnabled())
Tr.debug(tc, "removing", config.getPid());
config.delete();
}
}
} finally {
bundleContext.ungetService(configAdminRef.getReference());
}
return true;
}
|
java
|
public static byte[] readBytes(byte b[], int offset, int length) {
byte[] retValue = new byte[length];
System.arraycopy(b, offset, retValue, 0, length);
return retValue;
}
|
java
|
public Association getAssociation(String collectionRole) {
if ( associations == null ) {
return null;
}
return associations.get( collectionRole );
}
|
java
|
private void printMetaMasterInfo() throws IOException {
mIndentationLevel++;
Set<MasterInfoField> masterInfoFilter = new HashSet<>(Arrays
.asList(MasterInfoField.LEADER_MASTER_ADDRESS, MasterInfoField.WEB_PORT,
MasterInfoField.RPC_PORT, MasterInfoField.START_TIME_MS,
MasterInfoField.UP_TIME_MS, MasterInfoField.VERSION,
MasterInfoField.SAFE_MODE, MasterInfoField.ZOOKEEPER_ADDRESSES));
MasterInfo masterInfo = mMetaMasterClient.getMasterInfo(masterInfoFilter);
print("Master Address: " + masterInfo.getLeaderMasterAddress());
print("Web Port: " + masterInfo.getWebPort());
print("Rpc Port: " + masterInfo.getRpcPort());
print("Started: " + CommonUtils.convertMsToDate(masterInfo.getStartTimeMs(),
mDateFormatPattern));
print("Uptime: " + CommonUtils.convertMsToClockTime(masterInfo.getUpTimeMs()));
print("Version: " + masterInfo.getVersion());
print("Safe Mode: " + masterInfo.getSafeMode());
List<String> zookeeperAddresses = masterInfo.getZookeeperAddressesList();
if (zookeeperAddresses == null || zookeeperAddresses.isEmpty()) {
print("Zookeeper Enabled: false");
} else {
print("Zookeeper Enabled: true");
print("Zookeeper Addresses: ");
mIndentationLevel++;
for (String zkAddress : zookeeperAddresses) {
print(zkAddress);
}
mIndentationLevel--;
}
}
|
java
|
@Reference(authors = "T. Ooura", //
title = "Gamma / Error Functions", booktitle = "", //
url = "http://www.kurims.kyoto-u.ac.jp/~ooura/gamerf.html", //
bibkey = "web/Ooura96")
public static double erfc(double x) {
if(Double.isNaN(x)) {
return Double.NaN;
}
if(Double.isInfinite(x)) {
return (x < 0.0) ? 2 : 0;
}
final double t = 3.97886080735226 / (Math.abs(x) + 3.97886080735226);
final double u = t - 0.5;
double y = (((//
((((((0.00127109764952614092 * u //
+ 1.19314022838340944e-4) * u //
- 0.003963850973605135) * u //
- 8.70779635317295828e-4) * u //
+ 0.00773672528313526668) * u //
+ 0.00383335126264887303) * u //
- 0.0127223813782122755) * u //
- 0.0133823644533460069) * u //
+ 0.0161315329733252248) * u //
+ 0.0390976845588484035) * u //
+ 0.00249367200053503304;
y = ((((((((((((y * u //
- 0.0838864557023001992) * u //
- 0.119463959964325415) * u //
+ 0.0166207924969367356) * u //
+ 0.357524274449531043) * u //
+ 0.805276408752910567) * u //
+ 1.18902982909273333) * u //
+ 1.37040217682338167) * u //
+ 1.31314653831023098) * u //
+ 1.07925515155856677) * u //
+ 0.774368199119538609) * u //
+ 0.490165080585318424) * u //
+ 0.275374741597376782) //
* t * FastMath.exp(-x * x);
return x < 0 ? 2 - y : y;
}
|
java
|
public void detachMetadataCache(SlotReference slot) {
MetadataCache oldCache = metadataCacheFiles.remove(slot);
if (oldCache != null) {
try {
oldCache.close();
} catch (IOException e) {
logger.error("Problem closing metadata cache", e);
}
deliverCacheUpdate(slot, null);
}
}
|
python
|
def _update_settings(self, dialect):
"""Sets the widget settings to those of the chosen dialect"""
# the first parameter is the dialect itself --> ignore
for parameter in self.csv_params[2:]:
pname, ptype, plabel, phelp = parameter
widget = self._widget_from_p(pname, ptype)
if ptype is types.TupleType:
ptype = types.ObjectType
digest = Digest(acceptable_types=[ptype])
if pname == 'self.has_header':
if self.has_header is not None:
widget.SetValue(digest(self.has_header))
else:
value = getattr(dialect, pname)
widget.SetValue(digest(value))
|
java
|
private DataBlockScanner getNextNamespaceSliceScanner(int currentNamespaceId) {
Integer nextNsId = null;
while ((nextNsId == null) && datanode.shouldRun
&& !blockScannerThread.isInterrupted()) {
waitForOneNameSpaceUp();
synchronized (this) {
if (getNamespaceSetSize() > 0) {
// Find nextNsId by finding the last modified current log file, if any
long lastScanTime = -1;
Iterator<Integer> nsidIterator = namespaceScannerMap.keySet()
.iterator();
while (nsidIterator.hasNext()) {
int nsid = nsidIterator.next();
for (FSDataset.FSVolume vol : dataset.volumes.getVolumes()) {
try {
File currFile = DataBlockScanner.getCurrentFile(vol, nsid);
if (currFile.exists()) {
long lastModified = currFile.lastModified();
if (lastScanTime < lastModified) {
lastScanTime = lastModified;
nextNsId = nsid;
}
}
} catch (IOException e) {
LOG.warn("Received exception: ", e);
}
}
}
// nextNsId can still be -1 if no current log is found,
// find nextNsId sequentially.
if (nextNsId == null) {
try {
if (currentNamespaceId == -1) {
nextNsId = namespaceScannerMap.firstKey();
} else {
nextNsId = namespaceScannerMap.higherKey(currentNamespaceId);
if (nextNsId == null) {
nextNsId = namespaceScannerMap.firstKey();
}
}
} catch (NoSuchElementException e) {
// if firstKey throws an exception
continue;
}
}
if (nextNsId != null) {
return getNSScanner(nextNsId);
}
}
}
LOG.warn("No namespace is up, going to wait");
try {
Thread.sleep(5000);
} catch (InterruptedException ex) {
LOG.warn("Received exception: " + ex);
blockScannerThread.interrupt();
return null;
}
}
return null;
}
|
python
|
def SensorsDataGet(self, sensorIds, parameters):
"""
Retrieve sensor data for the specified sensors from CommonSense.
If SensorsDataGet is successful, the result can be obtained by a call to getResponse(), and should be a json string.
@param sensorIds (list) a list of sensor ids to retrieve the data for
@param parameters (dictionary) - Dictionary containing the parameters for the api call.
@return (bool) - Boolean indicating whether SensorsDataGet was successful.
"""
if parameters is None:
parameters = {}
parameters["sensor_id[]"] = sensorIds
if self.__SenseApiCall__('/sensors/data.json', 'GET', parameters = parameters):
return True
else:
self.__error__ = "api call unsuccessful"
return False
|
java
|
@Override
public void die(String reason) {
log.info("[die] Robot {} died with reason: {}", serialNumber, reason);
if (alive) { // if not alive it means it was killed at creation
alive = false;
interrupted = true; // to speed up death
world.remove(Robot.this);
}
}
|
java
|
public Object resolveProperty(Object object, String propertyName) {
return resolveProperty(object, Collections.singletonList(propertyName));
}
|
python
|
def select_authors_by_geo(query):
"""Pass exact name (case insensitive) of geography name, return ordered set
of author ids.
"""
for geo, ids in AUTHOR_GEO.items():
if geo.casefold() == query.casefold():
return set(ids)
|
java
|
public BitapMatcher substitutionOnlyMatcherFirst(int substitutions, final Sequence sequence) {
return substitutionOnlyMatcherFirst(substitutions, sequence, 0, sequence.size());
}
|
java
|
public int compareTo(Point<E> obj) {
if (obj == null)
return -1;
if (dimensions() != obj.dimensions())
return -1;
for (int i = 0; i < dimensions(); i++) {
if (getValue(i).getClass() != obj.getValue(i).getClass())
return -1;
if (getValue(i) instanceof Double) {
if (Utils.sm((Double) getValue(i), (Double) obj.getValue(i))) {
return -1;
} else if (Utils.gr((Double) getValue(i), (Double) obj.getValue(i))) {
return 1;
}
} else {
int r = getValue(i).toString().compareTo(obj.getValue(i).toString());
if (r != 0) {
return r;
}
}
}
return 0;
}
|
java
|
static final int match(String text, int pos, String str) {
for (int i = 0; i < str.length() && pos >= 0;) {
int ch = UTF16.charAt(str, i);
i += UTF16.getCharCount(ch);
if (isBidiMark(ch)) {
continue;
}
pos = match(text, pos, ch);
if (PatternProps.isWhiteSpace(ch)) {
i = skipPatternWhiteSpace(str, i);
}
}
return pos;
}
|
python
|
def _add_dophot_matches_to_database(
self,
dophotMatches,
exposureIds):
"""*add dophot matches to database*
**Key Arguments:**
- ``dophotMatches`` -- a list of lists of dophot matches
- ``exposureIds`` -- the ATLAS exposure IDs these matches were found in
**Return:**
- None
"""
self.log.info(
'starting the ``_add_dophot_matches_to_database`` method')
insertList = []
for d in dophotMatches:
insertList += d
dbSettings = self.settings["database settings"]["atlasMovers"]
insert_list_of_dictionaries_into_database_tables(
dbConn=self.atlasMoversDBConn,
log=self.log,
dictList=insertList,
dbTableName="dophot_photometry",
uniqueKeyList=["expname", "idx"],
dateModified=True,
batchSize=10000,
replace=True,
dbSettings=dbSettings
)
exps = []
exps[:] = [e[0] for e in exposureIds]
exps = ('","').join(exps)
sqlQuery = """
update atlas_exposures set dophot_match = 1 where dophot_match = 0 and expname in ("%(exps)s");
update atlas_exposures set dophot_match = 2 where dophot_match = 1 and expname not in (select distinct expname from dophot_photometry);""" % locals(
)
writequery(
log=self.log,
sqlQuery=sqlQuery,
dbConn=self.atlasMoversDBConn
)
self.log.info(
'completed the ``_add_dophot_matches_to_database`` method')
return None
|
python
|
def prepare_request(
url: Union[str, methods],
data: Optional[MutableMapping],
headers: Optional[MutableMapping],
global_headers: MutableMapping,
token: str,
as_json: Optional[bool] = None,
) -> Tuple[str, Union[str, MutableMapping], MutableMapping]:
"""
Prepare outgoing request
Create url, headers, add token to the body and if needed json encode it
Args:
url: :class:`slack.methods` item or string of url
data: Outgoing data
headers: Custom headers
global_headers: Global headers
token: Slack API token
as_json: Post JSON to the slack API
Returns:
:py:class:`tuple` (url, body, headers)
"""
if isinstance(url, methods):
as_json = as_json or url.value[3]
real_url = url.value[0]
else:
real_url = url
as_json = False
if not headers:
headers = {**global_headers}
else:
headers = {**global_headers, **headers}
payload: Optional[Union[str, MutableMapping]] = None
if real_url.startswith(HOOK_URL) or (real_url.startswith(ROOT_URL) and as_json):
payload, headers = _prepare_json_request(data, token, headers)
elif real_url.startswith(ROOT_URL) and not as_json:
payload = _prepare_form_encoded_request(data, token)
else:
real_url = ROOT_URL + real_url
payload = _prepare_form_encoded_request(data, token)
return real_url, payload, headers
|
python
|
def publish(self, channel, message, pipeline=False):
"""Post a message to a given channel.
Args:
channel (str): Channel where the message will be published
message (str): Message to publish
pipeline (bool): True, start a transaction block. Default false.
"""
if pipeline:
self._pipeline.publish(channel, message)
else:
self._db.publish(channel, message)
|
java
|
public static SynchronizedGeneratorIdentity basedOn(String quorum,
String znode,
Long claimDuration)
throws IOException {
ZooKeeperConnection zooKeeperConnection = new ZooKeeperConnection(quorum);
int clusterId = ClusterID.get(zooKeeperConnection.getActiveConnection(), znode);
Supplier<Duration> durationSupplier = () -> Duration.ofMillis(claimDuration);
return new SynchronizedGeneratorIdentity(zooKeeperConnection, znode, clusterId, durationSupplier);
}
|
java
|
public boolean isSubDirectory(File file1, File file2) {
try {
return (file1.getCanonicalPath()+File.separator).startsWith(file2.getCanonicalPath()+File.separator);
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
|
java
|
public double getKeyAsDouble(int index) throws IOException {
if (index >= structure.keySizes.size()) {
throw new IOException("Index " + index + " is out of range.");
}
return Bytes.toDouble(key, structure.keyByteOffsets.get(index));
}
|
python
|
def __on_disconnect(self, client, userdata, result_code):
# pylint: disable=W0613
"""
Client has been disconnected from the server
:param client: Client that received the message
:param userdata: User data (unused)
:param result_code: Disconnection reason (0: expected, 1: error)
"""
if result_code:
# rc != 0: unexpected disconnection
_logger.error(
"Unexpected disconnection from the MQTT server: %s (%d)",
paho.connack_string(result_code),
result_code,
)
# Try to reconnect
self.__stop_timer()
self.__start_timer(2)
# Notify the caller, if any
if self.on_disconnect is not None:
try:
self.on_disconnect(self, result_code)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
|
java
|
@Override
public StringBuffer getRequestURL() {
StringBuffer sb = new StringBuffer(getScheme());
sb.append("://");
// Note: following code required if IPv6 IP host does not contain brackets
// String host = getTargetHost();
// if (-1 != host.indexOf(":")) {
// // wrap brackets around the IPv6 IP address
// host = "[" + host + "]";
// }
// sb.append(host);
sb.append(getTargetHost());
sb.append(':');
sb.append(getTargetPort());
sb.append(getRequestURI());
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "getRequestURL() returning " + sb.toString());
}
return sb;
}
|
java
|
public void writePlacemark(XMLStreamWriter xmlOut, ResultSet rs, int geoFieldIndex, String spatialFieldName) throws XMLStreamException, SQLException {
xmlOut.writeStartElement("Placemark");
if (columnCount > 1) {
writeExtendedData(xmlOut, rs);
}
StringBuilder sb = new StringBuilder();
Geometry geom = (Geometry) rs.getObject(geoFieldIndex);
int inputSRID = geom.getSRID();
if (inputSRID == 0) {
throw new SQLException("A coordinate reference system must be set to save the KML file");
} else if (inputSRID != 4326) {
throw new SQLException("The kml format supports only the WGS84 projection. \n"
+ "Please use ST_Transform(" + spatialFieldName + "," + inputSRID + ")");
}
KMLGeometry.toKMLGeometry(geom, ExtrudeMode.NONE, AltitudeMode.NONE, sb);
//Write geometry
xmlOut.writeCharacters(sb.toString());
xmlOut.writeEndElement();//Write Placemark
}
|
java
|
public static URI uriFromUrl(URL url) throws IllegalArgumentException {
try {
return url.toURI();
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
|
java
|
public Object doRemoteAction(String strCommand, Map<String, Object> properties) throws DBException, RemoteException
{
synchronized(m_objSync)
{
return m_tableRemote.doRemoteAction(strCommand, properties);
}
}
|
python
|
def OnMouseMotion(self, event):
"""Mouse motion event handler"""
grid = self.grid
pos_x, pos_y = grid.CalcUnscrolledPosition(event.GetPosition())
row = grid.YToRow(pos_y)
col = grid.XToCol(pos_x)
tab = grid.current_table
key = row, col, tab
merge_area = self.grid.code_array.cell_attributes[key]["merge_area"]
if merge_area is not None:
top, left, bottom, right = merge_area
row, col = top, left
grid.actions.on_mouse_over((row, col, tab))
event.Skip()
|
java
|
public static BufferedImage loadImage(File file, int imageType){
BufferedImage img = loadImage(file);
if(img.getType() != imageType){
img = BufferedImageFactory.get(img, imageType);
}
return img;
}
|
python
|
def _process_status(self, status):
""" Process latest status update. """
self._screen_id = status.get(ATTR_SCREEN_ID)
self.status_update_event.set()
|
java
|
private void appendBranched(int startOffset, int pastEnd) {
// Main tricky thing here is just replacing of linefeeds...
if (mConvertLFs) {
char[] inBuf = mBuffer;
/* this will also unshare() and ensure there's room for at
* least one more char
*/
char[] outBuf = mBranchBuffer.getCurrentSegment();
int outPtr = mBranchBuffer.getCurrentSegmentSize();
// Pending \n to skip?
if (mGotCR) {
if (inBuf[startOffset] == '\n') {
++startOffset;
}
}
while (startOffset < pastEnd) {
char c = inBuf[startOffset++];
if (c == '\r') {
if (startOffset < pastEnd) {
if (inBuf[startOffset] == '\n') {
++startOffset;
}
} else {
mGotCR = true;
}
c = '\n';
}
// Ok, let's add char to output:
outBuf[outPtr++] = c;
// Need more room?
if (outPtr >= outBuf.length) {
outBuf = mBranchBuffer.finishCurrentSegment();
outPtr = 0;
}
}
mBranchBuffer.setCurrentLength(outPtr);
} else {
mBranchBuffer.append(mBuffer, startOffset, pastEnd-startOffset);
}
}
|
java
|
public static <W extends WitnessType<W>,ST,A> EitherT<W,ST,A> fromAnyM(final AnyM<W,A> anyM) {
return of(anyM.map(Either::right));
}
|
java
|
protected final void applyTagId(AbstractHtmlState state, String tagId)
throws JspException {
state.id = generateTagId(tagId);
}
|
python
|
def is_out_of_range(brain_or_object, result=_marker):
"""Checks if the result for the analysis passed in is out of range and/or
out of shoulders range.
min max
warn min max warn
·········|---------------|=====================|---------------|·········
----- out-of-range -----><----- in-range ------><----- out-of-range -----
<-- shoulder --><----- in-range ------><-- shoulder -->
:param brain_or_object: A single catalog brain or content object
:param result: Tentative result. If None, use the analysis result
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: Tuple of two elements. The first value is `True` if the result is
out of range and `False` if it is in range. The second value is `True` if
the result is out of shoulder range and `False` if it is in shoulder range
:rtype: (bool, bool)
"""
analysis = api.get_object(brain_or_object)
if not IAnalysis.providedBy(analysis) and \
not IReferenceAnalysis.providedBy(analysis):
api.fail("{} is not supported. Needs to be IAnalysis or "
"IReferenceAnalysis".format(repr(analysis)))
if result is _marker:
result = api.safe_getattr(analysis, "getResult", None)
if not api.is_floatable(result):
# Result is empty/None or not a valid number
return False, False
result = api.to_float(result)
# Note that routine analyses, duplicates and reference analyses all them
# implement the function getResultRange:
# - For routine analyses, the function returns the valid range based on the
# specs assigned during the creation process.
# - For duplicates, the valid range is the result of the analysis the
# the duplicate was generated from +/- the duplicate variation.
# - For reference analyses, getResultRange returns the valid range as
# indicated in the Reference Sample from which the analysis was created.
result_range = api.safe_getattr(analysis, "getResultsRange", None)
if not result_range:
# No result range defined or the passed in object does not suit
return False, False
# Maybe there is a custom adapter
adapters = getAdapters((analysis,), IResultOutOfRange)
for name, adapter in adapters:
ret = adapter(result=result, specification=result_range)
if not ret or not ret.get('out_of_range', False):
continue
if not ret.get('acceptable', True):
# Out of range + out of shoulders
return True, True
# Out of range, but in shoulders
return True, False
result_range = ResultsRangeDict(result_range)
# The assignment of result as default fallback for min and max guarantees
# the result will be in range also if no min/max values are defined
specs_min = api.to_float(result_range.min, result)
specs_max = api.to_float(result_range.max, result)
in_range = False
min_operator = result_range.min_operator
if min_operator == "geq":
in_range = result >= specs_min
else:
in_range = result > specs_min
max_operator = result_range.max_operator
if in_range:
if max_operator == "leq":
in_range = result <= specs_max
else:
in_range = result < specs_max
# If in range, no need to check shoulders
if in_range:
return False, False
# Out of range, check shoulders. If no explicit warn_min or warn_max have
# been defined, no shoulders must be considered for this analysis. Thus, use
# specs' min and max as default fallback values
warn_min = api.to_float(result_range.warn_min, specs_min)
warn_max = api.to_float(result_range.warn_max, specs_max)
in_shoulder = warn_min <= result <= warn_max
return True, not in_shoulder
|
python
|
def _validate_and_parse_course_key(self, course_key):
"""
Returns a validated parsed CourseKey deserialized from the given course_key.
"""
try:
return CourseKey.from_string(course_key)
except InvalidKeyError:
raise ValidationError(_("Invalid course key: {}").format(course_key))
|
java
|
public void marshall(EnvironmentPropertyUpdates environmentPropertyUpdates, ProtocolMarshaller protocolMarshaller) {
if (environmentPropertyUpdates == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(environmentPropertyUpdates.getPropertyGroups(), PROPERTYGROUPS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def _handle_tag_definefontname(self):
"""Handle the DefineFontName tag."""
obj = _make_object("DefineFontName")
obj.FontId = unpack_ui16(self._src)
obj.FontName = self._get_struct_string()
obj.FontCopyright = self._get_struct_string()
return obj
|
java
|
public static String serialize(String xmlString) {
StringWriter sw = new StringWriter();
serialize(asStreamSource(xmlString), sw);
return sw.toString();
}
|
java
|
public static void generate(ConfigurationImpl configuration, ClassTree classtree) throws DocFileIOException {
ClassUseMapper mapper = new ClassUseMapper(configuration, classtree);
for (TypeElement aClass : configuration.getIncludedTypeElements()) {
// If -nodeprecated option is set and the containing package is marked
// as deprecated, do not generate the class-use page. We will still generate
// the class-use page if the class is marked as deprecated but the containing
// package is not since it could still be linked from that package-use page.
if (!(configuration.nodeprecated &&
configuration.utils.isDeprecated(configuration.utils.containingPackage(aClass))))
ClassUseWriter.generate(configuration, mapper, aClass);
}
for (PackageElement pkg : configuration.packages) {
// If -nodeprecated option is set and the package is marked
// as deprecated, do not generate the package-use page.
if (!(configuration.nodeprecated && configuration.utils.isDeprecated(pkg)))
PackageUseWriter.generate(configuration, mapper, pkg);
}
}
|
java
|
public Vector3d div(Vector3dc v, Vector3d dest) {
dest.x = x / v.x();
dest.y = y / v.y();
dest.z = z / v.z();
return dest;
}
|
java
|
@Override
public void visitCode(final Code obj) {
try {
localCollections = new HashMap<>();
localScopeEnds = new HashMap<>();
localSourceLineAnnotations = new HashMap<>();
stack.resetForMethodEntry(this);
super.visitCode(obj);
} finally {
localCollections = null;
localScopeEnds = null;
localSourceLineAnnotations = null;
}
}
|
java
|
public static ObjectFields convertGenObjectFieldsToObjectFields(org.fcrepo.server.types.gen.ObjectFields source) {
if (source == null) {
return null;
}
ObjectFields result = new ObjectFields();
result.setPid(source.getPid() != null ? source.getPid().getValue() : null);
result.setLabel(source.getLabel() != null ? source.getLabel().getValue() : null);
result.setState(source.getState() != null ? source.getState().getValue() : null);
result.setOwnerId(source.getOwnerId() != null ? source.getOwnerId().getValue() : null);
result.setCDate(source.getCDate() != null ? DateUtility.convertStringToDate(source.getCDate().getValue()) : null);
result.setMDate(source.getMDate() != null ? DateUtility.convertStringToDate(source.getMDate().getValue()) : null);
result.setDCMDate(source.getDcmDate() != null ? DateUtility.convertStringToDate(source.getDcmDate().getValue()) : null);
result.titles().addAll(convertStringArray(source.getTitle()));
result.subjects().addAll(convertStringArray(source.getSubject()));
result.descriptions()
.addAll(convertStringArray(source.getDescription()));
result.publishers().addAll(convertStringArray(source.getPublisher()));
result.contributors()
.addAll(convertStringArray(source.getContributor()));
result.dates().addAll(convertStringArray(source.getDate()));
result.types().addAll(convertStringArray(source.getType()));
result.formats().addAll(convertStringArray(source.getFormat()));
result.identifiers().addAll(convertStringArray(source.getIdentifier()));
result.sources().addAll(convertStringArray(source.getSource()));
result.languages().addAll(convertStringArray(source.getLanguage()));
result.relations().addAll(convertStringArray(source.getRelation()));
result.coverages().addAll(convertStringArray(source.getCoverage()));
result.rights().addAll(convertStringArray(source.getRights()));
return result;
}
|
python
|
def _handle_qos1_message_flow(self, app_message):
"""
Handle QOS_1 application message acknowledgment
For incoming messages, this method stores the message and reply with PUBACK
For outgoing messages, this methods sends PUBLISH and waits for the corresponding PUBACK
:param app_message:
:return:
"""
assert app_message.qos == QOS_1
if app_message.puback_packet:
raise HBMQTTException("Message '%d' has already been acknowledged" % app_message.packet_id)
if app_message.direction == OUTGOING:
if app_message.packet_id not in self.session.inflight_out:
# Store message in session
self.session.inflight_out[app_message.packet_id] = app_message
if app_message.publish_packet is not None:
# A Publish packet has already been sent, this is a retry
publish_packet = app_message.build_publish_packet(dup=True)
else:
publish_packet = app_message.build_publish_packet()
# Send PUBLISH packet
yield from self._send_packet(publish_packet)
app_message.publish_packet = publish_packet
# Wait for puback
waiter = asyncio.Future(loop=self._loop)
self._puback_waiters[app_message.packet_id] = waiter
yield from waiter
del self._puback_waiters[app_message.packet_id]
app_message.puback_packet = waiter.result()
# Discard inflight message
del self.session.inflight_out[app_message.packet_id]
elif app_message.direction == INCOMING:
# Initiate delivery
self.logger.debug("Add message to delivery")
yield from self.session.delivered_message_queue.put(app_message)
# Send PUBACK
puback = PubackPacket.build(app_message.packet_id)
yield from self._send_packet(puback)
app_message.puback_packet = puback
|
java
|
public BioCDocument readDocument()
throws XMLStreamException {
if (reader.document != null) {
BioCDocument thisDocument = reader.document;
reader.read();
return thisDocument;
} else {
return null;
}
}
|
python
|
def comment(request, template="generic/comments.html", extra_context=None):
"""
Handle a ``ThreadedCommentForm`` submission and redirect back to its
related object.
"""
response = initial_validation(request, "comment")
if isinstance(response, HttpResponse):
return response
obj, post_data = response
form_class = import_dotted_path(settings.COMMENT_FORM_CLASS)
form = form_class(request, obj, post_data)
if form.is_valid():
url = obj.get_absolute_url()
if is_spam(request, form, url):
return redirect(url)
comment = form.save(request)
response = redirect(add_cache_bypass(comment.get_absolute_url()))
# Store commenter's details in a cookie for 90 days.
for field in ThreadedCommentForm.cookie_fields:
cookie_name = ThreadedCommentForm.cookie_prefix + field
cookie_value = post_data.get(field, "")
set_cookie(response, cookie_name, cookie_value)
return response
elif request.is_ajax() and form.errors:
return HttpResponse(dumps({"errors": form.errors}))
# Show errors with stand-alone comment form.
context = {"obj": obj, "posted_comment_form": form}
context.update(extra_context or {})
return TemplateResponse(request, template, context)
|
java
|
byte[][] getFamilyKeys() {
Charset c = Charset.forName(charset);
byte[][] familyKeys = new byte[this.familyMap.size()][];
int i = 0;
for (String name : this.familyMap.keySet()) {
familyKeys[i++] = name.getBytes(c);
}
return familyKeys;
}
|
java
|
public void revive(CmsObject adminCms, CmsPublishList publishList) throws CmsException {
CmsContextInfo context = new CmsContextInfo(adminCms.readUser(m_userId).getName());
CmsProject project = adminCms.readProject(m_projectId);
context.setLocale(m_locale);
m_cms = OpenCms.initCmsObject(adminCms, context);
m_cms.getRequestContext().setCurrentProject(project);
m_publishList = publishList;
m_publishList.revive(m_cms);
}
|
python
|
def apache_md5crypt(password, salt, magic='$apr1$'):
"""
Calculates the Apache-style MD5 hash of a password
"""
password = password.encode('utf-8')
salt = salt.encode('utf-8')
magic = magic.encode('utf-8')
m = md5()
m.update(password + magic + salt)
mixin = md5(password + salt + password).digest()
for i in range(0, len(password)):
m.update(mixin[i % 16])
i = len(password)
while i:
if i & 1:
m.update('\x00')
else:
m.update(password[0])
i >>= 1
final = m.digest()
for i in range(1000):
m2 = md5()
if i & 1:
m2.update(password)
else:
m2.update(final)
if i % 3:
m2.update(salt)
if i % 7:
m2.update(password)
if i & 1:
m2.update(final)
else:
m2.update(password)
final = m2.digest()
itoa64 = './0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
rearranged = ''
seq = ((0, 6, 12), (1, 7, 13), (2, 8, 14), (3, 9, 15), (4, 10, 5))
for a, b, c in seq:
v = ord(final[a]) << 16 | ord(final[b]) << 8 | ord(final[c])
for i in range(4):
rearranged += itoa64[v & 0x3f]
v >>= 6
v = ord(final[11])
for i in range(2):
rearranged += itoa64[v & 0x3f]
v >>= 6
return magic + salt + '$' + rearranged
|
java
|
public Q profiler(QueryProfiler profiler) {
Preconditions.checkNotNull(profiler);
this.profiler=profiler;
return getThis();
}
|
python
|
def dock_help():
"""Help message for Dock Widget.
.. versionadded:: 3.2.1
:returns: A message object containing helpful information.
:rtype: messaging.message.Message
"""
message = m.Message()
message.add(m.Brand())
message.add(heading())
message.add(content())
return message
|
python
|
def add_job(self, job, merged=False, widened=False):
"""
Appended a new job to this JobInfo node.
:param job: The new job to append.
:param bool merged: Whether it is a merged job or not.
:param bool widened: Whether it is a widened job or not.
"""
job_type = ''
if merged:
job_type = 'merged'
elif widened:
job_type = 'widened'
self.jobs.append((job, job_type))
|
java
|
public void marshall(DvbTdtSettings dvbTdtSettings, ProtocolMarshaller protocolMarshaller) {
if (dvbTdtSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(dvbTdtSettings.getTdtInterval(), TDTINTERVAL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def parse_field(self, field_data, index=0):
"""Parse field and add missing options"""
field = {
'__index__': index,
}
if isinstance(field_data, str):
field.update(self.parse_string_field(field_data))
elif isinstance(field_data, dict):
field.update(field_data)
else:
raise TypeError('Expected a str or dict get {}'.format(type(field_data)))
if 'field' not in field:
field['field'] = None
if 'label' not in field and field['field']:
try:
field['label'] = self.object._meta.get_field(field['field']).verbose_name.capitalize()
except Exception:
field['label'] = field['field'].replace('_', '').capitalize()
elif 'label' not in field:
field['label'] = ''
if 'format' not in field:
field['format'] = '{0}'
# Set default options
for name, options in self.fields_options.items():
if 'default' in options and name not in field:
field[name] = options['default']
return field
|
python
|
def subject(self) -> Optional[UnstructuredHeader]:
"""The ``Subject`` header."""
try:
return cast(UnstructuredHeader, self[b'subject'][0])
except (KeyError, IndexError):
return None
|
java
|
public void addRule(IntDependency dependency, double count) {
if ( ! directional) {
dependency = new IntDependency(dependency.head, dependency.arg, false, dependency.distance);
}
if (verbose) System.err.println("Adding dep " + dependency);
// coreDependencies.incrementCount(dependency, count);
/*new IntDependency(dependency.head.word,
dependency.head.tag,
dependency.arg.word,
dependency.arg.tag,
dependency.leftHeaded,
dependency.distance), count);
*/
expandDependency(dependency, count);
// System.err.println("stopCounter: " + stopCounter);
// System.err.println("argCounter: " + argCounter);
}
|
python
|
def betas_for_cov(self, covariate = '0'):
"""betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
:param covariate: name of covariate.
:type covariate: string
"""
# find the index in the designmatrix of the current covariate
this_covariate_index = list(self.covariates.keys()).index(covariate)
return self.betas[int(this_covariate_index*self.deconvolution_interval_size):int((this_covariate_index+1)*self.deconvolution_interval_size)]
|
python
|
async def update(self, _id=None, **new_data):
"""Updates fields values.
Accepts id of sigle entry and
fields with values.
update(id, **kwargs) => {"success":200, "reason":"Updated"} (if success)
update(id, **kwargs) => {"error":400, "reason":"Missed required fields"} (if error)
"""
if not _id or not new_data:
return {"error":400,
"reason":"Missed required fields"}
document = await self.collection.find_one({"id":_id})
if not document:
return {"error":404,
"reason":"Not found"}
for key in new_data:
await self.collection.find_one_and_update(
{"id": _id},
{"$set": {key: new_data[key]}}
)
updated = await self.collection.find_one({"id":_id})
return {"success":200, "reason": "Updated", **updated}
|
java
|
public void setMobileApplicationSegment(com.google.api.ads.admanager.axis.v201902.MobileApplicationTargeting mobileApplicationSegment) {
this.mobileApplicationSegment = mobileApplicationSegment;
}
|
python
|
def is_a_sequence(var, allow_none=False):
""" Returns True if var is a list or a tuple (but not a string!)
"""
return isinstance(var, (list, tuple)) or (var is None and allow_none)
|
java
|
public void setUrlAttribute(String name, String value) {
ensureValue();
Attribute attribute = new UrlAttribute(value);
attribute.setEditable(isEditable(name));
getValue().getAllAttributes().put(name, attribute);
}
|
java
|
public final <R> Iterable<R> mapReduce(String name, DBObject query, DBObject sort, Map<String, Object> scope, final MapReduceResultHandler<R> conv) {
String map = this.getMRFunction(name, "map");
String reduce = this.getMRFunction(name, "reduce");
MapReduceCommand mrc = new MapReduceCommand(this.collection.getDBCollection(), map, reduce, null, MapReduceCommand.OutputType.INLINE, query);
String finalizeFunction = this.getMRFunction(name, "finalize");
if(finalizeFunction != null) {
mrc.setFinalize(finalizeFunction);
}
if(sort != null) {
mrc.setSort(sort);
}
if(scope != null) {
mrc.setScope(scope);
}
MapReduceOutput mr = this.collection.getDBCollection().mapReduce(mrc);
return new ConverterIterable<R>(mr.results().iterator(), conv);
}
|
java
|
public void put(ItemData item)
{
// There is different commit processing for NullNodeData and ordinary ItemData.
if (item instanceof NullItemData)
{
putNullItem((NullItemData)item);
return;
}
boolean inTransaction = cache.isTransactionActive();
try
{
if (!inTransaction)
{
cache.beginTransaction();
}
cache.setLocal(true);
if (item.isNode())
{
putNode((NodeData)item, ModifyChildOption.NOT_MODIFY);
}
else
{
putProperty((PropertyData)item, ModifyChildOption.NOT_MODIFY);
}
}
finally
{
cache.setLocal(false);
if (!inTransaction)
{
dedicatedTxCommit();
}
}
}
|
python
|
def getargspec(fn): # type: (Callable) -> inspect.ArgSpec
"""Get the names and default values of a function's arguments.
Args:
fn (function): a function
Returns:
`inspect.ArgSpec`: A collections.namedtuple with the following attributes:
* Args:
args (list): a list of the argument names (it may contain nested lists).
varargs (str): name of the * argument or None.
keywords (str): names of the ** argument or None.
defaults (tuple): an n-tuple of the default values of the last n arguments.
"""
if six.PY2:
return inspect.getargspec(fn)
elif six.PY3:
full_arg_spec = inspect.getfullargspec(fn)
return inspect.ArgSpec(full_arg_spec.args, full_arg_spec.varargs, full_arg_spec.varkw, full_arg_spec.defaults)
|
python
|
def _stringlist_to_dictionary(input_string):
'''
Convert a stringlist (comma separated settings) to a dictionary
The result of the string setting1=value1,setting2=value2 will be a python dictionary:
{'setting1':'value1','setting2':'value2'}
'''
li = str(input_string).split(',')
ret = {}
for item in li:
pair = str(item).replace(' ', '').split('=')
if len(pair) != 2:
log.warning('Cannot process stringlist item %s', item)
continue
ret[pair[0]] = pair[1]
return ret
|
java
|
@Override
protected AstNode parseCustomStatement( DdlTokenStream tokens,
AstNode parentNode ) throws ParsingException {
assert tokens != null;
assert parentNode != null;
if (tokens.matches(STMT_RENAME_DATABASE)) {
markStartOfStatement(tokens);
// RENAME DATABASE db_name TO new_db_name;
tokens.consume(STMT_RENAME_DATABASE);
String oldName = parseName(tokens);
tokens.consume("TO");
AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_DATABASE_STATEMENT);
String newName = parseName(tokens);
node.setProperty(NEW_NAME, newName);
markEndOfStatement(tokens, node);
return node;
} else if (tokens.matches(STMT_RENAME_SCHEMA)) {
markStartOfStatement(tokens);
// RENAME SCHEMA schema_name TO new_schema_name;
tokens.consume(STMT_RENAME_SCHEMA);
String oldName = parseName(tokens);
tokens.consume("TO");
AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_SCHEMA_STATEMENT);
String newName = parseName(tokens);
node.setProperty(NEW_NAME, newName);
markEndOfStatement(tokens, node);
return node;
} else if (tokens.matches(STMT_RENAME_TABLE)) {
markStartOfStatement(tokens);
// RENAME TABLE old_table TO tmp_table,
// new_table TO old_table,
// tmp_table TO new_table;
tokens.consume(STMT_RENAME_TABLE);
String oldName = parseName(tokens);
tokens.consume("TO");
String newName = parseName(tokens);
AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_TABLE_STATEMENT);
node.setProperty(NEW_NAME, newName);
// IF NOT MULTIPLE RENAMES, FINISH AND RETURN
if (!tokens.matches(COMMA)) {
markEndOfStatement(tokens, node);
return node;
}
// Assume multiple renames
// Create list of nodes so we can re-set the expression of each to reflect ONE rename.
List<AstNode> nodes = new ArrayList<AstNode>();
nodes.add(node);
while (tokens.matches(COMMA)) {
tokens.consume(COMMA);
oldName = parseName(tokens);
tokens.consume("TO");
newName = parseName(tokens);
node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_TABLE_STATEMENT);
node.setProperty(NEW_NAME, newName);
nodes.add(node);
}
markEndOfStatement(tokens, nodes.get(0));
String originalExpression = (String)nodes.get(0).getProperty(DDL_EXPRESSION);
Object startLineNumber = nodes.get(0).getProperty(DDL_START_LINE_NUMBER);
Object startColumnNumber = nodes.get(0).getProperty(DDL_START_COLUMN_NUMBER);
Object startCharIndex = nodes.get(0).getProperty(DDL_START_CHAR_INDEX);
for (AstNode nextNode : nodes) {
oldName = nextNode.getName();
newName = (String)nextNode.getProperty(NEW_NAME);
String express = "RENAME TABLE" + SPACE + oldName + SPACE + "TO" + SPACE + newName + SEMICOLON;
nextNode.setProperty(DDL_EXPRESSION, express);
nextNode.setProperty(DDL_ORIGINAL_EXPRESSION, originalExpression);
nextNode.setProperty(DDL_START_LINE_NUMBER, startLineNumber);
nextNode.setProperty(DDL_START_COLUMN_NUMBER, startColumnNumber);
nextNode.setProperty(DDL_START_CHAR_INDEX, startCharIndex);
}
return nodes.get(0);
}
return super.parseCustomStatement(tokens, parentNode);
}
|
python
|
def _get_document_data(database, document):
"""
A safer version of Xapian.document.get_data
Simply wraps the Xapian version and catches any `Xapian.DatabaseModifiedError`,
attempting a `database.reopen` as needed.
Required arguments:
`database` -- The database to be read
`document` -- An instance of an Xapian.document object
"""
try:
return document.get_data()
except xapian.DatabaseModifiedError:
database.reopen()
return document.get_data()
|
java
|
protected void rehash(final int newN) {
final char key[] = this.key;
final char value[] = this.value;
final int mask = newN - 1; // Note that this is used by the hashing macro
final char newKey[] = new char[newN + 1];
final char newValue[] = new char[newN + 1];
int i = n, pos;
for (int j = realSize(); j-- != 0; ) {
while (((key[--i]) == ((char) 0))) ;
if (!((newKey[pos = (HashCommon.mix((key[i]))) & mask]) == ((char) 0)))
while (!((newKey[pos = (pos + 1) & mask]) == ((char) 0))) ;
newKey[pos] = key[i];
newValue[pos] = value[i];
}
newValue[newN] = value[n];
n = newN;
this.mask = mask;
maxFill = maxFill(n, f);
this.key = newKey;
this.value = newValue;
}
|
python
|
def close(self):
"""
Just send a message off to all the pool members which contains
the special :class:`_close_pool_message` sentinel.
"""
if self.is_master():
for i in range(self.size):
self.comm.isend(_close_pool_message(), dest=i + 1)
|
java
|
private void setConnector(final SocketConnector connector) {
if (connector == null) {
throw new NullPointerException("connector cannot be null");
}
this.connector = connector;
String className = ProxyFilter.class.getName();
// Removes an old ProxyFilter instance from the chain
if (connector.getFilterChain().contains(className)) {
connector.getFilterChain().remove(className);
}
// Insert the ProxyFilter as the first filter in the filter chain builder
connector.getFilterChain().addFirst(className, proxyFilter);
}
|
python
|
def default_logger(self, name=__name__, enable_stream=False,
enable_file=True):
"""Default Logger.
This is set to use a rotating File handler and a stream handler.
If you use this logger all logged output that is INFO and above will
be logged, unless debug_logging is set then everything is logged.
The logger will send the same data to a stdout as it does to the
specified log file.
You can disable the default handlers by setting either `enable_file` or
`enable_stream` to `False`
:param name: ``str``
:param enable_stream: ``bol``
:param enable_file: ``bol``
:return: ``object``
"""
if self.format is None:
self.format = logging.Formatter(
'%(asctime)s - %(module)s:%(levelname)s => %(message)s'
)
log = logging.getLogger(name)
self.name = name
if enable_file is True:
file_handler = handlers.RotatingFileHandler(
filename=self.return_logfile(filename='%s.log' % name),
maxBytes=self.max_size,
backupCount=self.max_backup
)
self.set_handler(log, handler=file_handler)
if enable_stream is True or self.debug_logging is True:
stream_handler = logging.StreamHandler()
self.set_handler(log, handler=stream_handler)
log.info('Logger [ %s ] loaded', name)
return log
|
python
|
def insert_many(cls, documents, ordered=True):
"""
Inserts a list of documents into the Collection and returns their _ids
"""
return cls.collection.insert_many(documents, ordered).inserted_ids
|
java
|
public T fql_query(CharSequence query)
throws FacebookException, IOException {
assert (null != query);
return this.callMethod(FacebookMethod.FQL_QUERY,
new Pair<String, CharSequence>("query", query));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.