language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public boolean writeInbound(Object... msgs) {
ensureOpen();
if (msgs.length == 0) {
return isNotEmpty(inboundMessages);
}
ChannelPipeline p = pipeline();
for (Object m: msgs) {
p.fireChannelRead(m);
}
flushInbound(false, voidPromise());
return isNotEmpty(inboundMessages);
}
|
java
|
private static void addAclHeaders(Request<? extends AmazonWebServiceRequest> request, AccessControlList acl) {
List<Grant> grants = acl.getGrantsAsList();
Map<Permission, Collection<Grantee>> grantsByPermission = new HashMap<Permission, Collection<Grantee>>();
for ( Grant grant : grants ) {
if ( !grantsByPermission.containsKey(grant.getPermission()) ) {
grantsByPermission.put(grant.getPermission(), new LinkedList<Grantee>());
}
grantsByPermission.get(grant.getPermission()).add(grant.getGrantee());
}
for ( Permission permission : Permission.values() ) {
if ( grantsByPermission.containsKey(permission) ) {
Collection<Grantee> grantees = grantsByPermission.get(permission);
boolean seenOne = false;
StringBuilder granteeString = new StringBuilder();
for ( Grantee grantee : grantees ) {
if ( !seenOne )
seenOne = true;
else
granteeString.append(", ");
granteeString.append(grantee.getTypeIdentifier()).append("=").append("\"")
.append(grantee.getIdentifier()).append("\"");
}
request.addHeader(permission.getHeaderName(), granteeString.toString());
}
}
}
|
python
|
def get_bank_hierarchy_design_session(self, proxy):
"""Gets the session designing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
if not self.supports_bank_hierarchy_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.BankHierarchyDesignSession(proxy=proxy, runtime=self._runtime)
|
java
|
public ReceiptTemplateBuilder addAdjustment(String name, BigDecimal amount) {
Adjustment adjustment = new Adjustment(name, amount);
return this.addAdjustment(adjustment);
}
|
java
|
private void maybeWarnForInvalidDestructuring(
NodeTraversal t, Node importNode, String importedNamespace) {
checkArgument(importNode.getFirstChild().isDestructuringLhs(), importNode);
ScriptDescription importedModule =
rewriteState.scriptDescriptionsByGoogModuleNamespace.get(importedNamespace);
if (importedModule == null) {
// Don't know enough to give a good warning here.
return;
}
if (importedModule.defaultExportRhs != null) {
t.report(importNode, ILLEGAL_DESTRUCTURING_DEFAULT_EXPORT);
return;
}
Node objPattern = importNode.getFirstFirstChild();
for (Node key = objPattern.getFirstChild(); key != null; key = key.getNext()) {
String exportName = key.getString();
if (!importedModule.namedExports.contains(exportName)) {
t.report(importNode, ILLEGAL_DESTRUCTURING_NOT_EXPORTED, exportName, importedNamespace);
}
}
}
|
java
|
static Exclusion convertExclusionPatternIntoExclusion(String exceptionPattern) throws MojoExecutionException {
Matcher matcher = COORDINATE_PATTERN.matcher(exceptionPattern);
if (!matcher.matches()) {
throw new MojoExecutionException(String.format("Bad artifact coordinates %s, expected format is <groupId>:<artifactId>[:<extension>][:<classifier>]", exceptionPattern));
}
return new Exclusion(matcher.group(1), matcher.group(2), matcher.group(4), matcher.group(6));
}
|
python
|
def allow_buttons(self, message="", link=True, back=True):
"""
Function allows buttons
"""
self.info_label.set_label(message)
self.allow_close_window()
if link and self.link is not None:
self.link.set_sensitive(True)
self.link.show_all()
if back:
self.back_btn.show()
self.main_btn.set_sensitive(True)
|
java
|
public JSONObject exportConfigurationAndProfile(String oldExport) {
try {
BasicNameValuePair[] params = {
new BasicNameValuePair("oldExport", oldExport)
};
String url = BASE_BACKUP_PROFILE + "/" + uriEncode(this._profileName) + "/" + this._clientId;
return new JSONObject(doGet(url, new BasicNameValuePair[]{}));
} catch (Exception e) {
return new JSONObject();
}
}
|
python
|
def _setup_versioned_lib_variables(env, **kw):
"""
Setup all variables required by the versioning machinery
"""
tool = None
try: tool = kw['tool']
except KeyError: pass
use_soname = False
try: use_soname = kw['use_soname']
except KeyError: pass
# The $_SHLIBVERSIONFLAGS define extra commandline flags used when
# building VERSIONED shared libraries. It's always set, but used only
# when VERSIONED library is built (see __SHLIBVERSIONFLAGS in SCons/Defaults.py).
if use_soname:
# If the linker uses SONAME, then we need this little automata
if tool == 'sunlink':
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -h $_SHLIBSONAME'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -h $_LDMODULESONAME'
else:
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS -Wl,-soname=$_SHLIBSONAME'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS -Wl,-soname=$_LDMODULESONAME'
env['_SHLIBSONAME'] = '${ShLibSonameGenerator(__env__,TARGET)}'
env['_LDMODULESONAME'] = '${LdModSonameGenerator(__env__,TARGET)}'
env['ShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator
env['LdModSonameGenerator'] = SCons.Tool.LdModSonameGenerator
else:
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# LDOMDULVERSIONFLAGS should always default to $SHLIBVERSIONFLAGS
env['LDMODULEVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
|
java
|
private double logL(int k, long n, double x) {
if (x == 0.0) x = 0.01;
if (x == 1.0) x = 0.99;
return k * Math.log(x) + (n-k) * Math.log(1-x);
}
|
java
|
public ItemIdValue copy(ItemIdValue object) {
return dataObjectFactory.getItemIdValue(object.getId(), object.getSiteIri());
}
|
python
|
def release(major=False, minor=False, patch=True, pypi_index=None):
"""Overall process flow for performing a release"""
relver = next_release(major, minor, patch)
start_rel_branch(relver)
prepare_release(relver)
finish_rel_branch(relver)
publish(pypi_index)
|
java
|
public void setHistoricalMetrics(java.util.Collection<HistoricalMetric> historicalMetrics) {
if (historicalMetrics == null) {
this.historicalMetrics = null;
return;
}
this.historicalMetrics = new java.util.ArrayList<HistoricalMetric>(historicalMetrics);
}
|
python
|
def read_creds_from_ecs_container_metadata():
"""
Read credentials from ECS instance metadata (IAM role)
:return:
"""
creds = init_creds()
try:
ecs_metadata_relative_uri = os.environ['AWS_CONTAINER_CREDENTIALS_RELATIVE_URI']
credentials = requests.get('http://169.254.170.2' + ecs_metadata_relative_uri, timeout = 1).json()
for c in ['AccessKeyId', 'SecretAccessKey']:
creds[c] = credentials[c]
creds['SessionToken'] = credentials['Token']
return creds
except Exception as e:
return False
|
java
|
public final boolean setPointAt(int index, Point2D<?, ?> point) {
return setPointAt(index, point.getX(), point.getY(), false);
}
|
python
|
def tintWith(self, red, green, blue):
"""tintWith(self, red, green, blue)"""
if not self.colorspace or self.colorspace.n > 3:
print("warning: colorspace invalid for function")
return
return _fitz.Pixmap_tintWith(self, red, green, blue)
|
java
|
private Attributes splitAttributes(final NamedNodeMap map) {
Attr sLoc = (Attr) map.getNamedItemNS(XMLConstants
.W3C_XML_SCHEMA_INSTANCE_NS_URI,
"schemaLocation");
Attr nNsLoc = (Attr) map.getNamedItemNS(XMLConstants
.W3C_XML_SCHEMA_INSTANCE_NS_URI,
"noNamespaceSchemaLocation");
Attr type = (Attr) map.getNamedItemNS(XMLConstants
.W3C_XML_SCHEMA_INSTANCE_NS_URI,
"type");
List<Attr> rest = new LinkedList<Attr>();
final int len = map.getLength();
for (int i = 0; i < len; i++) {
Attr a = (Attr) map.item(i);
if (!XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(a.getNamespaceURI())
&& a != sLoc && a != nNsLoc && a != type
&& getAttributeFilter().test(a)) {
rest.add(a);
}
}
return new Attributes(sLoc, nNsLoc, type, rest);
}
|
java
|
@Subscribe
public void updateThrottleState(ThrottleState throttleState) {
// Only run if throttling is enabled.
if (!throttlingAllowed) {
return;
}
// check if we are throttled
final boolean throttled = determineIfThrottled(throttleState);
if (currentlyThrottled.get()) {
// no need to unblock
if (throttled) {
return;
}
// sanity check
if (blockLatch == null) {
log.error("Expected to see a transport throttle latch, but it is missing. This is a bug, continuing anyway.");
return;
}
currentlyThrottled.set(false);
handleChangedThrottledState(false);
blockLatch.countDown();
} else if (throttled) {
currentlyThrottled.set(true);
handleChangedThrottledState(true);
blockLatch = new CountDownLatch(1);
}
}
|
python
|
def catchable_exceptions(exceptions):
"""Returns True if exceptions can be caught in the except clause.
The exception can be caught if it is an Exception type or a tuple of
exception types.
"""
if isinstance(exceptions, type) and issubclass(exceptions, BaseException):
return True
if (
isinstance(exceptions, tuple)
and exceptions
and all(issubclass(it, BaseException) for it in exceptions)
):
return True
return False
|
java
|
public void putViewState(@NonNull String viewId,
@NonNull Object viewState) {
if (viewId == null) {
throw new NullPointerException("ViewId is null");
}
if (viewState == null) {
throw new NullPointerException("ViewState is null");
}
PresenterHolder presenterHolder = presenterMap.get(viewId);
if (presenterHolder == null) {
presenterHolder = new PresenterHolder();
presenterHolder.viewState = viewState;
presenterMap.put(viewId, presenterHolder);
} else {
presenterHolder.viewState = viewState;
}
}
|
python
|
def str_repr(klass):
"""
Implements string conversion methods for the given class.
The given class must implement the __str__ method. This decorat
will add __repr__ and __unicode__ (for Python 2).
"""
if PY2:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
klass.__repr__ = lambda self: '<%s: %r>' % (self.__class__.__name__, str(self))
return klass
|
python
|
def _write_xml(xmlfile, srcs):
"""Save the ROI model as an XML """
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for src in srcs:
src.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root))
|
python
|
def run_driz_chip(img,chip,output_wcs,outwcs,template,paramDict,single,
doWrite,build,_versions,_numctx,_nplanes,_numchips,
_outsci,_outwht,_outctx,_hdrlist,wcsmap):
""" Perform the drizzle operation on a single chip.
This is separated out from `run_driz_img` so as to keep together
the entirety of the code which is inside the loop over
chips. See the `run_driz` code for more documentation.
"""
global time_pre_all, time_driz_all, time_post_all, time_write_all
epoch = time.time()
# Look for sky-subtracted product
if os.path.exists(chip.outputNames['outSky']):
chipextn = '['+chip.header['extname']+','+str(chip.header['extver'])+']'
_expname = chip.outputNames['outSky']+chipextn
else:
# If sky-subtracted product does not exist, use regular input
_expname = chip.outputNames['data']
log.info('-Drizzle input: %s' % _expname)
# Open the SCI image
_handle = fileutil.openImage(_expname, mode='readonly', memmap=False)
_sciext = _handle[chip.header['extname'],chip.header['extver']]
# Apply sky subtraction and unit conversion to input array
if chip.computedSky is None:
_insci = _sciext.data
else:
log.info("Applying sky value of %0.6f to %s"%(chip.computedSky,_expname))
_insci = _sciext.data - chip.computedSky
# If input SCI image is still integer format (RAW files)
# transform it to float32 for all subsequent operations
# needed for numpy >=1.12.x
if np.issubdtype(_insci[0,0],np.int16):
_insci = _insci.astype(np.float32)
_insci *= chip._effGain
# Set additional parameters needed by 'drizzle'
_in_units = chip.in_units.lower()
if _in_units == 'cps':
_expin = 1.0
else:
_expin = chip._exptime
####
#
# Put the units keyword handling in the imageObject class
#
####
# Determine output value of BUNITS
# and make sure it is not specified as 'ergs/cm...'
_bunit = chip._bunit
_bindx = _bunit.find('/')
if paramDict['units'] == 'cps':
# If BUNIT value does not specify count rate already...
if _bindx < 1:
# ... append '/SEC' to value
_bunit += '/S'
else:
# reset _bunit here to None so it does not
# overwrite what is already in header
_bunit = None
else:
if _bindx > 0:
# remove '/S'
_bunit = _bunit[:_bindx]
else:
# reset _bunit here to None so it does not
# overwrite what is already in header
_bunit = None
_uniqid = _numchips + 1
if _nplanes == 1:
# We need to reset what gets passed to TDRIZ
# when only 1 context image plane gets generated
# to prevent overflow problems with trying to access
# planes that weren't created for large numbers of inputs.
_uniqid = ((_uniqid-1) % 32) + 1
# Select which mask needs to be read in for drizzling
####
#
# Actually need to generate mask file here 'on-demand'
# and combine it with the static_mask for single_drizzle case...
#
####
# Build basic DQMask from DQ array and bits value
dqarr = img.buildMask(chip._chip,bits=paramDict['bits'])
# get correct mask filenames/objects
staticMaskName = chip.outputNames['staticMask']
crMaskName = chip.outputNames['crmaskImage']
if img.inmemory:
if staticMaskName in img.virtualOutputs:
staticMaskName = img.virtualOutputs[staticMaskName]
if crMaskName in img.virtualOutputs:
crMaskName = img.virtualOutputs[crMaskName]
# Merge appropriate additional mask(s) with DQ mask
if single:
mergeDQarray(staticMaskName,dqarr)
if dqarr.sum() == 0:
log.warning('All pixels masked out when applying static mask!')
else:
mergeDQarray(staticMaskName,dqarr)
if dqarr.sum() == 0:
log.warning('All pixels masked out when applying static mask!')
else:
# Only apply cosmic-ray mask when some good pixels remain after
# applying the static mask
mergeDQarray(crMaskName,dqarr)
if dqarr.sum() == 0:
log.warning('WARNING: All pixels masked out when applying '
'cosmic ray mask to %s' % _expname)
updateInputDQArray(chip.dqfile,chip.dq_extn,chip._chip,
crMaskName, paramDict['crbit'])
img.set_wtscl(chip._chip,paramDict['wt_scl'])
pix_ratio = outwcs.pscale / chip.wcslin_pscale
# Convert mask to a datatype expected by 'tdriz'
# Also, base weight mask on ERR or IVM file as requested by user
wht_type = paramDict['wht_type']
if wht_type == 'ERR':
_inwht = img.buildERRmask(chip._chip,dqarr,pix_ratio)
elif wht_type == 'IVM':
_inwht = img.buildIVMmask(chip._chip,dqarr,pix_ratio)
elif wht_type == 'EXP':
_inwht = img.buildEXPmask(chip._chip,dqarr)
else: # wht_type == None, used for single drizzle images
_inwht = chip._exptime * dqarr.astype(np.float32)
if not(paramDict['clean']):
# Write out mask file if 'clean' has been turned off
if single:
step_mask = 'singleDrizMask'
else:
step_mask = 'finalMask'
_outmaskname = chip.outputNames[step_mask]
if os.path.exists(_outmaskname): os.remove(_outmaskname)
pimg = fits.PrimaryHDU(data=_inwht)
img.saveVirtualOutputs({step_mask:pimg})
# Only write out mask files if in_memory=False
if not img.inmemory:
pimg.writeto(_outmaskname)
del pimg
log.info('Writing out mask file: %s' % _outmaskname)
time_pre = time.time() - epoch; epoch = time.time()
# New interface to performing the drizzle operation on a single chip/image
_vers = do_driz(_insci, chip.wcs, _inwht, outwcs, _outsci, _outwht, _outctx,
_expin, _in_units, chip._wtscl,
wcslin_pscale=chip.wcslin_pscale, uniqid=_uniqid,
pixfrac=paramDict['pixfrac'], kernel=paramDict['kernel'],
fillval=paramDict['fillval'], stepsize=paramDict['stepsize'],
wcsmap=wcsmap)
time_driz = time.time() - epoch; epoch = time.time()
# Set up information for generating output FITS image
#### Check to see what names need to be included here for use in _hdrlist
chip.outputNames['driz_version'] = _vers
chip.outputNames['driz_wcskey'] = paramDict['wcskey']
outputvals = chip.outputNames.copy()
# Update entries for names/values based on final output
outputvals.update(img.outputValues)
for kw in img.outputNames:
if kw[:3] == 'out':
outputvals[kw] = img.outputNames[kw]
outputvals['exptime'] = chip._exptime
outputvals['expstart'] = chip._expstart
outputvals['expend'] = chip._expend
outputvals['wt_scl_val'] = chip._wtscl
_hdrlist.append(outputvals)
time_post = time.time() - epoch; epoch = time.time()
if doWrite:
###########################
#
# IMPLEMENTATION REQUIREMENT:
#
# Need to implement scaling of the output image
# from 'cps' to 'counts' in the case where 'units'
# was set to 'counts'... 21-Mar-2005
#
###########################
# Convert output data from electrons/sec to counts/sec as specified
native_units = img.native_units
if paramDict['proc_unit'].lower() == 'native' and native_units.lower()[:6] == 'counts':
np.divide(_outsci, chip._gain, _outsci)
_bunit = native_units.lower()
if paramDict['units'] == 'counts':
indx = _bunit.find('/')
if indx > 0: _bunit = _bunit[:indx]
# record IDCSCALE for output to product header
paramDict['idcscale'] = chip.wcs.idcscale
#If output units were set to 'counts', rescale the array in-place
if paramDict['units'] == 'counts':
#determine what exposure time needs to be used
# to rescale the product.
if single:
_expscale = chip._exptime
else:
_expscale = img.outputValues['texptime']
np.multiply(_outsci, _expscale, _outsci)
#
# Write output arrays to FITS file(s)
#
if not single:
img.inmemory = False
_outimg = outputimage.OutputImage(_hdrlist, paramDict, build=build,
wcs=output_wcs, single=single)
_outimg.set_bunit(_bunit)
_outimg.set_units(paramDict['units'])
outimgs = _outimg.writeFITS(template,_outsci,_outwht,ctxarr=_outctx,
versions=_versions,virtual=img.inmemory)
del _outimg
# update imageObject with product in memory
if single:
img.saveVirtualOutputs(outimgs)
# this is after the doWrite
time_write = time.time() - epoch; epoch = time.time()
if False and not single: # turn off all this perf reporting for now
time_pre_all.append(time_pre)
time_driz_all.append(time_driz)
time_post_all.append(time_post)
time_write_all.append(time_write)
log.info('chip time pre-drizzling: %6.3f' % time_pre)
log.info('chip time drizzling: %6.3f' % time_driz)
log.info('chip time post-drizzling: %6.3f' % time_post)
log.info('chip time writing output: %6.3f' % time_write)
if doWrite:
tot_pre = sum(time_pre_all)
tot_driz = sum(time_driz_all)
tot_post = sum(time_post_all)
tot_write = sum(time_write_all)
tot = tot_pre+tot_driz+tot_post+tot_write
log.info('chip total pre-drizzling: %6.3f (%4.1f%%)' % (tot_pre, (100.*tot_pre/tot)))
log.info('chip total drizzling: %6.3f (%4.1f%%)' % (tot_driz, (100.*tot_driz/tot)))
log.info('chip total post-drizzling: %6.3f (%4.1f%%)' % (tot_post, (100.*tot_post/tot)))
log.info('chip total writing output: %6.3f (%4.1f%%)' % (tot_write, (100.*tot_write/tot)))
|
java
|
public EEnum getMFCMFCScpe() {
if (mfcmfcScpeEEnum == null) {
mfcmfcScpeEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(49);
}
return mfcmfcScpeEEnum;
}
|
python
|
def from_array(array):
"""
Deserialize a new MaskPosition from a given dictionary.
:return: new MaskPosition instance.
:rtype: MaskPosition
"""
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['point'] = u(array.get('point'))
data['x_shift'] = float(array.get('x_shift'))
data['y_shift'] = float(array.get('y_shift'))
data['scale'] = float(array.get('scale'))
data['_raw'] = array
return MaskPosition(**data)
|
java
|
public java.rmi.Remote getPort(javax.xml.namespace.QName portName, Class serviceEndpointInterface) throws javax.xml.rpc.ServiceException {
if (portName == null) {
return getPort(serviceEndpointInterface);
}
java.lang.String inputPortName = portName.getLocalPart();
if ("NetworkServiceInterfacePort".equals(inputPortName)) {
return getNetworkServiceInterfacePort();
}
else {
java.rmi.Remote _stub = getPort(serviceEndpointInterface);
((org.apache.axis.client.Stub) _stub).setPortName(portName);
return _stub;
}
}
|
java
|
public void writeToExcel(List<? extends Object> datas, boolean hasTitle,
String path) throws IOException {
writeToExcel(datas, hasTitle, path, false);
}
|
java
|
@Override
public CommerceWishList remove(Serializable primaryKey)
throws NoSuchWishListException {
Session session = null;
try {
session = openSession();
CommerceWishList commerceWishList = (CommerceWishList)session.get(CommerceWishListImpl.class,
primaryKey);
if (commerceWishList == null) {
if (_log.isDebugEnabled()) {
_log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey);
}
throw new NoSuchWishListException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY +
primaryKey);
}
return remove(commerceWishList);
}
catch (NoSuchWishListException nsee) {
throw nsee;
}
catch (Exception e) {
throw processException(e);
}
finally {
closeSession(session);
}
}
|
java
|
public static Option repositories(final String... repositoryUrls) {
validateNotEmptyContent(repositoryUrls, true, "Repository URLs");
final List<RepositoryOption> options = new ArrayList<RepositoryOption>();
for (String repositoryUrl : repositoryUrls) {
options.add(repository(repositoryUrl));
}
return repositories(options.toArray(new RepositoryOption[options.size()]));
}
|
java
|
public static Set<String> getIFixesThatMustBeReapplied(File installDir, Map<String, ProvisioningFeatureDefinition> features,
ContentBasedLocalBundleRepository repo, CommandConsole console) {
Set<String> ifixesToReApply = new HashSet<String>();
Map<File, Map<String, Version>> allBaseBundleJarContent = new HashMap<File, Map<String, Version>>();
Set<File> allBundleJarContent = new HashSet<File>();
Set<File> allStaticFileContent = new HashSet<File>();
// Process all subsystem content of all manifests and store them in the set. We'll use that to process the ifix files.
processSubsystemContent(installDir, features, repo, allBaseBundleJarContent, allBundleJarContent, allStaticFileContent, console);
// Get a list of all the LibertyProfile metadata files that use to match up with the ifixes.
Map<String, BundleFile> bundleFiles = processLPMFXmls(installDir, console);
// Iterate over each file that is found in all the ifix xmls. If the same file is listed in multiple ifix xml, we will have
// been given the new version that should be on the system, if it relevant to the runtime.
for (Map.Entry<String, IFixInfo> ifixInfoEntry : processIFixXmls(installDir, bundleFiles, console).entrySet()) {
// Get the relative file name of the ifix file e.g. lib/test_1.0.0.20130101.jar
String updateFileName = ifixInfoEntry.getKey();
// Get the IfixInfo object that contains the latest version of this file.
IFixInfo ifixInfo = ifixInfoEntry.getValue();
// Loop through all the updated files in the current ifixInfo and when we've found the required file to
// process, do the rest of the processing.
for (UpdatedFile updatedFile : ifixInfo.getUpdates().getFiles()) {
if (updatedFile.getId().equals(updateFileName)) {
File updateFile = new File(installDir, updateFileName);
// Check to see if we're dealing with a static content file or not. If not see if it is a bundle. If not then ignore as we
// don't need it.
if (allStaticFileContent.contains(updateFile)) {
// Check that the file exists in the runtime.
if (updateFile.exists()) {
// Get the hash of the static file from the ifix xml file.
String ifixHash = updatedFile.getHash();
// Now calculate the new hash and compare the 2. If they are NOT the same the ifix needs to be re-applied.
try {
// Now calculate the new hash and compare each hash. If they are NOT the same the ifix needs to be re-applied.
if (!equalsHashes(updateFile, ifixHash))
ifixesToReApply.add(ifixInfo.getId());
} catch (IOException ioe) {
console.printlnErrorMessage(getMessage("ifixutils.unable.to.read.file", updateFile.getAbsolutePath(), ioe.getMessage()));
}
} else {
// If the static file doesn't appear on disk, then we need to re-apply the ifix.
ifixesToReApply.add(ifixInfo.getId());
}
// Process jar files. If the ifix jar doesn't exist, then check to see whether we have the relevant
// features installed that would require the ifix to be re-applied.
} else {
// If we're not dealing with static files, then we should be dealing with bundles and or static jars.
// If we have the actual ifix jar in the runtime, we need to check that it is the correct file. Check that hash.
if (allBundleJarContent.contains(updateFile)) {
// Get the hash of the bundle from the ifix xml file.
String ifixHash = updatedFile.getHash();
try {
// Now calculate the new hash and compare both hashes. If they are NOT the same the ifix needs to be re-applied.
if (!equalsHashes(updateFile, ifixHash))
ifixesToReApply.add(ifixInfo.getId());
} catch (IOException ioe) {
console.printlnErrorMessage(getMessage("ifixutils.unable.to.read.file", updateFile.getAbsolutePath(), ioe.getMessage()));
}
} else {
// If the ifix jar isn't in the runtime, we need to check that the base bundle it is an ifix for does. If this doesn't
// exist we can then ignore the ifix.
// Find the BundleFile associated with the ifix file.
BundleFile bundleFile = bundleFiles.get(updatedFile.getId());
// If we don't have one, then we're not a jar file we know about. We know ignore this file.
if (bundleFile != null) {
// Get the symbolic name and version of the ifix jar we're dealing with.
String bundleSymbolicName = bundleFile.getSymbolicName();
Version bundleVersion = new Version(bundleFile.getVersion());
// Now iterate over all the bundles we know about in the runtime, and see if we can find a matching base bundle.
// If we can we need to reapply the ifix, otherwise we can ignore.
boolean found = false;
for (Iterator<Map<String, Version>> iter = allBaseBundleJarContent.values().iterator(); iter.hasNext() && !found;) {
for (Map.Entry<String, Version> bundleEntry : iter.next().entrySet()) {
// If we have a matching symbolic name and the version matches (ignoring the qualifier) then we have a base bundle on the
// system.
String entrySymbolicName = bundleEntry.getKey();
Version entryVersion = bundleEntry.getValue();
if (bundleSymbolicName.equals(entrySymbolicName) &&
bundleVersion.getMajor() == entryVersion.getMajor() &&
bundleVersion.getMinor() == entryVersion.getMinor() &&
bundleVersion.getMicro() == entryVersion.getMicro()) {
found = true;
}
}
}
// If we have found a base bundle, report we need to reinstall the ifix.
if (found)
ifixesToReApply.add(ifixInfo.getId());
}
}
}
}
}
}
return ifixesToReApply;
}
|
java
|
@Override
public void endVisit(PrefixExpression node) {
Boolean value = getReplaceableValue(node.getOperand());
if (node.getOperator() == PrefixExpression.Operator.NOT && value != null) {
node.replaceWith(new BooleanLiteral(!value, typeUtil));
}
}
|
python
|
def get_string(self, betas: List[float], gammas: List[float], samples: int = 100):
"""
Compute the most probable string.
The method assumes you have passed init_betas and init_gammas with your
pre-computed angles or you have run the VQE loop to determine the
angles. If you have not done this you will be returning the output for
a random set of angles.
:param betas: List of beta angles
:param gammas: List of gamma angles
:param samples: (Optional) number of samples to get back from the QuantumComputer.
:returns: tuple representing the bitstring, Counter object from
collections holding all output bitstrings and their frequency.
"""
if samples <= 0 and not isinstance(samples, int):
raise ValueError("samples variable must be positive integer")
param_prog = self.get_parameterized_program()
stacked_params = np.hstack((betas, gammas))
sampling_prog = Program()
ro = sampling_prog.declare('ro', 'BIT', len(self.qubits))
sampling_prog += param_prog(stacked_params)
sampling_prog += [MEASURE(qubit, r) for qubit, r in zip(self.qubits, ro)]
sampling_prog.wrap_in_numshots_loop(samples)
executable = self.qc.compile(sampling_prog)
bitstring_samples = self.qc.run(executable)
bitstring_tuples = list(map(tuple, bitstring_samples))
freq = Counter(bitstring_tuples)
most_frequent_bit_string = max(freq, key=lambda x: freq[x])
return most_frequent_bit_string, freq
|
python
|
def get_plot(self, units='THz', ymin=None, ymax=None, width=None,
height=None, dpi=None, plt=None, fonts=None, dos=None,
dos_aspect=3, color=None, style=None, no_base_style=False):
"""Get a :obj:`matplotlib.pyplot` object of the phonon band structure.
Args:
units (:obj:`str`, optional): Units of phonon frequency. Accepted
(case-insensitive) values are Thz, cm-1, eV, meV.
ymin (:obj:`float`, optional): The minimum energy on the y-axis.
ymax (:obj:`float`, optional): The maximum energy on the y-axis.
width (:obj:`float`, optional): The width of the plot.
height (:obj:`float`, optional): The height of the plot.
dpi (:obj:`int`, optional): The dots-per-inch (pixel density) for
the image.
fonts (:obj:`list`, optional): Fonts to use in the plot. Can be a
a single font, specified as a :obj:`str`, or several fonts,
specified as a :obj:`list` of :obj:`str`.
plt (:obj:`matplotlib.pyplot`, optional): A
:obj:`matplotlib.pyplot` object to use for plotting.
dos (:obj:`np.ndarray`): 2D Numpy array of total DOS data
dos_aspect (float): Width division for vertical DOS
color (:obj:`str` or :obj:`tuple`, optional): Line/fill colour in
any matplotlib-accepted format
style (:obj:`list`, :obj:`str`, or :obj:`dict`): Any matplotlib
style specifications, to be composed on top of Sumo base
style.
no_base_style (:obj:`bool`, optional): Prevent use of sumo base
style. This can make alternative styles behave more
predictably.
Returns:
:obj:`matplotlib.pyplot`: The phonon band structure plot.
"""
if color is None:
color = 'C0' # Default to first colour in matplotlib series
if dos is not None:
plt = pretty_subplot(1, 2, width=width, height=height,
sharex=False, sharey=True, dpi=dpi, plt=plt,
gridspec_kw={'width_ratios': [dos_aspect, 1],
'wspace': 0})
ax = plt.gcf().axes[0]
else:
plt = pretty_plot(width, height, dpi=dpi, plt=plt)
ax = plt.gca()
data = self.bs_plot_data()
dists = data['distances']
freqs = data['frequency']
# nd is branch index, nb is band index, nk is kpoint index
for nd, nb in itertools.product(range(len(data['distances'])),
range(self._nb_bands)):
f = freqs[nd][nb]
# plot band data
ax.plot(dists[nd], f, ls='-', c=color, zorder=1)
self._maketicks(ax, units=units)
self._makeplot(ax, plt.gcf(), data, width=width, height=height,
ymin=ymin, ymax=ymax, dos=dos, color=color)
plt.tight_layout()
plt.subplots_adjust(wspace=0)
return plt
|
python
|
def check(response, expected_status=200, url=None):
"""
Check whether the status code of the response equals expected_status and
raise an APIError otherwise.
@param url: The url of the response (for error messages).
Defaults to response.url
@param json: if True, return r.json(), otherwise return r.text
"""
if response.status_code != expected_status:
if url is None:
url = response.url
try:
err = response.json()
except:
err = {} # force generic error
if all(x in err for x in ("status", "message", "description", "details")):
raise _APIError(err["status"], err['message'], url,
err, err["description"], err["details"])
else: # generic error
suffix = ".html" if "<html" in response.text else ".txt"
msg = response.text
if len(msg) > 200:
with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as f:
f.write(response.text.encode("utf-8"))
msg = "{}...\n\n[snipped; full response written to {f.name}".format(msg[:100], **locals())
msg = ("Request {url!r} returned code {response.status_code},"
" expected {expected_status}. \n{msg}".format(**locals()))
raise _APIError(response.status_code, msg, url, response.text)
if response.headers.get('Content-Type') == 'application/json':
try:
return response.json()
except:
raise Exception("Cannot decode json; text={response.text!r}"
.format(**locals()))
else:
return response.text
|
java
|
public void printFramesDocument(String title, ConfigurationImpl configuration,
HtmlTree body) throws IOException {
Content htmlDocType = configuration.isOutputHtml5()
? DocType.HTML5
: DocType.TRANSITIONAL;
Content htmlComment = new Comment(configuration.getText("doclet.New_Page"));
Content head = new HtmlTree(HtmlTag.HEAD);
head.addContent(getGeneratedBy(!configuration.notimestamp));
Content windowTitle = HtmlTree.TITLE(new StringContent(title));
head.addContent(windowTitle);
Content meta = HtmlTree.META("Content-Type", CONTENT_TYPE,
(configuration.charset.length() > 0) ?
configuration.charset : HtmlConstants.HTML_DEFAULT_CHARSET);
head.addContent(meta);
head.addContent(getStyleSheetProperties(configuration));
head.addContent(getFramesJavaScript());
Content htmlTree = HtmlTree.HTML(configuration.getLocale().getLanguage(),
head, body);
Content htmlDocument = new HtmlDocument(htmlDocType,
htmlComment, htmlTree);
write(htmlDocument);
}
|
python
|
def element_to_dict(elem_to_parse, element_path=None, recurse=True):
"""
:return: an element losslessly as a dictionary. If recurse is True,
the element's children are included, otherwise they are omitted.
The resulting Dictionary will have the following attributes:
- name: the name of the element tag
- text: the text contained by element
- tail: text immediately following the element
- attributes: a Dictionary containing element attributes
- children: a List of converted child elements
"""
element = get_element(elem_to_parse, element_path)
if element is not None:
converted = {
_ELEM_NAME: element.tag,
_ELEM_TEXT: element.text,
_ELEM_TAIL: element.tail,
_ELEM_ATTRIBS: element.attrib,
_ELEM_CHILDREN: []
}
if recurse is True:
for child in element:
converted[_ELEM_CHILDREN].append(element_to_dict(child, recurse=recurse))
return converted
return {}
|
java
|
private void processEmbeddedField(Field field) {
// First create EmbeddedField so we can maintain the path/depth of the
// embedded field
EmbeddedField embeddedField = new EmbeddedField(field);
// Introspect the embedded field.
EmbeddedMetadata embeddedMetadata = EmbeddedIntrospector.introspect(embeddedField,
entityMetadata);
entityMetadata.putEmbeddedMetadata(embeddedMetadata);
}
|
python
|
def remove(self, widget):
"""Remove a widget from the window."""
for i, (wid, _) in enumerate(self._widgets):
if widget is wid:
del self._widgets[i]
return True
raise ValueError('Widget not in list')
|
python
|
def get_pull_command(self, remote=None, revision=None):
"""Get the command to pull changes from a remote repository into the local repository."""
if revision:
raise NotImplementedError(compact("""
Bazaar repository support doesn't include
the ability to pull specific revisions!
"""))
command = ['bzr', 'pull']
if remote:
command.append(remote)
return command
|
java
|
public static <T> ApiFuture<T> toApiFuture(CompletableFuture<T> completableFuture) {
return ApiFutureUtils.createApiFuture(Java8FutureUtils.createValueSourceFuture(completableFuture));
}
|
python
|
def plot_generation_over_load(stats, plotpath):
"""
Plot of generation over load
"""
# Generation capacity vs. peak load
sns.set_context("paper", font_scale=1.1)
sns.set_style("ticks")
# reformat to MW
gen_cap_indexes = ["Gen. Cap. of MV at v_level 4",
"Gen. Cap. of MV at v_level 5",
"Gen. Cap. of LV at v_level 6",
"Gen. Cap. of LV at v_level 7"]
peak_load_index = ["LA Total LV Peak Load total"]
stats['generation_capacity'] = stats[gen_cap_indexes].sum(axis=1) / 1e3
stats['peak_load'] = stats[peak_load_index] / 1e3
sns.lmplot('generation_capacity', 'peak_load',
data=stats,
fit_reg=False,
# hue='v_nom',
# hue='Voltage level',
scatter_kws={"marker": "D",
"s": 100},
aspect=2)
plt.title('Peak load vs. generation capacity')
plt.xlabel('Generation capacity in MW')
plt.ylabel('Peak load in MW')
plt.savefig(os.path.join(plotpath,
'Scatter_generation_load.pdf'))
|
java
|
public ListMetricsResult withMetrics(Metric... metrics) {
if (this.metrics == null) {
setMetrics(new com.amazonaws.internal.SdkInternalList<Metric>(metrics.length));
}
for (Metric ele : metrics) {
this.metrics.add(ele);
}
return this;
}
|
python
|
def load_pickle(filename):
"""Load a pickle file as a dictionary"""
try:
if pd:
return pd.read_pickle(filename), None
else:
with open(filename, 'rb') as fid:
data = pickle.load(fid)
return data, None
except Exception as err:
return None, str(err)
|
python
|
def convert_default(self, field, **params):
"""Return raw field."""
for klass, ma_field in self.TYPE_MAPPING:
if isinstance(field, klass):
return ma_field(**params)
return fields.Raw(**params)
|
java
|
private boolean hasContentInVfsOrImport(CmsResource resource) {
if (m_contentFiles.contains(resource.getResourceId())) {
return true;
}
try {
List<CmsResource> resources = getCms().readSiblings(resource, CmsResourceFilter.ALL);
if (!resources.isEmpty()) {
return true;
}
} catch (CmsException e) {
LOG.warn(e.getLocalizedMessage(), e);
}
return false;
}
|
python
|
def stacklevel():
"""Fetch current stack level."""
pcapkit = f'{os.path.sep}pcapkit{os.path.sep}'
tb = traceback.extract_stack()
for index, tbitem in enumerate(tb):
if pcapkit in tbitem[0]:
break
else:
index = len(tb)
return (index-1)
|
python
|
def post_comment(self, bugid, comment):
'''http://bugzilla.readthedocs.org/en/latest/api/core/v1/comment.html#create-comments'''
data = {'id': bugid, "comment": comment}
return self._post('bug/{bugid}/comment'.format(bugid=bugid), json.dumps(data))
|
java
|
public static <Key, Value> Aggregation<Key, Integer, Integer> integerMin() {
return new AggregationAdapter(new IntegerMinAggregation<Key, Value>());
}
|
java
|
public void sendAdvertisement(final BasicTrustGraphAdvertisement message,
final TrustGraphNodeId sender,
final TrustGraphNodeId toNeighbor,
final int ttl) {
final BasicTrustGraphAdvertisement outboundMessage = message.copyWith(sender, ttl);
final TrustGraphNode toNode = nodes.get(toNeighbor);
toNode.handleAdvertisement(outboundMessage);
}
|
java
|
@Override
public boolean hasNext() {
if (state == NOT_CACHED) {
state = CACHED;
nextElement = fetch();
}
return state == CACHED;
}
|
python
|
def can_pair_with(self, other):
"""
A local candidate is paired with a remote candidate if and only if
the two candidates have the same component ID and have the same IP
address version.
"""
a = ipaddress.ip_address(self.host)
b = ipaddress.ip_address(other.host)
return (
self.component == other.component and
self.transport.lower() == other.transport.lower() and
a.version == b.version
)
|
python
|
def create_lv(self, name, length, units):
"""
Creates a logical volume and returns the LogicalVolume instance associated with
the lv_t handle::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.create_lv("mylv", 40, "MiB")
*Args:*
* name (str): The desired logical volume name.
* length (int): The desired size.
* units (str): The size units.
*Raises:*
* HandleError, CommitError, ValueError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
if units != "%":
size = size_units[units] * length
else:
if not (0 < length <= 100) or type(length) is float:
raise ValueError("Length not supported.")
size = (self.size("B") / 100) * length
self.open()
lvh = lvm_vg_create_lv_linear(self.handle, name, c_ulonglong(size))
if not bool(lvh):
self.close()
raise CommitError("Failed to create LV.")
lv = LogicalVolume(self, lvh=lvh)
self.close()
return lv
|
java
|
public static int skipSpaces (final String sIn, final int nStart)
{
int pos = nStart;
while (pos < sIn.length () && (sIn.charAt (pos) == ' ' || sIn.charAt (pos) == '\n'))
pos++;
return pos < sIn.length () ? pos : -1;
}
|
java
|
public void setUserDataAt(int index, Object data) {
if (this.userData == null) {
throw new IndexOutOfBoundsException();
}
this.userData.set(index, data);
}
|
java
|
public static Anima open(String url, String user, String pass) {
return open(url, user, pass, QuirksDetector.forURL(url));
}
|
python
|
def _func(self, volume, params):
"""
BirchMurnaghan equation from PRB 70, 224107
"""
e0, b0, b1, v0 = tuple(params)
eta = (v0 / volume) ** (1. / 3.)
return (e0 +
9. * b0 * v0 / 16. * (eta ** 2 - 1)**2 *
(6 + b1 * (eta ** 2 - 1.) - 4. * eta ** 2))
|
python
|
def upload_image(self,
image_file,
referer_url=None,
title=None,
desc=None,
created_at=None,
collection_id=None):
"""Upload an image
:param image_file: File-like object of an image file
:param referer_url: Referer site URL
:param title: Site title
:param desc: Comment
:param created_at: Image's created time in unix time
:param collection_id: Collection ID
"""
url = self.upload_url + '/api/upload'
data = {}
if referer_url is not None:
data['referer_url'] = referer_url
if title is not None:
data['title'] = title
if desc is not None:
data['desc'] = desc
if created_at is not None:
data['created_at'] = str(created_at)
if collection_id is not None:
data['collection_id'] = collection_id
files = {
'imagedata': image_file
}
response = self._request_url(
url, 'post', data=data, files=files, with_access_token=True)
headers, result = self._parse_and_check(response)
return Image.from_dict(result)
|
python
|
def get_first_child_of_type(self, klass):
"""! @brief Breadth-first search for a child of the given class.
@param self
@param klass The class type to search for. The first child at any depth that is an instance
of this class or a subclass thereof will be returned. Matching children at more shallow
nodes will take precedence over deeper nodes.
@returns Either a node object or None.
"""
matches = self.find_children(lambda c: isinstance(c, klass))
if len(matches):
return matches[0]
else:
return None
|
python
|
def _set_group_type(self, v, load=False):
"""
Setter method for group_type, mapped from YANG variable /openflow_state/group/group_info_list/group_type (group-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_type() directly.
YANG Description: Group type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'dcm-group-type-select': {'value': 2}, u'dcm-group-type-invalid': {'value': 0}, u'dcm-group-type-fast-failover': {'value': 4}, u'dcm-group-type-indirect': {'value': 3}, u'dcm-group-type-all': {'value': 1}},), is_leaf=True, yang_name="group-type", rest_name="group-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-openflow-operational', defining_module='brocade-openflow-operational', yang_type='group-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """group_type must be of a type compatible with group-type""",
'defined-type': "brocade-openflow-operational:group-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'dcm-group-type-select': {'value': 2}, u'dcm-group-type-invalid': {'value': 0}, u'dcm-group-type-fast-failover': {'value': 4}, u'dcm-group-type-indirect': {'value': 3}, u'dcm-group-type-all': {'value': 1}},), is_leaf=True, yang_name="group-type", rest_name="group-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-openflow-operational', defining_module='brocade-openflow-operational', yang_type='group-type', is_config=False)""",
})
self.__group_type = t
if hasattr(self, '_set'):
self._set()
|
java
|
@Override
public List<T> readWithFilter(ReadFilter filter) {
ensureOpen();
if (filter == null) {
filter = new ReadFilter();
}
String sql = String.format("select PARENT_ID from %s_property where PROPERTY_NAME LIKE ? and PROPERTY_VALUE = ?", className);
JsonObject where = (JsonObject) new JsonParser().parse(filter.getWhere().toString());
List<Pair<String, String>> queryList = new ArrayList<Pair<String, String>>();
Map<String, AtomicInteger> resultCount = new HashMap<String, AtomicInteger>();
buildKeyValuePairs(where, queryList, "");
if (queryList.isEmpty()) {// there is no query
return new ArrayList<T>(readAll());
} else {
for (Pair<String, String> kv : queryList) {
String[] bindArgs = new String[]{kv.first, kv.second};
Cursor cursor = database.rawQuery(sql, bindArgs);
while (cursor.moveToNext()) {
String id = cursor.getString(0);
AtomicInteger count = resultCount.get(id);
if (count == null) {
count = new AtomicInteger(0);
resultCount.put(id, count);
}
count.incrementAndGet();
}
cursor.close();
}
}
List<T> results = new ArrayList<T>();
for (String id : resultCount.keySet()) {
if (resultCount.get(id).get() == queryList.size()) {// There are as many objects as queries which meant a result was returned for every query
results.add(read(id));
}
}
return results;
}
|
python
|
def delete_build(self, project, build_id):
"""DeleteBuild.
Deletes a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
self._send(http_method='DELETE',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.0',
route_values=route_values)
|
python
|
def decode(self, hashid):
"""Restore a tuple of numbers from the passed `hashid`.
:param hashid The hashid to decode
>>> hashids = Hashids('arbitrary salt', 16, 'abcdefghijkl0123456')
>>> hashids.decode('1d6216i30h53elk3')
(1, 23, 456)
"""
if not hashid or not _is_str(hashid):
return ()
try:
numbers = tuple(_decode(hashid, self._salt, self._alphabet,
self._separators, self._guards))
return numbers if hashid == self.encode(*numbers) else ()
except ValueError:
return ()
|
python
|
def get_virtualenv_env_data(mgr):
"""Finds kernel specs from virtualenv environments
env_data is a structure {name -> (resourcedir, kernel spec)}
"""
if not mgr.find_virtualenv_envs:
return {}
mgr.log.debug("Looking for virtualenv environments in %s...", mgr.virtualenv_env_dirs)
# find all potential env paths
env_paths = find_env_paths_in_basedirs(mgr.virtualenv_env_dirs)
mgr.log.debug("Scanning virtualenv environments for python kernels...")
env_data = convert_to_env_data(mgr=mgr,
env_paths=env_paths,
validator_func=validate_IPykernel,
activate_func=_get_env_vars_for_virtualenv_env,
name_template=mgr.virtualenv_prefix_template,
display_name_template=mgr.display_name_template,
# virtualenv has only python, so no need for a prefix
name_prefix="")
return env_data
|
java
|
public static double getPvalue(int n11, int n12, int n21, int n22) {
double Chisquare=Math.pow(Math.abs(n12-n21) - 0.5,2)/(n12+n21); //McNemar with Yates's correction for continuity
double pvalue= scoreToPvalue(Chisquare);
return pvalue;
}
|
python
|
def run_apidoc(_):
"""This method is required by the setup method below."""
import os
dirname = os.path.dirname(__file__)
ignore_paths = [os.path.join(dirname, '../../aaf2/model'),]
# https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/apidoc.py
argv = [
'--force',
'--no-toc',
'--separate',
'--module-first',
'--output-dir',
os.path.join(dirname, 'api'),
os.path.join(dirname, '../../aaf2'),
] + ignore_paths
from sphinx.ext import apidoc
apidoc.main(argv)
|
python
|
def _process_for_uuid(cls, response_raw):
"""
:type response_raw: client.BunqResponseRaw
:rtype: client.BunqResponse[str]
"""
json = response_raw.body_bytes.decode()
obj = converter.json_to_class(dict, json)
uuid = converter.deserialize(
Uuid,
cls._unwrap_response_single(obj, cls._FIELD_UUID)
)
return client.BunqResponse(uuid.uuid, response_raw.headers)
|
java
|
public Collection<HazeltaskTask<GROUP>> call() throws Exception {
try {
if(isShutdownNow)
return this.getDistributedExecutorService().shutdownNowWithHazeltask();
else
this.getDistributedExecutorService().shutdown();
} catch(IllegalStateException e) {}
return Collections.emptyList();
}
|
java
|
public synchronized void unset(String name) {
String[] names = null;
if (!isDeprecated(name)) {
names = getAlternativeNames(name);
if(names == null) {
names = new String[]{name};
}
}
else {
names = handleDeprecation(deprecationContext.get(), name);
}
for(String n: names) {
getOverlay().remove(n);
getProps().remove(n);
}
}
|
java
|
public void setBrokerSummaries(java.util.Collection<BrokerSummary> brokerSummaries) {
if (brokerSummaries == null) {
this.brokerSummaries = null;
return;
}
this.brokerSummaries = new java.util.ArrayList<BrokerSummary>(brokerSummaries);
}
|
python
|
def _get_wrapper():
"""
Get a socket wrapper based on SSL config.
"""
if not pmxbot.config.get('use_ssl', False):
return lambda x: x
return importlib.import_module('ssl').wrap_socket
|
java
|
public long count(Class<? extends Execution> cls, Map<String,Object> criteria) throws PersistenceException {
logger.debug("enter - count(Class,Map)");
if( logger.isDebugEnabled() ) {
logger.debug("For: " + cache.getTarget().getName() + "/" + cls + "/" + criteria);
}
try {
Transaction xaction = Transaction.getInstance(true);
try {
long count = 0L;
criteria = xaction.execute(cls, criteria);
count = ((Number)criteria.get("count")).longValue();
xaction.commit();
return count;
}
finally {
xaction.rollback();
}
}
finally {
logger.debug("exit - count(Class,Map)");
}
}
|
java
|
public static BoundedOverlay getBoundedOverlay(TileDao tileDao, float density, TileScaling scaling) {
return new GeoPackageOverlay(tileDao, density, scaling);
}
|
java
|
public static boolean isValid(final String value) {
if (value == null) {
return true;
}
final String uuidPattern = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-" + "[0-9a-f]{4}-[0-9a-f]{12}$";
return Pattern.matches(uuidPattern, value);
}
|
python
|
def spatialBin(self, roi):
"""
Calculate indices of ROI pixels corresponding to object locations.
"""
if hasattr(self,'pixel_roi_index') and hasattr(self,'pixel'):
logger.warning('Catalog alread spatially binned')
return
# ADW: Not safe to set index = -1 (since it will access last entry);
# np.inf would be better...
self.pixel = ang2pix(self.config['coords']['nside_pixel'],self.lon,self.lat)
self.pixel_roi_index = roi.indexROI(self.lon,self.lat)
logger.info("Found %i objects outside ROI"%(self.pixel_roi_index < 0).sum())
|
python
|
def fit1d(samples, e, remove_zeros = False, **kw):
"""Fits a 1D distribution with splines.
Input:
samples: Array
Array of samples from a probability distribution
e: Array
Edges that define the events in the probability
distribution. For example, e[0] < x <= e[1] is
the range of values that are associated with the
first event.
**kw: Arguments that are passed on to spline_bse1d.
Returns:
distribution: Array
An array that gives an estimate of probability for
events defined by e.
knots: Array
Sequence of knots that were used for the spline basis
"""
samples = samples[~np.isnan(samples)]
length = len(e)-1
hist,_ = np.histogramdd(samples, (e,))
hist = hist/sum(hist)
basis, knots = spline_base1d(length, marginal = hist, **kw)
non_zero = hist>0
model = linear_model.BayesianRidge()
if remove_zeros:
model.fit(basis[non_zero, :], hist[:,np.newaxis][non_zero,:])
else:
hist[~non_zero] = np.finfo(float).eps
model.fit(basis, hist[:,np.newaxis])
return model.predict(basis), hist, knots
|
java
|
public int scrollToPage(int pageNumber) {
Log.d(Log.SUBSYSTEM.LAYOUT, TAG, "scrollToPage pageNumber = %d mPageCount = %d",
pageNumber, mPageCount);
if (mSupportScrollByPage &&
(mScrollOver || (pageNumber >= 0 && pageNumber <= mPageCount - 1))) {
scrollToItem(getFirstItemIndexOnPage(pageNumber));
} else {
Log.w(TAG, "Pagination is not enabled!");
}
return mCurrentItemIndex;
}
|
java
|
private void loadJsonResource(JSONObject json, Properties properties, Object name)
throws JSONException {
Iterator<?> keys = json.keys();
while (keys.hasNext()) {
Object obj = keys.next();
if (!(obj instanceof String)) {
LOG.warn("Object not instance of string : " + obj + " skipping");
continue;
}
String key = (String) obj;
// can't have . in thrift fields so we represent . with _
String keyUnderscoresToDots = key.replace("_", ".");
// actual _ are represented as __ in thrift schema
keyUnderscoresToDots = keyUnderscoresToDots.replace("..", "_");
if (!json.isNull(key)) {
Object value = json.get(key);
String stringVal = "";
if (value instanceof String) {
stringVal = (String)value;
} else if (value instanceof Integer) {
stringVal = new Integer((Integer)value).toString();
} else if (value instanceof Long) {
stringVal = new Long((Long)value).toString();
} else if (value instanceof Double) {
stringVal = new Double((Double)value).toString();
} else if (value instanceof Boolean) {
stringVal = new Boolean((Boolean)value).toString();
} else if (value instanceof JSONObject) {
loadJsonResource((JSONObject)value, properties, name);
continue;
} else {
LOG.warn("unsupported value in json object: " + value);
}
if (!finalParameters.contains(keyUnderscoresToDots)) {
properties.setProperty(keyUnderscoresToDots, stringVal);
updatingResource.put(keyUnderscoresToDots, name.toString());
} else {
LOG.warn(name+":a attempt to override final parameter: "+
keyUnderscoresToDots+"; Ignoring.");
}
}
}
}
|
python
|
def sameAddr(self, ha, ha2) -> bool:
"""
Check whether the two arguments correspond to the same address
"""
if ha == ha2:
return True
if ha[1] != ha2[1]:
return False
return ha[0] in self.localips and ha2[0] in self.localips
|
python
|
def from_xmrs(cls, xmrs, predicate_modifiers=False, **kwargs):
"""
Instantiate an Eds from an Xmrs (lossy conversion).
Args:
xmrs (:class:`~delphin.mrs.xmrs.Xmrs`): Xmrs instance to
convert from
predicate_modifiers (function, bool): function that is
called as `func(xmrs, deps)` after finding the basic
dependencies (`deps`), returning a mapping of
predicate-modifier dependencies; the form of `deps` and
the returned mapping are `{head: [(role, dependent)]}`;
if *predicate_modifiers* is `True`, the function is
created using :func:`non_argument_modifiers` as:
`non_argument_modifiers(role="ARG1", connecting=True);
if *predicate_modifiers* is `False`, only the basic
dependencies are returned
"""
eps = xmrs.eps()
deps = _find_basic_dependencies(xmrs, eps)
# if requested, find additional dependencies not captured already
if predicate_modifiers is True:
func = non_argument_modifiers(role='ARG1', only_connecting=True)
addl_deps = func(xmrs, deps)
elif predicate_modifiers is False or predicate_modifiers is None:
addl_deps = {}
elif hasattr(predicate_modifiers, '__call__'):
addl_deps = predicate_modifiers(xmrs, deps)
else:
raise TypeError('a boolean or callable is required')
for nid, deplist in addl_deps.items():
deps.setdefault(nid, []).extend(deplist)
ids = _unique_ids(eps, deps)
root = _find_root(xmrs)
if root is not None:
root = ids[root]
nodes = [Node(ids[n.nodeid], *n[1:]) for n in make_nodes(xmrs)]
edges = [(ids[a], rarg, ids[b]) for a, deplist in deps.items()
for rarg, b in deplist]
return cls(top=root, nodes=nodes, edges=edges)
|
java
|
private CmsGalleryFolderEntry readGalleryFolderEntry(CmsResource folder, String typeName) throws CmsException {
CmsObject cms = getCmsObject();
CmsGalleryFolderEntry folderEntry = new CmsGalleryFolderEntry();
folderEntry.setResourceType(typeName);
folderEntry.setSitePath(cms.getSitePath(folder));
folderEntry.setStructureId(folder.getStructureId());
folderEntry.setOwnProperties(getClientProperties(cms, folder, false));
folderEntry.setIconClasses(CmsIconUtil.getIconClasses(typeName, null, false));
return folderEntry;
}
|
python
|
def _update_limits_from_api(self):
"""
Query EC2's DescribeAccountAttributes API action, and update limits
with the quotas returned. Updates ``self.limits``.
"""
self.connect()
self.connect_resource()
logger.info("Querying EC2 DescribeAccountAttributes for limits")
# no need to paginate
attribs = self.conn.describe_account_attributes()
for attrib in attribs['AccountAttributes']:
aname = attrib['AttributeName']
val = attrib['AttributeValues'][0]['AttributeValue']
lname = None
if aname == 'max-elastic-ips':
lname = 'Elastic IP addresses (EIPs)'
elif aname == 'max-instances':
lname = 'Running On-Demand EC2 instances'
elif aname == 'vpc-max-elastic-ips':
lname = 'VPC Elastic IP addresses (EIPs)'
elif aname == 'vpc-max-security-groups-per-interface':
lname = 'VPC security groups per elastic network interface'
if lname is not None:
if int(val) == 0:
continue
self.limits[lname]._set_api_limit(int(val))
logger.debug("Done setting limits from API")
|
java
|
public Coordinate[] getPositions() {
ArrayList<Coordinate> ret = new ArrayList<Coordinate>();
for (PoseSteering ps : psa) {
ret.add(ps.getPose().getPosition());
}
return ret.toArray(new Coordinate[ret.size()]);
}
|
java
|
@SuppressWarnings("unchecked")
protected <R> void connect(final EtcdRequest<R> etcdRequest, final ConnectionState connectionState) throws IOException {
if(eventLoopGroup.isShuttingDown() || eventLoopGroup.isShutdown() || eventLoopGroup.isTerminated()){
etcdRequest.getPromise().getNettyPromise().cancel(true);
logger.debug("Retry canceled because of closed etcd client");
return;
}
final URI uri;
// when we are called from a redirect, the url in the request may also
// contain host and port!
URI requestUri = URI.create(etcdRequest.getUrl());
if (requestUri.getHost() != null && requestUri.getPort() > -1) {
uri = requestUri;
} else if (connectionState.uris.length == 0 && System.getenv(ENV_ETCD4J_ENDPOINT) != null) {
// read uri from environment variable
String endpoint_uri = System.getenv(ENV_ETCD4J_ENDPOINT);
if(logger.isDebugEnabled()) {
logger.debug("Will use environment variable {} as uri with value {}", ENV_ETCD4J_ENDPOINT, endpoint_uri);
}
uri = URI.create(endpoint_uri);
} else {
uri = connectionState.uris[connectionState.uriIndex];
}
// Start the connection attempt.
final ChannelFuture connectFuture = bootstrap.connect(connectAddress(uri));
etcdRequest.getPromise().getConnectionState().loop = connectFuture.channel().eventLoop();
etcdRequest.getPromise().attachNettyPromise(connectFuture.channel().eventLoop().<R>newPromise());
connectFuture.addListener(new GenericFutureListener<ChannelFuture>() {
@Override
public void operationComplete(final ChannelFuture f) throws Exception {
if (!f.isSuccess()) {
final Throwable cause = f.cause();
if (logger.isDebugEnabled()) {
logger.debug("Connection failed to {}, cause {}", connectionState.uris[connectionState.uriIndex], cause);
}
if (cause instanceof ClosedChannelException || cause instanceof IllegalStateException) {
etcdRequest.getPromise().cancel(new CancellationException("Channel closed"));
} else {
etcdRequest.getPromise().handleRetry(f.cause());
}
return;
}
// Handle already cancelled promises
if (etcdRequest.getPromise().getNettyPromise().isCancelled()) {
f.channel().close();
etcdRequest.getPromise().getNettyPromise().setFailure(new CancellationException());
return;
}
final Promise listenedToPromise = etcdRequest.getPromise().getNettyPromise();
// Close channel when promise is satisfied or cancelled later
listenedToPromise.addListener(new GenericFutureListener<Future<?>>() {
@Override
public void operationComplete(Future<?> future) throws Exception {
// Only close if it was not redirected to new promise
if (etcdRequest.getPromise().getNettyPromise() == listenedToPromise) {
f.channel().close();
}
}
});
if (logger.isDebugEnabled()) {
logger.debug("Connected to {} ({})", f.channel().remoteAddress().toString(), connectionState.uriIndex);
}
lastWorkingUriIndex = connectionState.uriIndex;
modifyPipeLine(etcdRequest, f.channel().pipeline());
createAndSendHttpRequest(uri, etcdRequest.getUrl(), etcdRequest, f.channel())
.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!future.isSuccess()) {
etcdRequest.getPromise().setException(future.cause());
if (!f.channel().eventLoop().inEventLoop()) {
f.channel().eventLoop().shutdownGracefully();
}
f.channel().close();
}
}
});
f.channel().closeFuture().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (logger.isDebugEnabled()) {
logger.debug("Connection closed for request {} on uri {} ",
etcdRequest.getMethod().name(),
etcdRequest.getUri());
}
}
});
}
});
}
|
python
|
def clear_end_date(self):
"""Clears the end date.
raise: NoAccess - ``Metadata.isRequired()`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if (self.get_end_date_metadata().is_read_only() or
self.get_end_date_metadata().is_required()):
raise errors.NoAccess()
self._my_map['endDate'] = self._mdata['end_date']['default_date_time_values'][0]
|
java
|
public void marshall(GetDocumentVersionRequest getDocumentVersionRequest, ProtocolMarshaller protocolMarshaller) {
if (getDocumentVersionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getDocumentVersionRequest.getAuthenticationToken(), AUTHENTICATIONTOKEN_BINDING);
protocolMarshaller.marshall(getDocumentVersionRequest.getDocumentId(), DOCUMENTID_BINDING);
protocolMarshaller.marshall(getDocumentVersionRequest.getVersionId(), VERSIONID_BINDING);
protocolMarshaller.marshall(getDocumentVersionRequest.getFields(), FIELDS_BINDING);
protocolMarshaller.marshall(getDocumentVersionRequest.getIncludeCustomMetadata(), INCLUDECUSTOMMETADATA_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public static Predicate not(final Predicate childPredicate) {
return new Predicate() {
public void init(AbstractSqlCreator creator) {
childPredicate.init(creator);
}
public String toSql() {
return "not (" + childPredicate.toSql() + ")";
}
};
}
|
java
|
public Collector<Point, ?, Length> toPathLength() {
return Collector.of(
() -> new LengthCollector(this),
LengthCollector::add,
LengthCollector::combine,
LengthCollector::pathLength
);
}
|
java
|
public void setAttributes(String name, int attributes)
{
checkNotSealed(name, 0);
findAttributeSlot(name, 0, SlotAccess.MODIFY).setAttributes(attributes);
}
|
java
|
public Matrix3d lookAlong(double dirX, double dirY, double dirZ,
double upX, double upY, double upZ) {
return lookAlong(dirX, dirY, dirZ, upX, upY, upZ, this);
}
|
java
|
public static FileSystem create(BlockDevice device, boolean readOnly)
throws IOException {
return FatFileSystem.read(device, readOnly);
}
|
python
|
def get_pyof_version(module_fullname):
"""Get the module pyof version based on the module fullname.
Args:
module_fullname (str): The fullname of the module
(e.g.: pyof.v0x01.common.header)
Returns:
str: openflow version.
The openflow version, on the format 'v0x0?' if any. Or None
if there isn't a version on the fullname.
"""
ver_module_re = re.compile(r'(pyof\.)(v0x\d+)(\..*)')
matched = ver_module_re.match(module_fullname)
if matched:
version = matched.group(2)
return version
return None
|
java
|
@Override
public DescribeDBClusterParameterGroupsResult describeDBClusterParameterGroups(DescribeDBClusterParameterGroupsRequest request) {
request = beforeClientExecution(request);
return executeDescribeDBClusterParameterGroups(request);
}
|
java
|
@BetaApi
public final Router getRouter(String router) {
GetRouterHttpRequest request = GetRouterHttpRequest.newBuilder().setRouter(router).build();
return getRouter(request);
}
|
java
|
protected void logInternalError(String message) {
if (this.log.isLoggable(Level.FINEST) && !Strings.isNullOrEmpty(message)) {
this.log.log(Level.FINEST,
MessageFormat.format(Messages.SARLJvmModelInferrer_1,
Messages.SARLJvmModelInferrer_0, message));
}
}
|
java
|
public static ResourceList<Endpoint> list(final BandwidthClient client, final String domainId, final int page, final int size) {
assert(domainId != null);
assert(page >= 0);
assert(size > 0);
final String resourceUri = String.format(client.getUserResourceUri(BandwidthConstants.ENDPOINTS_URI_PATH), domainId);
final ResourceList<Endpoint> endpoints = new ResourceList<Endpoint>(page, size, resourceUri, Endpoint.class);
endpoints.setClient(client);
endpoints.initialize();
return endpoints;
}
|
java
|
static ResponseList<Place> createPlaceList(HttpResponse res, Configuration conf) throws TwitterException {
JSONObject json = null;
try {
json = res.asJSONObject();
return createPlaceList(json.getJSONObject("result").getJSONArray("places"), res, conf);
} catch (JSONException jsone) {
throw new TwitterException(jsone.getMessage() + ":" + json.toString(), jsone);
}
}
|
java
|
private Collection parseCollection(Element collectionElement) {
Collection collection = new Collection();
collection.setId(collectionElement.getAttribute("id"));
collection.setServer(collectionElement.getAttribute("server"));
collection.setSecret(collectionElement.getAttribute("secret"));
collection.setChildCount(collectionElement.getAttribute("child_count"));
collection.setIconLarge(collectionElement.getAttribute("iconlarge"));
collection.setIconSmall(collectionElement.getAttribute("iconsmall"));
collection.setDateCreated(collectionElement.getAttribute("datecreate"));
collection.setTitle(XMLUtilities.getChildValue(collectionElement, "title"));
collection.setDescription(XMLUtilities.getChildValue(collectionElement, "description"));
Element iconPhotos = XMLUtilities.getChild(collectionElement, "iconphotos");
if (iconPhotos != null) {
NodeList photoElements = iconPhotos.getElementsByTagName("photo");
for (int i = 0; i < photoElements.getLength(); i++) {
Element photoElement = (Element) photoElements.item(i);
collection.addPhoto(PhotoUtils.createPhoto(photoElement));
}
}
return collection;
}
|
java
|
public void export() throws IOException, SQLException, ClassNotFoundException {
//check if properties is set or not
if(!isValidateProperties()) {
logger.error("Invalid config properties: The config properties is missing important parameters: DB_NAME, DB_USERNAME and DB_PASSWORD");
return;
}
//connect to the database
database = properties.getProperty(DB_NAME);
String jdbcURL = properties.getProperty(JDBC_CONNECTION_STRING, "");
String driverName = properties.getProperty(JDBC_DRIVER_NAME, "");
Connection connection;
if(jdbcURL.isEmpty()) {
connection = MysqlBaseService.connect(properties.getProperty(DB_USERNAME), properties.getProperty(DB_PASSWORD),
database, driverName);
}
else {
if (jdbcURL.contains("?")){
database = jdbcURL.substring(jdbcURL.lastIndexOf("/") + 1, jdbcURL.indexOf("?"));
} else {
database = jdbcURL.substring(jdbcURL.lastIndexOf("/") + 1);
}
logger.debug("database name extracted from connection string: " + database);
connection = MysqlBaseService.connectWithURL(properties.getProperty(DB_USERNAME), properties.getProperty(DB_PASSWORD),
jdbcURL, driverName);
}
stmt = connection.createStatement();
//generate the final SQL
String sql = exportToSql();
//create a temp dir to store the exported file for processing
dirName = properties.getProperty(MysqlExportService.TEMP_DIR, dirName);
File file = new File(dirName);
if(!file.exists()) {
boolean res = file.mkdir();
if(!res) {
// logger.error(LOG_PREFIX + ": Unable to create temp dir: " + file.getAbsolutePath());
throw new IOException(LOG_PREFIX + ": Unable to create temp dir: " + file.getAbsolutePath());
}
}
//write the sql file out
File sqlFolder = new File(dirName + "/sql");
if(!sqlFolder.exists()) {
boolean res = sqlFolder.mkdir();
if(!res) {
throw new IOException(LOG_PREFIX + ": Unable to create temp dir: " + file.getAbsolutePath());
}
}
sqlFileName = getSqlFilename();
FileOutputStream outputStream = new FileOutputStream( sqlFolder + "/" + sqlFileName);
outputStream.write(sql.getBytes());
outputStream.close();
//zip the file
zipFileName = dirName + "/" + sqlFileName.replace(".sql", ".zip");
generatedZipFile = new File(zipFileName);
ZipUtil.pack(sqlFolder, generatedZipFile);
//mail the zipped file if mail settings are available
if(isEmailPropertiesSet()) {
boolean emailSendingRes = EmailService.builder()
.setHost(properties.getProperty(EMAIL_HOST))
.setPort(Integer.valueOf(properties.getProperty(EMAIL_PORT)))
.setToAddress(properties.getProperty(EMAIL_TO))
.setFromAddress(properties.getProperty(EMAIL_FROM))
.setUsername(properties.getProperty(EMAIL_USERNAME))
.setPassword(properties.getProperty(EMAIL_PASSWORD))
.setSubject(properties.getProperty(EMAIL_SUBJECT, sqlFileName.replace(".sql", "").toUpperCase()))
.setMessage(properties.getProperty(EMAIL_MESSAGE, "Please find attached database backup of " + database))
.setAttachments(new File[]{new File(zipFileName)})
.sendMail();
if (emailSendingRes) {
logger.debug(LOG_PREFIX + ": Zip File Sent as Attachment to Email Address Successfully");
} else {
logger.error(LOG_PREFIX + ": Unable to send zipped file as attachment to email. See log debug for more info");
}
}
//clear the generated temp files
clearTempFiles(Boolean.parseBoolean(properties.getProperty(PRESERVE_GENERATED_ZIP, Boolean.FALSE.toString())));
}
|
java
|
public Observable<GetPersonalPreferencesResponseInner> getPersonalPreferencesAsync(String userName, PersonalPreferencesOperationsPayload personalPreferencesOperationsPayload) {
return getPersonalPreferencesWithServiceResponseAsync(userName, personalPreferencesOperationsPayload).map(new Func1<ServiceResponse<GetPersonalPreferencesResponseInner>, GetPersonalPreferencesResponseInner>() {
@Override
public GetPersonalPreferencesResponseInner call(ServiceResponse<GetPersonalPreferencesResponseInner> response) {
return response.body();
}
});
}
|
python
|
def add_file(self, filename):
""" Read and adds given file's content to data array
that will be used to generate output
:param filename File name to add
:type str or unicode
"""
with (open(filename, 'rb')) as f:
data = f.read()
# below won't handle the same name files
# in different paths
fname = os.path.basename(filename)
self.files[fname] = base64.b64encode(data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.