language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public TSMeta parseTSMetaV1() {
final String json = query.getContent();
if (json == null || json.isEmpty()) {
throw new BadRequestException(HttpResponseStatus.BAD_REQUEST,
"Missing message content",
"Supply valid JSON formatted data in the body of your request");
}
try {
return JSON.parseToObject(json, TSMeta.class);
} catch (IllegalArgumentException iae) {
throw new BadRequestException("Unable to parse the given JSON", iae);
}
}
|
python
|
def get(self, name, typ):
"""
Gets a counter specified by its name.
It counter does not exist or its type doesn't match the specified type
it creates a new one.
:param name: a counter name to retrieve.
:param typ: a counter type.
:return: an existing or newly created counter of the specified type.
"""
if name == None or len(name) == 0:
raise Exception("Counter name was not set")
self._lock.acquire()
try:
counter = self._cache[name] if name in self._cache else None
if counter == None or counter.type != typ:
counter = Counter(name, typ)
self._cache[name] = counter
return counter
finally:
self._lock.release()
|
python
|
def _populate_inception_bottlenecks(scope):
"""Add Inception bottlenecks and their pre-Relu versions to the graph."""
graph = tf.get_default_graph()
for op in graph.get_operations():
if op.name.startswith(scope+'/') and 'Concat' in op.type:
name = op.name.split('/')[1]
pre_relus = []
for tower in op.inputs[1:]:
if tower.op.type == 'Relu':
tower = tower.op.inputs[0]
pre_relus.append(tower)
concat_name = scope + '/' + name + '_pre_relu'
_ = tf.concat(pre_relus, -1, name=concat_name)
|
python
|
def id_fix(value):
""" fix @prefix values for ttl """
if value.startswith('KSC_M'):
pass
else:
value = value.replace(':','_')
if value.startswith('ERO') or value.startswith('OBI') or value.startswith('GO') or value.startswith('UBERON') or value.startswith('IAO'):
value = 'obo:' + value
elif value.startswith('birnlex') or value.startswith('nlx'):
value = 'NIFSTD:' + value
elif value.startswith('MESH'):
value = ':'.join(value.split('_'))
else:
value = ':' + value
return OntId(value).URIRef
|
java
|
private byte[] writeExtendingClass(Class<?> type, String className) {
String clazz = ClassUtils.classNameToInternalClassName(className);
ByteArrayOutputStream bIn = new ByteArrayOutputStream(1000); // 1000 should be large enough to fit the entire class
try(DataOutputStream in = new DataOutputStream(bIn)) {
in.write(MAGIC);
in.write(VERSION);
in.writeShort(CONSTANT_POOL_COUNT);
// set all the constant pool here
// 1. class
in.writeByte(CONSTANT_Class);
in.writeShort(INDEX_UTF8_INSTANTIATOR_CLASS);
// 2. super class
in.writeByte(CONSTANT_Class);
in.writeShort(INDEX_UTF8_SUPERCLASS);
// 3. default constructor name
in.writeByte(CONSTANT_Utf8);
in.writeUTF(CONSTRUCTOR_NAME);
// 4. default constructor description
in.writeByte(CONSTANT_Utf8);
in.writeUTF(CONSTRUCTOR_DESC);
// 5. Code
in.writeByte(CONSTANT_Utf8);
in.writeUTF("Code");
// 6. Class name
in.writeByte(CONSTANT_Utf8);
in.writeUTF("L" + clazz + ";");
// 7. Class name (again)
in.writeByte(CONSTANT_Utf8);
in.writeUTF(clazz);
// 8. Superclass name
in.writeByte(CONSTANT_Utf8);
// in.writeUTF("java/lang/Object");
in.writeUTF(MAGIC_ACCESSOR);
// 9. ObjectInstantiator interface
in.writeByte(CONSTANT_Class);
in.writeShort(INDEX_UTF8_INTERFACE);
// 10. ObjectInstantiator name
in.writeByte(CONSTANT_Utf8);
in.writeUTF(ObjectInstantiator.class.getName().replace('.', '/'));
// 11. newInstance name
in.writeByte(CONSTANT_Utf8);
in.writeUTF("newInstance");
// 12. newInstance desc
in.writeByte(CONSTANT_Utf8);
in.writeUTF("()Ljava/lang/Object;");
// 13. Methodref to the Object constructor
in.writeByte(CONSTANT_Methodref);
in.writeShort(INDEX_CLASS_OBJECT);
in.writeShort(INDEX_NAMEANDTYPE_DEFAULT_CONSTRUCTOR);
// 14. Object class
in.writeByte(CONSTANT_Class);
in.writeShort(INDEX_UTF8_OBJECT);
// 15. Object class name
in.writeByte(CONSTANT_Utf8);
in.writeUTF("java/lang/Object");
// 16. Default constructor name and type
in.writeByte(CONSTANT_NameAndType);
in.writeShort(INDEX_UTF8_CONSTRUCTOR_NAME);
in.writeShort(INDEX_UTF8_CONSTRUCTOR_DESC);
// 17. Type to instantiate class
in.writeByte(CONSTANT_Class);
in.writeShort(INDEX_UTF8_TYPE);
// 18. Type to instantiate name
in.writeByte(CONSTANT_Utf8);
in.writeUTF(ClassUtils.classNameToInternalClassName(type.getName()));
// end of constant pool
// access flags: We want public, ACC_SUPER is always there
in.writeShort(ACC_PUBLIC | ACC_SUPER | ACC_FINAL);
// this class index in the constant pool
in.writeShort(INDEX_CLASS_THIS);
// super class index in the constant pool
in.writeShort(INDEX_CLASS_SUPERCLASS);
// interfaces implemented count (we have none)
in.writeShort(1);
in.writeShort(INDEX_CLASS_INTERFACE);
// fields count (we have none)
in.writeShort(0);
// method count (we have two: the default constructor and newInstance)
in.writeShort(2);
// default constructor method_info
in.writeShort(ACC_PUBLIC);
in.writeShort(INDEX_UTF8_CONSTRUCTOR_NAME); // index of the method name (<init>)
in.writeShort(INDEX_UTF8_CONSTRUCTOR_DESC); // index of the description
in.writeShort(1); // number of attributes: only one, the code
// code attribute of the default constructor
in.writeShort(INDEX_UTF8_CODE_ATTRIBUTE);
in.writeInt(CONSTRUCTOR_CODE_ATTRIBUTE_LENGTH); // attribute length
in.writeShort(0); // max_stack
in.writeShort(1); // max_locals
in.writeInt(CONSTRUCTOR_CODE.length); // code length
in.write(CONSTRUCTOR_CODE);
in.writeShort(0); // exception_table_length = 0
in.writeShort(0); // attributes count = 0, no need to have LineNumberTable and LocalVariableTable
// newInstance method_info
in.writeShort(ACC_PUBLIC);
in.writeShort(INDEX_UTF8_NEWINSTANCE_NAME); // index of the method name (newInstance)
in.writeShort(INDEX_UTF8_NEWINSTANCE_DESC); // index of the description
in.writeShort(1); // number of attributes: only one, the code
// code attribute of newInstance
in.writeShort(INDEX_UTF8_CODE_ATTRIBUTE);
in.writeInt(NEWINSTANCE_CODE_ATTRIBUTE_LENGTH); // attribute length
in.writeShort(2); // max_stack
in.writeShort(1); // max_locals
in.writeInt(NEWINSTANCE_CODE.length); // code length
in.write(NEWINSTANCE_CODE);
in.writeShort(0); // exception_table_length = 0
in.writeShort(0); // attributes count = 0, no need to have LineNumberTable and LocalVariableTable
// class attributes
in.writeShort(0); // none. No need to have a source file attribute
} catch (IOException e) {
throw new ObjenesisException(e);
}
return bIn.toByteArray();
}
|
python
|
def ss(inlist):
"""
Squares each value in the passed list, adds up these squares and
returns the result.
Usage: lss(inlist)
"""
ss = 0
for item in inlist:
ss = ss + item * item
return ss
|
python
|
def _setup_param_widgets(self):
"""Creates the parameter entry widgets and binds them to methods"""
for parameter in self.csv_params:
pname, ptype, plabel, phelp = parameter
label = wx.StaticText(self.parent, -1, plabel)
widget = self.type2widget[ptype](self.parent)
# Append choicebox items and bind handler
if pname in self.choices:
widget.AppendItems(self.choices[pname])
widget.SetValue = widget.Select
widget.SetSelection(0)
# Bind event handler to widget
if ptype is types.StringType or ptype is types.UnicodeType:
event_type = wx.EVT_TEXT
elif ptype is types.BooleanType:
event_type = wx.EVT_CHECKBOX
else:
event_type = wx.EVT_CHOICE
handler = getattr(self, self.widget_handlers[pname])
self.parent.Bind(event_type, handler, widget)
# Tool tips
label.SetToolTipString(phelp)
widget.SetToolTipString(phelp)
label.__name__ = wx.StaticText.__name__.lower()
widget.__name__ = self.type2widget[ptype].__name__.lower()
self.param_labels.append(label)
self.param_widgets.append(widget)
self.__setattr__("_".join([label.__name__, pname]), label)
self.__setattr__("_".join([widget.__name__, pname]), widget)
|
java
|
public static LeaderConnectionInfo retrieveLeaderConnectionInfo(
LeaderRetrievalService leaderRetrievalService,
Time timeout) throws LeaderRetrievalException {
return retrieveLeaderConnectionInfo(leaderRetrievalService, FutureUtils.toFiniteDuration(timeout));
}
|
java
|
public CrosstabBuilder addRow(String title, String property, String className, boolean showTotal,
Style headerStyle, Style totalStyle, Style totalHeaderStyle) {
DJCrosstabRow row = new CrosstabRowBuilder()
.setProperty(property,className)
.setShowTotals(showTotal)
.setTitle(title)
.setHeaderStyle(headerStyle)
.setTotalHeaderStyle(totalHeaderStyle)
.setTotalStyle(totalStyle)
.build();
addRow(row);
return this;
}
|
python
|
def load(self, schema_file: Union[str, TextIO], schema_location: Optional[str]=None) -> ShExJ.Schema:
""" Load a ShEx Schema from schema_location
:param schema_file: name or file-like object to deserialize
:param schema_location: URL or file name of schema. Used to create the base_location
:return: ShEx Schema represented by schema_location
"""
if isinstance(schema_file, str):
schema_file = self.location_rewrite(schema_file)
self.schema_text = load_shex_file(schema_file)
else:
self.schema_text = schema_file.read()
if self.base_location:
self.root_location = self.base_location
elif schema_location:
self.root_location = os.path.dirname(schema_location) + '/'
else:
self.root_location = None
return self.loads(self.schema_text)
|
java
|
@SuppressWarnings("unchecked")
protected T normalizedToValue(double normalized) {
double v = absoluteMinValuePrim + normalized * (absoluteMaxValuePrim - absoluteMinValuePrim);
// TODO parameterize this rounding to allow variable decimal points
return (T) numberType.toNumber(Math.round(v * 100) / 100d);
}
|
java
|
protected void addEquiJoinColumn( QueryContext context,
PlanNode node,
Column joinColumn ) {
if (node.getSelectors().contains(joinColumn.selectorName())) {
// Get the existing projected columns ...
List<Column> columns = node.getPropertyAsList(Property.PROJECT_COLUMNS, Column.class);
List<String> types = node.getPropertyAsList(Property.PROJECT_COLUMN_TYPES, String.class);
if (columns != null && addIfMissing(context, joinColumn, columns, types)) {
node.setProperty(Property.PROJECT_COLUMNS, columns);
node.setProperty(Property.PROJECT_COLUMN_TYPES, types);
}
}
// Apply recursively ...
for (PlanNode child : node) {
addEquiJoinColumn(context, child, joinColumn);
}
}
|
python
|
def show_tracebacks(self):
""" Show tracebacks """
if self.broker.tracebacks:
print(file=self.stream)
print("Tracebacks:", file=self.stream)
for t in self.broker.tracebacks.values():
print(t, file=self.stream)
|
java
|
protected String getJSIncludeFile(String fileName) {
StringBuffer result = new StringBuffer(8);
result.append("<script type=\"text/javascript\" src=\"");
result.append(fileName);
result.append("\"></script>");
return result.toString();
}
|
python
|
def load(fh, encoding=None, is_verbose=False):
"""load a pickle, with a provided encoding
if compat is True:
fake the old class hierarchy
if it works, then return the new type objects
Parameters
----------
fh : a filelike object
encoding : an optional encoding
is_verbose : show exception output
"""
try:
fh.seek(0)
if encoding is not None:
up = Unpickler(fh, encoding=encoding)
else:
up = Unpickler(fh)
up.is_verbose = is_verbose
return up.load()
except (ValueError, TypeError):
raise
|
java
|
public static ViewDefinition of(String query, List<UserDefinedFunction> functions) {
return newBuilder(query, functions).build();
}
|
java
|
public Observable<ServiceResponse<NetworkSettingsInner>> getNetworkSettingsWithServiceResponseAsync(String deviceName, String resourceGroupName) {
if (deviceName == null) {
throw new IllegalArgumentException("Parameter deviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.getNetworkSettings(deviceName, this.client.subscriptionId(), resourceGroupName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NetworkSettingsInner>>>() {
@Override
public Observable<ServiceResponse<NetworkSettingsInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NetworkSettingsInner> clientResponse = getNetworkSettingsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
java
|
public StateMachine<T> withTransition(T from, T to, T... moreTo) {
transitions.put(from, EnumSet.of(to, moreTo));
return this;
}
|
java
|
private static List<CopyPath> expandDirectories(FileSystem fs,
List<Path> paths, Path dstPath)
throws IOException {
List<CopyPath> newList = new ArrayList<CopyPath>();
FileSystem dstFs = dstPath.getFileSystem(defaultConf);
boolean isDstFile = false;
try {
FileStatus dstPathStatus = dstFs.getFileStatus(dstPath);
if (!dstPathStatus.isDir()) {
isDstFile = true;
}
} catch (FileNotFoundException e) {
isDstFile = true;
}
for (Path path : paths) {
FileStatus pathStatus = fs.getFileStatus(path);
if (!pathStatus.isDir()) {
// This is the case where the destination is a file, in this case, we
// allow only a single source file. This check has been done below in
// FastCopy#parseFiles(List, String[])
if (isDstFile) {
newList.add(new CopyPath(path, dstPath));
} else {
newList.add(new CopyPath(path, new Path(dstPath, path.getName())));
}
} else {
// If we are copying /a/b/c into /x/y/z and 'z' does not exist, we
// create the structure /x/y/z/f*, where f* represents all files and
// directories in c/
Path rootPath = dstPath;
// This ensures if we copy a directory like /a/b/c to a directory
// /x/y/z/, we will create the directory structure /x/y/z/c, if 'z'
// exists.
if (dstFs.exists(dstPath)) {
rootPath = new Path(dstPath, pathStatus.getPath().getName());
}
getDirectoryListing(pathStatus, fs, newList, rootPath);
}
}
return newList;
}
|
python
|
def dataset_list(self):
'''Subcommand of dataset for listing available datasets'''
# Initialize the prepare subcommand's argparser
parser = argparse.ArgumentParser(description='Preprocess a raw dialogue corpus into a dsrt dataset')
self.init_dataset_list_args(parser)
# Parse the args we got
args = parser.parse_args(sys.argv[3:])
print(CLI_DIVIDER + '\n')
dsrt.application.utils.list_dataset()
|
python
|
def get_t(self):
"""Returns the top border of the cell"""
cell_above = CellBorders(self.cell_attributes,
*self.cell.get_above_key_rect())
return cell_above.get_b()
|
java
|
private static List<Path> expandMultiAppInputDirs(List<Path> input)
{
List<Path> expanded = new LinkedList<>();
for (Path path : input)
{
if (Files.isRegularFile(path))
{
expanded.add(path);
continue;
}
if (!Files.isDirectory(path))
{
String pathString = (path == null) ? "" : path.toString();
log.warning("Neither a file or directory found in input: " + pathString);
continue;
}
try
{
try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path))
{
for (Path subpath : directoryStream)
{
if (isJavaArchive(subpath))
{
expanded.add(subpath);
}
}
}
}
catch (IOException e)
{
throw new WindupException("Failed to read directory contents of: " + path);
}
}
return expanded;
}
|
python
|
def get_request_handler_chain(self, handler_input):
# type: (Input) -> Union[GenericRequestHandlerChain, None]
"""Get the request handler chain that can handle the dispatch
input.
:param handler_input: Generic input passed to the
dispatcher.
:type handler_input: Input
:return: Handler Chain that can handle the input.
:rtype: Union[None, GenericRequestHandlerChain]
"""
for chain in self.request_handler_chains:
handler = chain.request_handler # type: AbstractRequestHandler
if handler.can_handle(handler_input=handler_input):
return chain
return None
|
java
|
protected long getFileDescriptor() throws AsyncException {
FieldReturn fRet = AccessController.doPrivileged(new PrivFieldCheck(channel));
if (fRet.e != null) {
throw fRet.e;
}
return fRet.val;
}
|
java
|
public static MappedByteBuffer map(File file, MapMode mode) throws IOException {
checkNotNull(file);
checkNotNull(mode);
if (!file.exists()) {
throw new FileNotFoundException(file.toString());
}
return map(file, mode, file.length());
}
|
java
|
public static MediaTable create(String tableName, String idColumnName) {
return create(tableName, idColumnName, null);
}
|
python
|
def parse_xml_node(self, node):
'''Parse an xml.dom Node object representing a target port into this
object.
'''
super(TargetPort, self).parse_xml_node(node)
self.port_name = node.getAttributeNS(RTS_NS, 'portName')
return self
|
python
|
def _set_fcoe_fabric_mode(self, v, load=False):
"""
Setter method for fcoe_fabric_mode, mapped from YANG variable /fcoe/fcoe_fabric_map/fcoe_fabric_mode (fcoe-fabric-mode-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_fcoe_fabric_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fcoe_fabric_mode() directly.
YANG Description: This specifies the san mode.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'local|remote', 'length': [u'1..32']}), is_leaf=True, yang_name="fcoe-fabric-mode", rest_name="san-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure the san mode', u'alt-name': u'san-mode', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='fcoe-fabric-mode-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fcoe_fabric_mode must be of a type compatible with fcoe-fabric-mode-type""",
'defined-type': "brocade-fcoe:fcoe-fabric-mode-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'local|remote', 'length': [u'1..32']}), is_leaf=True, yang_name="fcoe-fabric-mode", rest_name="san-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure the san mode', u'alt-name': u'san-mode', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='fcoe-fabric-mode-type', is_config=True)""",
})
self.__fcoe_fabric_mode = t
if hasattr(self, '_set'):
self._set()
|
java
|
@SuppressWarnings("unchecked")
private Segment<K,V> ensureSegment(int k) {
final Segment<K,V>[] ss = this.segments;
long u = (k << SSHIFT) + SBASE; // raw offset
Segment<K,V> seg;
if ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u)) == null) {
Segment<K,V> proto = ss[0]; // use segment 0 as prototype
int cap = proto.table.length;
float lf = proto.loadFactor;
int threshold = (int)(cap * lf);
HashEntry<K,V>[] tab = new HashEntry[cap];
if ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u))
== null) { // recheck
Segment<K,V> s = new Segment<K,V>(lf, threshold, tab);
while ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u))
== null) {
if (UNSAFE.compareAndSwapObject(ss, u, null, seg = s)) {
break;
}
}
}
}
return seg;
}
|
java
|
public Observable<ServiceResponse<Void>> addVideoFrameStreamWithServiceResponseAsync(String teamName, String reviewId, String contentType, byte[] frameImageZip, String frameMetadata, Integer timescale) {
if (this.client.baseUrl() == null) {
throw new IllegalArgumentException("Parameter this.client.baseUrl() is required and cannot be null.");
}
if (teamName == null) {
throw new IllegalArgumentException("Parameter teamName is required and cannot be null.");
}
if (reviewId == null) {
throw new IllegalArgumentException("Parameter reviewId is required and cannot be null.");
}
if (contentType == null) {
throw new IllegalArgumentException("Parameter contentType is required and cannot be null.");
}
if (frameImageZip == null) {
throw new IllegalArgumentException("Parameter frameImageZip is required and cannot be null.");
}
if (frameMetadata == null) {
throw new IllegalArgumentException("Parameter frameMetadata is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{baseUrl}", this.client.baseUrl());
RequestBody frameImageZipConverted = RequestBody.create(MediaType.parse("multipart/form-data"), frameImageZip);
return service.addVideoFrameStream(teamName, reviewId, contentType, timescale, frameImageZipConverted, frameMetadata, this.client.acceptLanguage(), parameterizedHost, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = addVideoFrameStreamDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
python
|
def get_next_task(self):
"""get the next task if there's one that should be processed,
and return how long it will be until the next one should be
processed."""
if _debug: TaskManager._debug("get_next_task")
# get the time
now = _time()
task = None
delta = None
if self.tasks:
# look at the first task
when, n, nxttask = self.tasks[0]
if when <= now:
# pull it off the list and mark that it's no longer scheduled
heappop(self.tasks)
task = nxttask
task.isScheduled = False
if self.tasks:
when, n, nxttask = self.tasks[0]
# peek at the next task, return how long to wait
delta = max(when - now, 0.0)
else:
delta = when - now
# return the task to run and how long to wait for the next one
return (task, delta)
|
java
|
private static SimpleDateFormat createDateFormat(String pattern, EUniCalendar calendar) {
return createDateFormat(pattern, calendar, getDefault());
}
|
java
|
public void getFileFromComputeNode(String poolId, String nodeId, String fileName, OutputStream outputStream) throws BatchErrorException, IOException {
getFileFromComputeNode(poolId, nodeId, fileName, null, outputStream);
}
|
python
|
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
|
java
|
protected final void skip_clob_close_punctuation() throws IOException {
int c = skip_over_clob_whitespace();
if (c == '}') {
c = read_char();
if (c == '}') {
return;
}
unread_char(c);
c = '}';
}
unread_char(c);
error("invalid closing puctuation for CLOB");
}
|
java
|
public static PdfAction rendition(String file, PdfFileSpecification fs, String mimeType, PdfIndirectReference ref) throws IOException {
PdfAction js = new PdfAction();
js.put(PdfName.S, PdfName.RENDITION);
js.put(PdfName.R, new PdfRendition(file, fs, mimeType));
js.put(new PdfName("OP"), new PdfNumber(0));
js.put(new PdfName("AN"), ref);
return js;
}
|
java
|
@Override
public ReferenceContext initializeInjectionServices() throws CDIException {
Set<Class<?>> injectionClasses = getInjectionClasses();
ReferenceContext referenceContext = archive.getReferenceContext(injectionClasses);
return referenceContext;
}
|
python
|
def hira2hkata(text, ignore=''):
"""Convert Hiragana to Half-width (Hankaku) Katakana
Parameters
----------
text : str
Hiragana string.
ignore : str
Characters to be ignored in converting.
Return
------
str
Half-width Katakana string.
Examples
--------
>>> print(jaconv.hira2hkata('ともえまみ'))
トモエマミ
>>> print(jaconv.hira2hkata('ともえまみ', ignore='み'))
トモエマみ
"""
if ignore:
h2hk_map = _exclude_ignorechar(ignore, H2HK_TABLE.copy())
return _convert(text, h2hk_map)
return _convert(text, H2HK_TABLE)
|
python
|
async def upload_file(self, data: bytes, mime_type: Optional[str] = None) -> str:
"""
Upload a file to the content repository. See also: `API reference`_
Args:
data: The data to upload.
mime_type: The MIME type to send with the upload request.
Returns:
The MXC URI to the uploaded file.
Raises:
MatrixResponseError: If the response does not contain a ``content_uri`` field.
.. _API reference:
https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-media-r0-upload
"""
await self.ensure_registered()
if magic:
mime_type = mime_type or magic.from_buffer(data, mime=True)
resp = await self.client.request("POST", "", content=data,
headers={"Content-Type": mime_type},
api_path="/_matrix/media/r0/upload")
try:
return resp["content_uri"]
except KeyError:
raise MatrixResponseError("Media repo upload response did not contain content_uri.")
|
python
|
def memoized_parse_block(code):
"""Memoized version of parse_block."""
success, result = parse_block_memo.get(code, (None, None))
if success is None:
try:
parsed = COMPILER.parse_block(code)
except Exception as err:
success, result = False, err
else:
success, result = True, parsed
parse_block_memo[code] = (success, result)
if success:
return result
else:
raise result
|
python
|
def get(cls, external_id, local_user_id, provider_name, db_session=None):
"""
Fetch row using primary key -
will use existing object in session if already present
:param external_id:
:param local_user_id:
:param provider_name:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
return db_session.query(cls.model).get(
[external_id, local_user_id, provider_name]
)
|
python
|
def drawdown_end(self, return_date=False):
"""The date of the drawdown trough.
Date at which the drawdown was most negative.
Parameters
----------
return_date : bool, default False
If True, return a `datetime.date` object.
If False, return a Pandas Timestamp object.
Returns
-------
datetime.date or pandas._libs.tslib.Timestamp
"""
end = self.drawdown_idx().idxmin()
if return_date:
return end.date()
return end
|
java
|
public void marshall(DescribeTaskExecutionRequest describeTaskExecutionRequest, ProtocolMarshaller protocolMarshaller) {
if (describeTaskExecutionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeTaskExecutionRequest.getTaskExecutionArn(), TASKEXECUTIONARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public int compareTo(ReadablePartial partial) {
// override to perform faster
if (this == partial) {
return 0;
}
if (partial instanceof LocalDate) {
LocalDate other = (LocalDate) partial;
if (iChronology.equals(other.iChronology)) {
return (iLocalMillis < other.iLocalMillis ? -1 :
(iLocalMillis == other.iLocalMillis ? 0 : 1));
}
}
return super.compareTo(partial);
}
|
python
|
def thumbnail(self):
"""
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open("thumbnail.jpg", "wb").write(file.thumbnail())
"""
response = GettRequest().get("/files/%s/%s/blob/thumb" % (self.sharename, self.fileid))
return response.response
|
python
|
async def create_sentinel_pool(sentinels, *, db=None, password=None,
encoding=None, minsize=1, maxsize=10,
ssl=None, parser=None, timeout=0.2, loop=None):
"""Create SentinelPool."""
# FIXME: revise default timeout value
assert isinstance(sentinels, (list, tuple)), sentinels
if loop is None:
loop = asyncio.get_event_loop()
pool = SentinelPool(sentinels, db=db,
password=password,
ssl=ssl,
encoding=encoding,
parser=parser,
minsize=minsize,
maxsize=maxsize,
timeout=timeout,
loop=loop)
await pool.discover()
return pool
|
python
|
def info(self):
"""Formatted string to display the available choices"""
if self.descriptions is None:
choice_list = ['"{}"'.format(choice) for choice in self.choices]
else:
choice_list = [
'"{}" ({})'.format(choice, self.descriptions[choice])
for choice in self.choices
]
if len(self.choices) == 2:
return 'either {} or {}'.format(choice_list[0], choice_list[1])
return 'any of {}'.format(', '.join(choice_list))
|
python
|
def _to_mwtab(self):
"""Save :class:`~mwtab.mwtab.MWTabFile` in `mwtab` formatted string.
:return: NMR-STAR string.
:rtype: :py:class:`str`
"""
mwtab_str = io.StringIO()
self.print_file(mwtab_str)
return mwtab_str.getvalue()
|
python
|
def pingback_ping(source, target):
"""
pingback.ping(sourceURI, targetURI) => 'Pingback message'
Notifies the server that a link has been added to sourceURI,
pointing to targetURI.
See: http://hixie.ch/specs/pingback/pingback-1.0
"""
try:
if source == target:
return UNDEFINED_ERROR
site = Site.objects.get_current()
try:
document = ''.join(map(
lambda byte_line: byte_line.decode('utf-8'),
urlopen(source).readlines()))
except (HTTPError, URLError):
return SOURCE_DOES_NOT_EXIST
if target not in document:
return SOURCE_DOES_NOT_LINK
target_splitted = urlsplit(target)
if target_splitted.netloc != site.domain:
return TARGET_DOES_NOT_EXIST
try:
view, args, kwargs = resolve(target_splitted.path)
except Resolver404:
return TARGET_DOES_NOT_EXIST
try:
entry = Entry.published.get(
slug=kwargs['slug'],
publication_date__year=kwargs['year'],
publication_date__month=kwargs['month'],
publication_date__day=kwargs['day'])
if not entry.pingbacks_are_open:
return TARGET_IS_NOT_PINGABLE
except (KeyError, Entry.DoesNotExist):
return TARGET_IS_NOT_PINGABLE
soup = BeautifulSoup(document, 'html.parser')
title = six.text_type(soup.find('title'))
title = title and strip_tags(title) or _('No title')
description = generate_pingback_content(soup, target,
PINGBACK_CONTENT_LENGTH)
pingback_klass = comments.get_model()
pingback_datas = {
'content_type': ContentType.objects.get_for_model(Entry),
'object_pk': entry.pk,
'site': site,
'user_url': source,
'user_name': title,
'comment': description
}
pingback = pingback_klass(**pingback_datas)
if check_is_spam(pingback, entry, FakeRequest()):
return PINGBACK_IS_SPAM
pingback_defaults = {'comment': pingback_datas.pop('comment'),
'user_name': pingback_datas.pop('user_name')}
pingback, created = pingback_klass.objects.get_or_create(
defaults=pingback_defaults,
**pingback_datas)
if created:
pingback.flags.create(user=get_user_flagger(), flag=PINGBACK)
pingback_was_posted.send(pingback.__class__,
pingback=pingback,
entry=entry)
return 'Pingback from %s to %s registered.' % (source, target)
return PINGBACK_ALREADY_REGISTERED
except Exception:
return UNDEFINED_ERROR
|
java
|
public DateTime plusWeeks(int weeks) {
if (weeks == 0) {
return this;
}
long instant = getChronology().weeks().add(getMillis(), weeks);
return withMillis(instant);
}
|
python
|
def get_element_pdos(dos, element, sites, lm_orbitals=None, orbitals=None):
"""Get the projected density of states for an element.
Args:
dos (:obj:`~pymatgen.electronic_structure.dos.CompleteDos`): The
density of states.
element (str): Element symbol. E.g. 'Zn'.
sites (tuple): The atomic indices over which to sum the density of
states, as a :obj:`tuple`. Indices are zero based for each
element. For example, ``(0, 1, 2)`` will sum the density of states
for the 1st, 2nd and 3rd sites of the element specified.
lm_orbitals (:obj:`tuple`, optional): The orbitals to decompose into
their lm contributions (e.g. p -> px, py, pz). Should be provided
as a :obj:`tuple` of :obj:`str`. For example, ``('p')``, will
extract the projected density of states for the px, py, and pz
orbitals. Defaults to ``None``.
orbitals (:obj:`tuple`, optional): The orbitals to extract from the
projected density of states. Should be provided as a :obj:`tuple`
of :obj:`str`. For example, ``('s', 'px', 'dx2')`` will extract the
s, px, and dx2 orbitals, only. If ``None``, all orbitals will be
extracted. Defaults to ``None``.
Returns:
dict: The projected density of states. Formatted as a :obj:`dict`
mapping the orbitals to :obj:`~pymatgen.electronic_structure.dos.Dos`
objects. For example::
{
's': Dos,
'p': Dos
}
"""
el_dos = {}
for site in sites:
# build a list of which orbitals we are after
# start with s, p, and d orbitals only
spd = [orb for orb in dos.get_element_spd_dos(element).keys() if
((orbitals and orb.name in orbitals) or not orbitals) and
((lm_orbitals and orb.name not in lm_orbitals) or
not lm_orbitals)]
# now add any lm decomposed orbitals
lm = [orb for orb in Orbital
if lm_orbitals and orb.name[0] in lm_orbitals]
# extract the data
for orb in spd:
pdos = dos.get_site_spd_dos(site)[orb]
el_dos[orb.name] = (el_dos[orb.name] + pdos if orb.name in el_dos
else pdos)
for orb in lm:
pdos = dos.get_site_orbital_dos(site, orb)
el_dos[orb.name] = (el_dos[orb.name] + pdos if orb.name in el_dos
else pdos)
return el_dos
|
python
|
def assertion_jwt(client_id, keys, audience, algorithm, lifetime=600):
"""
Create a signed Json Web Token containing some information.
:param client_id: The Client ID
:param keys: Signing keys
:param audience: Who is the receivers for this assertion
:param algorithm: Signing algorithm
:param lifetime: The lifetime of the signed Json Web Token
:return: A Signed Json Web Token
"""
_now = utc_time_sans_frac()
at = AuthnToken(iss=client_id, sub=client_id,
aud=audience, jti=rndstr(32),
exp=_now + lifetime, iat=_now)
logger.debug('AuthnToken: {}'.format(at.to_dict()))
return at.to_jwt(key=keys, algorithm=algorithm)
|
python
|
def offer_url(self):
"""Offer URL
:return:
Offer URL (string).
"""
return "{0}{1}/?tag={2}".format(
AMAZON_ASSOCIATES_BASE_URL.format(domain=DOMAINS[self.region]),
self.asin,
self.aws_associate_tag)
|
python
|
def insert(self, i, tag, affix, cmd="hassuf", tagged=None):
""" Inserts a new rule that assigns the given tag to words with the given affix,
e.g., Morphology.append("RB", "-ly").
"""
if affix.startswith("-") and affix.endswith("-"):
affix, cmd = affix[+1:-1], "char"
if affix.startswith("-"):
affix, cmd = affix[+1:-0], "hassuf"
if affix.endswith("-"):
affix, cmd = affix[+0:-1], "haspref"
if tagged:
r = [tagged, affix, "f"+cmd.lstrip("f"), tag, "x"]
else:
r = [affix, cmd.lstrip("f"), tag, "x"]
lazylist.insert(self, i, r)
|
java
|
public TroubleshootingResultInner getTroubleshooting(String resourceGroupName, String networkWatcherName, TroubleshootingParameters parameters) {
return getTroubleshootingWithServiceResponseAsync(resourceGroupName, networkWatcherName, parameters).toBlocking().last().body();
}
|
java
|
public static <K, V> Lens.Simple<Map<K, V>, Maybe<V>> valueAt(K k) {
return adapt(valueAt(HashMap::new, k));
}
|
java
|
public final void blendps(XMMRegister dst, Mem src, Immediate imm8)
{
emitX86(INST_BLENDPS, dst, src, imm8);
}
|
java
|
public void doDelete(@Param("alarmRuleId") Long alarmRuleId, @Param("pipelineId") Long pipelineId, Navigator nav)
throws WebxException {
alarmRuleService.remove(alarmRuleId);
nav.redirectToLocation("alarmRuleList.htm?pipelineId=" + pipelineId);
}
|
java
|
public Request<List<Token>> getBlacklist(String audience) {
Asserts.assertNotNull(audience, "audience");
String url = baseUrl
.newBuilder()
.addPathSegments("api/v2/blacklists/tokens")
.addQueryParameter("aud", audience)
.build()
.toString();
CustomRequest<List<Token>> request = new CustomRequest<>(client, url, "GET", new TypeReference<List<Token>>() {
});
request.addHeader("Authorization", "Bearer " + apiToken);
return request;
}
|
python
|
def track_child(self, child, logical_block_size, allow_duplicate=False):
# type: (DirectoryRecord, int, bool) -> None
'''
A method to track an existing child of this directory record.
Parameters:
child - The child directory record object to add.
logical_block_size - The size of a logical block for this volume descriptor.
allow_duplicate - Whether to allow duplicate names, as there are
situations where duplicate children are allowed.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('Directory Record not yet initialized')
self._add_child(child, logical_block_size, allow_duplicate, False)
|
java
|
public static SelectionSpec[] buildFullTraversal() {
List<TraversalSpec> tSpecs = buildFullTraversalV2NoFolder();
// Recurse through the folders
TraversalSpec visitFolders = createTraversalSpec("visitFolders",
"Folder", "childEntity",
new String[]{"visitFolders", "dcToHf", "dcToVmf", "crToH", "crToRp", "HToVm", "rpToVm"});
SelectionSpec[] sSpecs = new SelectionSpec[tSpecs.size() + 1];
sSpecs[0] = visitFolders;
for (int i = 1; i < sSpecs.length; i++) {
sSpecs[i] = tSpecs.get(i - 1);
}
return sSpecs;
}
|
python
|
def append_waiting_queue(self, transfer_coordinator):
''' append item to waiting queue '''
logger.debug("Add to waiting queue count=%d" % self.waiting_coordinator_count())
with self._lockw:
self._waiting_transfer_coordinators.append(transfer_coordinator)
|
python
|
def unlink(self, key, *keys):
"""Delete a key asynchronously in another thread."""
return wait_convert(self.execute(b'UNLINK', key, *keys), int)
|
java
|
public FilterBuilder includePackage(final String... prefixes) {
for (String prefix : prefixes) {
add(new Include(prefix(prefix)));
}
return this;
}
|
java
|
public SDVariable max(String name, SDVariable x, int... dimensions) {
return max(name, x, false, dimensions);
}
|
python
|
def run():
"""CLI main entry point."""
# Use print() instead of logging when running in CLI mode:
set_pyftpsync_logger(None)
parser = argparse.ArgumentParser(
description="Synchronize folders over FTP.",
epilog="See also https://github.com/mar10/pyftpsync",
parents=[verbose_parser],
)
# Note: we want to allow --version to be combined with --verbose. However
# on Py2, argparse makes sub-commands mandatory, unless `action="version"` is used.
if check_cli_verbose(3) > 3:
version_info = "pyftpsync/{} Python/{} {}".format(
__version__, PYTHON_VERSION, platform.platform()
)
else:
version_info = "{}".format(__version__)
parser.add_argument("-V", "--version", action="version", version=version_info)
subparsers = parser.add_subparsers(help="sub-command help")
# --- Create the parser for the "upload" command ---------------------------
sp = subparsers.add_parser(
"upload",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="copy new and modified files to remote folder",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--force",
action="store_true",
help="overwrite remote files, even if the target is newer "
"(but no conflict was detected)",
)
sp.add_argument(
"--resolve",
default="ask",
choices=["local", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.add_argument(
"--delete",
action="store_true",
help="remove remote files if they don't exist locally",
)
sp.add_argument(
"--delete-unmatched",
action="store_true",
help="remove remote files if they don't exist locally "
"or don't match the current filter (implies '--delete' option)",
)
sp.set_defaults(command="upload")
# --- Create the parser for the "download" command -------------------------
sp = subparsers.add_parser(
"download",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="copy new and modified files from remote folder to local target",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--force",
action="store_true",
help="overwrite local files, even if the target is newer "
"(but no conflict was detected)",
)
sp.add_argument(
"--resolve",
default="ask",
choices=["remote", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.add_argument(
"--delete",
action="store_true",
help="remove local files if they don't exist on remote target",
)
sp.add_argument(
"--delete-unmatched",
action="store_true",
help="remove local files if they don't exist on remote target "
"or don't match the current filter (implies '--delete' option)",
)
sp.set_defaults(command="download")
# --- Create the parser for the "sync" command -----------------------------
sp = subparsers.add_parser(
"sync",
parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
help="synchronize new and modified files between remote folder and local target",
)
sp.add_argument(
"local",
metavar="LOCAL",
default=".",
help="path to local folder (default: %(default)s)",
)
sp.add_argument("remote", metavar="REMOTE", help="path to remote folder")
sp.add_argument(
"--resolve",
default="ask",
choices=["old", "new", "local", "remote", "skip", "ask"],
help="conflict resolving strategy (default: '%(default)s')",
)
sp.set_defaults(command="sync")
# --- Create the parser for the "run" command -----------------------------
add_run_parser(subparsers)
# --- Create the parser for the "scan" command -----------------------------
add_scan_parser(subparsers)
# --- Parse command line ---------------------------------------------------
args = parser.parse_args()
args.verbose -= args.quiet
del args.quiet
# print("verbose", args.verbose)
ftp_debug = 0
if args.verbose >= 6:
ftp_debug = 1
# Modify the `args` from the `pyftpsync.yaml` config:
if getattr(args, "command", None) == "run":
handle_run_command(parser, args)
if callable(getattr(args, "command", None)):
# scan_handler
try:
return args.command(parser, args)
except KeyboardInterrupt:
print("\nAborted by user.", file=sys.stderr)
sys.exit(3)
elif not hasattr(args, "command"):
parser.error(
"missing command (choose from 'upload', 'download', 'run', 'sync', 'scan')"
)
# Post-process and check arguments
if hasattr(args, "delete_unmatched") and args.delete_unmatched:
args.delete = True
args.local_target = make_target(args.local, {"ftp_debug": ftp_debug})
if args.remote == ".":
parser.error("'.' is expected to be the local target (not remote)")
args.remote_target = make_target(args.remote, {"ftp_debug": ftp_debug})
if not isinstance(args.local_target, FsTarget) and isinstance(
args.remote_target, FsTarget
):
parser.error("a file system target is expected to be local")
# Let the command handler do its thing
opts = namespace_to_dict(args)
if args.command == "upload":
s = UploadSynchronizer(args.local_target, args.remote_target, opts)
elif args.command == "download":
s = DownloadSynchronizer(args.local_target, args.remote_target, opts)
elif args.command == "sync":
s = BiDirSynchronizer(args.local_target, args.remote_target, opts)
else:
parser.error("unknown command '{}'".format(args.command))
s.is_script = True
try:
s.run()
except KeyboardInterrupt:
print("\nAborted by user.", file=sys.stderr)
sys.exit(3)
finally:
# Prevent sporadic exceptions in ftplib, when closing in __del__
s.local.close()
s.remote.close()
stats = s.get_stats()
if args.verbose >= 5:
pprint(stats)
elif args.verbose >= 1:
if args.dry_run:
print("(DRY-RUN) ", end="")
print(
"Wrote {}/{} files in {} directories, skipped: {}.".format(
stats["files_written"],
stats["local_files"],
stats["local_dirs"],
stats["conflict_files_skipped"],
),
end="",
)
if stats["interactive_ask"]:
print()
else:
print(" Elap: {}.".format(stats["elap_str"]))
return
|
python
|
def find_similar(self, doc, min_score=0.0, max_results=100):
"""
Find `max_results` most similar articles in the index, each having similarity
score of at least `min_score`. The resulting list may be shorter than `max_results`,
in case there are not enough matching documents.
`doc` is either a string (=document id, previously indexed) or a
dict containing a 'tokens' key. These tokens are processed to produce a
vector, which is then used as a query against the index.
The similar documents are returned in decreasing similarity order, as
`(doc_id, similarity_score, doc_payload)` 3-tuples. The payload returned
is identical to what was supplied for this document during indexing.
"""
logger.debug("received query call with %r" % doc)
if self.is_locked():
msg = "cannot query while the server is being updated"
logger.error(msg)
raise RuntimeError(msg)
sims_opt, sims_fresh = None, None
for index in [self.fresh_index, self.opt_index]:
if index is not None:
index.topsims = max_results
if isinstance(doc, basestring):
# query by direct document id
docid = doc
if self.opt_index is not None and docid in self.opt_index:
sims_opt = self.opt_index.sims_by_id(docid)
if self.fresh_index is not None:
vec = self.opt_index.vec_by_id(docid)
sims_fresh = self.fresh_index.sims_by_vec(vec, normalize=False)
elif self.fresh_index is not None and docid in self.fresh_index:
sims_fresh = self.fresh_index.sims_by_id(docid)
if self.opt_index is not None:
vec = self.fresh_index.vec_by_id(docid)
sims_opt = self.opt_index.sims_by_vec(vec, normalize=False)
else:
raise ValueError("document %r not in index" % docid)
else:
if 'topics' in doc:
# user supplied vector directly => use that
vec = gensim.matutils.any2sparse(doc['topics'])
else:
# query by an arbitrary text (=tokens) inside doc['tokens']
vec = self.model.doc2vec(doc) # convert document (text) to vector
if self.opt_index is not None:
sims_opt = self.opt_index.sims_by_vec(vec)
if self.fresh_index is not None:
sims_fresh = self.fresh_index.sims_by_vec(vec)
merged = merge_sims(sims_opt, sims_fresh)
logger.debug("got %s raw similars, pruning with max_results=%s, min_score=%s" %
(len(merged), max_results, min_score))
result = []
for docid, score in merged:
if score < min_score or 0 < max_results <= len(result):
break
result.append((docid, float(score), self.payload.get(docid, None)))
return result
|
java
|
protected static <I, D> void fetchResults(Iterator<DefaultQuery<I, D>> queryIt, List<D> output, int numSuffixes) {
for (int j = 0; j < numSuffixes; j++) {
DefaultQuery<I, D> qry = queryIt.next();
output.add(qry.getOutput());
}
}
|
python
|
def set_ref(self, ref_key, ref_id):
"""
Using a ref key and ref id set the
reference to the appropriate resource type.
"""
if ref_key == 'NETWORK':
self.network_id = ref_id
elif ref_key == 'NODE':
self.node_id = ref_id
elif ref_key == 'LINK':
self.link_id = ref_id
elif ref_key == 'GROUP':
self.group_id = ref_id
elif ref_key == 'SCENARIO':
self.scenario_id = ref_id
elif ref_key == 'PROJECT':
self.project_id = ref_id
else:
raise HydraError("Ref Key %s not recognised."%ref_key)
|
java
|
protected void ini() throws CmsException {
if (m_container == null) {
m_container = new IndexedContainer();
setContainerDataSource(m_container);
} else {
m_container.removeAllItems();
}
for (TableProperty prop : TableProperty.values()) {
m_container.addContainerProperty(prop, prop.getType(), prop.getDefaultValue());
setColumnHeader(prop, prop.getLocalizedMessage());
}
setColumnWidth(TableProperty.Icon, 40);
setSelectable(true);
setMultiSelect(true);
m_menu = new CmsContextMenu();
m_menu.setAsTableContextMenu(this);
List<CmsSessionInfo> sessionInfos = OpenCms.getSessionManager().getSessionInfos();
List<CmsOrganizationalUnit> manageableOus = OpenCms.getRoleManager().getManageableOrgUnits(
A_CmsUI.getCmsObject(),
"",
true,
false);
for (CmsSessionInfo session : sessionInfos) {
CmsUser user = A_CmsUI.getCmsObject().readUser(session.getUserId());
CmsOrganizationalUnit userOu = OpenCms.getOrgUnitManager().readOrganizationalUnit(
A_CmsUI.getCmsObject(),
user.getOuFqn());
if (!(manageableOus.contains(userOu) && !user.isWebuser())) {
continue;
}
// CmsListItem item = getList().newItem(sessionInfo.getSessionId().toString());
Item item = m_container.addItem(session.getSessionId().getStringValue());
item.getItemProperty(TableProperty.UserName).setValue(user.getName());
item.getItemProperty(TableProperty.DateCreated).setValue(
session.getAgeOfSession() + " " + CmsVaadinUtils.getMessageText(Messages.GUI_MESSAGES_HOUR_0));
item.getItemProperty(TableProperty.IS_ACTIVE).setValue(
new Long(System.currentTimeMillis() - session.getTimeUpdated()));
item.getItemProperty(TableProperty.OrgUnit).setValue(userOu.getName());
item.getItemProperty(TableProperty.Project).setValue(
A_CmsUI.getCmsObject().readProject(session.getProject()).getName());
CmsSite site = OpenCms.getSiteManager().getSiteForSiteRoot(session.getSiteRoot());
String siteTitle = site == null
? CmsVaadinUtils.getMessageText(org.opencms.ade.galleries.Messages.GUI_ROOT_SITE_0)
: site.getTitle();
item.getItemProperty(TableProperty.Site).setValue(siteTitle);
item.getItemProperty(TableProperty.IS_WAITING).setValue(
new Boolean(!session.getBroadcastQueue().isEmpty()));
}
setVisibleColumns(
TableProperty.Icon,
TableProperty.IS_ACTIVE,
TableProperty.UserName,
TableProperty.DateCreated,
TableProperty.Site,
TableProperty.Project);
}
|
java
|
private static boolean containsObject(Object searchFor, Object[] searchIn)
{
for (int i = 0; i < searchIn.length; i++)
{
if (searchFor == searchIn[i])
{
return true;
}
}
return false;
}
|
java
|
public static <T extends ImageGray<T>> InterpolatePixelS<T>
createPixelS(double min, double max, InterpolationType type, BorderType borderType, Class<T> imageType)
{
InterpolatePixelS<T> alg;
switch( type ) {
case NEAREST_NEIGHBOR:
alg = nearestNeighborPixelS(imageType);
break;
case BILINEAR:
return bilinearPixelS(imageType, borderType);
case BICUBIC:
alg = bicubicS(-0.5f, (float) min, (float) max, imageType);
break;
case POLYNOMIAL4:
alg = polynomialS(4, min, max, imageType);
break;
default:
throw new IllegalArgumentException("Add type: "+type);
}
if( borderType != null )
alg.setBorder(FactoryImageBorder.single(imageType, borderType));
return alg;
}
|
python
|
def to_dict(self, save_data=True):
"""
Store the object into a json serializable dictionary
:param boolean save_data: if true, it adds the data self.X and self.Y to the dictionary
:return dict: json serializable dictionary containing the needed information to instantiate the object
"""
model_dict = super(SparseGPClassification,self).to_dict(save_data)
model_dict["class"] = "GPy.models.SparseGPClassification"
return model_dict
|
java
|
private String buildAddSql(final JSONObject jsonObject, final List<Object> paramlist, final StringBuilder sql) throws Exception {
String ret = null;
if (!jsonObject.has(Keys.OBJECT_ID)) {
if (!(KEY_GEN instanceof DBKeyGenerator)) {
ret = (String) KEY_GEN.gen();
jsonObject.put(Keys.OBJECT_ID, ret);
}
} else {
ret = jsonObject.getString(Keys.OBJECT_ID);
}
setProperties(jsonObject, paramlist, sql);
return ret;
}
|
java
|
@Override
public int prepare(Xid xid) throws XAException {
if (logger.logDebug()) {
debug("preparing transaction xid = " + xid);
}
// Check preconditions
if (!currentXid.equals(xid)) {
throw new CloudSpannerXAException(CloudSpannerXAException.PREPARE_WITH_SAME,
Code.UNIMPLEMENTED, XAException.XAER_RMERR);
}
if (state != STATE_ENDED) {
throw new CloudSpannerXAException(CloudSpannerXAException.PREPARE_BEFORE_END,
Code.FAILED_PRECONDITION, XAException.XAER_INVAL);
}
state = STATE_IDLE;
currentXid = null;
try {
String s = RecoveredXid.xidToString(xid);
conn.prepareTransaction(s);
conn.setAutoCommit(localAutoCommitMode);
return XA_OK;
} catch (CloudSpannerSQLException ex) {
throw new CloudSpannerXAException(CloudSpannerXAException.ERROR_PREPARING, ex,
XAException.XAER_RMERR);
} catch (SQLException ex) {
throw new CloudSpannerXAException(CloudSpannerXAException.ERROR_PREPARING, ex, Code.UNKNOWN,
XAException.XAER_RMERR);
}
}
|
python
|
def convert_to_underscore(name):
""" "someFunctionWhatever" -> "some_function_whatever" """
s1 = _first_cap_re.sub(r'\1_\2', name)
return _all_cap_re.sub(r'\1_\2', s1).lower()
|
python
|
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display.
Characters in the string should be any supported character by set_digit,
or a decimal point. Decimal point characters will be associated with
the previous character.
"""
# Calculate length of value without decimals.
length = sum(map(lambda x: 1 if x != '.' else 0, value))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_number_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1
|
java
|
public Map<String, Object> refundOrder(String orderId, Setup setup) {
this.requestMaker = new RequestMaker(setup);
RequestProperties props = new RequestPropertiesBuilder()
.method("POST")
.endpoint(String.format(ENDPOINT_REFUND_ORDER, orderId))
.type(Refunds.class)
.contentType(CONTENT_TYPE)
.build();
return this.requestMaker.doRequest(props);
}
|
java
|
Class<?> getSetterPropertyType( Clazz<?> clazz, String name )
{
ClassInfoCache cache = retrieveCache( clazz );
Class<?> res = cache.getSetterType( name );
if( res != null )
return res;
String setterName = "set" + capitalizeFirstLetter( name );
Method setter = clazz.getMethod( setterName );
if( setter != null && setter.getParameterTypes().size() == 1 )
{
res = setter.getParameterTypes().get( 0 );
}
else
{
Field field = clazz.getAllField( name );
if( field != null )
res = field.getType();
}
if( res != null )
cache.setSetterType( name, res );
return res;
}
|
python
|
def _calculate_degree_days(temperature_equivalent, base_temperature, cooling=False):
"""
Calculates degree days, starting with a series of temperature equivalent values
Parameters
----------
temperature_equivalent : Pandas Series
base_temperature : float
cooling : bool
Set True if you want cooling degree days instead of heating degree days
Returns
-------
Pandas Series called HDD_base_temperature for heating degree days or
CDD_base_temperature for cooling degree days.
"""
if cooling:
ret = temperature_equivalent - base_temperature
else:
ret = base_temperature - temperature_equivalent
# degree days cannot be negative
ret[ret < 0] = 0
prefix = 'CDD' if cooling else 'HDD'
ret.name = '{}_{}'.format(prefix, base_temperature)
return ret
|
java
|
@GwtIncompatible("incompatible method")
public static Class<?> getClass(final String className, final boolean initialize) throws ClassNotFoundException {
final ClassLoader contextCL = Thread.currentThread().getContextClassLoader();
final ClassLoader loader = contextCL == null ? ClassUtils.class.getClassLoader() : contextCL;
return getClass(loader, className, initialize);
}
|
java
|
protected boolean isMethodInstrumentedByThisAdapter() {
if (injectedTraceAnnotationVisitor == null) {
return false;
}
List<String> visitedMethodAdapters = injectedTraceAnnotationVisitor.getMethodAdapters();
return visitedMethodAdapters.contains(getClass().getName());
}
|
python
|
def get_source(source):
"""Get the source data for a particular GW catalog
"""
if source == 'gwtc-1':
fname = download_file(gwtc1_url, cache=True)
data = json.load(open(fname, 'r'))
else:
raise ValueError('Unkown catalog source {}'.format(source))
return data['data']
|
java
|
@Override
public final void cacheMoveValidation(Move<?> move, Validation validation) {
validatedMove = move;
this.validation = validation;
}
|
java
|
protected void performScan(Collection<File> created, Collection<File> modified, Collection<File> deleted) {
final LinkedHashMap<String, FileInfo> prevScanResult = inMemoryScanResults;
final LinkedHashMap<String, FileInfo> newScanResult = new LinkedHashMap<String, FileInfo>();
// Check that directory exists
if (!monitoredFile.isDirectory()) {
// Directory does not exist or was removed
// If we had/knew about files before, then all of the files were deleted..
if (prevScanResult != null && !prevScanResult.isEmpty() && deleted != null) {
for (Map.Entry<String, FileInfo> entry : prevScanResult.entrySet()) {
File f = new File(entry.getKey());
// Simplify: Make sure only stuff that matches gets
// into the cache, and then you can be indiscriminate about what
// gets removed.
deleted.add(f);
}
// Add the monitored directory itself to the results.. (if required)
if (isIncludeSelf()) {
deleted.add(monitoredFile);
}
}
} else {
// Directory exists: check only nested resources that match.
scanDirectory(prevScanResult, newScanResult, monitoredFile, created, modified);
// Add the monitored directory itself to the results.. (if required)
if (isIncludeSelf()) {
boolean isFile = isFile(monitoredFile);
if (matches(monitoredFile, isFile)) {
scanFile(prevScanResult, newScanResult, monitoredFile, created, modified, isFile);
}
}
// Any remaining in the previous result have been deleted.
// Notify based on filter settings
if (deleted != null && prevScanResult != null) {
for (Map.Entry<String, FileInfo> entry : prevScanResult.entrySet()) {
File f = new File(entry.getKey());
// Simplify: Make sure only stuff that matches gets into the cache,
// and then you know whatever is leftover in the cache was deleted
// (because you know it matches.. )
deleted.add(f);
}
}
}
// replace the referenced map with the new scan result
inMemoryScanResults = newScanResult;
}
|
java
|
@Override
public void handle(final ZoomEvent zoomEvent) {
final EventType<?> type = zoomEvent.getEventType();
if (ZoomEvent.ZOOM_STARTED == type) {
adapter().zoomStarted(zoomEvent);
} else if (ZoomEvent.ZOOM == type) {
adapter().zoom(zoomEvent);
} else if (ZoomEvent.ZOOM_FINISHED == type) {
adapter().zoomFinished(zoomEvent);
} else {
adapter().anyZoom(zoomEvent);
}
}
|
python
|
def has_var_groups(self):
'''Returns a set of the variant group ids that this cluster has'''
ids = set()
for d in self.data:
if self._has_known_variant(d) != 'no' and d['var_group'] != '.':
ids.add(d['var_group'])
return ids
|
python
|
def intervals_containing(t, p):
"""Query the interval tree
:param t: root of the interval tree
:param p: value
:returns: a list of intervals containing p
:complexity: O(log n + m), where n is the number of intervals in t,
and m the length of the returned list
"""
INF = float('inf')
if t is None:
return []
if p < t.center:
retval = intervals_containing(t.left, p)
j = bisect_right(t.by_low, (p, (INF, INF)))
for i in range(j):
retval.append(t.by_low[i][1])
else:
retval = intervals_containing(t.right, p)
i = bisect_right(t.by_high, (p, (INF, INF)))
for j in range(i, len(t.by_high)):
retval.append(t.by_high[j][1])
return retval
|
python
|
def init(opts):
'''
Open the connection to the Nexsu switch over the NX-API.
As the communication is HTTP based, there is no connection to maintain,
however, in order to test the connectivity and make sure we are able to
bring up this Minion, we are executing a very simple command (``show clock``)
which doesn't come with much overhead and it's sufficient to confirm we are
indeed able to connect to the NX-API endpoint as configured.
'''
proxy_dict = opts.get('proxy', {})
conn_args = copy.deepcopy(proxy_dict)
conn_args.pop('proxytype', None)
opts['multiprocessing'] = conn_args.pop('multiprocessing', True)
# This is not a SSH-based proxy, so it should be safe to enable
# multiprocessing.
try:
rpc_reply = __utils__['nxos_api.rpc']('show clock', **conn_args)
# Execute a very simple command to confirm we are able to connect properly
nxos_device['conn_args'] = conn_args
nxos_device['initialized'] = True
nxos_device['up'] = True
except SaltException:
log.error('Unable to connect to %s', conn_args['host'], exc_info=True)
raise
return True
|
python
|
def parse_references_elements(ref_sect, kbs, linker_callback=None):
"""Passed a complete reference section, process each line and attempt to
## identify and standardise individual citations within the line.
@param ref_sect: (list) of strings - each string in the list is a
reference line.
@param preprint_repnum_search_kb: (dictionary) - keyed by a tuple
containing the line-number of the pattern in the KB and the non-standard
category string. E.g.: (3, 'ASTRO PH'). Value is regexp pattern used to
search for that report-number.
@param preprint_repnum_standardised_categs: (dictionary) - keyed by non-
standard version of institutional report number, value is the
standardised version of that report number.
@param periodical_title_search_kb: (dictionary) - keyed by non-standard
title to search for, value is the compiled regexp pattern used to
search for that title.
@param standardised_periodical_titles: (dictionary) - keyed by non-
standard title to search for, value is the standardised version of that
title.
@param periodical_title_search_keys: (list) - ordered list of non-
standard titles to search for.
@return: (tuple) of 6 components:
( list -> of strings, each string is a MARC XML-ized reference
line.
integer -> number of fields of miscellaneous text found for the
record.
integer -> number of title citations found for the record.
integer -> number of institutional report-number citations found
for the record.
integer -> number of URL citations found for the record.
integer -> number of DOI's found
integer -> number of author groups found
dictionary -> The totals for each 'bad title' found in the reference
section.
)
"""
# a list to contain the processed reference lines:
citations = []
# counters for extraction stats:
counts = {
'misc': 0,
'title': 0,
'reportnum': 0,
'url': 0,
'doi': 0,
'auth_group': 0,
}
# A dictionary to contain the total count of each 'bad title' found
# in the entire reference section:
bad_titles_count = {}
# Cleanup the reference lines
# process references line-by-line:
for ref_line in ref_sect:
clean_line = wash_and_repair_reference_line(ref_line)
citation_elements, line_marker, this_counts, bad_titles_count = \
parse_reference_line(
clean_line, kbs, bad_titles_count, linker_callback)
# Accumulate stats
counts = sum_2_dictionaries(counts, this_counts)
citations.append({'elements': citation_elements,
'line_marker': line_marker,
'raw_ref': ref_line})
# Return the list of processed reference lines:
return citations, counts, bad_titles_count
|
java
|
public static Builder newBuilder(String sourceUri, Schema schema, FormatOptions format) {
return newBuilder(ImmutableList.of(sourceUri), schema, format);
}
|
python
|
def targets_w_bins(cnv_file, access_file, target_anti_fn, work_dir, data):
"""Calculate target and anti-target files with pre-determined bins.
"""
target_file = os.path.join(work_dir, "%s-target.bed" % dd.get_sample_name(data))
anti_file = os.path.join(work_dir, "%s-antitarget.bed" % dd.get_sample_name(data))
if not utils.file_exists(target_file):
target_bin, _ = target_anti_fn()
with file_transaction(data, target_file) as tx_out_file:
cmd = [_get_cmd(), "target", cnv_file, "--split", "-o", tx_out_file,
"--avg-size", str(target_bin)]
do.run(_prep_cmd(cmd, tx_out_file), "CNVkit target")
if not os.path.exists(anti_file):
_, anti_bin = target_anti_fn()
with file_transaction(data, anti_file) as tx_out_file:
# Create access file without targets to avoid overlap
# antitarget in cnvkit is meant to do this but appears to not always happen
# after chromosome 1
tx_access_file = os.path.join(os.path.dirname(tx_out_file), os.path.basename(access_file))
pybedtools.BedTool(access_file).subtract(cnv_file).saveas(tx_access_file)
cmd = [_get_cmd(), "antitarget", "-g", tx_access_file, cnv_file, "-o", tx_out_file,
"--avg-size", str(anti_bin)]
do.run(_prep_cmd(cmd, tx_out_file), "CNVkit antitarget")
return target_file, anti_file
|
python
|
def add_latlon_metadata(lat_var, lon_var):
"""Adds latitude and longitude metadata"""
lat_var.long_name = 'latitude'
lat_var.standard_name = 'latitude'
lat_var.units = 'degrees_north'
lat_var.axis = 'Y'
lon_var.long_name = 'longitude'
lon_var.standard_name = 'longitude'
lon_var.units = 'degrees_east'
lon_var.axis = 'X'
|
java
|
public static SyncPlan computeSyncPlan(Inode inode, Fingerprint ufsFingerprint,
boolean containsMountPoint) {
Fingerprint inodeFingerprint = Fingerprint.parse(inode.getUfsFingerprint());
boolean isContentSynced = inodeUfsIsContentSynced(inode, inodeFingerprint, ufsFingerprint);
boolean isMetadataSynced = inodeUfsIsMetadataSynced(inode, inodeFingerprint, ufsFingerprint);
boolean ufsExists = ufsFingerprint.isValid();
boolean ufsIsDir = ufsFingerprint != null
&& Fingerprint.Type.DIRECTORY.name().equals(ufsFingerprint.getTag(Fingerprint.Tag.TYPE));
UfsSyncUtils.SyncPlan syncPlan = new UfsSyncUtils.SyncPlan();
if (isContentSynced && isMetadataSynced) {
// Inode is already synced.
if (inode.isDirectory() && inode.isPersisted()) {
// Both Alluxio and UFS are directories, so sync the children of the directory.
syncPlan.setSyncChildren();
}
return syncPlan;
}
// One of the metadata or content is not in sync
if (inode.isDirectory() && (containsMountPoint || ufsIsDir)) {
// Instead of deleting and then loading metadata to update, try to update directly
// - mount points (or paths with mount point descendants) should not be deleted
// - directory permissions can be updated without removing the inode
if (inode.getParentId() != InodeTree.NO_PARENT) {
// Only update the inode if it is not the root directory. The root directory is a special
// case, since it is expected to be owned by the process that starts the master, and not
// the owner on UFS.
syncPlan.setUpdateMetadata();
}
syncPlan.setSyncChildren();
return syncPlan;
}
// One of metadata or content is not in sync and it is a file
// The only way for a directory to reach this point, is that the ufs with the same path is not
// a directory. That requires a deletion and reload as well.
if (!isContentSynced) {
// update inode, by deleting and then optionally loading metadata
syncPlan.setDelete();
if (ufsExists) {
// UFS exists, so load metadata later.
syncPlan.setLoadMetadata();
}
} else {
syncPlan.setUpdateMetadata();
}
return syncPlan;
}
|
java
|
private boolean checkDataQuality(Optional<Object> schema)
throws Exception {
if (this.branches > 1) {
this.forkTaskState.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED,
this.taskState.getProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED));
this.forkTaskState.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXTRACTED,
this.taskState.getProp(ConfigurationKeys.EXTRACTOR_ROWS_EXTRACTED));
}
String writerRecordsWrittenKey =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_RECORDS_WRITTEN, this.branches, this.index);
if (this.writer.isPresent()) {
this.forkTaskState.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, this.writer.get().recordsWritten());
this.taskState.setProp(writerRecordsWrittenKey, this.writer.get().recordsWritten());
} else {
this.forkTaskState.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, 0L);
this.taskState.setProp(writerRecordsWrittenKey, 0L);
}
if (schema.isPresent()) {
this.forkTaskState.setProp(ConfigurationKeys.EXTRACT_SCHEMA, schema.get().toString());
}
try {
// Do task-level quality checking
TaskLevelPolicyCheckResults taskResults =
this.taskContext.getTaskLevelPolicyChecker(this.forkTaskState, this.branches > 1 ? this.index : -1)
.executePolicies();
TaskPublisher publisher = this.taskContext.getTaskPublisher(this.forkTaskState, taskResults);
switch (publisher.canPublish()) {
case SUCCESS:
return true;
case CLEANUP_FAIL:
this.logger.error("Cleanup failed for task " + this.taskId);
break;
case POLICY_TESTS_FAIL:
this.logger.error("Not all quality checking passed for task " + this.taskId);
break;
case COMPONENTS_NOT_FINISHED:
this.logger.error("Not all components completed for task " + this.taskId);
break;
default:
break;
}
return false;
} catch (Throwable t) {
this.logger.error("Failed to check task-level data quality", t);
return false;
}
}
|
python
|
def _get_cur_remotes(path):
"""Retrieve remote references defined in the CWL.
"""
cur_remotes = set([])
if isinstance(path, (list, tuple)):
for v in path:
cur_remotes |= _get_cur_remotes(v)
elif isinstance(path, dict):
for v in path.values():
cur_remotes |= _get_cur_remotes(v)
elif path and isinstance(path, six.string_types):
if path.startswith(tuple(INTEGRATION_MAP.keys())):
cur_remotes.add(INTEGRATION_MAP.get(path.split(":")[0] + ":"))
return cur_remotes
|
java
|
public static MarkedElement markupBond(IRenderingElement elem, IBond bond) {
assert elem != null;
MarkedElement tagElem = markupChemObj(elem, bond);
tagElem.aggClass("bond");
return tagElem;
}
|
python
|
def proxy_napalm_wrap(func):
'''
This decorator is used to make the execution module functions
available outside a proxy minion, or when running inside a proxy
minion. If we are running in a proxy, retrieve the connection details
from the __proxy__ injected variable. If we are not, then
use the connection information from the opts.
:param func:
:return:
'''
@wraps(func)
def func_wrapper(*args, **kwargs):
wrapped_global_namespace = func.__globals__
# get __opts__ and __proxy__ from func_globals
proxy = wrapped_global_namespace.get('__proxy__')
opts = copy.deepcopy(wrapped_global_namespace.get('__opts__'))
# in any case, will inject the `napalm_device` global
# the execution modules will make use of this variable from now on
# previously they were accessing the device properties through the __proxy__ object
always_alive = opts.get('proxy', {}).get('always_alive', True)
# force_reconnect is a magic keyword arg that allows one to establish
# a separate connection to the network device running under an always
# alive Proxy Minion, using new credentials (overriding the ones
# configured in the opts / pillar.
force_reconnect = kwargs.get('force_reconnect', False)
if force_reconnect:
log.debug('Usage of reconnect force detected')
log.debug('Opts before merging')
log.debug(opts['proxy'])
opts['proxy'].update(**kwargs)
log.debug('Opts after merging')
log.debug(opts['proxy'])
if is_proxy(opts) and always_alive:
# if it is running in a NAPALM Proxy and it's using the default
# always alive behaviour, will get the cached copy of the network
# device object which should preserve the connection.
if force_reconnect:
wrapped_global_namespace['napalm_device'] = get_device(opts)
else:
wrapped_global_namespace['napalm_device'] = proxy['napalm.get_device']()
elif is_proxy(opts) and not always_alive:
# if still proxy, but the user does not want the SSH session always alive
# get a new device instance
# which establishes a new connection
# which is closed just before the call() function defined above returns
if 'inherit_napalm_device' not in kwargs or ('inherit_napalm_device' in kwargs and
not kwargs['inherit_napalm_device']):
# try to open a new connection
# but only if the function does not inherit the napalm driver
# for configuration management this is very important,
# in order to make sure we are editing the same session.
try:
wrapped_global_namespace['napalm_device'] = get_device(opts)
except napalm_base.exceptions.ConnectionException as nce:
log.error(nce)
return '{base_msg}. See log for details.'.format(
base_msg=six.text_type(nce.msg)
)
else:
# in case the `inherit_napalm_device` is set
# and it also has a non-empty value,
# the global var `napalm_device` will be overridden.
# this is extremely important for configuration-related features
# as all actions must be issued within the same configuration session
# otherwise we risk to open multiple sessions
wrapped_global_namespace['napalm_device'] = kwargs['inherit_napalm_device']
else:
# if not a NAPLAM proxy
# thus it is running on a regular minion, directly on the network device
# or another flavour of Minion from where we can invoke arbitrary
# NAPALM commands
# get __salt__ from func_globals
log.debug('Not running in a NAPALM Proxy Minion')
_salt_obj = wrapped_global_namespace.get('__salt__')
napalm_opts = _salt_obj['config.get']('napalm', {})
napalm_inventory = _salt_obj['config.get']('napalm_inventory', {})
log.debug('NAPALM opts found in the Minion config')
log.debug(napalm_opts)
clean_kwargs = salt.utils.args.clean_kwargs(**kwargs)
napalm_opts.update(clean_kwargs) # no need for deeper merge
log.debug('Merging the found opts with the CLI args')
log.debug(napalm_opts)
host = napalm_opts.get('host') or napalm_opts.get('hostname') or\
napalm_opts.get('fqdn') or napalm_opts.get('ip')
if host and napalm_inventory and isinstance(napalm_inventory, dict) and\
host in napalm_inventory:
inventory_opts = napalm_inventory[host]
log.debug('Found %s in the NAPALM inventory:', host)
log.debug(inventory_opts)
napalm_opts.update(inventory_opts)
log.debug('Merging the config for %s with the details found in the napalm inventory:', host)
log.debug(napalm_opts)
opts = copy.deepcopy(opts) # make sure we don't override the original
# opts, but just inject the CLI args from the kwargs to into the
# object manipulated by ``get_device_opts`` to extract the
# connection details, then use then to establish the connection.
opts['napalm'] = napalm_opts
if 'inherit_napalm_device' not in kwargs or ('inherit_napalm_device' in kwargs and
not kwargs['inherit_napalm_device']):
# try to open a new connection
# but only if the function does not inherit the napalm driver
# for configuration management this is very important,
# in order to make sure we are editing the same session.
try:
wrapped_global_namespace['napalm_device'] = get_device(opts, salt_obj=_salt_obj)
except napalm_base.exceptions.ConnectionException as nce:
log.error(nce)
return '{base_msg}. See log for details.'.format(
base_msg=six.text_type(nce.msg)
)
else:
# in case the `inherit_napalm_device` is set
# and it also has a non-empty value,
# the global var `napalm_device` will be overridden.
# this is extremely important for configuration-related features
# as all actions must be issued within the same configuration session
# otherwise we risk to open multiple sessions
wrapped_global_namespace['napalm_device'] = kwargs['inherit_napalm_device']
if not_always_alive(opts):
# inject the __opts__ only when not always alive
# otherwise, we don't want to overload the always-alive proxies
wrapped_global_namespace['napalm_device']['__opts__'] = opts
ret = func(*args, **kwargs)
if force_reconnect:
log.debug('That was a forced reconnect, gracefully clearing up')
device = wrapped_global_namespace['napalm_device']
closing = call(device, 'close', __retry=False)
return ret
return func_wrapper
|
python
|
def qualified_name(obj) -> str:
"""
Return the qualified name (e.g. package.module.Type) for the given object.
If ``obj`` is not a class, the returned name will match its type instead.
"""
if not isclass(obj):
obj = type(obj)
if obj.__module__ == 'builtins':
return obj.__name__
else:
return '{}.{}'.format(obj.__module__, obj.__qualname__)
|
java
|
public String convertGSLELINEENDToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.