language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public final void innerCreator() throws RecognitionException {
int innerCreator_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 132) ) { return; }
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1256:5: ( Identifier classCreatorRest )
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1256:7: Identifier classCreatorRest
{
match(input,Identifier,FOLLOW_Identifier_in_innerCreator6078); if (state.failed) return;
pushFollow(FOLLOW_classCreatorRest_in_innerCreator6080);
classCreatorRest();
state._fsp--;
if (state.failed) return;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 132, innerCreator_StartIndex); }
}
}
|
java
|
public static boolean containsIgnoreCase(CharSequence a, CharSequence b) {
return contains(a, b, AsciiCaseInsensitiveCharEqualityComparator.INSTANCE);
}
|
python
|
def unicode(self, *, invert_color: bool = False, borders: bool = False) -> str:
"""
Returns a string representation of the board with Unicode pieces.
Useful for pretty-printing to a terminal.
:param invert_color: Invert color of the Unicode pieces.
:param borders: Show borders and a coordinate margin.
"""
builder = []
for rank_index in range(7, -1, -1):
if borders:
builder.append(" ")
builder.append("-" * 17)
builder.append("\n")
builder.append(RANK_NAMES[rank_index])
builder.append(" ")
for file_index in range(8):
square_index = square(file_index, rank_index)
if borders:
builder.append("|")
elif file_index > 0:
builder.append(" ")
piece = self.piece_at(square_index)
if piece:
builder.append(piece.unicode_symbol(invert_color=invert_color))
else:
builder.append(u"·")
if borders:
builder.append("|")
if borders or rank_index > 0:
builder.append("\n")
if borders:
builder.append(" ")
builder.append("-" * 17)
builder.append("\n")
builder.append(" a b c d e f g h")
return "".join(builder)
|
python
|
def as_enum(enum):
""" Turn a possibly string enum into an integer enum.
"""
if isinstance(enum, string_types):
try:
enum = getattr(gl, 'GL_' + enum.upper())
except AttributeError:
try:
enum = _internalformats['GL_' + enum.upper()]
except KeyError:
raise ValueError('Could not find int value for enum %r' % enum)
return enum
|
java
|
final Set<String> getAllColumnNames() {
final Set<String> columnNameSet = mColumnNameFieldInfoMap.keySet();
// Returns clone of columnNameSet because {@see D6Inex} directly
// manipulats(almost delete) columnNameSet,so it effects
// mColumnNameFieldInfoMap.
// So, prevent original columnNameSet from getting edit.
return new LinkedHashSet<String>(columnNameSet);
}
|
java
|
public TimeOfDay withChronologyRetainFields(Chronology newChronology) {
newChronology = DateTimeUtils.getChronology(newChronology);
newChronology = newChronology.withUTC();
if (newChronology == getChronology()) {
return this;
} else {
TimeOfDay newTimeOfDay = new TimeOfDay(this, newChronology);
newChronology.validate(newTimeOfDay, getValues());
return newTimeOfDay;
}
}
|
python
|
def summary(args):
"""
%prog summary *.fasta
Report real bases and N's in fastafiles in a tabular report
"""
from jcvi.utils.natsort import natsort_key
p = OptionParser(summary.__doc__)
p.add_option("--suffix", default="Mb",
help="make the base pair counts human readable [default: %default]")
p.add_option("--ids",
help="write the ids that have >= 50% N's [default: %default]")
p.set_outfile()
opts, args = p.parse_args(args)
if len(args) == 0:
sys.exit(not p.print_help())
idsfile = opts.ids
header = "Seqid Real N's Total %_real".split()
if idsfile:
idsfile = open(idsfile, "w")
nids = 0
data = []
for fastafile in args:
for rec in SeqIO.parse(fastafile, "fasta"):
seqlen = len(rec)
nns = rec.seq.count('n') + rec.seq.count('N')
reals = seqlen - nns
pct = reals * 100. / seqlen
pctreal = "{0:.1f}%".format(pct)
if idsfile and pct < 50:
nids += 1
print(rec.id, file=idsfile)
data.append((rec.id, reals, nns, seqlen, pctreal))
data.sort(key=natsort_key)
ids, reals, nns, seqlen, pctreal = zip(*data)
reals = sum(reals)
nns = sum(nns)
seqlen = sum(seqlen)
pctreal = "{0:.1f}%".format(reals * 100. / seqlen)
data.append(("Total", reals, nns, seqlen, pctreal))
write_csv(header, data, sep=" ", filename=opts.outfile, thousands=True)
if idsfile:
logging.debug("A total of {0} ids >= 50% N's written to {1}.".\
format(nids, idsfile.name))
idsfile.close()
return reals, nns, seqlen
|
java
|
public void setBundleList(java.util.Collection<BundleDetails> bundleList) {
if (bundleList == null) {
this.bundleList = null;
return;
}
this.bundleList = new java.util.ArrayList<BundleDetails>(bundleList);
}
|
python
|
def geolocation_buses(network, session):
"""
If geopandas is installed:
Use Geometries of buses x/y(lon/lat) and Polygons
of Countries from RenpassGisParameterRegion
in order to locate the buses
Else:
Use coordinats of buses to locate foreign buses, which is less accurate.
Parameters
----------
network_etrago: : class: `etrago.tools.io.NetworkScenario`
eTraGo network object compiled by: meth: `etrago.appl.etrago`
session: : sqlalchemy: `sqlalchemy.orm.session.Session < orm/session_basics.html >`
SQLAlchemy session to the OEDB
"""
if geopandas:
# Start db connetion
# get renpassG!S scenario data
RenpassGISRegion = RenpassGisParameterRegion
# Define regions
region_id = ['DE', 'DK', 'FR', 'BE', 'LU', 'AT',
'NO', 'PL', 'CH', 'CZ', 'SE', 'NL']
query = session.query(RenpassGISRegion.gid,
RenpassGISRegion.u_region_id,
RenpassGISRegion.stat_level,
RenpassGISRegion.geom,
RenpassGISRegion.geom_point)
# get regions by query and filter
Regions = [(gid, u_region_id, stat_level, geoalchemy2.shape.to_shape(
geom), geoalchemy2.shape.to_shape(geom_point))
for gid, u_region_id, stat_level,
geom, geom_point in query.filter(RenpassGISRegion.u_region_id.
in_(region_id)).all()]
crs = {'init': 'epsg:4326'}
# transform lon lat to shapely Points and create GeoDataFrame
points = [Point(xy) for xy in zip(network.buses.x, network.buses.y)]
bus = gpd.GeoDataFrame(network.buses, crs=crs, geometry=points)
# Transform Countries Polygons as Regions
region = pd.DataFrame(
Regions, columns=['id', 'country', 'stat_level', 'Polygon',
'Point'])
re = gpd.GeoDataFrame(region, crs=crs, geometry=region['Polygon'])
# join regions and buses by geometry which intersects
busC = gpd.sjoin(bus, re, how='inner', op='intersects')
# busC
# Drop non used columns
busC = busC.drop(['index_right', 'Point', 'id', 'Polygon',
'stat_level', 'geometry'], axis=1)
# add busC to eTraGo.buses
network.buses['country_code'] = busC['country']
network.buses.country_code[network.buses.country_code.isnull()] = 'DE'
# close session
session.close()
else:
buses_by_country(network)
transborder_lines_0 = network.lines[network.lines['bus0'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
transborder_lines_1 = network.lines[network.lines['bus1'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
#set country tag for lines
network.lines.loc[transborder_lines_0, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_0, 'bus0'].\
values,'country_code'].values
network.lines.loc[transborder_lines_1, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_1, 'bus1'].\
values,'country_code'].values
network.lines['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_lines_0.intersection(transborder_lines_1)))
for line in doubles:
c_bus0 = network.buses.loc[network.lines.loc[line, 'bus0'],
'country_code']
c_bus1 = network.buses.loc[network.lines.loc[line, 'bus1'],
'country_code']
network.lines.loc[line, 'country'] = '{}{}'.format(c_bus0, c_bus1)
transborder_links_0 = network.links[network.links['bus0'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
transborder_links_1 = network.links[network.links['bus1'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
#set country tag for links
network.links.loc[transborder_links_0, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_0, 'bus0'].\
values, 'country_code'].values
network.links.loc[transborder_links_1, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_1, 'bus1'].\
values, 'country_code'].values
network.links['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_links_0.intersection(transborder_links_1)))
for link in doubles:
c_bus0 = network.buses.loc[
network.links.loc[link, 'bus0'], 'country_code']
c_bus1 = network.buses.loc[
network.links.loc[link, 'bus1'], 'country_code']
network.links.loc[link, 'country'] = '{}{}'.format(c_bus0, c_bus1)
return network
|
python
|
def serve(
host,
port,
app,
request_handler,
error_handler,
before_start=None,
after_start=None,
before_stop=None,
after_stop=None,
debug=False,
request_timeout=60,
response_timeout=60,
keep_alive_timeout=5,
ssl=None,
sock=None,
request_max_size=None,
request_buffer_queue_size=100,
reuse_port=False,
loop=None,
protocol=HttpProtocol,
backlog=100,
register_sys_signals=True,
run_multiple=False,
run_async=False,
connections=None,
signal=Signal(),
request_class=None,
access_log=True,
keep_alive=True,
is_request_stream=False,
router=None,
websocket_max_size=None,
websocket_max_queue=None,
websocket_read_limit=2 ** 16,
websocket_write_limit=2 ** 16,
state=None,
graceful_shutdown_timeout=15.0,
asyncio_server_kwargs=None,
):
"""Start asynchronous HTTP Server on an individual process.
:param host: Address to host on
:param port: Port to host on
:param request_handler: Sanic request handler with middleware
:param error_handler: Sanic error handler with middleware
:param before_start: function to be executed before the server starts
listening. Takes arguments `app` instance and `loop`
:param after_start: function to be executed after the server starts
listening. Takes arguments `app` instance and `loop`
:param before_stop: function to be executed when a stop signal is
received before it is respected. Takes arguments
`app` instance and `loop`
:param after_stop: function to be executed when a stop signal is
received after it is respected. Takes arguments
`app` instance and `loop`
:param debug: enables debug output (slows server)
:param request_timeout: time in seconds
:param response_timeout: time in seconds
:param keep_alive_timeout: time in seconds
:param ssl: SSLContext
:param sock: Socket for the server to accept connections from
:param request_max_size: size in bytes, `None` for no limit
:param reuse_port: `True` for multiple workers
:param loop: asyncio compatible event loop
:param protocol: subclass of asyncio protocol class
:param request_class: Request class to use
:param access_log: disable/enable access log
:param websocket_max_size: enforces the maximum size for
incoming messages in bytes.
:param websocket_max_queue: sets the maximum length of the queue
that holds incoming messages.
:param websocket_read_limit: sets the high-water limit of the buffer for
incoming bytes, the low-water limit is half
the high-water limit.
:param websocket_write_limit: sets the high-water limit of the buffer for
outgoing bytes, the low-water limit is a
quarter of the high-water limit.
:param is_request_stream: disable/enable Request.stream
:param request_buffer_queue_size: streaming request buffer queue size
:param router: Router object
:param graceful_shutdown_timeout: How long take to Force close non-idle
connection
:param asyncio_server_kwargs: key-value args for asyncio/uvloop
create_server method
:return: Nothing
"""
if not run_async:
# create new event_loop after fork
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if debug:
loop.set_debug(debug)
connections = connections if connections is not None else set()
server = partial(
protocol,
loop=loop,
connections=connections,
signal=signal,
app=app,
request_handler=request_handler,
error_handler=error_handler,
request_timeout=request_timeout,
response_timeout=response_timeout,
keep_alive_timeout=keep_alive_timeout,
request_max_size=request_max_size,
request_class=request_class,
access_log=access_log,
keep_alive=keep_alive,
is_request_stream=is_request_stream,
router=router,
websocket_max_size=websocket_max_size,
websocket_max_queue=websocket_max_queue,
websocket_read_limit=websocket_read_limit,
websocket_write_limit=websocket_write_limit,
state=state,
debug=debug,
)
asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {}
)
server_coroutine = loop.create_server(
server,
host,
port,
ssl=ssl,
reuse_port=reuse_port,
sock=sock,
backlog=backlog,
**asyncio_server_kwargs
)
if run_async:
return server_coroutine
trigger_events(before_start, loop)
try:
http_server = loop.run_until_complete(server_coroutine)
except BaseException:
logger.exception("Unable to start server")
return
trigger_events(after_start, loop)
# Ignore SIGINT when run_multiple
if run_multiple:
signal_func(SIGINT, SIG_IGN)
# Register signals for graceful termination
if register_sys_signals:
_singals = (SIGTERM,) if run_multiple else (SIGINT, SIGTERM)
for _signal in _singals:
try:
loop.add_signal_handler(_signal, loop.stop)
except NotImplementedError:
logger.warning(
"Sanic tried to use loop.add_signal_handler "
"but it is not implemented on this platform."
)
pid = os.getpid()
try:
logger.info("Starting worker [%s]", pid)
loop.run_forever()
finally:
logger.info("Stopping worker [%s]", pid)
# Run the on_stop function if provided
trigger_events(before_stop, loop)
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections:
connection.close_if_idle()
# Gracefully shutdown timeout.
# We should provide graceful_shutdown_timeout,
# instead of letting connection hangs forever.
# Let's roughly calcucate time.
start_shutdown = 0
while connections and (start_shutdown < graceful_shutdown_timeout):
loop.run_until_complete(asyncio.sleep(0.1))
start_shutdown = start_shutdown + 0.1
# Force close non-idle connection after waiting for
# graceful_shutdown_timeout
coros = []
for conn in connections:
if hasattr(conn, "websocket") and conn.websocket:
coros.append(conn.websocket.close_connection())
else:
conn.close()
_shutdown = asyncio.gather(*coros, loop=loop)
loop.run_until_complete(_shutdown)
trigger_events(after_stop, loop)
loop.close()
|
java
|
public static String normalizePattern(final String pattern) {
if (pattern == null || "".equals(pattern.trim())) {
return "/";
}
if (pattern.length() > 0 && !pattern.startsWith("/")
&& !pattern.startsWith("*")) {
return "/" + pattern;
}
return pattern;
}
|
python
|
def postproc_mask (stats_result):
"""Simple helper to postprocess angular outputs from StatsCollectors in the
way we want.
"""
n, mean, scat = stats_result
ok = np.isfinite (mean)
n = n[ok]
mean = mean[ok]
scat = scat[ok]
mean *= 180 / np.pi # rad => deg
scat /= n # variance-of-samples => variance-of-mean
scat **= 0.5 # variance => stddev
scat *= 180 / np.pi # rad => deg
return ok, mean, scat
|
python
|
def enumerate_tokens(sid=None, session_id=None, privs=None):
'''
Enumerate tokens from any existing processes that can be accessed.
Optionally filter by sid.
'''
for p in psutil.process_iter():
if p.pid == 0:
continue
try:
ph = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, p.pid)
except win32api.error as exc:
if exc.winerror == 5:
log.debug("Unable to OpenProcess pid=%d name=%s", p.pid, p.name())
continue
raise exc
try:
access = (
win32security.TOKEN_DUPLICATE |
win32security.TOKEN_QUERY |
win32security.TOKEN_IMPERSONATE |
win32security.TOKEN_ASSIGN_PRIMARY
)
th = win32security.OpenProcessToken(ph, access)
except Exception as exc:
log.debug("OpenProcessToken failed pid=%d name=%s user%s", p.pid, p.name(), p.username())
continue
try:
process_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0]
except Exception as exc:
log.exception("GetTokenInformation pid=%d name=%s user%s", p.pid, p.name(), p.username())
continue
proc_sid = win32security.ConvertSidToStringSid(process_sid)
if sid and sid != proc_sid:
log.debug("Token for pid does not match user sid: %s", sid)
continue
if session_id and win32security.GetTokenInformation(th, win32security.TokenSessionId) != session_id:
continue
def has_priv(tok, priv):
luid = win32security.LookupPrivilegeValue(None, priv)
for priv_luid, flags in win32security.GetTokenInformation(tok, win32security.TokenPrivileges):
if priv_luid == luid:
return True
return False
if privs:
has_all = True
for name in privs:
if not has_priv(th, name):
has_all = False
if not has_all:
continue
yield dup_token(th)
|
python
|
def write_message(
self, message: Union[str, bytes], binary: bool = False
) -> "Future[None]":
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes)
self._message_bytes_out += len(message)
flags = 0
if self._compressor:
message = self._compressor.compress(message)
flags |= self.RSV1
# For historical reasons, write methods in Tornado operate in a semi-synchronous
# mode in which awaiting the Future they return is optional (But errors can
# still be raised). This requires us to go through an awkward dance here
# to transform the errors that may be returned while presenting the same
# semi-synchronous interface.
try:
fut = self._write_frame(True, opcode, message, flags=flags)
except StreamClosedError:
raise WebSocketClosedError()
async def wrapper() -> None:
try:
await fut
except StreamClosedError:
raise WebSocketClosedError()
return asyncio.ensure_future(wrapper())
|
python
|
def get_deprecated_msg(self, wrapped, instance):
"""
Get the deprecation warning message for the user.
:param wrapped: Wrapped class or function.
:param instance: The object to which the wrapped function was bound when it was called.
:return: The warning message.
"""
if instance is None:
if inspect.isclass(wrapped):
fmt = "Call to deprecated class {name}."
else:
fmt = "Call to deprecated function (or staticmethod) {name}."
else:
if inspect.isclass(instance):
fmt = "Call to deprecated class method {name}."
else:
fmt = "Call to deprecated method {name}."
if self.reason:
fmt += " ({reason})"
if self.version:
fmt += " -- Deprecated since version {version}."
return fmt.format(name=wrapped.__name__,
reason=self.reason or "",
version=self.version or "")
|
java
|
public com.google.api.ads.adwords.axis.v201809.billing.BudgetOrderErrorReason getReason() {
return reason;
}
|
python
|
async def get_prefix(self, message):
"""|coro|
Retrieves the prefix the bot is listening to
with the message as a context.
Parameters
-----------
message: :class:`discord.Message`
The message context to get the prefix of.
Returns
--------
Union[List[:class:`str`], :class:`str`]
A list of prefixes or a single prefix that the bot is
listening for.
"""
prefix = ret = self.command_prefix
if callable(prefix):
ret = await discord.utils.maybe_coroutine(prefix, self, message)
if not isinstance(ret, str):
try:
ret = list(ret)
except TypeError:
# It's possible that a generator raised this exception. Don't
# replace it with our own error if that's the case.
if isinstance(ret, collections.Iterable):
raise
raise TypeError("command_prefix must be plain string, iterable of strings, or callable "
"returning either of these, not {}".format(ret.__class__.__name__))
if not ret:
raise ValueError("Iterable command_prefix must contain at least one prefix")
return ret
|
java
|
public TraceSummary withServiceIds(ServiceId... serviceIds) {
if (this.serviceIds == null) {
setServiceIds(new java.util.ArrayList<ServiceId>(serviceIds.length));
}
for (ServiceId ele : serviceIds) {
this.serviceIds.add(ele);
}
return this;
}
|
java
|
public TrxMessageHeader addTransportProperties(TrxMessageHeader trxMessageHeader, String strVersion)
{
if (trxMessageHeader.get(MessageTransport.TRANSPORT_ID_PARAM) != null)
{
RecordOwner recordOwner = this.findRecordOwner();
if (m_recMessageTransport == null)
{
m_recMessageTransport = new MessageTransport(recordOwner);
if (recordOwner != null)
recordOwner.removeRecord(m_recMessageTransport);
this.addListener(new FreeOnFreeHandler(m_recMessageTransport));
}
if (m_recMessageTransportInfo == null)
{
m_recMessageTransportInfo = new MessageTransportInfo(recordOwner);
if (recordOwner != null)
recordOwner.removeRecord(m_recMessageTransportInfo);
this.addListener(new FreeOnFreeHandler(m_recMessageTransportInfo));
}
SubFileFilter subFileFilter = null;
try {
if (m_recMessageTransport.setHandle(trxMessageHeader.get(MessageTransport.TRANSPORT_ID_PARAM), DBConstants.BOOKMARK_HANDLE) != null)
{
Map<String,Object> propMessageTransport = ((PropertiesField)m_recMessageTransport.getField(MessageTransport.PROPERTIES)).loadProperties();
String strInitialMessageStatusID = null;
if (!this.getField(MessageProcessInfo.INITIAL_MESSAGE_STATUS_ID).isNull())
strInitialMessageStatusID = this.getField(MessageProcessInfo.INITIAL_MESSAGE_STATUS_ID).toString();
if (strInitialMessageStatusID != null)
{
if (propMessageTransport == null)
propMessageTransport = new Hashtable<String,Object>();
propMessageTransport.put(MessageTransport.INITIAL_MESSAGE_DATA_STATUS, strInitialMessageStatusID);
}
if (propMessageTransport != null)
{
Map<String,Object> propHeaderTransport = trxMessageHeader.getMessageTransportMap();
if (propHeaderTransport != null)
propHeaderTransport.putAll(propMessageTransport);
else
propHeaderTransport = propMessageTransport;
trxMessageHeader.setMessageTransportMap(propHeaderTransport);
}
m_recMessageTransportInfo.setKeyArea(MessageTransportInfo.MESSAGE_PROCESS_INFO_ID_KEY);
int iMessageVersionID = 0;
if (trxMessageHeader.getProperties() != null)
if (strVersion == null)
{
if (trxMessageHeader.getProperties().get(MessageVersion.VERSION_ID) != null)
{
try {
iMessageVersionID = Integer.parseInt(trxMessageHeader.getProperties().get(MessageVersion.VERSION_ID).toString());
} catch (NumberFormatException ex) {
// Ignore
}
}
else
strVersion = (String)trxMessageHeader.getProperties().get(MessageVersion.VERSION);
}
MessageVersion recMessageVersion = this.getMessageControl().getMessageVersion();
if (iMessageVersionID != 0)
{
recMessageVersion.getField(MessageVersion.ID).setValue(iMessageVersionID);
recMessageVersion.setKeyArea(MessageVersion.ID_KEY);
if (!recMessageVersion.seek(DBConstants.EQUALS))
iMessageVersionID = 0; // Never
}
if (iMessageVersionID == 0)
{
recMessageVersion = this.getMessageControl().getMessageVersion(strVersion);
iMessageVersionID = (int)recMessageVersion.getField(MessageVersion.ID).getValue();
if (strVersion == null)
{
int iMessageVersionIDDefault = 0;
int iMessageVersionIDBestGuess = iMessageVersionID;
boolean bDefaultExists = false;
m_recMessageTransportInfo.addListener(subFileFilter = new SubFileFilter(this.getField(MessageProcessInfo.ID), MessageTransportInfo.MESSAGE_PROCESS_INFO_ID, m_recMessageTransport.getField(MessageTransport.ID), MessageTransportInfo.MESSAGE_TRANSPORT_ID, null, null));
while (m_recMessageTransportInfo.hasNext())
{
m_recMessageTransportInfo.next();
if (m_recMessageTransportInfo.getField(MessageTransportInfo.ACTIVE).getState())
iMessageVersionIDBestGuess = (int)m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_VERSION_ID).getValue();
else if (m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_VERSION_ID).getValue() == 0)
iMessageVersionIDBestGuess = 0; // If not active with no version, best guess is no version
if (m_recMessageTransportInfo.getField(MessageTransportInfo.DEFAULT_TRANSPORT).getState()) // Default is always the best guess
iMessageVersionIDDefault = (int)m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_VERSION_ID).getValue();
if (m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_VERSION_ID).getValue() == iMessageVersionID) // Default is always the best guess
bDefaultExists = true;
}
subFileFilter.free();
subFileFilter = null;
if (iMessageVersionIDDefault != 0)
iMessageVersionID = iMessageVersionIDDefault; // If there is a default, always use it
else if (!bDefaultExists)
iMessageVersionID = iMessageVersionIDBestGuess; // else, If the default doesn't exist, use the best guess
recMessageVersion.getField(MessageVersion.ID).setValue(iMessageVersionID);
recMessageVersion.setKeyArea(MessageVersion.ID_KEY);
if (!recMessageVersion.seek(DBConstants.EQUALS))
iMessageVersionID = 0; // Never
}
}
m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_PROCESS_INFO_ID).moveFieldToThis(this.getField(MessageProcessInfo.ID));
m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_TRANSPORT_ID).moveFieldToThis(m_recMessageTransport.getField(MessageTransport.ID));
m_recMessageTransportInfo.getField(MessageTransportInfo.MESSAGE_VERSION_ID).setValue(iMessageVersionID);
if (m_recMessageTransportInfo.seek(DBConstants.EQUALS))
{
trxMessageHeader = recMessageVersion.addMessageProperties(trxMessageHeader, this.getMessageControl());
trxMessageHeader = m_recMessageTransportInfo.addMessageProperties(trxMessageHeader);
}
}
} catch (DBException ex) {
ex.printStackTrace();
} finally {
if (subFileFilter != null)
subFileFilter.free();
}
}
return trxMessageHeader;
}
|
java
|
private String stripPassword(String str)
{
if (str.indexOf("<password>") == -1)
return str;
Pattern pattern = Pattern.compile("<password>[^<]*</password>");
String[] strs = pattern.split(str);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < strs.length; i++)
{
String s = strs[i];
sb.append(s);
if (i < strs.length - 1)
sb.append("<password>****</password>");
}
return sb.toString();
}
|
python
|
def save_json(obj, filename, **kwargs):
"""
Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`.
"""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
|
java
|
public synchronized int addColumn(double[] row) {
if (isFinished)
throw new IllegalStateException(
"Cannot add columns to a MatrixBuilder that is finished");
// Update the size of the matrix based on the size of the array
if (row.length > numCols)
numCols = row.length;
// Write the row to file
int nonZero = 0;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < row.length; ++i) {
if (row[i] != 0d) {
sb.append(i+1).append(" ").append(row[i]).append(" ");
nonZero++;
}
}
writer.println(sb.toString());
// Update the total number of non-zero values for the entire matrix
nonZeroValues += nonZero;
return ++curRow;
}
|
python
|
async def update(self):
"""
Update sirbot
Trigger the update method of the plugins. This is needed if the plugins
need to perform update migration (i.e database)
"""
logger.info('Updating Sir Bot-a-lot')
for name, plugin in self._plugins.items():
plugin_update = getattr(plugin['plugin'], 'update', None)
if callable(plugin_update):
logger.info('Updating %s', name)
await plugin_update(self.config.get(name, {}), self._plugins)
logger.info('%s updated', name)
self._session.close()
logger.info('Sir Bot-a-lot updated')
|
python
|
def plot_f_rate(self, ax, X, i, xlim, x, y, binsize=1, yscale='linear',
plottype='fill_between', show_label=False, rasterized=False):
"""
Plot network firing rate plot in subplot object.
Parameters
----------
ax : `matplotlib.axes.AxesSubplot` object.
X : str
Population name.
i : int
Population index in class attribute `X`.
xlim : list of floats
Spike time interval, e.g., [0., 1000.].
x : dict
Key-value entries are population name and neuron spike times.
y : dict
Key-value entries are population name and neuron gid number.
yscale : 'str'
Linear, log, or symlog y-axes in rate plot.
plottype : str
plot type string in `['fill_between', 'bar']`
show_label : bool
whether or not to show labels
Returns
-------
None
"""
bins = np.arange(xlim[0], xlim[1]+binsize, binsize)
(hist, bins) = np.histogram(x[X], bins=bins)
if plottype == 'fill_between':
ax.fill_between(bins[:-1], hist * 1000. / self.N_X[i],
color=self.colors[i], lw=0.5, label=X, rasterized=rasterized,
clip_on=False)
ax.plot(bins[:-1], hist * 1000. / self.N_X[i],
color='k', lw=0.5, label=X, rasterized=rasterized,
clip_on=False)
elif plottype == 'bar':
ax.bar(bins[:-1], hist * 1000. / self.N_X[i],
color=self.colors[i], label=X, rasterized=rasterized ,
linewidth=0.25, width=0.9, clip_on=False)
else:
mssg = "plottype={} not in ['fill_between', 'bar']".format(plottype)
raise Exception(mssg)
remove_axis_junk(ax)
ax.axis(ax.axis('tight'))
ax.set_yscale(yscale)
ax.set_xlim(xlim[0], xlim[1])
if show_label:
ax.text(xlim[0] + .05*(xlim[1]-xlim[0]), ax.axis()[3]*1.5, X,
va='center', ha='left')
|
java
|
public ApiSuccessResponse complete(String mediatype, String id, CompleteData completeData) throws ApiException {
ApiResponse<ApiSuccessResponse> resp = completeWithHttpInfo(mediatype, id, completeData);
return resp.getData();
}
|
python
|
def create_db(file_pth):
""" Create an empty SQLite database for library spectra.
Example:
>>> from msp2db.db import create_db
>>> db_pth = 'library.db'
>>> create_db(file_pth=db_pth)
Args:
file_pth (str): File path for SQLite database
"""
conn = sqlite3.connect(file_pth)
c = conn.cursor()
c.execute('DROP TABLE IF EXISTS library_spectra_source')
c.execute('''CREATE TABLE library_spectra_source (
id integer PRIMARY KEY,
name text NOT NULL,
created_at date,
parsing_software text
)'''
)
c.execute('DROP TABLE IF EXISTS metab_compound')
c.execute('''CREATE TABLE metab_compound (
inchikey_id text PRIMARY KEY,
name text,
pubchem_id text,
chemspider_id text,
other_names text,
exact_mass real,
molecular_formula text,
molecular_weight real,
compound_class text,
smiles text,
created_at date,
updated_at date
)''')
c.execute('DROP TABLE IF EXISTS library_spectra_meta')
c.execute('''CREATE TABLE library_spectra_meta (
id integer PRIMARY KEY,
name text,
collision_energy text,
ms_level real,
accession text NOT NULL,
resolution text,
polarity integer,
fragmentation_type text,
precursor_mz real,
precursor_type text,
instrument_type text,
instrument text,
copyright text,
column text,
mass_accuracy real,
mass_error real,
origin text,
splash text,
retention_index real,
retention_time real,
library_spectra_source_id integer NOT NULL,
inchikey_id text NOT NULL,
FOREIGN KEY(library_spectra_source_id) REFERENCES library_spectra_source(id),
FOREIGN KEY(inchikey_id) REFERENCES metab_compound(inchikey_id)
)'''
)
c.execute('DROP TABLE IF EXISTS library_spectra')
c.execute('''CREATE TABLE library_spectra (
id integer PRIMARY KEY,
mz real NOT NULL,
i real NOT NULL,
other text,
library_spectra_meta_id integer NOT NULL,
FOREIGN KEY (library_spectra_meta_id) REFERENCES library_spectra_meta(id)
)'''
)
c.execute('DROP TABLE IF EXISTS library_spectra_annotation')
c.execute('''CREATE TABLE library_spectra_annotation (
id integer PRIMARY KEY,
mz real,
tentative_formula text,
mass_error real,
library_spectra_meta_id integer NOT NULL,
FOREIGN KEY (library_spectra_meta_id) REFERENCES library_spectra_meta(id)
)'''
)
|
python
|
def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS,
code_objects={}, compile_mode='exec', is_pypy=IS_PYPY, walker=SourceWalker):
"""
ingests and deparses a given code block 'co'. If version is None,
we will use the current Python interpreter version.
"""
assert iscode(co)
if version is None:
version = float(sys.version[0:3])
# store final output stream for case of error
scanner = get_scanner(version, is_pypy=is_pypy)
tokens, customize = scanner.ingest(co, code_objects=code_objects,
show_asm=debug_opts['asm'])
debug_parser = dict(PARSER_DEFAULT_DEBUG)
if debug_opts.get('grammar', None):
debug_parser['reduce'] = debug_opts['grammar']
debug_parser['errorstack'] = 'full'
# Build Syntax Tree from disassembly.
linestarts = dict(scanner.opc.findlinestarts(co))
deparsed = walker(version, out, scanner, showast=debug_opts.get('ast', None),
debug_parser=debug_parser, compile_mode=compile_mode,
is_pypy=is_pypy, linestarts=linestarts)
isTopLevel = co.co_name == '<module>'
deparsed.ast = deparsed.build_ast(tokens, customize, isTopLevel=isTopLevel)
#### XXX workaround for profiling
if deparsed.ast is None:
return None
assert deparsed.ast == 'stmts', 'Should have parsed grammar start'
# save memory
del tokens
deparsed.mod_globs, nonlocals = find_globals_and_nonlocals(deparsed.ast,
set(), set(),
co, version)
assert not nonlocals
# convert leading '__doc__ = "..." into doc string
try:
if deparsed.ast[0][0] == ASSIGN_DOC_STRING(co.co_consts[0]):
print_docstring(deparsed, '', co.co_consts[0])
del deparsed.ast[0]
if deparsed.ast[-1] == RETURN_NONE:
deparsed.ast.pop() # remove last node
# todo: if empty, add 'pass'
except:
pass
deparsed.FUTURE_UNICODE_LITERALS = (
COMPILER_FLAG_BIT['FUTURE_UNICODE_LITERALS'] & co.co_flags != 0)
# What we've been waiting for: Generate source from Syntax Tree!
deparsed.gen_source(deparsed.ast, co.co_name, customize)
for g in sorted(deparsed.mod_globs):
deparsed.write('# global %s ## Warning: Unused global\n' % g)
if deparsed.ast_errors:
deparsed.write("# NOTE: have internal decompilation grammar errors.\n")
deparsed.write("# Use -t option to show full context.")
for err in deparsed.ast_errors:
deparsed.write(err)
raise SourceWalkerError("Deparsing hit an internal grammar-rule bug")
if deparsed.ERROR:
raise SourceWalkerError("Deparsing stopped due to parse error")
return deparsed
|
java
|
@SuppressWarnings("unchecked")
public void createSymbols(String ctDescriptionFile, String ctSymLocation, CtSymKind ctSymKind) throws IOException {
ClassList classes = load(Paths.get(ctDescriptionFile));
splitHeaders(classes);
for (ClassDescription classDescription : classes) {
for (ClassHeaderDescription header : classDescription.header) {
switch (ctSymKind) {
case JOINED_VERSIONS:
Set<String> jointVersions = new HashSet<>();
jointVersions.add(header.versions);
limitJointVersion(jointVersions, classDescription.fields);
limitJointVersion(jointVersions, classDescription.methods);
writeClassesForVersions(ctSymLocation, classDescription, header, jointVersions);
break;
case SEPARATE:
Set<String> versions = new HashSet<>();
for (char v : header.versions.toCharArray()) {
versions.add("" + v);
}
writeClassesForVersions(ctSymLocation, classDescription, header, versions);
break;
}
}
}
}
|
java
|
public DescribeAccountAttributesResult withAccountAttributes(AccountAttribute... accountAttributes) {
if (this.accountAttributes == null) {
setAccountAttributes(new com.amazonaws.internal.SdkInternalList<AccountAttribute>(accountAttributes.length));
}
for (AccountAttribute ele : accountAttributes) {
this.accountAttributes.add(ele);
}
return this;
}
|
java
|
@SuppressWarnings("unchecked")
static Class<? extends ServerChannel> getServerSocketChannelClass(final EventLoopGroup eventLoopGroup) {
Objects.requireNonNull(eventLoopGroup);
final String serverSocketChannelClassName = SERVER_SOCKET_CHANNEL_CLASSES.get(eventLoopGroup.getClass().getName());
if (serverSocketChannelClassName == null) {
throw new IllegalArgumentException("No server socket channel class found for event loop group type: " + eventLoopGroup.getClass().getName());
}
try {
return Class.forName(serverSocketChannelClassName).asSubclass(ServerChannel.class);
} catch (final ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
}
|
java
|
ResolveSource replaceWithinCurrentParent(AbstractConfigValue old, AbstractConfigValue replacement) {
if (ConfigImpl.traceSubstitutionsEnabled())
ConfigImpl.trace("replaceWithinCurrentParent old " + old + "@" + System.identityHashCode(old)
+ " replacement " + replacement + "@" + System.identityHashCode(old) + " in " + this);
if (old == replacement) {
return this;
} else if (pathFromRoot != null) {
Container parent = pathFromRoot.head();
AbstractConfigValue newParent = parent.replaceChild(old, replacement);
return replaceCurrentParent(parent, (newParent instanceof Container) ? (Container) newParent : null);
} else {
if (old == root && replacement instanceof Container) {
return new ResolveSource(rootMustBeObj((Container) replacement));
} else {
throw new ConfigException.BugOrBroken("replace in parent not possible " + old + " with " + replacement
+ " in " + this);
// return this;
}
}
}
|
python
|
async def on_raw_353(self, message):
""" Response to /NAMES. """
target, visibility, channel, names = message.params
if not self.in_channel(channel):
return
# Set channel visibility.
if visibility == protocol.PUBLIC_CHANNEL_SIGIL:
self.channels[channel]['public'] = True
elif visibility in (protocol.PRIVATE_CHANNEL_SIGIL, protocol.SECRET_CHANNEL_SIGIL):
self.channels[channel]['public'] = False
# Update channel user list.
for entry in names.split():
statuses = []
# Make entry safe for _parse_user().
safe_entry = entry.lstrip(''.join(self._nickname_prefixes.keys()))
# Parse entry and update database.
nick, metadata = self._parse_user(safe_entry)
self._sync_user(nick, metadata)
# Get prefixes.
prefixes = set(entry.replace(safe_entry, ''))
# Check, record and strip status prefixes.
for prefix, status in self._nickname_prefixes.items():
# Add to list of statuses by user.
if prefix in prefixes:
statuses.append(status)
# Add user to user list.
self.channels[channel]['users'].add(nick)
# And to channel modes..
for status in statuses:
if status not in self.channels[channel]['modes']:
self.channels[channel]['modes'][status] = []
self.channels[channel]['modes'][status].append(nick)
|
python
|
def _VAR_DECL_value(self, cursor, _type):
"""Handles Variable value initialization."""
# always expect list [(k,v)] as init value.from list(cursor.get_children())
# get the init_value and special cases
init_value = self._get_var_decl_init_value(cursor.type,
list(cursor.get_children()))
_ctype = cursor.type.get_canonical()
if self.is_unexposed_type(_ctype):
# string are not exposed
init_value = '%s # UNEXPOSED TYPE. PATCH NEEDED.' % (init_value)
elif (self.is_pointer_type(_ctype) and
_ctype.get_pointee().kind == TypeKind.FUNCTIONPROTO):
# Function pointers argument are handled at type creation time
# but we need to put a CFUNCTYPE as a value of the name variable
init_value = _type
elif self.is_array_type(_ctype):
# an integer litteral will be the size
# an string litteral will be the value
# any list member will be children of a init_list_expr
# FIXME Move that code into typedesc
def countof(k, l):
return [item[0] for item in l].count(k)
if (countof(CursorKind.INIT_LIST_EXPR, init_value) == 1):
init_value = dict(init_value)[CursorKind.INIT_LIST_EXPR]
elif (countof(CursorKind.STRING_LITERAL, init_value) == 1):
# we have a initialised c_array
init_value = dict(init_value)[CursorKind.STRING_LITERAL]
else:
# ignore size alone
init_value = []
# check the array size versus elements.
if _type.size < len(init_value):
_type.size = len(init_value)
elif init_value == []:
# catch case.
init_value = None
else:
log.debug('VAR_DECL: default init_value: %s', init_value)
if len(init_value) > 0:
init_value = init_value[0][1]
return init_value
|
python
|
def render_html_report(summary, report_template=None, report_dir=None):
""" render html report with specified report name and template
Args:
report_template (str): specify html report template path
report_dir (str): specify html report save directory
"""
if not report_template:
report_template = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"templates",
"report_template.html"
)
logger.log_debug("No html report template specified, use default.")
else:
logger.log_info("render with html report template: {}".format(report_template))
logger.log_info("Start to render Html report ...")
report_dir = report_dir or os.path.join(os.getcwd(), "reports")
if not os.path.isdir(report_dir):
os.makedirs(report_dir)
start_at_timestamp = int(summary["time"]["start_at"])
summary["time"]["start_datetime"] = datetime.fromtimestamp(start_at_timestamp).strftime('%Y-%m-%d %H:%M:%S')
report_path = os.path.join(report_dir, "{}.html".format(start_at_timestamp))
with io.open(report_template, "r", encoding='utf-8') as fp_r:
template_content = fp_r.read()
with io.open(report_path, 'w', encoding='utf-8') as fp_w:
rendered_content = Template(
template_content,
extensions=["jinja2.ext.loopcontrols"]
).render(summary)
fp_w.write(rendered_content)
logger.log_info("Generated Html report: {}".format(report_path))
return report_path
|
python
|
def netconf_state_files_file_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
netconf_state = ET.SubElement(config, "netconf-state", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring")
files = ET.SubElement(netconf_state, "files", xmlns="http://tail-f.com/yang/netconf-monitoring")
file = ET.SubElement(files, "file")
name = ET.SubElement(file, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
public static UserBean unmarshallUser(Map<String, Object> source) {
if (source == null) {
return null;
}
UserBean bean = new UserBean();
bean.setUsername(asString(source.get("username")));
bean.setEmail(asString(source.get("email")));
bean.setFullName(asString(source.get("fullName")));
bean.setJoinedOn(asDate(source.get("joinedOn")));
postMarshall(bean);
return bean;
}
|
python
|
def edit_ini(ini_filepath=None):
"""
Open the .ini file with the operating system’s associated editor.
"""
if ini_filepath == None:
ini_filepath = get_ini_filepath()
try:
click.edit(filename=ini_filepath)
except click.exceptions.ClickException as err:
print("Click err: %s" % err)
webbrowser.open(ini_filepath)
|
java
|
private static String formatName( String name )
{
if ( name.length() < NAME_COL_WIDTH )
{
return name;
}
return name.substring( 0, NAME_COL_WIDTH - 1 ) + ">";
}
|
python
|
def read_file(filename, print_error=True):
"""Returns the contents of a file."""
try:
for encoding in ['utf-8', 'latin1']:
try:
with io.open(filename, encoding=encoding) as fp:
return fp.read()
except UnicodeDecodeError:
pass
except IOError as exception:
if print_error:
print(exception, file=sys.stderr)
return None
|
java
|
private void enQueueCurNodeEdges(CQueue queWork, int nCurNode)
{
int nPreNode;
double eWeight;
List<EdgeFrom> pEdgeToList;
queWork.clear();
pEdgeToList = graph.getEdgeListTo(nCurNode);
// Get all the edgesFrom
for (EdgeFrom e : pEdgeToList)
{
nPreNode = e.from;
eWeight = e.weight;
for (int i = 0; i < N; i++)
{
// 第一个结点,没有PreNode,直接加入队列
if (nPreNode == 0)
{
queWork.enQueue(new QueueElement(nPreNode, i, eWeight));
break;
}
// 如果PreNode的Weight == INFINITE_VALUE,则没有必要继续下去了
if (weightArray[nPreNode - 1][i] == Double.MAX_VALUE)
break;
queWork.enQueue(new QueueElement(nPreNode, i, eWeight + weightArray[nPreNode - 1][i]));
}
}
}
|
python
|
def status(self):
"""
Determine the status of the connection and receiver, and return
ERROR, CONNECTED, or DISCONNECTED as appropriate.
For simplicity, we only consider ourselves to be connected
after the Connection class has setup a receiver task. This
only happens after the websocket is open, and the connection
isn't usable until that receiver has been started.
"""
connection = self.connection()
# the connection instance was destroyed but someone kept
# a separate reference to the monitor for some reason
if not connection:
return self.DISCONNECTED
# connection cleanly disconnected or not yet opened
if not connection.ws:
return self.DISCONNECTED
# close called but not yet complete
if self.close_called.is_set():
return self.DISCONNECTING
# connection closed uncleanly (we didn't call connection.close)
stopped = connection._receiver_task.stopped.is_set()
if stopped or not connection.ws.open:
return self.ERROR
# everything is fine!
return self.CONNECTED
|
java
|
public boolean isCompetitor(Island isl) {
for (Coordinate c : isl) {
for (Coordinate d : islandCoordinates) {
if (c.sameColumn(d) || c.sameRow(d)) return true;
}
}
return false;
}
|
python
|
def save_default_values(self):
"""Save InaSAFE default values."""
for parameter_container in self.default_value_parameter_containers:
parameters = parameter_container.get_parameters()
for parameter in parameters:
set_inasafe_default_value_qsetting(
self.settings,
GLOBAL,
parameter.guid,
parameter.value
)
|
python
|
def gen_inherited(self) -> str:
""" Generate the list of slot properties that are inherited across slot_usage or is_a paths """
inherited_head = 'inherited_slots: List[str] = ['
inherited_slots = ', '.join([f'"{underscore(slot.name)}"' for slot in self.schema.slots.values()
if slot.inherited])
is_rows = split_line(inherited_slots, 120 - len(inherited_head))
return inherited_head + ('\n' + len(inherited_head) * ' ').join([r.strip() for r in is_rows]) + ']'
|
java
|
public void translateAndSetInterestOps(int ops, SelectionKeyImpl sk) {
int newOps = 0;
// Translate ops
if ((ops & SelectionKey.OP_ACCEPT) != 0)
newOps |= PollArrayWrapper.POLLIN;
// Place ops into pollfd array
sk.selector.putEventOps(sk, newOps);
}
|
python
|
def _set_dynamic_bypass(self, v, load=False):
"""
Setter method for dynamic_bypass, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/dynamic_bypass (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dynamic_bypass() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dynamic_bypass must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__dynamic_bypass = t
if hasattr(self, '_set'):
self._set()
|
python
|
def _num_vowel_to_acc(vowel, tone):
"""Convert a numbered vowel to an accented vowel."""
try:
return VOWEL_MAP[vowel + str(tone)]
except IndexError:
raise ValueError("Vowel must be one of '{}' and tone must be a tone.".format(VOWELS))
|
java
|
public final Object getValue(final String id, final String property) {
final CriteriaBuilder builder = getSession().getCriteriaBuilder();
final CriteriaQuery<Object> criteria = builder.createQuery(Object.class);
final Root<PrintJobStatusExtImpl> root = criteria.from(PrintJobStatusExtImpl.class);
criteria.select(root.get(property));
criteria.where(builder.equal(root.get("referenceId"), id));
return getSession().createQuery(criteria).uniqueResult();
}
|
python
|
def create(self, request):
"""Create a new product request
"""
variant_id = request.data.get("variant_id", None)
if variant_id is not None:
variant = ProductVariant.objects.get(id=variant_id)
product_request = ProductRequest(variant=variant)
product_request.save()
serializer = self.serializer_class(product_request)
response = Response(data=serializer.data, status=status.HTTP_201_CREATED)
else:
response = Response(
{"message": "Missing 'variant_id'"},
status=status.HTTP_400_BAD_REQUEST)
return response
|
python
|
def _cas_4(self):
''' Longitude/Lagitude overlap (4 images) '''
lonc_left = self._format_lon(self.lonm)
lonc_right = self._format_lon(self.lonM)
latc_top = self._format_lat(self.latM)
latc_bot = self._format_lat(self.latm)
img_name_00 = self._format_name_map(lonc_left, latc_top)
img_00 = BinaryTable(img_name_00, self.path_pdsfiles)
X_00, Y_00, Z_00 = img_00.extract_grid(self.lonm,
float(
img_00.EASTERNMOST_LONGITUDE),
float(img_00.MINIMUM_LATITUDE),
self.latM)
img_name_01 = self._format_name_map(lonc_right, latc_top)
img_01 = BinaryTable(img_name_01, self.path_pdsfiles)
X_01, Y_01, Z_01 = img_01.extract_grid(float(img_01.WESTERNMOST_LONGITUDE),
self.lonM,
float(img_01.MINIMUM_LATITUDE),
self.latM)
img_name_10 = self._format_name_map(lonc_left, latc_bot)
img_10 = BinaryTable(img_name_10, self.path_pdsfiles)
X_10, Y_10, Z_10 = img_10.extract_grid(self.lonm,
float(
img_10.EASTERNMOST_LONGITUDE),
self.latm,
float(img_10.MAXIMUM_LATITUDE))
img_name_11 = self._format_name_map(lonc_right, latc_bot)
img_11 = BinaryTable(img_name_11, self.path_pdsfiles)
X_11, Y_11, Z_11 = img_11.extract_grid(float(img_11.WESTERNMOST_LONGITUDE),
self.lonM,
self.latm,
float(img_11.MAXIMUM_LATITUDE))
X_new_top = np.hstack((X_00, X_01))
X_new_bot = np.hstack((X_10, X_11))
X_new = np.vstack((X_new_top, X_new_bot))
Y_new_top = np.hstack((Y_00, Y_01))
Y_new_bot = np.hstack((Y_10, Y_11))
Y_new = np.vstack((Y_new_top, Y_new_bot))
Z_new_top = np.hstack((Z_00, Z_01))
Z_new_bot = np.hstack((Z_10, Z_11))
Z_new = np.vstack((Z_new_top, Z_new_bot))
return X_new, Y_new, Z_new
|
python
|
def data_log_send(self, fl_1, fl_2, fl_3, fl_4, fl_5, fl_6, force_mavlink1=False):
'''
Configurable data log probes to be used inside Simulink
fl_1 : Log value 1 (float)
fl_2 : Log value 2 (float)
fl_3 : Log value 3 (float)
fl_4 : Log value 4 (float)
fl_5 : Log value 5 (float)
fl_6 : Log value 6 (float)
'''
return self.send(self.data_log_encode(fl_1, fl_2, fl_3, fl_4, fl_5, fl_6), force_mavlink1=force_mavlink1)
|
java
|
public static BIG hashModOrder(byte[] data) {
HASH256 hash = new HASH256();
for (byte b : data) {
hash.process(b);
}
byte[] hasheddata = hash.hash();
BIG ret = BIG.fromBytes(hasheddata);
ret.mod(IdemixUtils.GROUP_ORDER);
return ret;
}
|
java
|
public List<CoNLLWord> findChildren(CoNLLWord word, String relation)
{
List<CoNLLWord> result = new LinkedList<CoNLLWord>();
for (CoNLLWord other : this)
{
if (other.HEAD == word && other.DEPREL.equals(relation))
result.add(other);
}
return result;
}
|
java
|
public com.squareup.okhttp.Call getAlliancesAllianceIdContactsLabelsAsync(Integer allianceId, String datasource,
String ifNoneMatch, String token, final ApiCallback<List<AllianceContactsLabelsResponse>> callback)
throws ApiException {
com.squareup.okhttp.Call call = getAlliancesAllianceIdContactsLabelsValidateBeforeCall(allianceId, datasource,
ifNoneMatch, token, callback);
Type localVarReturnType = new TypeToken<List<AllianceContactsLabelsResponse>>() {
}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
|
java
|
public List<CorporationOrdersHistoryResponse> getCorporationsCorporationIdOrdersHistory(Integer corporationId,
String datasource, String ifNoneMatch, Integer page, String token) throws ApiException {
ApiResponse<List<CorporationOrdersHistoryResponse>> resp = getCorporationsCorporationIdOrdersHistoryWithHttpInfo(
corporationId, datasource, ifNoneMatch, page, token);
return resp.getData();
}
|
java
|
public static IntSet concurrentCopyFrom(IntSet intSet, int maxExclusive) {
ConcurrentSmallIntSet cis = new ConcurrentSmallIntSet(maxExclusive);
intSet.forEach((IntConsumer) cis::set);
return cis;
}
|
java
|
public Observable<IntegrationAccountMapInner> getAsync(String resourceGroupName, String integrationAccountName, String mapName) {
return getWithServiceResponseAsync(resourceGroupName, integrationAccountName, mapName).map(new Func1<ServiceResponse<IntegrationAccountMapInner>, IntegrationAccountMapInner>() {
@Override
public IntegrationAccountMapInner call(ServiceResponse<IntegrationAccountMapInner> response) {
return response.body();
}
});
}
|
java
|
@Override
public EClass getIfcPropertyDefinition() {
if (ifcPropertyDefinitionEClass == null) {
ifcPropertyDefinitionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(467);
}
return ifcPropertyDefinitionEClass;
}
|
python
|
def get_dataset(self, key, info):
"""Load a dataset."""
if self.channel not in key.name:
return
logger.debug('Reading %s.', key.name)
if key.calibration == 'brightness_temperature':
variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view)]
else:
variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view)]
radiances = variable
units = variable.attrs['units']
if key.calibration == 'reflectance':
# TODO take into account sun-earth distance
solar_flux = self.cal[re.sub('_[^_]*$', '', key.name) + '_solar_irradiances']
d_index = self.indices['detector_{}{}'.format(self.stripe, self.view)]
idx = 0 if self.view == 'n' else 1 # 0: Nadir view, 1: oblique (check).
radiances.data = da.map_blocks(
self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values)
radiances *= np.pi * 100
units = '%'
info.update(radiances.attrs)
info.update(key.to_dict())
info.update(dict(units=units,
platform_name=self.platform_name,
sensor=self.sensor,
view=self.view))
radiances.attrs = info
return radiances
|
python
|
def ok_check(function, *args, **kwargs):
'''Ensure that the response body is OK'''
req = function(*args, **kwargs)
if req.content.lower() != 'ok':
raise ClientException(req.content)
return req.content
|
java
|
protected ActivityBehavior determineBehaviour(ActivityBehavior delegateInstance) {
if (hasMultiInstanceCharacteristics()) {
multiInstanceActivityBehavior.setInnerActivityBehavior((AbstractBpmnActivityBehavior) delegateInstance);
return multiInstanceActivityBehavior;
}
return delegateInstance;
}
|
java
|
@SuppressWarnings("unused")
@Override
public Iterator<Instance> newIteratorFrom(Iterator<Instance> source) {
List<Instance> output = new LinkedList<Instance>();
while (source.hasNext()) {
Instance carrier = (Instance) source.next();
JCas jCas = (JCas) carrier.getData();
int pmId = (Integer) carrier.getName();
// gold labels from training corpus (represent BRs)
// key: token that are covered; val: list of BR covering that token
final Set<AnnotationFS> coveringBrainRegions = indexCovering(
jCas.getCas(), //
getType(jCas, Token.class), getType(jCas, Gold.class))
.keySet();
// key: token that are covered; val: list of BR covering that token
final Map<AnnotationFS, Collection<AnnotationFS>> coveringMeasures = indexCovering(
jCas.getCas(), //
getType(jCas, Token.class), getType(jCas, Measure.class));
// topics from DCA. not all tokens have topics (e.g. tokens
// representing stopwords, or hapax)
// key: each token; value: a list of Topics covering that token
final Map<AnnotationFS, Collection<AnnotationFS>> coveringTopics = indexCovering(
jCas.getCas(), //
getType(jCas, Token.class), getType(jCas, Topic.class));
final Collection<LinnaeusSpecies> species = select(jCas,
LinnaeusSpecies.class);
int sentId = 0;
for (Sentence s : select(jCas, Sentence.class)) {
List<cc.mallet.types.Token> data = newArrayList();
TokenSequence target = new TokenSequence();
for (Token t : selectCovered(Token.class, s)) {
cc.mallet.types.Token malletToken = new cc.mallet.types.Token(
t.getCoveredText());
data.add(malletToken);
// POS, LEMMA
malletToken.setFeatureValue(PROPERTY_POS + t.getPos(), 1.0);
// /if (GridSearchConfiguration.getBoolean("Lemma")) {
if (t.getLemmaStr() != null && t.getLemmaStr().length() > 1)
malletToken.setFeatureValue(
PROPERTY_LEMMA + t.getLemmaStr(), 1.0);
// else
// malletToken.setFeatureValue(
// PROPERTY_TEXT + t.getCoveredText(), 1.0);
// // Word2class
// if (false) {
// int classz = Word2VecUtils.getClass(t.getCoveredText());
// malletToken.setFeatureValue(PROPERTY_WORDVECTOR//
// + classz, 1.0);
// }
//
// // Word2vec
// if (false) {
// float[] wordVector = Word2VecUtils.getWordVector(t
// .getCoveredText());
// if (wordVector != null) {
//
// for (int j = 0; j < wordVector.length; j++) {
// malletToken.setFeatureValue(PROPERTY_WORDVECTOR//
// + j, wordVector[j]);
// }
// } else {
// System.out.println("no wordvec for "
// + t.getCoveredText());
// }
// }
/*-
// TOPICS
if (coveringTopics.containsKey(t)) {
Topic top = (Topic) coveringTopics.get(t).iterator()
.next();
if ("a".equals("a")) {
malletToken.setFeatureValue(PROPERTY_TOPICS//
+ top.getMostLikelyTopic(), 1.0);
} else {
int topicScenario = StaticOption
.getInt("topScenario");
if (topicScenario == 1) { // top1
malletToken.setFeatureValue(PROPERTY_TOPICS//
+ top.getMostLikelyTopic(), 1.0);
} else if (topicScenario == 2) { // staged
// format: u_TOPICID_{topicId}_{category}
DoubleArray scores = top.getScores();
for (int topic_id = 0; topic_id < scores.size(); topic_id++) {
double score = scores.get(topic_id);
// System.out.println(topic_id+"\t"+score);
if (score >= 0.01d && score < 0.05d) {
malletToken.setFeatureValue(
PROPERTY_TOPICS + topic_id
+ "_1", 1.0);
} else if (score >= 0.05d && score < 0.1d) {
malletToken.setFeatureValue(
PROPERTY_TOPICS + topic_id
+ "_2", 1.0);
} else if (score >= 0.1d && score < 0.2d) {
malletToken.setFeatureValue(
PROPERTY_TOPICS + topic_id
+ "_3", 1.0);
} else if (score >= 0.2d) {
malletToken.setFeatureValue(
PROPERTY_TOPICS + topic_id
+ "_4", 1.0);
}
}
} else { // topN & minProb
int topNTopics = StaticOption
.getInt("topNTopics");
double minProb = StaticOption
.getDouble("minProb");
for (int topTopic : DCATopicModelsAnnotator
.topNTopics(top.getScores(),
topNTopics, minProb)) {
malletToken.setFeatureValue(PROPERTY_TOPICS
+ topTopic, 1.0);
}
}
}
}*/
// SPECIES
if (NEW_FEATURES && species != null && !species.isEmpty()) {
for (LinnaeusSpecies specie : species) {
malletToken.setFeatureValue(PROPERTY_SPECIES
+ specie.getMostProbableSpeciesId(), 1.0);
}
}
// MEASURE
if (NEW_FEATURES && coveringMeasures.containsKey(t)) {
String unit = null;
for (AnnotationFS measure : coveringMeasures.get(t)) {
Measure m = (Measure) measure;
if (m.getUnit() != null && m.getUnit().length() > 0) {
unit = m.getUnit();
break;
}
}
if (unit != null) {
malletToken.setFeatureValue(PROPERTY_UNITS + unit,
1.0);
}
}
// TARGET annots for brain regions
if (coveringBrainRegions.contains(t)) {
target.add(TARGET_I);
} else {
target.add(TARGET_O);
}
}
output.add(new Instance(new TokenSequence(data), target, pmId
+ "__" + sentId, null));
sentId++;
}
}
return output.iterator();
}
|
python
|
def addMsrunContainers(mainContainer, subContainer):
"""Adds the complete content of all specfile entries from the subContainer
to the mainContainer. However if a specfile of ``subContainer.info`` is
already present in ``mainContainer.info`` its contents are not added to the
mainContainer.
:param mainContainer: :class:`MsrunContainer`
:param subContainer: :class:`MsrunContainer`
.. warning:: does not generate new items, all items added to the
``mainContainer`` are still present in the ``subContainer`` and changes
made to elements of one container also affects the elements of the other
one (ie elements share same memory location).
"""
typeToContainer = {'rm': 'rmc', 'ci': 'cic', 'smi': 'smic',
'sai': 'saic', 'si': 'sic'
}
for specfile in subContainer.info:
if specfile in mainContainer.info:
continue
mainContainer.addSpecfile(specfile, subContainer.info[specfile]['path'])
for datatype, status in listitems(subContainer.info[specfile]['status']):
if not status:
continue
datatypeContainer = typeToContainer[datatype]
dataTypeContainer = getattr(mainContainer, datatypeContainer)
subContainerData = getattr(subContainer,
datatypeContainer
)[specfile]
dataTypeContainer[specfile] = subContainerData
mainContainer.info[specfile]['status'][datatype] = True
|
java
|
public ServiceFuture<Void> deleteUserAsync(String poolId, String nodeId, String userName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromHeaderResponse(deleteUserWithServiceResponseAsync(poolId, nodeId, userName), serviceCallback);
}
|
python
|
def do_command(self):
"""Call a single command with arguments."""
method = self.args[0]
raw_args = self.args[1:]
if '=' in method:
if raw_args:
self.parser.error("Please don't mix rTorrent and shell argument styles!")
method, raw_args = method.split('=', 1)
raw_args = raw_args.split(',')
self.execute(self.open(), method, self.cooked(raw_args))
|
java
|
@Override
public Value caseACasesExp(ACasesExp node, Context ctxt)
throws AnalysisException
{
BreakpointManager.getBreakpoint(node).check(node.getLocation(), ctxt);
Value val = node.getExpression().apply(VdmRuntime.getExpressionEvaluator(), ctxt);
for (ACaseAlternative c : node.getCases())
{
Value rv = eval(c, val, ctxt);
if (rv != null)
{
return rv;
}
}
if (node.getOthers() != null)
{
return node.getOthers().apply(VdmRuntime.getExpressionEvaluator(), ctxt);
}
return VdmRuntimeError.abort(node.getLocation(), 4004, "No cases apply for "
+ val, ctxt);
}
|
java
|
public static String choose(String dictElem, String romaji) {
dictElem = clean(dictElem);
// simplify verbs
// if (pos.contains("verb") && dictElem.startsWith("to ")) {
// dictElem = dictElem.substring(3);
// }
// discard elements of more than 4 words
if (isLong(dictElem)) {
return null;
}
// single letters are not considered, nor excessively long items
if (dictElem.length() > 1 && dictElem.length() < 20
&& dictElem.length() < (romaji.length() * 3 + 5)) {
return dictElem;
}
return null;
}
|
java
|
public static String getClassName(QName qname) {
try {
StringBuilder className = new StringBuilder();
// e.g., {http://www.w3.org/2001/XMLSchema}decimal
StringTokenizer st1 = new StringTokenizer(qname.getNamespaceURI(),
"/");
// --> "http:" -> "www.w3.org" --> "2001" -> "XMLSchema"
st1.nextToken(); // protocol, e.g. "http:"
String domain = st1.nextToken(); // "www.w3.org"
StringTokenizer st2 = new StringTokenizer(domain, ".");
List<String> lDomain = new ArrayList<String>();
while (st2.hasMoreTokens()) {
lDomain.add(st2.nextToken());
}
assert (lDomain.size() >= 2);
className.append(lDomain.get(lDomain.size() - 1)); // "org"
className.append('.');
className.append(lDomain.get(lDomain.size() - 2)); // "w3"
while (st1.hasMoreTokens()) {
className.append('.');
className.append(st1.nextToken());
}
className.append('.');
className.append(qname.getLocalPart());
return className.toString();
} catch (Exception e) {
return null;
}
}
|
java
|
public Matrix3f rotateY(float ang, Matrix3f dest) {
float sin, cos;
if (ang == (float) Math.PI || ang == -(float) Math.PI) {
cos = -1.0f;
sin = 0.0f;
} else if (ang == (float) Math.PI * 0.5f || ang == -(float) Math.PI * 1.5f) {
cos = 0.0f;
sin = 1.0f;
} else if (ang == (float) -Math.PI * 0.5f || ang == (float) Math.PI * 1.5f) {
cos = 0.0f;
sin = -1.0f;
} else {
sin = (float) Math.sin(ang);
cos = (float) Math.cosFromSin(sin, ang);
}
float rm00 = cos;
float rm20 = sin;
float rm02 = -sin;
float rm22 = cos;
// add temporaries for dependent values
float nm00 = m00 * rm00 + m20 * rm02;
float nm01 = m01 * rm00 + m21 * rm02;
float nm02 = m02 * rm00 + m22 * rm02;
// set non-dependent values directly
dest.m20 = m00 * rm20 + m20 * rm22;
dest.m21 = m01 * rm20 + m21 * rm22;
dest.m22 = m02 * rm20 + m22 * rm22;
// set other values
dest.m00 = nm00;
dest.m01 = nm01;
dest.m02 = nm02;
dest.m10 = m10;
dest.m11 = m11;
dest.m12 = m12;
return dest;
}
|
python
|
def libvlc_video_get_chapter_description(p_mi, i_title):
'''Get the description of available chapters for specific title.
@param p_mi: the media player.
@param i_title: selected title.
@return: list containing description of available chapter for title i_title.
'''
f = _Cfunctions.get('libvlc_video_get_chapter_description', None) or \
_Cfunction('libvlc_video_get_chapter_description', ((1,), (1,),), None,
ctypes.POINTER(TrackDescription), MediaPlayer, ctypes.c_int)
return f(p_mi, i_title)
|
python
|
def only(self, *keys):
"""
Get the items with the specified keys.
:param keys: The keys to keep
:type keys: tuple
:rtype: Collection
"""
items = []
for key, value in enumerate(self.items):
if key in keys:
items.append(value)
return self.__class__(items)
|
java
|
public void setCode(final String code) {
if (isAttached()) {
setCode(getElement(), code);
} else {
getElement().setInnerHTML(code);
}
}
|
java
|
public Set<T> dfs(Set<T> roots) {
Deque<T> deque = new LinkedList<>(roots);
Set<T> visited = new HashSet<>();
while (!deque.isEmpty()) {
T u = deque.pop();
if (!visited.contains(u)) {
visited.add(u);
if (contains(u)) {
adjacentNodes(u).stream()
.filter(v -> !visited.contains(v))
.forEach(deque::push);
}
}
}
return visited;
}
|
python
|
def publish_receiver_count(
self, service, routing_id, method, timeout=None):
'''Get the number of peers that would handle a particular publish
This method will block until a response arrives
:param service: the service name
:type service: anything hash-able
:param routing_id:
the id used for narrowing within the service handlers
:type routing_id: int
:param method: the method name
:type method: string
:param timeout: maximum time to wait for the response
:type timeout: int, float or None
:raises:
- :class:`Unroutable <junction.errors.Unroutable>` if no peers are
registered to receive the message
- :class:`WaitTimeout <junction.errors.WaitTimeout>` if a timeout
was provided and it expires
'''
if not self._peer.up:
raise errors.Unroutable()
return self._rpc_client.recipient_count(self._peer,
const.MSG_TYPE_PUBLISH, service, routing_id, method).wait(
timeout)[0]
|
java
|
public synchronized CheckBox setChecked(final boolean checked) {
this.checked = checked;
runOnGUIThreadIfExistsOtherwiseRunDirect(new Runnable() {
@Override
public void run() {
for(Listener listener : listeners) {
listener.onStatusChanged(checked);
}
}
});
invalidate();
return this;
}
|
java
|
public JSONObject put(String key, float value) throws JSONException {
return this.put(key, Float.valueOf(value));
}
|
python
|
def save_profile(self, userdata, data):
""" Save user profile modifications """
result = userdata
error = False
# Check if updating username.
if not userdata["username"] and "username" in data:
if re.match(r"^[-_|~0-9A-Z]{4,}$", data["username"], re.IGNORECASE) is None:
error = True
msg = _("Invalid username format.")
elif self.database.users.find_one({"username": data["username"]}):
error = True
msg = _("Username already taken")
else:
result = self.database.users.find_one_and_update({"email": userdata["email"]},
{"$set": {"username": data["username"]}},
return_document=ReturnDocument.AFTER)
if not result:
error = True
msg = _("Incorrect email.")
else:
self.user_manager.connect_user(result["username"], result["realname"], result["email"],
result["language"])
msg = _("Profile updated.")
return result, msg, error
# Check if updating the password.
if self.app.allow_registration and len(data["passwd"]) in range(1, 6):
error = True
msg = _("Password too short.")
return result, msg, error
elif self.app.allow_registration and len(data["passwd"]) > 0 and data["passwd"] != data["passwd2"]:
error = True
msg = _("Passwords don't match !")
return result, msg, error
elif self.app.allow_registration and len(data["passwd"]) >= 6:
oldpasswd_hash = hashlib.sha512(data["oldpasswd"].encode("utf-8")).hexdigest()
passwd_hash = hashlib.sha512(data["passwd"].encode("utf-8")).hexdigest()
match = {"username": self.user_manager.session_username()}
if "password" in userdata:
match["password"] = oldpasswd_hash
result = self.database.users.find_one_and_update(match,
{"$set": {"password": passwd_hash}},
return_document=ReturnDocument.AFTER)
if not result:
error = True
msg = _("Incorrect old password.")
return result, msg, error
# Check if updating language
if data["language"] != userdata["language"]:
language = data["language"] if data["language"] in self.app.available_languages else "en"
result = self.database.users.find_one_and_update({"username": self.user_manager.session_username()},
{"$set": {"language": language}},
return_document=ReturnDocument.AFTER)
if not result:
error = True
msg = _("Incorrect username.")
return result, msg, error
else:
self.user_manager.set_session_language(language)
# Checks if updating name
if len(data["realname"]) > 0:
result = self.database.users.find_one_and_update({"username": self.user_manager.session_username()},
{"$set": {"realname": data["realname"]}},
return_document=ReturnDocument.AFTER)
if not result:
error = True
msg = _("Incorrect username.")
return result, msg, error
else:
self.user_manager.set_session_realname(data["realname"])
else:
error = True
msg = _("Name is too short.")
return result, msg, error
msg = _("Profile updated.")
return result, msg, error
|
python
|
def get_steam():
"""
Returns a Steam object representing the current Steam installation on the
users computer. If the user doesn't have Steam installed, returns None.
"""
# Helper function which checks if the potential userdata directory exists
# and returns a new Steam instance with that userdata directory if it does.
# If the directory doesnt exist it returns None instead
helper = lambda udd: Steam(udd) if os.path.exists(udd) else None
# For both OS X and Linux, Steam stores it's userdata in a consistent
# location.
plat = platform.system()
if plat == 'Darwin':
return helper(paths.default_osx_userdata_path())
if plat == 'Linux':
return helper(paths.default_linux_userdata_path())
# Windows is a bit trickier. The userdata directory is stored in the Steam
# installation directory, meaning that theoretically it could be anywhere.
# Luckily, Valve stores the installation directory in the registry, so its
# still possible for us to figure out automatically
if plat == 'Windows':
possible_dir = winutils.find_userdata_directory()
# Unlike the others, `possible_dir` might be None (if something odd
# happened with the registry)
return helper(possible_dir) if possible_dir is not None else None
# This should never be hit. Windows, OS X, and Linux should be the only
# supported platforms.
# TODO: Add logging here so that the user (developer) knows that something
# odd happened.
return None
|
python
|
def find_crs(op, element):
"""
Traverses the supplied object looking for coordinate reference
systems (crs). If multiple clashing reference systems are found
it will throw an error.
"""
crss = [crs for crs in element.traverse(lambda x: x.crs, [_Element])
if crs is not None]
if not crss:
return {}
crs = crss[0]
if any(crs != ocrs for ocrs in crss[1:]):
raise ValueError('Cannot %s Elements in different '
'coordinate reference systems.'
% type(op).__name__)
return {'crs': crs}
|
python
|
def _repack(h5file):
"""
Repack archive to remove freespace.
Returns
-------
file : h5py File or None
If the input is a h5py.File then a h5py File instance of the
repacked archive is returned. The input File instance will no longer
be useable.
"""
f1, opened = _openfile(h5file)
filename1 = f1.filename
filename2 = filename1 + '_repack_tmp'
f2 = h5py.File(filename2, 'w')
for key in f1.keys():
# print 'copying', key
f1.copy(key, f2)
f1.close()
f2.close()
filename_tmp = filename1 + '_repack_rename_tmp'
os.rename(filename1, filename_tmp)
os.rename(filename2, filename1)
if opened:
f = None
else:
f = h5py.File(filename1)
os.remove(filename_tmp)
return f
|
python
|
def GetDefaultContract(self):
"""
Get the default contract.
Returns:
contract (Contract): if Successful, a contract of type neo.SmartContract.Contract, otherwise an Exception.
Raises:
Exception: if no default contract is found.
Note:
Prints a warning to the console if the default contract could not be found.
"""
try:
return self.GetContracts()[0]
except Exception as e:
logger.error("Could not find default contract: %s" % str(e))
raise
|
python
|
def wait(self):
"""
Block until a matched message appears.
"""
if not self._patterns:
raise RuntimeError('Listener has nothing to capture')
while 1:
msg = self._queue.get(block=True)
if any(map(lambda p: filtering.match_all(msg, p), self._patterns)):
return msg
|
java
|
public final void setEntryFactory(Callback<CreateEntryParameter, Entry<?>> factory) {
Objects.requireNonNull(factory);
entryFactoryProperty().set(factory);
}
|
java
|
@Override
public org.fcrepo.server.types.gen.Validation validate(String pid,
String asOfDateTime) {
assertInitialized();
try {
MessageContext ctx = context.getMessageContext();
return TypeUtility
.convertValidationToGenValidation(m_management.validate(ReadOnlyContext
.getSoapContext(ctx),
pid,
DateUtility
.parseDateOrNull(asOfDateTime)));
} catch (Throwable th) {
LOG.error("Error validating", th);
throw CXFUtility.getFault(th);
} finally {
LOG.debug("end: validate");
}
}
|
python
|
def external_system_identifiers(endpoint):
"""Populate the ``external_system_identifiers`` key.
Also populates the ``new_record`` key through side effects.
"""
@utils.flatten
@utils.for_each_value
def _external_system_identifiers(self, key, value):
new_recid = maybe_int(value.get('d'))
if new_recid:
self['new_record'] = get_record_ref(new_recid, endpoint)
return [
{
'schema': 'SPIRES',
'value': ext_sys_id,
} for ext_sys_id in force_list(value.get('a'))
]
return _external_system_identifiers
|
java
|
@Autowired
@RefreshScope
@Bean
public IgniteConfiguration igniteConfiguration(@Qualifier("ticketCatalog") final TicketCatalog ticketCatalog) {
val ignite = casProperties.getTicket().getRegistry().getIgnite();
val config = new IgniteConfiguration();
val spi = new TcpDiscoverySpi();
if (!StringUtils.isEmpty(ignite.getLocalAddress())) {
spi.setLocalAddress(ignite.getLocalAddress());
}
if (ignite.getLocalPort() != -1) {
spi.setLocalPort(ignite.getLocalPort());
}
spi.setJoinTimeout(Beans.newDuration(ignite.getJoinTimeout()).toMillis());
spi.setAckTimeout(Beans.newDuration(ignite.getAckTimeout()).toMillis());
spi.setNetworkTimeout(Beans.newDuration(ignite.getNetworkTimeout()).toMillis());
spi.setSocketTimeout(Beans.newDuration(ignite.getSocketTimeout()).toMillis());
spi.setThreadPriority(ignite.getThreadPriority());
spi.setForceServerMode(ignite.isForceServerMode());
val finder = new TcpDiscoveryVmIpFinder();
finder.setAddresses(ignite.getIgniteAddress());
spi.setIpFinder(finder);
config.setDiscoverySpi(spi);
val cacheConfigurations = buildIgniteTicketCaches(ignite, ticketCatalog);
config.setCacheConfiguration(cacheConfigurations.toArray(CacheConfiguration[]::new));
config.setClientMode(ignite.isClientMode());
val factory = buildSecureTransportForIgniteConfiguration();
if (factory != null) {
config.setSslContextFactory(factory);
}
val dataStorageConfiguration = new DataStorageConfiguration();
val dataRegionConfiguration = new DataRegionConfiguration();
dataRegionConfiguration.setName("DefaultRegion");
dataRegionConfiguration.setMaxSize(ignite.getDefaultRegionMaxSize());
dataRegionConfiguration.setPersistenceEnabled(ignite.isDefaultPersistenceEnabled());
dataStorageConfiguration.setDefaultDataRegionConfiguration(dataRegionConfiguration);
dataStorageConfiguration.setSystemRegionMaxSize(ignite.getDefaultRegionMaxSize());
config.setDataStorageConfiguration(dataStorageConfiguration);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("igniteConfiguration.cacheConfiguration=[{}]", (Object[]) config.getCacheConfiguration());
LOGGER.debug("igniteConfiguration.getDiscoverySpi=[{}]", config.getDiscoverySpi());
LOGGER.debug("igniteConfiguration.getSslContextFactory=[{}]", config.getSslContextFactory());
}
return config;
}
|
python
|
def remove_esc_chars(text_string):
'''
Removes any escape character within text_string and returns the new string as type str.
Keyword argument:
- text_string: string instance
Exceptions raised:
- InputError: occurs should a non-string argument be passed
'''
if text_string is None or text_string == "":
return ""
elif isinstance(text_string, str):
return " ".join(re.sub(r'\\\w', "", text_string).split())
else:
raise InputError("string not passed as argument")
|
java
|
@Override
public void consume(@NotNull final Purchase itemInfo) throws IabException {
Logger.i("NokiaStoreHelper.consume");
final String token = itemInfo.getToken();
final String productId = itemInfo.getSku();
final String packageName = itemInfo.getPackageName();
Logger.d("productId = ", productId);
Logger.d("token = ", token);
Logger.d("packageName = ", packageName);
int response = 0;
try {
response = mService.consumePurchase(3, packageName, productId, token);
} catch (RemoteException e) {
Logger.e(e, "RemoteException: ", e);
}
if (response == RESULT_OK) {
Logger.d("Successfully consumed productId: ", productId);
} else {
Logger.d("Error consuming consuming productId ", productId, ". Code: ", response);
throw new IabException(new NokiaResult(response, "Error consuming productId " + productId));
}
Logger.d("consume: done");
}
|
python
|
def _reprJSON(self):
"""Returns a JSON serializable represenation of a ``MzmlPrecursor``
class instance. Use :func:`maspy.core.MzmlPrecursor._fromJSON()` to
generate a new ``MzmlPrecursor`` instance from the return value.
:returns: a JSON serializable python object
"""
return {'__MzmlPrecursor__': (self.spectrumRef, self.activation,
self.isolationWindow, self.selectedIonList
)
}
|
python
|
def sendAZ(self, az):
'''
Sends AZ velocity.
@param az: AZ velocity
@type az: float
'''
self.lock.acquire()
self.data.az = az
self.lock.release()
|
python
|
def clip_gradients(batch_result, model, max_grad_norm):
""" Clip gradients to a given maximum length """
if max_grad_norm is not None:
grad_norm = torch.nn.utils.clip_grad_norm_(
filter(lambda p: p.requires_grad, model.parameters()),
max_norm=max_grad_norm
)
else:
grad_norm = 0.0
batch_result['grad_norm'] = grad_norm
|
java
|
static void colorKeyCursor(byte[] source,
Buffer dest,
int targetDepth,
int transparentPixel) {
switch (targetDepth) {
case 32:
colorKeyCursor32(source,
(IntBuffer) dest, transparentPixel);
break;
case 16:
colorKeyCursor16(source,
(ShortBuffer) dest, transparentPixel);
break;
default:
throw new UnsupportedOperationException();
}
}
|
python
|
def return_dat(self, chan, begsam, endsam):
"""Return the data as 2D numpy.ndarray.
Parameters
----------
chan : int or list
index (indices) of the channels to read
begsam : int
index of the first sample
endsam : int
index of the last sample
Returns
-------
numpy.ndarray
A 2d matrix, with dimension chan X samples
"""
dat = _read_memmap(self.eeg_file, self.dshape, begsam, endsam,
self.data_type, self.data_order)
return dat[chan, :] * self.gain[chan, None]
|
python
|
def get_state(self, minimal:bool=True):
"Return the inner state of the `Callback`, `minimal` or not."
to_remove = ['exclude', 'not_min'] + getattr(self, 'exclude', []).copy()
if minimal: to_remove += getattr(self, 'not_min', []).copy()
return {k:v for k,v in self.__dict__.items() if k not in to_remove}
|
python
|
def split_n(string, seps, reg=False):
r"""Split strings into n-dimensional list.
::
from torequests.utils import split_n
ss = '''a b c d e f 1 2 3 4 5 6
a b c d e f 1 2 3 4 5 6
a b c d e f 1 2 3 4 5 6'''
print(split_n(ss, ('\n', ' ', ' ')))
# [[['a', 'b', 'c'], ['d', 'e', 'f'], ['1', '2', '3'], ['4', '5', '6']], [['a', 'b', 'c'], ['d', 'e', 'f'], ['1', '2', '3'], ['4', '5', '6']], [['a', 'b', 'c'], ['d', 'e', 'f'], ['1', '2', '3'], ['4', '5', '6']]]
print(split_n(ss, ['\s+'], reg=1))
# ['a', 'b', 'c', 'd', 'e', 'f', '1', '2', '3', '4', '5', '6', 'a', 'b', 'c', 'd', 'e', 'f', '1', '2', '3', '4', '5', '6', 'a', 'b', 'c', 'd', 'e', 'f', '1', '2', '3', '4', '5', '6']
"""
deep = len(seps)
if not deep:
return string
return [split_n(i, seps[1:]) for i in _re_split_mixin(string, seps[0], reg=reg)]
|
python
|
def remove_comments(self, recursive=True):
"""Remove latex comments from document (modifies document in place).
Parameters
----------
recursive : bool
Remove comments from all input LaTeX documents (default ``True``).
"""
self.text = texutils.remove_comments(self.text)
if recursive:
for path, document in self._children.iteritems():
document.remove_comments(recursive=True)
|
python
|
def get_all_environment_option_pool(self, id_environment=None, option_id=None, option_type=None):
"""Get all Option VIP by Environment .
:return: Dictionary with the following structure:
::
{[{‘id’: < id >,
option: {
'id': <id>
'type':<type>
'name':<name> }
environment: {
'id':<id>
.... all environment info }
etc to option pools ...] }
:raise EnvironmentVipNotFoundError: Environment Pool not registered.
:raise DataBaseError: Can't connect to networkapi database.
:raise XMLError: Failed to generate the XML response.
"""
url='api/pools/environment_options/'
if id_environment:
if option_id:
if option_type:
url = url + "?environment_id=" + str(id_environment)+ "&option_id=" + str(option_id) + "&option_type=" + option_type
else:
url = url + "?environment_id=" + str(id_environment)+ "&option_id=" + str(option_id)
else:
if option_type:
url = url + "?environment_id=" + str(id_environment) + "&option_type=" + option_type
else:
url = url + "?environment_id=" + str(id_environment)
elif option_id:
if option_type:
url = url + "?option_id=" + str(option_id) + "&option_type=" + option_type
else:
url = url + "?option_id=" + str(option_id)
elif option_type:
url = url + "?option_type=" + option_type
return self.get(url)
|
python
|
def minimum_needs(self, input_layer):
"""Compute minimum needs given a layer and a column containing pop.
:param input_layer: Vector layer assumed to contain
population counts.
:type input_layer: QgsVectorLayer
:returns: A tuple containing True and the vector layer if
post processor success. Or False and an error message
if something went wrong.
:rtype: tuple(bool,QgsVectorLayer or basetring)
"""
# Create a new layer for output layer
output_layer = self.prepare_new_layer(input_layer)
# count each minimum needs for every features
for needs in minimum_needs_post_processors:
is_success, message = run_single_post_processor(
output_layer, needs)
# check if post processor not running successfully
if not is_success:
LOGGER.debug(message)
display_critical_message_box(
title=self.tr('Error while running post processor'),
message=message)
return False, None
return True, output_layer
|
python
|
def add_user(self, **kwargs):
"""Add a User object, with properties specified in ``**kwargs``."""
user = self.UserClass(**kwargs)
if hasattr(user, 'active'):
user.active = True
self.db_adapter.add_object(user)
return user
|
java
|
public static TagLib nameSpace(Data data) {
boolean hasTag = false;
int start = data.srcCode.getPos();
TagLib tagLib = null;
// loop over NameSpaces
for (int i = 1; i >= 0; i--) {
for (int ii = 0; ii < data.tlibs[i].length; ii++) {
tagLib = data.tlibs[i][ii];
char[] c = tagLib.getNameSpaceAndSeperatorAsCharArray();
// Loop over char of NameSpace and Sepearator
hasTag = true;
for (int y = 0; y < c.length; y++) {
if (!(data.srcCode.isValidIndex() && c[y] == data.srcCode.getCurrentLower())) {
// hasTag=true;
// } else {
hasTag = false;
data.srcCode.setPos(start);
break;
}
data.srcCode.next();
}
if (hasTag) return tagLib;// break;
}
// if(hasTag) return tagLib;
}
return null;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.