language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public void trackEvent(String argCategory, String argAction, String argLabel, Integer argValue) {
AnalyticsRequestData data = new AnalyticsRequestData();
data.setEventCategory(argCategory);
data.setEventAction(argAction);
data.setEventLabel(argLabel);
data.setEventValue(argValue);
makeCustomRequest(data);
}
|
python
|
def export_public_key(self):
""" Export a public key in PEM-format
:return: bytes
"""
if self.__public_key is None:
raise ValueError('Unable to call this method. Public key must be set')
return self.__public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
|
java
|
public static CommerceTierPriceEntry removeByUUID_G(String uuid,
long groupId)
throws com.liferay.commerce.price.list.exception.NoSuchTierPriceEntryException {
return getPersistence().removeByUUID_G(uuid, groupId);
}
|
java
|
public void addSpillIfNotEmpty(UnsafeSorterIterator spillReader) throws IOException {
if (spillReader.hasNext()) {
// We only add the spillReader to the priorityQueue if it is not empty. We do this to
// make sure the hasNext method of UnsafeSorterIterator returned by getSortedIterator
// does not return wrong result because hasNext will return true
// at least priorityQueue.size() times. If we allow n spillReaders in the
// priorityQueue, we will have n extra empty records in the result of UnsafeSorterIterator.
spillReader.loadNext();
priorityQueue.add(spillReader);
numRecords += spillReader.getNumRecords();
}
}
|
python
|
def pingable_ws_connect(request=None, on_message_callback=None,
on_ping_callback=None):
"""
A variation on websocket_connect that returns a PingableWSClientConnection
with on_ping_callback.
"""
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
# for tornado 4.5.x compatibility
if version_info[0] == 4:
conn = PingableWSClientConnection(io_loop=ioloop.IOLoop.current(),
request=request,
on_message_callback=on_message_callback,
on_ping_callback=on_ping_callback)
else:
conn = PingableWSClientConnection(request=request,
on_message_callback=on_message_callback,
on_ping_callback=on_ping_callback,
max_message_size=getattr(websocket, '_default_max_message_size', 10 * 1024 * 1024))
return conn.connect_future
|
python
|
def main(argv: Optional[Sequence[str]] = None) -> None:
"""Parse arguments and process the exam assignment."""
parser = ArgumentParser(description="Convert Jupyter Notebook exams to PDFs")
parser.add_argument(
"--exam",
type=int,
required=True,
help="Exam number to convert",
dest="exam_num",
)
parser.add_argument(
"--time", type=str, required=True, help="Time of exam to convert"
)
parser.add_argument(
"--date", type=str, required=True, help="The date the exam will take place"
)
args = parser.parse_args(argv)
process(args.exam_num, args.time, args.date)
|
java
|
private void closeCursor() {
if (mCursor != null) {
int count = mCursor.getCount();
mCursor.close();
mCursor = null;
notifyItemRangeRemoved(0, count);
}
updateCursorObserver();
}
|
python
|
def asynchronous(method):
""" Convenience wrapper for GObject.idle_add. """
def _async(*args, **kwargs):
GObject.idle_add(method, *args, **kwargs)
return _async
|
java
|
private void clear() {
boolPropertySet = null;
databaseInputDir = null;
databaseOutputDir = null;
IO.close(project);
}
|
python
|
def dead(self):
"""Returns True if this entity no longer exists in the underlying
model.
"""
return (
self.data is None or
self.model.state.entity_data(
self.entity_type, self.entity_id, -1) is None
)
|
python
|
def deploy(self, id_networkv4):
"""Deploy network in equipments and set column 'active = 1' in tables redeipv4
:param id_networkv4: ID for NetworkIPv4
:return: Equipments configuration output
"""
data = dict()
uri = 'api/networkv4/%s/equipments/' % id_networkv4
return super(ApiNetworkIPv4, self).post(uri, data=data)
|
java
|
public TColumn getColumnById(long columnId) {
if (columns == null) {
return null;
}
TColumn result = null;
for (TColumn column : columns) {
if (column.getId() == columnId) {
result = column;
break;
}
}
return result;
}
|
java
|
protected Block visitBlock(Block block) {
if (block == null) {
return null;
}
Statement stmt = (Statement)block.accept(this);
if (stmt instanceof Block) {
return (Block)stmt;
}
else if (stmt != null) {
return new Block(stmt);
}
else {
return new Block(block.getSourceInfo());
}
}
|
java
|
protected <T> List<T> getResources(final ResourceType resourceType, final Class<T> type,
@Nullable final String locationUUID) throws FlexiantException {
SearchFilter sf = new SearchFilter();
if (locationUUID != null) {
FilterCondition fcLocation = new FilterCondition();
fcLocation.setCondition(Condition.IS_EQUAL_TO);
fcLocation.setField("vdcUUID");
fcLocation.getValue().add(locationUUID);
sf.getFilterConditions().add(fcLocation);
}
try {
//noinspection unchecked
return (List<T>) this.getService().listResources(sf, null, resourceType).getList();
} catch (ExtilityException e) {
throw new FlexiantException(
String.format("Error while retrieving resources of resourceType %s.", resourceType),
e);
}
}
|
java
|
public void createNewSecurityDomain(String securityDomainName, LoginModuleRequest... loginModules)
throws Exception {
//do not close the controller client here, we're using our own..
CoreJBossASClient coreClient = new CoreJBossASClient(getModelControllerClient());
String serverVersion = coreClient.getAppServerVersion();
if (serverVersion.startsWith("7.2")) {
createNewSecurityDomain72(securityDomainName, loginModules);
}
else {
createNewSecurityDomain71(securityDomainName, loginModules);
}
}
|
python
|
def roles_required(*roles):
"""
Decorator which specifies that a user must have all the specified roles.
Aborts with HTTP 403: Forbidden if the user doesn't have the required roles.
Example::
@app.route('/dashboard')
@roles_required('ROLE_ADMIN', 'ROLE_EDITOR')
def dashboard():
return 'Dashboard'
The current user must have both the `ROLE_ADMIN` and `ROLE_EDITOR` roles
in order to view the page.
:param roles: The required roles.
"""
def wrapper(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
perms = [Permission(RoleNeed(role)) for role in roles]
for perm in perms:
if not perm.can():
abort(HTTPStatus.FORBIDDEN)
return fn(*args, **kwargs)
return decorated_view
return wrapper
|
java
|
String selectSyncHistoryOwner()
throws IOException {
// L.1.2 Select the history of a follwer f to be the initial history
// of the new epoch. Follwer f is such that for every f' in the quorum,
// f'.a < f.a or (f'.a == f.a && f'.zxid <= f.zxid).
long ackEpoch = persistence.getAckEpoch();
Zxid zxid = persistence.getLatestZxid();
String peerId = this.serverId;
Iterator<Map.Entry<String, PeerHandler>> iter;
iter = this.quorumMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, PeerHandler> entry = iter.next();
long fEpoch = entry.getValue().getLastAckedEpoch();
Zxid fZxid = entry.getValue().getLastZxid();
if (fEpoch > ackEpoch ||
(fEpoch == ackEpoch && fZxid.compareTo(zxid) > 0)) {
ackEpoch = fEpoch;
zxid = fZxid;
peerId = entry.getKey();
}
}
LOG.debug("{} has largest acknowledged epoch {} and longest history {}",
peerId, ackEpoch, zxid);
if (this.stateChangeCallback != null) {
this.stateChangeCallback.initialHistoryOwner(peerId, ackEpoch, zxid);
}
return peerId;
}
|
python
|
def GetMacAddresses(self):
"""MAC addresses from all interfaces."""
result = set()
for interface in self.interfaces:
if (interface.mac_address and
interface.mac_address != b"\x00" * len(interface.mac_address)):
result.add(Text(interface.mac_address.human_readable_address))
return sorted(result)
|
python
|
def _copy(self, filename, id_=-1, file_type=0):
"""Upload a file to the distribution server.
Directories/bundle-style packages must be zipped prior to
copying.
"""
if os.path.isdir(filename):
raise JSSUnsupportedFileType(
"Distribution Server type repos do not permit directory "
"uploads. You are probably trying to upload a non-flat "
"package. Please zip or create a flat package.")
basefname = os.path.basename(filename)
resource = open(filename, "rb")
headers = {"DESTINATION": self.destination, "OBJECT_ID": str(id_),
"FILE_TYPE": file_type, "FILE_NAME": basefname}
response = self.connection["jss"].session.post(
url=self.connection["upload_url"], data=resource, headers=headers)
if self.connection["jss"].verbose:
print response
|
java
|
@Override
public GetRouteResponsesResult getRouteResponses(GetRouteResponsesRequest request) {
request = beforeClientExecution(request);
return executeGetRouteResponses(request);
}
|
java
|
private void assertPropertyIsIndexed(final PropertyKey key) {
if (key != null && !key.isIndexed() && key instanceof AbstractPrimitiveProperty) {
final Class declaringClass = key.getDeclaringClass();
String className = "";
if (declaringClass != null) {
className = declaringClass.getSimpleName() + ".";
}
logger.warn("Non-indexed property key {}{} is used in query. This can lead to performance problems in large databases.", className, key.jsonName());
}
}
|
python
|
def __GetMark(self):
" keep track of cursor position within text"
try:
self.__mark = min(wx.TextCtrl.GetSelection(self)[0],
len(wx.TextCtrl.GetValue(self).strip()))
except:
self.__mark = 0
|
java
|
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
final int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask();
this.buckets = bucketsBuilder.createBuckets(subtaskIndex);
final OperatorStateStore stateStore = context.getOperatorStateStore();
bucketStates = stateStore.getListState(BUCKET_STATE_DESC);
maxPartCountersState = stateStore.getUnionListState(MAX_PART_COUNTER_STATE_DESC);
if (context.isRestored()) {
buckets.initializeState(bucketStates, maxPartCountersState);
}
}
|
python
|
def to_hdf5(self, filepath: str):
"""Write the main dataframe to Hdf5 file
:param filepath: path where to save the file
:type filepath: str
:example: ``ds.to_hdf5_("./myfile.hdf5")``
"""
try:
self.start("Saving data to Hdf5...")
dd.io.save(filepath, self.df)
self.end("Finished saving Hdf5 data")
except Exception as e:
self.err(e, "Can not convert data to Hdf5")
|
java
|
public static boolean replaceEntry(File zip, String path, byte[] bytes, File destZip) {
return replaceEntry(zip, new ByteSource(path, bytes), destZip);
}
|
python
|
def section_path_lengths(neurites, neurite_type=NeuriteType.all):
'''Path lengths of a collection of neurites '''
# Calculates and stores the section lengths in one pass,
# then queries the lengths in the path length iterations.
# This avoids repeatedly calculating the lengths of the
# same sections.
dist = {}
neurite_filter = is_type(neurite_type)
for s in iter_sections(neurites, neurite_filter=neurite_filter):
dist[s] = s.length
def pl2(node):
'''Calculate the path length using cached section lengths'''
return sum(dist[n] for n in node.iupstream())
return map_sections(pl2, neurites, neurite_type=neurite_type)
|
java
|
public static DoubleMatrix identity(int n)
{
return getInstance(n ,n, (i, j) -> i == j ? 1 : 0);
}
|
java
|
public static base_response update(nitro_service client, nsconfig resource) throws Exception {
nsconfig updateresource = new nsconfig();
updateresource.ipaddress = resource.ipaddress;
updateresource.netmask = resource.netmask;
updateresource.nsvlan = resource.nsvlan;
updateresource.ifnum = resource.ifnum;
updateresource.tagged = resource.tagged;
updateresource.httpport = resource.httpport;
updateresource.maxconn = resource.maxconn;
updateresource.maxreq = resource.maxreq;
updateresource.cip = resource.cip;
updateresource.cipheader = resource.cipheader;
updateresource.cookieversion = resource.cookieversion;
updateresource.securecookie = resource.securecookie;
updateresource.pmtumin = resource.pmtumin;
updateresource.pmtutimeout = resource.pmtutimeout;
updateresource.ftpportrange = resource.ftpportrange;
updateresource.crportrange = resource.crportrange;
updateresource.timezone = resource.timezone;
updateresource.grantquotamaxclient = resource.grantquotamaxclient;
updateresource.exclusivequotamaxclient = resource.exclusivequotamaxclient;
updateresource.grantquotaspillover = resource.grantquotaspillover;
updateresource.exclusivequotaspillover = resource.exclusivequotaspillover;
updateresource.nwfwmode = resource.nwfwmode;
return updateresource.update_resource(client);
}
|
java
|
@Override
public void afterCompletion(int status) {
logger.log(Level.FINE, "The status of the transaction commit is: " + status);
if (status == Status.STATUS_COMMITTED){
//Save the metrics object after a successful commit
runtimeStepExecution.setCommittedMetrics();
} else{
//status = 4 = STATUS_ROLLEDBACK;
runtimeStepExecution.rollBackMetrics();
}
}
|
python
|
def variants(ctx, variant_id, chromosome, end_chromosome, start, end, variant_type,
sv_type):
"""Display variants in the database."""
if sv_type:
variant_type = 'sv'
adapter = ctx.obj['adapter']
if (start or end):
if not (chromosome and start and end):
LOG.warning("Regions must be specified with chromosome, start and end")
return
if variant_id:
variant = adapter.get_variant({'_id':variant_id})
if variant:
click.echo(variant)
else:
LOG.info("Variant {0} does not exist in database".format(variant_id))
return
if variant_type == 'snv':
result = adapter.get_variants(
chromosome=chromosome,
start=start,
end=end
)
else:
LOG.info("Search for svs")
result = adapter.get_sv_variants(
chromosome=chromosome,
end_chromosome=end_chromosome,
sv_type=sv_type,
pos=start,
end=end
)
i = 0
for variant in result:
i += 1
pp(variant)
LOG.info("Number of variants found in database: %s", i)
|
python
|
def get_unbound_arg_names(arg_names, arg_binding_keys):
"""Determines which args have no arg binding keys.
Args:
arg_names: a sequence of the names of possibly bound args
arg_binding_keys: a sequence of ArgBindingKey each of whose arg names is
in arg_names
Returns:
a sequence of arg names that is a (possibly empty, possibly non-proper)
subset of arg_names
"""
bound_arg_names = [abk._arg_name for abk in arg_binding_keys]
return [arg_name for arg_name in arg_names
if arg_name not in bound_arg_names]
|
python
|
def _split_ns_by_scatter(cls,
shard_count,
namespace,
raw_entity_kind,
app):
"""Split a namespace by scatter index into key_range.KeyRange.
TODO(user): Power this with key_range.KeyRange.compute_split_points.
Args:
shard_count: number of shards.
namespace: namespace name to split. str.
raw_entity_kind: low level datastore API entity kind.
app: app id in str.
Returns:
A list of key_range.KeyRange objects. If there are not enough entities to
splits into requested shards, the returned list will contain KeyRanges
ordered lexicographically with any Nones appearing at the end.
"""
if shard_count == 1:
# With one shard we don't need to calculate any split points at all.
return [key_range.KeyRange(namespace=namespace, _app=app)]
ds_query = datastore.Query(kind=raw_entity_kind,
namespace=namespace,
_app=app,
keys_only=True)
ds_query.Order("__scatter__")
oversampling_factor = 32
random_keys = ds_query.Get(shard_count * oversampling_factor)
if not random_keys:
# There are no entities with scatter property. We have no idea
# how to split.
return ([key_range.KeyRange(namespace=namespace, _app=app)] +
[None] * (shard_count - 1))
random_keys.sort()
if len(random_keys) >= shard_count:
# We've got a lot of scatter values. Sample them down.
random_keys = cls._choose_split_points(random_keys, shard_count)
k_ranges = []
k_ranges.append(key_range.KeyRange(
key_start=None,
key_end=random_keys[0],
direction=key_range.KeyRange.ASC,
include_start=False,
include_end=False,
namespace=namespace,
_app=app))
for i in range(0, len(random_keys) - 1):
k_ranges.append(key_range.KeyRange(
key_start=random_keys[i],
key_end=random_keys[i+1],
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
k_ranges.append(key_range.KeyRange(
key_start=random_keys[-1],
key_end=None,
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
if len(k_ranges) < shard_count:
# We need to have as many shards as it was requested. Add some Nones.
k_ranges += [None] * (shard_count - len(k_ranges))
return k_ranges
|
python
|
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
|
python
|
def log(self, facility, level, text, pid=False):
"""Send the message text to all registered hosts.
The facility and level will be used to create the packet's PRI
part. The HEADER will be automatically determined from the
current time and hostname. The MSG will be set from the
running program's name and the text parameter.
This is the simplest way to use reSyslog.Syslog, creating log
messages containing the current time, hostname, program name,
etc. This is how you do it::
logger = syslog.Syslog()
logger.add_host("localhost")
logger.log(Facility.USER, Level.INFO, "Hello World")
If pid is True the process ID will be prepended to the text
parameter, enclosed in square brackets and followed by a
colon.
"""
pri = PRI(facility, level)
header = HEADER()
if pid:
msg = MSG(content=text, pid=os.getpid())
else:
msg = MSG(content=text)
packet = Packet(pri, header, msg)
self._send_packet_to_hosts(packet)
|
java
|
public void setXocBase(Integer newXocBase) {
Integer oldXocBase = xocBase;
xocBase = newXocBase;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.CDD__XOC_BASE, oldXocBase, xocBase));
}
|
java
|
@Override
public synchronized P readPage(int pageID) {
countRead();
P page = map.get(pageID);
if(page != null) {
if(LOG.isDebuggingFine()) {
LOG.debugFine("Read from cache: " + pageID);
}
}
else {
if(LOG.isDebuggingFine()) {
LOG.debugFine("Read from backing: " + pageID);
}
page = file.readPage(pageID);
map.put(pageID, page);
}
return page;
}
|
java
|
@Override
public RetryDecision onReadTimeout(
@NonNull Request request,
@NonNull ConsistencyLevel cl,
int blockFor,
int received,
boolean dataPresent,
int retryCount) {
RetryDecision decision =
(retryCount == 0 && received >= blockFor && !dataPresent)
? RetryDecision.RETRY_SAME
: RetryDecision.RETHROW;
if (decision == RetryDecision.RETRY_SAME && LOG.isTraceEnabled()) {
LOG.trace(RETRYING_ON_READ_TIMEOUT, logPrefix, cl, blockFor, received, false, retryCount);
}
return decision;
}
|
python
|
def upload_create(self, data, filename=None, token=None, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/attachments#upload-files"
api_path = "/api/v2/uploads.json"
api_query = {}
if "query" in kwargs.keys():
api_query.update(kwargs["query"])
del kwargs["query"]
if filename:
api_query.update({
"filename": filename,
})
if token:
api_query.update({
"token": token,
})
return self.call(api_path, query=api_query, method="POST", data=data, **kwargs)
|
python
|
def notify_event_nowait(self, conn_string, name, event):
"""Notify an event.
This will move the notification to the background event loop and
return immediately. It is useful for situations where you cannot
await notify_event but keep in mind that it prevents back-pressure
when you are notifying too fast so should be used sparingly.
Note that calling this method will push the notification to a
background task so it can be difficult to reason about when it will
precisely occur. For that reason, :meth:`notify_event` should be
preferred when possible since that method guarantees that all
callbacks will be called synchronously before it finishes.
Args:
conn_string (str): The connection string for the device that the
event is associated with.
name (str): The name of the event. Must be in SUPPORTED_EVENTS.
event (object): The event object. The type of this object will
depend on what is being notified.
"""
if self._loop.stopping:
self._logger.debug("Ignoring notification %s from %s because loop is shutting down", name, conn_string)
return
self._loop.log_coroutine(self._notify_event_internal, conn_string, name, event)
|
python
|
def spawn(self, command):
""" Spawns a new process and adds it to the pool """
# process_name
# output
# time before starting (wait for port?)
# start_new_session=True : avoid sending parent signals to child
env = dict(os.environ)
env["MRQ_IS_SUBPROCESS"] = "1"
env.update(self.extra_env or {})
# Extract env variables from shell commands.
parts = shlex.split(command)
for p in list(parts):
if "=" in p:
env[p.split("=")[0]] = p[len(p.split("=")[0]) + 1:]
parts.pop(0)
else:
break
p = subprocess.Popen(parts, shell=False, close_fds=True, env=env, cwd=os.getcwd())
self.processes.append({
"subprocess": p,
"pid": p.pid,
"command": command,
"psutil": psutil.Process(pid=p.pid)
})
|
python
|
def truncate(value: Decimal, n_digits: int) -> Decimal:
"""Truncates a value to a number of decimals places"""
return Decimal(math.trunc(value * (10 ** n_digits))) / (10 ** n_digits)
|
python
|
def gen_part_from_line(lines: Iterable[str],
part_index: int,
splitter: str = None) -> Generator[str, None, None]:
"""
Splits lines with ``splitter`` and yields a specified part by index.
Args:
lines: iterable of strings
part_index: index of part to yield
splitter: string to split the lines on
Yields:
the specified part for each line
"""
for line in lines:
parts = line.split(splitter)
yield parts[part_index]
|
java
|
public Color getColorAt(float p) {
if (p <= 0) {
return ((Step) steps.get(0)).col;
}
if (p > 1) {
return ((Step) steps.get(steps.size()-1)).col;
}
for (int i=1;i<steps.size();i++) {
Step prev = ((Step) steps.get(i-1));
Step current = ((Step) steps.get(i));
if (p <= current.location) {
float dis = current.location - prev.location;
p -= prev.location;
float v = p / dis;
Color c = new Color(1,1,1,1);
c.a = (prev.col.a * (1 - v)) + (current.col.a * (v));
c.r = (prev.col.r * (1 - v)) + (current.col.r * (v));
c.g = (prev.col.g * (1 - v)) + (current.col.g * (v));
c.b = (prev.col.b * (1 - v)) + (current.col.b * (v));
return c;
}
}
// shouldn't ever happen
return Color.black;
}
|
python
|
def mousePressEvent(self, event):
"""Create a marker or start selection
Parameters
----------
event : instance of QtCore.QEvent
it contains the position that was clicked.
"""
if not self.scene:
return
if self.event_sel or self.current_event:
self.parent.notes.idx_eventtype.setCurrentText(self.current_etype)
self.current_etype = None
self.current_event = None
self.deselect = True
self.event_sel = None
self.current_event_row = None
self.scene.removeItem(self.highlight)
self.highlight = None
self.parent.statusBar().showMessage('')
return
self.ready = False
self.event_sel = None
xy_scene = self.mapToScene(event.pos())
chan_idx = argmin(abs(asarray(self.chan_pos) - xy_scene.y()))
self.sel_chan = chan_idx
self.sel_xy = (xy_scene.x(), xy_scene.y())
chk_marker = self.parent.notes.action['new_bookmark'].isChecked()
chk_event = self.parent.notes.action['new_event'].isChecked()
if not (chk_marker or chk_event):
channame = self.chan[self.sel_chan] + ' in selected window'
self.parent.spectrum.show_channame(channame)
# Make annotations clickable
else:
for annot in self.idx_annot:
if annot.contains(xy_scene):
self.highlight_event(annot)
if chk_event:
row = self.parent.notes.find_row(annot.marker.x(),
annot.marker.x() + annot.marker.width())
self.parent.notes.idx_annot_list.setCurrentCell(row, 0)
break
self.ready = True
|
python
|
def _get_validation_labels(val_path):
"""Returns labels for validation.
Args:
val_path: path to TAR file containing validation images. It is used to
retrieve the name of pictures and associate them to labels.
Returns:
dict, mapping from image name (str) to label (str).
"""
labels_path = tfds.core.get_tfds_path(_VALIDATION_LABELS_FNAME)
with tf.io.gfile.GFile(labels_path) as labels_f:
labels = labels_f.read().strip().split('\n')
with tf.io.gfile.GFile(val_path, 'rb') as tar_f_obj:
tar = tarfile.open(mode='r:', fileobj=tar_f_obj)
images = sorted(tar.getnames())
return dict(zip(images, labels))
|
python
|
def clear(self):
"""Clears all the axes to start fresh."""
for ax in self.flat_grid:
for im_h in ax.findobj(AxesImage):
im_h.remove()
|
python
|
def validate_mutations(self, mutations):
'''This function has been refactored to use the SimpleMutation class.
The parameter is a list of Mutation objects. The function has no return value but raises a PDBValidationException
if the wildtype in the Mutation m does not match the residue type corresponding to residue m.ResidueID in the PDB file.
'''
# Chain, ResidueID, WildTypeAA, MutantAA
resID2AA = self.get_residue_id_to_type_map()
badmutations = []
for m in mutations:
wildtype = resID2AA.get(PDB.ChainResidueID2String(m.Chain, m.ResidueID), "")
if m.WildTypeAA != wildtype:
badmutations.append(m)
if badmutations:
raise PDBValidationException("The mutation(s) %s could not be matched against the PDB %s." % (", ".join(map(str, badmutations)), self.pdb_id))
|
python
|
def layout(self, value):
'Overloaded layout function to fix component names as needed'
if self._adjust_id:
self._fix_component_id(value)
return Dash.layout.fset(self, value)
|
python
|
def get_account(self, username):
"""return user by username.
"""
try:
account = self.model.objects.get(
**self._filter_user_by(username)
)
except self.model.DoesNotExist:
return None
return account
|
java
|
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case BpsimPackage.USER_DISTRIBUTION_DATA_POINT_TYPE__PARAMETER_VALUE_GROUP:
return parameterValueGroup != null && !parameterValueGroup.isEmpty();
case BpsimPackage.USER_DISTRIBUTION_DATA_POINT_TYPE__PARAMETER_VALUE:
return getParameterValue() != null;
case BpsimPackage.USER_DISTRIBUTION_DATA_POINT_TYPE__PROBABILITY:
return isSetProbability();
}
return super.eIsSet(featureID);
}
|
python
|
def ctc_label(p):
"""Iterates through p, identifying non-zero and non-repeating values, and returns them in a list Parameters
----------
p: list of int
Returns
-------
list of int
"""
ret = []
p1 = [0] + p
for i, _ in enumerate(p):
c1 = p1[i]
c2 = p1[i+1]
if c2 in (0, c1):
continue
ret.append(c2)
return ret
|
java
|
public synchronized long sendMessage(AbstractRequestMessage message) {
log.debug("Sending {} to {}", message, this);
message.setTicketNumber(this.nextTicketNumber.getAndIncrement());
this.outstandingRequests.put(Long.valueOf(message.getTicketNumber()),
message);
this.session.write(message);
return message.getTicketNumber();
}
|
python
|
def compute_qpi(self):
"""Compute model data with current parameters
Returns
-------
qpi: qpimage.QPImage
Modeled phase data
Notes
-----
The model image might deviate from the fitted image
because of interpolation during the fitting process.
"""
kwargs = self.model_kwargs.copy()
kwargs["radius"] = self.radius
kwargs["sphere_index"] = self.sphere_index
kwargs["center"] = [self.posx_offset, self.posy_offset]
qpi = self.sphere_method(**kwargs)
# apply phase offset
bg_data = np.ones(qpi.shape) * -self.pha_offset
qpi.set_bg_data(bg_data=bg_data, which_data="phase")
return qpi
|
python
|
def get_files(self):
"""
:calls: `GET /repos/:owner/:repo/pulls/:number/files <http://developer.github.com/v3/pulls>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.File.File`
"""
return github.PaginatedList.PaginatedList(
github.File.File,
self._requester,
self.url + "/files",
None
)
|
java
|
@Override
public AnnotationDefinition resolveAnnotationDefinition(
String resourceLocation)
throws AnnotationDefinitionResolutionException {
synchronized (resourceLocation) {
// return if cached
if (annotations.containsKey(resourceLocation)) {
return annotations.get(resourceLocation);
}
// resolve and parse
AnnotationDefinition annotationDefinition =
cacheableAnnotationDefinitionService
.resolveAnnotationDefinition(resourceLocation);
// cache annotation definition and return
annotations.put(resourceLocation, annotationDefinition);
return annotationDefinition;
}
}
|
python
|
def high_frequency_cutoff_from_config(cp):
"""Gets the high frequency cutoff from the given config file.
This looks for ``high-frequency-cutoff`` in the ``[model]`` section and
casts it to float. If none is found, will just return ``None``.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
Returns
-------
float or None :
The high frequency cutoff.
"""
if cp.has_option('model', 'high-frequency-cutoff'):
high_frequency_cutoff = float(
cp.get('model', 'high-frequency-cutoff'))
else:
high_frequency_cutoff = None
return high_frequency_cutoff
|
java
|
public final void setSecurityController(SecurityController controller)
{
if (sealed) onSealedMutation();
if (controller == null) throw new IllegalArgumentException();
if (securityController != null) {
throw new SecurityException("Can not overwrite existing SecurityController object");
}
if (SecurityController.hasGlobal()) {
throw new SecurityException("Can not overwrite existing global SecurityController object");
}
securityController = controller;
}
|
java
|
public CipherInputStream wrapInputStream(InputStream is, byte[] iv) throws GeneralSecurityException, IOException {
Cipher cipher = getCipher(true);
if(iv == null && ivLength > 0) {
if(prependIV) {
iv = new byte[ivLength];
is.read(iv);
} else {
throw new IllegalStateException("Could not obtain IV");
}
}
if(iv != null) {
cipher.init(Cipher.DECRYPT_MODE, getKey(), getAlgorithmParameterSpec(iv));
} else {
cipher.init(Cipher.DECRYPT_MODE, getKey());
}
return new CipherInputStream(is, cipher);
}
|
java
|
public static MozuUrl resetPasswordUrl()
{
UrlFormatter formatter = new UrlFormatter("/api/commerce/customer/accounts/Reset-Password");
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ;
}
|
java
|
public BoxRequestsFile.CreateNewVersionUploadSession getCreateUploadVersionSessionRequest(InputStream is, String fileName, long fileSize, String fileId)
throws FileNotFoundException {
return new BoxRequestsFile.CreateNewVersionUploadSession(is, fileName, fileSize, getUploadSessionForNewFileVersionUrl(fileId), mSession);
}
|
java
|
public WatchObject watch(final boolean enable, final boolean dumpData, final String device) throws IOException, JSONException {
JSONObject watch = new JSONObject();
watch.put("class", "WATCH");
watch.put("enable", enable);
watch.put("json", dumpData);
if (device != null) {
watch.put("device", device);
}
return this.syncCommand("?WATCH=" + watch.toString(), WatchObject.class);
}
|
java
|
public void init(Application app, Activity initActivity) {
HMSAgentLog.d("init");
if (application != null) {
application.unregisterActivityLifecycleCallbacks(this);
}
application = app;
setCurActivity(initActivity);
app.registerActivityLifecycleCallbacks(this);
}
|
java
|
private void showSignUpTermsDialog(@Nullable DialogInterface.OnClickListener acceptCallback) {
final String content = getResources().getString(R.string.com_auth0_lock_sign_up_terms_dialog_message, configuration.getTermsURL(), configuration.getPrivacyURL());
final AlertDialog.Builder builder = new AlertDialog.Builder(getContext())
.setTitle(getResources().getString(R.string.com_auth0_lock_sign_up_terms_dialog_title))
.setPositiveButton(R.string.com_auth0_lock_action_ok, null)
.setMessage(Html.fromHtml(content));
if (acceptCallback != null) {
builder.setNegativeButton(R.string.com_auth0_lock_action_cancel, null)
.setPositiveButton(R.string.com_auth0_lock_action_accept, acceptCallback)
.setCancelable(false);
}
//the dialog needs to be shown before we can get it's view.
final TextView message = (TextView) builder.show().findViewById(android.R.id.message);
if (message != null) {
message.setMovementMethod(LinkMovementMethod.getInstance());
}
}
|
python
|
def stringify_device_meta(device_object):
""" Input: Portals device object.
Output: The same device object with the device meta
converted to a python string. """
try:
if isinstance(device_object['info']['description']['meta'], dict):
device_object['info']['description']['meta'] =\
json.dumps(device_object['info']['description']['meta'])
except ValueError as err:
print("stringify: {0}".format(err))
return device_object
|
java
|
boolean remove(Class<?> eventClass, ListenerReferenceHolder listener) {
lock.readLock().lock();
TreeSet<ListenerReferenceHolder> set = listeners.get(eventClass);
if (set != null) {
lock.readLock().unlock();
lock.writeLock().lock();
try {
return removeListenerAndSetIfNeeded(eventClass, listener, set);
} finally {
lock.writeLock().unlock();
}
}
lock.readLock().unlock();
return false;
}
|
python
|
def delete_tag_from_bookmark(self, bookmark_id, tag_id):
"""
Remove a single tag from a bookmark.
The identified bookmark must belong to the current user.
:param bookmark_id: ID of the bookmark to delete.
"""
url = self._generate_url('bookmarks/{0}/tags/{1}'.format(
bookmark_id, tag_id))
return self.delete(url)
|
python
|
def add_signal_handler():
"""Adds a signal handler to handle KeyboardInterrupt."""
import signal
def handler(sig, frame):
if sig == signal.SIGINT:
librtmp.RTMP_UserInterrupt()
raise KeyboardInterrupt
signal.signal(signal.SIGINT, handler)
|
python
|
def evaluate(self, gold):
"""Evaluate the accuracy of this tagger using a gold standard corpus.
:param list(list(tuple(str, str))) gold: The list of tagged sentences to score the tagger on.
:returns: Tagger accuracy value.
:rtype: float
"""
tagged_sents = self.tag_sents([w for (w, t) in sent] for sent in gold)
gold_tokens = sum(gold, [])
test_tokens = sum(tagged_sents, [])
accuracy = float(sum(x == y for x, y in six.moves.zip(gold_tokens, test_tokens))) / len(test_tokens)
return accuracy
|
python
|
def __getSequenceVariants(self, x1, polyStart, polyStop, listSequence) :
"""polyStop, is the polymorphisme at wixh number where the calcul of combinaisons stops"""
if polyStart < len(self.polymorphisms) and polyStart < polyStop:
sequence = copy.copy(listSequence)
ret = []
pk = self.polymorphisms[polyStart]
posInSequence = pk[0]-x1
if posInSequence < len(listSequence) :
for allele in pk[1] :
sequence[posInSequence] = allele
ret.extend(self.__getSequenceVariants(x1, polyStart+1, polyStop, sequence))
return ret
else :
return [''.join(listSequence)]
|
java
|
private String extractGrantCode(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String code = Utils.getParameterValueFromQuery(url.getQuery(), "code");
if (code == null){
throw new RuntimeException("Failed to extract grant code from url");
}
logger.debug("Grant code extracted successfully");
return code;
}
|
python
|
def registeredView(viewName, location='Central'):
"""
Returns the view that is registered to the inputed location for the \
given name.
:param viewName | <str>
location | <str>
:return <subclass of XView> || None
"""
loc = nativestring(location)
view = XView._registry.get(loc, {}).get(viewName, None)
if not view:
for view in XView._registry.get(nativestring(location), {}).values():
if view.__name__ == viewName:
return view
return view
|
java
|
public Observable<CheckAvailabilityResourceInner> checkAvailabilityAsync(CheckAvailabilityParameters parameters) {
return checkAvailabilityWithServiceResponseAsync(parameters).map(new Func1<ServiceResponse<CheckAvailabilityResourceInner>, CheckAvailabilityResourceInner>() {
@Override
public CheckAvailabilityResourceInner call(ServiceResponse<CheckAvailabilityResourceInner> response) {
return response.body();
}
});
}
|
python
|
def delete(self, exchange, if_unused=False, nowait=True, ticket=None,
cb=None):
'''
Delete an exchange.
'''
nowait = nowait and self.allow_nowait() and not cb
args = Writer()
args.write_short(ticket or self.default_ticket).\
write_shortstr(exchange).\
write_bits(if_unused, nowait)
self.send_frame(MethodFrame(self.channel_id, 40, 20, args))
if not nowait:
self._delete_cb.append(cb)
self.channel.add_synchronous_cb(self._recv_delete_ok)
|
java
|
public double getDistance(double[] sample1, double[] sample2) throws IllegalArgumentException {
int n = sample1.length;
if (n != sample2.length || n < 1)
throw new IllegalArgumentException("Input arrays must have the same length.");
double sumOfSquares = 0;
for (int i = 0; i < n; i++) {
if (Double.isNaN(sample1[i]) || Double.isNaN(sample2[i]))
continue;
sumOfSquares += (sample1[i] - sample2[i]) * (sample1[i] - sample2[i]);
}
return Math.sqrt(sumOfSquares);
}
|
java
|
public VectorIterator nonZeroIteratorOfRow(int i) {
final int ii = i;
return new VectorIterator(columns) {
private int j = -1;
@Override
public int index() {
return j;
}
@Override
public double get() {
return SparseMatrix.this.get(ii, j);
}
@Override
public void set(double value) {
SparseMatrix.this.set(ii, j, value);
}
@Override
public boolean hasNext() {
while (j + 1 < columns && SparseMatrix.this.isZeroAt(ii, j + 1)) {
j++;
}
return j + 1 < columns;
}
@Override
public Double next() {
if(!hasNext()) {
throw new NoSuchElementException();
}
j++;
return get();
}
};
}
|
java
|
public static void fullViewLeft(CameraPinholeBrown paramLeft,
FMatrixRMaj rectifyLeft, FMatrixRMaj rectifyRight,
FMatrixRMaj rectifyK)
{
ImplRectifyImageOps_F32.fullViewLeft(paramLeft, rectifyLeft, rectifyRight, rectifyK);
}
|
java
|
public void marshall(GetEmailIdentityRequest getEmailIdentityRequest, ProtocolMarshaller protocolMarshaller) {
if (getEmailIdentityRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getEmailIdentityRequest.getEmailIdentity(), EMAILIDENTITY_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def get_nameserver_detail_output_show_nameserver_nameserver_portname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_nameserver_detail = ET.Element("get_nameserver_detail")
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, "output")
show_nameserver = ET.SubElement(output, "show-nameserver")
nameserver_portid_key = ET.SubElement(show_nameserver, "nameserver-portid")
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_portname = ET.SubElement(show_nameserver, "nameserver-portname")
nameserver_portname.text = kwargs.pop('nameserver_portname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
@Override
public EClass getIfcPreDefinedProperties() {
if (ifcPreDefinedPropertiesEClass == null) {
ifcPreDefinedPropertiesEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(443);
}
return ifcPreDefinedPropertiesEClass;
}
|
python
|
def installShlibLinks(dest, source, env):
"""If we are installing a versioned shared library create the required links."""
Verbose = False
symlinks = listShlibLinksToInstall(dest, source, env)
if Verbose:
print('installShlibLinks: symlinks={:r}'.format(SCons.Tool.StringizeLibSymlinks(symlinks)))
if symlinks:
SCons.Tool.CreateLibSymlinks(env, symlinks)
return
|
python
|
def _handle_command(self, buffer):
" When text is accepted in the command line. "
text = buffer.text
# First leave command mode. We want to make sure that the working
# pane is focused again before executing the command handers.
self.pymux.leave_command_mode(append_to_history=True)
# Execute command.
self.pymux.handle_command(text)
|
python
|
def get_tweet(self, id):
"""
Get an existing tweet.
:param id: ID of the tweet in question
:return: Tweet object. None if not found
"""
try:
return Tweet(self._client.get_status(id=id)._json)
except TweepError as e:
if e.api_code == TWITTER_TWEET_NOT_FOUND_ERROR:
return None
raise
|
java
|
private void processVersionLine(String log) {
log = log.replace(start, "");
String[] pieces = log.split(" \\(");
if (pieces.length == 2) {
version = pieces[0];
build = pieces[1].replace(")", "");
// old builds do not include build info. Xcode 4.3.2 return 1.0 without build info
} else {
version = pieces[0];
build = null;
}
}
|
python
|
def duplicate(self, name):
"""
.. versionadded:: 0.5.8
Requires SMC version >= 6.3.2
Duplicate this element. This is a shortcut method that will make
a direct copy of the element under the new name and type.
:param str name: name for the duplicated element
:raises ActionCommandFailed: failed to duplicate the element
:return: the newly created element
:rtype: Element
"""
dup = self.make_request(
method='update',
raw_result=True,
resource='duplicate',
params={'name': name})
return type(self)(name=name, href=dup.href, type=type(self).typeof)
|
java
|
private MilestoneManager getKilometerManager() {
final float backgroundRadius = 20;
final Paint backgroundPaint1 = getFillPaint(COLOR_BACKGROUND);
final Paint backgroundPaint2 = getFillPaint(COLOR_POLYLINE_ANIMATED);
final Paint textPaint1 = getTextPaint(COLOR_POLYLINE_STATIC);
final Paint textPaint2 = getTextPaint(COLOR_BACKGROUND);
final Paint borderPaint = getStrokePaint(COLOR_BACKGROUND, 2);
return new MilestoneManager(
new MilestoneMeterDistanceLister(1000),
new MilestoneDisplayer(0, false) {
@Override
protected void draw(final Canvas pCanvas, final Object pParameter) {
final double meters = (double)pParameter;
final int kilometers = (int)Math.round(meters / 1000);
final boolean checked = meters < mAnimatedMetersSoFar || (kilometers == 10 && mAnimationEnded);
final Paint textPaint = checked ? textPaint2 : textPaint1;
final Paint backgroundPaint = checked ? backgroundPaint2 : backgroundPaint1;
final String text = "" + kilometers + "K";
final Rect rect = new Rect();
textPaint1.getTextBounds(text, 0, text.length(), rect);
pCanvas.drawCircle(0, 0, backgroundRadius, backgroundPaint);
pCanvas.drawText(text, -rect.left - rect.width() / 2, rect.height() / 2 - rect.bottom, textPaint);
pCanvas.drawCircle(0, 0, backgroundRadius + 1, borderPaint);
}
}
);
}
|
python
|
def pause_knocks(obj):
"""
Context manager to suspend sending knocks for the given model
:param obj: model instance
"""
if not hasattr(_thread_locals, 'knock_enabled'):
_thread_locals.knock_enabled = {}
obj.__class__._disconnect()
_thread_locals.knock_enabled[obj.__class__] = False
yield
_thread_locals.knock_enabled[obj.__class__] = True
obj.__class__._connect()
|
python
|
def checkout(self):
'''
Checkout the configured branch/tag. We catch an "Exception" class here
instead of a specific exception class because the exceptions raised by
GitPython when running these functions vary in different versions of
GitPython.
'''
tgt_ref = self.get_checkout_target()
try:
head_sha = self.repo.rev_parse('HEAD').hexsha
except Exception:
# Should only happen the first time we are checking out, since
# we fetch first before ever checking anything out.
head_sha = None
# 'origin/' + tgt_ref ==> matches a branch head
# 'tags/' + tgt_ref + '@{commit}' ==> matches tag's commit
for rev_parse_target, checkout_ref in (
('origin/' + tgt_ref, 'origin/' + tgt_ref),
('tags/' + tgt_ref, 'tags/' + tgt_ref)):
try:
target_sha = self.repo.rev_parse(rev_parse_target).hexsha
except Exception:
# ref does not exist
continue
else:
if head_sha == target_sha:
# No need to checkout, we're already up-to-date
return self.check_root()
try:
with self.gen_lock(lock_type='checkout'):
self.repo.git.checkout(checkout_ref)
log.debug(
'%s remote \'%s\' has been checked out to %s',
self.role,
self.id,
checkout_ref
)
except GitLockError as exc:
if exc.errno == errno.EEXIST:
# Re-raise with a different strerror containing a
# more meaningful error message for the calling
# function.
raise GitLockError(
exc.errno,
'Checkout lock exists for {0} remote \'{1}\''
.format(self.role, self.id)
)
else:
log.error(
'Error %d encountered obtaining checkout lock '
'for %s remote \'%s\'',
exc.errno,
self.role,
self.id
)
return None
except Exception:
continue
return self.check_root()
log.error(
'Failed to checkout %s from %s remote \'%s\': remote ref does '
'not exist', tgt_ref, self.role, self.id
)
return None
|
java
|
public static servicegroup_binding get(nitro_service service, String servicegroupname) throws Exception{
servicegroup_binding obj = new servicegroup_binding();
obj.set_servicegroupname(servicegroupname);
servicegroup_binding response = (servicegroup_binding) obj.get_resource(service);
return response;
}
|
python
|
def request_uniq(func):
"""
return unique dict for each uwsgi request.
note: won't work on non-uwsgi cases
"""
def _wrapped(*args, **kwargs):
data = _get_request_unique_cache()
return func(data, *args, **kwargs)
return _wrapped
|
python
|
def add_cmd_handler(self, handler_obj):
"""Registers a new command handler object.
All methods on `handler_obj` whose name starts with "cmd_" are
registered as a GTP command. For example, the method cmd_genmove will
be invoked when the engine receives a genmove command.
Args:
handler_obj: the handler object to register.
"""
for field in dir(handler_obj):
if field.startswith("cmd_"):
cmd = field[4:]
fn = getattr(handler_obj, field)
if cmd in self.cmds:
print('Replacing {} with {}'.format(
_handler_name(self.cmds[cmd]), _handler_name(fn)),
file=sys.stderr)
self.cmds[cmd] = fn
|
python
|
def main(argv=None):
"""Run the pywhich command as if invoked with arguments `argv`.
If `argv` is `None`, arguments from `sys.argv` are used.
"""
if argv is None:
argv = sys.argv
parser = OptionParser()
parser.add_option('-v', '--verbose', dest="verbose", action="count",
default=2, help="be chattier (stackable)")
def quiet(option, opt_str, value, parser):
parser.values.verbose -= 1
parser.add_option('-q', '--quiet', action="callback", callback=quiet,
help="be less chatty (stackable)")
parser.add_option('-r', action="store_true", dest="real_path",
default=False, help="dereference symlinks")
parser.add_option('-b', action="store_true", dest="show_directory",
default=False, help="show directory instead of filename")
parser.add_option('-i', '--hide-init', action="store_true", dest="hide_init",
default=False, help="show directory if the module ends in __init__.py")
parser.add_option('-s', '--source', action="store_true", dest="find_source",
default=False, help="find .py files for .pyc/.pyo files")
parser.add_option('--ver', action="store_true", dest="find_version",
default=False, help="find the version of the named package, not the location on disk")
opts, args = parser.parse_args()
verbose = max(0, min(4, opts.verbose))
log_levels = (logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG)
logging.basicConfig()
log.setLevel(log_levels[verbose])
if opts.find_version:
find_version(*args)
else:
kwargs = dict((fld, getattr(opts, fld)) for fld
in ('real_path', 'show_directory', 'find_source', 'hide_init'))
identify_modules(*args, **kwargs)
return 0
|
python
|
def debye_E_single(x):
"""
calculate Debye energy using old fortran routine
:params x: Debye x value
:return: Debye energy
"""
# make the function handles both scalar and array
if ((x > 0.0) & (x <= 0.1)):
result = 1. - 0.375 * x + x * x * \
(0.05 - (5.952380953e-4) * x * x)
# for 0.1 < x <= 7.25
if ((x > 0.1) & (x <= 7.25)):
result = ((((.0946173 * x - 4.432582) * x +
85.07724) * x - 800.6087) * x +
3953.632) / ((((x + 15.121491) * x +
143.155337) * x + 682.0012) *
x + 3953.632)
# for x > 7.25
# it appears there might be error for this part, but never been exposed
# because of rarity of such high x value.
if (x > 7.25):
exx = np.exp(-x)
nn = np.round(25. / x)
n = nn.astype(np.int64)
temp = 0.
if (n > 0):
temp2 = 1.
end = n + 1
for i in range(1, end):
temps = i * 1.
temp2 = temp2 * exx
x3 = temps * x
temp = temp + temp2 * \
(6. + x3 * (6. + x3 * (3. + x3))) / \
(temps * temps * temps * temps)
result = 3.0 * (6.493939402 - temp) / (x * x * x)
return result
|
python
|
def OnGridEditorCreated(self, event):
"""Used to capture Editor close events"""
editor = event.GetControl()
editor.Bind(wx.EVT_KILL_FOCUS, self.OnGridEditorClosed)
event.Skip()
|
python
|
def collection_names(self, include_system_collections=True):
"""Get a list of all the collection names in this database.
:Parameters:
- `include_system_collections` (optional): if ``False`` list
will not include system collections (e.g ``system.indexes``)
"""
with self.__client._socket_for_reads(
ReadPreference.PRIMARY) as (sock_info, slave_okay):
wire_version = sock_info.max_wire_version
results = self._list_collections(sock_info, slave_okay)
# Iterating the cursor to completion may require a socket for getmore.
# Ensure we do that outside the "with" block so we don't require more
# than one socket at a time.
names = [result["name"] for result in results]
if wire_version <= 2:
# MongoDB 2.4 and older return index namespaces and collection
# namespaces prefixed with the database name.
names = [n[len(self.__name) + 1:] for n in names
if n.startswith(self.__name + ".") and "$" not in n]
if not include_system_collections:
names = [name for name in names if not name.startswith("system.")]
return names
|
java
|
private Object getEmbeddedObject(JsonParser jsonParser) throws IOException {
LOG.info("Start parsing an embedded object.");
Map<String, Object> embeddedMap = new HashMap<>();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
String key = jsonParser.getText();
jsonParser.nextToken();
JsonToken token = jsonParser.getCurrentToken();
if (token == JsonToken.START_ARRAY) {
Object embeddedArray = getCollectionValue(jsonParser);
embeddedMap.put(key, embeddedArray);
} else if (token == JsonToken.START_OBJECT) {
Object embeddedObject = getEmbeddedObject(jsonParser);
embeddedMap.put(key, embeddedObject);
} else {
if (token.equals(JsonToken.VALUE_NULL)) {
embeddedMap.put(key, null);
} else {
embeddedMap.put(key, jsonParser.getText());
}
}
}
return embeddedMap;
}
|
python
|
def transformer_base_v2():
"""Set of hyperparameters."""
hparams = transformer_base_v1()
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
hparams.layer_prepostprocess_dropout = 0.1
hparams.attention_dropout = 0.1
hparams.relu_dropout = 0.1
hparams.learning_rate_warmup_steps = 8000
hparams.learning_rate = 0.2
return hparams
|
python
|
def selections(self):
"""Build list of extra selections for rectangular selection"""
selections = []
cursors = self.cursors()
if cursors:
background = self._qpart.palette().color(QPalette.Highlight)
foreground = self._qpart.palette().color(QPalette.HighlightedText)
for cursor in cursors:
selection = QTextEdit.ExtraSelection()
selection.format.setBackground(background)
selection.format.setForeground(foreground)
selection.cursor = cursor
selections.append(selection)
return selections
|
python
|
def get_ip(self):
"""
Retrieve a complete list of bought ip address related only to PRO Servers.
It create an internal object (Iplist) representing all of the ips object
iterated form the WS.
@param: None
@return: None
"""
json_scheme = self.gen_def_json_scheme('GetPurchasedIpAddresses')
json_obj = self.call_method_post(method='GetPurchasedIpAddresses ', json_scheme=json_scheme)
self.iplist = IpList()
for ip in json_obj['Value']:
r = Ip()
r.ip_addr = ip['Value']
r.resid = ip['ResourceId']
r.serverid = ip['ServerId'] if 'None' not in str(ip['ServerId']) else None
self.iplist.append(r)
|
java
|
public void writeBootstrapData(String name,
ImmutableMap<String, ImmutableListMultimap<String, Double>> measuresToBreakdownsToStats,
File outputDir) throws IOException {
final StringBuilder chart = new StringBuilder();
final StringBuilder delim = new StringBuilder();
final StringBuilder mediansDelim = new StringBuilder();
final ImmutableSet<String> breakdownKeys =
MapUtils.allMultimapKeys(measuresToBreakdownsToStats.values());
// Set up chart title, delimited file headers
chart.append(name).append("\n\n");
addDelimPercentileHeader(name, delim);
addDelimMediansHeader(name, measures(), mediansDelim);
final ImmutableMap.Builder<String, ImmutableMap<String, ImmutableList<Double>>> samples =
ImmutableMap.builder();
final File bootstrapDataDir = new File(outputDir, "bootstrapData");
bootstrapDataDir.mkdirs();
// all four multimaps have the same keyset
for (final String breakdownKey : breakdownKeys) {
final ImmutableMap.Builder<String, Double> mediansMapBuilder =
ImmutableMap.builder();
final ImmutableMap.Builder<String, PercentileComputer.Percentiles> percentileMapB = ImmutableMap.builder();
final ImmutableMap.Builder<String, ImmutableList<Double>> keySamples =
ImmutableMap.builder();
for (final Map.Entry<String, ImmutableListMultimap<String, Double>> e : measuresToBreakdownsToStats
.entrySet()) {
final String measureName = e.getKey();
final ImmutableList<Double> samplesForBreakdownKey = e.getValue().get(breakdownKey);
final PercentileComputer.Percentiles percentiles =
percentileComputer().calculatePercentilesAdoptingData(
Doubles.toArray(samplesForBreakdownKey));
percentileMapB.put(measureName, percentiles);
// Raw samples
keySamples.put(measureName, samplesForBreakdownKey);
// Aggregate medians
mediansMapBuilder.put(measureName, percentiles.median().or(Double.NaN));
}
// Write to chart
final ImmutableMap<String, PercentileComputer.Percentiles> percentilesMap = percentileMapB.build();
dumpPercentilesForMetric(breakdownKey, percentilesMap, chart);
chart.append("\n");
final JacksonSerializer serializer = JacksonSerializer.builder().forJson().prettyOutput().build();
serializer.serializeTo(new SerializedBootstrapResults.Builder()
.percentilesMap(percentilesMap).build(), Files.asByteSink(new File(bootstrapDataDir,
breakdownKey + ".percentile.json")));
// Write to delim
addDelimPercentilesForMetric(breakdownKey, percentilesMap, delim);
addMediansRow(breakdownKey, mediansMapBuilder.build(), mediansDelim);
}
// Make output dir as needed
outputDir.mkdir();
// Write chart
Files.asCharSink(new File(outputDir, name + ".bootstrapped.txt"),
Charsets.UTF_8).write(chart.toString());
// Write delim
Files.asCharSink(new File(outputDir, name + ".bootstrapped.csv"),
Charsets.UTF_8).write(delim.toString());
// Write means-only delimited
Files.asCharSink(new File(outputDir, name + ".bootstrapped.medians.csv"),
Charsets.UTF_8).write(mediansDelim.toString());
// Write raw data
Files.asCharSink(new File(outputDir, name + ".bootstrapped.raw"),
Charsets.UTF_8).write(renderSamples(samples.build()));
}
|
java
|
public static DateTime parseDateTime(String dateString) {
dateString = normalize(dateString);
return parse(dateString, DatePattern.NORM_DATETIME_FORMAT);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.