language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
private Map<String, String> prepareHeaderRowDataMap(List<String> header, List<String> rowData) {
Map<String, String> headerRowDataMap = new HashMap<>();
if (header.size() == rowData.size()) {
for (int i = 0; i < header.size(); i++) {
if (null != header.get(i)) {
headerRowDataMap.put(header.get(i).toLowerCase(), rowData.get(i));
}
}
} else {
logger.warning("header and columns are not of same size");
}
return headerRowDataMap;
}
|
python
|
def d_x(data, axis, boundary='forward-backward'):
'''
Calculates a second-order centered finite difference of data along the
specified axis.
Parameters
----------
data : ndarray
Data on which we are taking a derivative.
axis : int
Index of the data array on which to take the difference.
boundary : string, optional
Boundary condition. If 'periodic', assume periodic boundary condition
for centered difference. If 'forward-backward', take first-order
forward or backward derivatives at boundary.
Returns
-------
derivative : ndarray
Derivative of the data along the specified axis.
Raises
------
ValueError:
If an invalid boundary condition choice is given, if both dx and x are
specified, if axis is out of the valid range for the shape of the data,
or if x is specified and axis_x is out of the valid range for the shape
of x.
'''
if abs(axis) > len(data.shape):
raise ValueError('axis is out of bounds for the shape of data')
if boundary == 'periodic':
diff = np.roll(data, -1, axis) - np.roll(data, 1, axis)
elif boundary == 'forward-backward':
# We will take forward-backward differencing at edges
# need some fancy indexing to handle arbitrary derivative axis
# Initialize our index lists
front = [slice(s) for s in data.shape]
back = [slice(s) for s in data.shape]
target = [slice(s) for s in data.shape]
# Set our index values for the derivative axis
# front is the +1 index for derivative
front[axis] = np.array([1, -1])
# back is the -1 index for derivative
back[axis] = np.array([0, -2])
# target is the position where the derivative is being calculated
target[axis] = np.array([0, -1])
diff = (np.roll(data, -1, axis) - np.roll(data, 1, axis))/(2.)
diff[target] = (data[front]-data[back])
else: # invalid boundary condition was given
raise ValueError('Invalid option {} for boundary '
'condition.'.format(boundary))
return diff
|
java
|
public DataObjectModel view(Collection<? extends DataObject> dos)
{
DataObjectModel view = clone();
view.propertyList = new ArrayList<String>();
Set<String> propertySet = new HashSet<String>();
for (DataObject dob : dos)
{
for (String property : propertyList) // TODO performance
{
Object value = dob.get(property);
if (value != null)
{
propertySet.add(property);
}
}
}
for (String property : propertyList)
{
if (propertySet.contains(property))
{
view.propertyList.add(property);
}
}
return view;
}
|
python
|
def NS(domain, resolve=True, nameserver=None):
'''
Return a list of IPs of the nameservers for ``domain``
If ``resolve`` is False, don't resolve names.
CLI Example:
.. code-block:: bash
salt ns1 dig.NS google.com
'''
dig = ['dig', '+short', six.text_type(domain), 'NS']
if nameserver is not None:
dig.append('@{0}'.format(nameserver))
cmd = __salt__['cmd.run_all'](dig, python_shell=False)
# In this case, 0 is not the same as False
if cmd['retcode'] != 0:
log.warning(
'dig returned exit code \'%s\'. Returning empty list as fallback.',
cmd['retcode']
)
return []
if resolve:
ret = []
for ns_host in cmd['stdout'].split('\n'):
for ip_addr in A(ns_host, nameserver):
ret.append(ip_addr)
return ret
return cmd['stdout'].split('\n')
|
python
|
def ufloatDict_stdev(self, ufloat_dict):
'This gives us a dictionary of nominal values from a dictionary of uncertainties'
return OrderedDict(izip(ufloat_dict.keys(), map(lambda x: x.std_dev, ufloat_dict.values())))
|
java
|
@Override
public DescribeReplicationTasksResult describeReplicationTasks(DescribeReplicationTasksRequest request) {
request = beforeClientExecution(request);
return executeDescribeReplicationTasks(request);
}
|
java
|
static int gcd(int a, int b) {
while (a != b) {
if (a > b) {
int na = a % b;
if (na == 0) return b;
a = na;
} else {
int nb = b % a;
if (nb == 0) return a;
b = nb;
}
}
return a;
}
|
java
|
protected boolean calculateGestureTransform(
Matrix outTransform,
@LimitFlag int limitTypes) {
TransformGestureDetector detector = mGestureDetector;
boolean transformCorrected = false;
outTransform.set(mPreviousTransform);
if (mIsRotationEnabled) {
float angle = detector.getRotation() * (float) (180 / Math.PI);
outTransform.postRotate(angle, detector.getPivotX(), detector.getPivotY());
}
if (mIsScaleEnabled) {
float scale = detector.getScale();
outTransform.postScale(scale, scale, detector.getPivotX(), detector.getPivotY());
}
transformCorrected |=
limitScale(outTransform, detector.getPivotX(), detector.getPivotY(), limitTypes);
if (mIsTranslationEnabled) {
outTransform.postTranslate(detector.getTranslationX(), detector.getTranslationY());
}
transformCorrected |= limitTranslation(outTransform, limitTypes);
return transformCorrected;
}
|
python
|
def factory_profiles(self):
'''The factory profiles of all loaded modules.'''
with self._mutex:
if not self._factory_profiles:
self._factory_profiles = []
for fp in self._obj.get_factory_profiles():
self._factory_profiles.append(utils.nvlist_to_dict(fp.properties))
return self._factory_profiles
|
java
|
public final boolean awaitUninterruptibly() {
try {
return await(15, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
}
|
java
|
@SuppressWarnings("PMD")
static Double eval(String expr, Map<String, ? extends Number> vars) {
Deque<Double> stack = new ArrayDeque<>();
String[] parts = expr.split("[,\\s]+");
for (String part : parts) {
switch (part) {
case ":add": binaryOp(stack, (a, b) -> a + b); break;
case ":sub": binaryOp(stack, (a, b) -> a - b); break;
case ":mul": binaryOp(stack, (a, b) -> a * b); break;
case ":div": binaryOp(stack, (a, b) -> a / b); break;
case ":if-changed": ifChanged(stack); break;
default:
if (part.startsWith("{") && part.endsWith("}")) {
Number v = vars.get(part.substring(1, part.length() - 1));
if (v == null) v = Double.NaN;
stack.addFirst(v.doubleValue());
} else {
stack.addFirst(Double.parseDouble(part));
}
break;
}
}
return stack.removeFirst();
}
|
python
|
def has_relationship(self, left_id, left_type, right_id, right_type,
rel_type='Related To'):
"""
Checks if the two objects are related
Args:
left_id: The CRITs ID of the first indicator
left_type: The CRITs TLO type of the first indicator
right_id: The CRITs ID of the second indicator
right_type: The CRITs TLO type of the second indicator
rel_type: The relationships type ("Related To", etc)
Returns:
True or False if the relationship exists or not.
"""
data = self.get_object(left_id, left_type)
if not data:
raise CRITsOperationalError('Crits Object not found with id {}'
'and type {}'.format(left_id,
left_type))
if 'relationships' not in data:
return False
for relationship in data['relationships']:
if relationship['relationship'] != rel_type:
continue
if relationship['value'] != right_id:
continue
if relationship['type'] != right_type:
continue
return True
return False
|
java
|
private void readObject(java.io.ObjectInputStream s)
throws IOException, ClassNotFoundException
{
// Read in the threshold, loadfactor, and any hidden stuff
s.defaultReadObject();
// Read in number of buckets and allocate the bucket array;
int numBuckets = s.readInt();
table = new Entry[numBuckets];
// Read in size (number of Mappings)
int size = s.readInt();
// Read the keys and values, and put the mappings in the IntHashMap
for (int i=0; i<size; i++) {
int key = s.readInt();
V value = (V) s.readObject();
put(key, value);
}
}
|
python
|
def rm(*components, **kwargs):
"""
Remove a file or directory.
If path is a directory, this recursively removes the directory and
any contents. Non-existent paths are silently ignored.
Supports Unix style globbing by default (disable using
glob=False). For details on globbing pattern expansion, see:
https://docs.python.org/2/library/glob.html
Arguments:
*components (string[]): path to the file or directory to remove. May be
absolute or relative. May contain unix glob
**kwargs: if "glob" is True, perform Unix style pattern expansion of
paths (default: True).
"""
_path = path(*components)
glob = kwargs.get("glob", True)
paths = iglob(_path) if glob else [_path]
for file in paths:
if isfile(file):
os.remove(file)
elif exists(file):
shutil.rmtree(file, ignore_errors=False)
|
python
|
def get_url(url, data=None, cached=True, cache_key=None, crawler='urllib'):
"""Retrieves the HTML code for a given URL.
If a cached version is not available, uses phantom_retrieve to fetch the page.
data - Additional data that gets passed onto the crawler.
cached - If True, retrieves the URL from the cache if it is available. If False, will still store the page in cache.
cache_key - If set, will be used instead of the URL to lookup the cached version of the page.
crawler - A string referencing one of the builtin crawlers.
Returns the HTML as a unicode string.
Raises a HttpNotFound exception if the page could not be found.
"""
if cache_key is None:
cache_key = url
cache_path = cache_path_for_url(cache_key)
if cached and os.path.exists(cache_path):
with open(cache_path) as f:
html = f.read().decode('utf-8')
else:
if FAIL_IF_NOT_CACHED:
raise BaseException("URL is not in cache and FAIL_IF_NOT_CACHED is True: %s" % url)
crawler_fn = CRAWLERS[crawler]
status, html = crawler_fn(url, data)
if status != 200:
raise HttpNotFound(url)
_ensure_directory(CACHE_DIRECTORY)
with open(cache_path, 'w') as f:
f.write(html.encode('utf-8'))
return html
|
java
|
private void showAssertInfo(IAssert<?> assertCommand, AssertionError ex, boolean failedTest) {
ITestResult testResult = Reporter.getCurrentTestResult();
// Checks whether the soft assert was called in a TestNG test run or else within a Java application.
String methodName = "main";
if (testResult != null) {
methodName = testResult.getMethod().getMethodName();
}
StringBuilder sb = new StringBuilder();
sb.append("Soft Assert ");
if (assertCommand.getMessage() != null && !assertCommand.getMessage().trim().isEmpty()) {
sb.append("[").append(assertCommand.getMessage()).append("] ");
}
if (failedTest) {
sb.append("failed in ");
} else {
sb.append("passed in ");
}
sb.append(methodName).append("()\n");
if (failedTest) {
sb.append(ExceptionUtils.getStackTrace(ex));
}
Reporter.log(sb.toString(), true);
}
|
java
|
private IMolecularFormula getFormula(List<IIsotope> isoToCond_new, int[] value_In) {
IMolecularFormula mf = builder.newInstance(IMolecularFormula.class);
for (int i = 0; i < isoToCond_new.size(); i++) {
if (value_In[i] != 0) {
for (int j = 0; j < value_In[i]; j++)
mf.addIsotope(isoToCond_new.get(i));
}
}
mf = putInOrder(mf);
return mf;
}
|
python
|
def time_iteration(model, initial_guess=None, dprocess=None, with_complementarities=True,
verbose=True, grid={},
maxit=1000, inner_maxit=10, tol=1e-6, hook=None, details=False):
'''
Finds a global solution for ``model`` using backward time-iteration.
This algorithm iterates on the residuals of the arbitrage equations
Parameters
----------
model : Model
model to be solved
verbose : boolean
if True, display iterations
initial_guess : decision rule
initial guess for the decision rule
dprocess : DiscretizedProcess (model.exogenous.discretize())
discretized process to be used
with_complementarities : boolean (True)
if False, complementarity conditions are ignored
grid: grid options
overload the values set in `options:grid` section
maxit: maximum number of iterations
inner_maxit: maximum number of iteration for inner solver
tol: tolerance criterium for successive approximations
hook: Callable
function to be called within each iteration, useful for debugging purposes
Returns
-------
decision rule :
approximated solution
'''
from dolo import dprint
def vprint(t):
if verbose:
print(t)
if dprocess is None:
dprocess = model.exogenous.discretize()
n_ms = dprocess.n_nodes() # number of exogenous states
n_mv = dprocess.n_inodes(0) # this assume number of integration nodes is constant
x0 = model.calibration['controls']
parms = model.calibration['parameters']
n_x = len(x0)
n_s = len(model.symbols['states'])
endo_grid = model.get_grid(**grid)
exo_grid = dprocess.grid
mdr = DecisionRule(exo_grid, endo_grid)
grid = mdr.endo_grid.nodes()
N = grid.shape[0]
controls_0 = numpy.zeros((n_ms, N, n_x))
if initial_guess is None:
controls_0[:, :, :] = x0[None,None,:]
else:
if isinstance(initial_guess, AlgoResult):
initial_guess = initial_guess.dr
try:
for i_m in range(n_ms):
controls_0[i_m, :, :] = initial_guess(i_m, grid)
except Exception:
for i_m in range(n_ms):
m = dprocess.node(i_m)
controls_0[i_m, :, :] = initial_guess(m, grid)
f = model.functions['arbitrage']
g = model.functions['transition']
if 'controls_lb' in model.functions and with_complementarities==True:
lb_fun = model.functions['controls_lb']
ub_fun = model.functions['controls_ub']
lb = numpy.zeros_like(controls_0)*numpy.nan
ub = numpy.zeros_like(controls_0)*numpy.nan
for i_m in range(n_ms):
m = dprocess.node(i_m)[None,:]
p = parms[None,:]
m = numpy.repeat(m, N, axis=0)
p = numpy.repeat(p, N, axis=0)
lb[i_m,:,:] = lb_fun(m, grid, p)
ub[i_m,:,:] = ub_fun(m, grid, p)
else:
with_complementarities = False
sh_c = controls_0.shape
controls_0 = controls_0.reshape( (-1,n_x) )
from dolo.numeric.optimize.newton import newton, SerialDifferentiableFunction
from dolo.numeric.optimize.ncpsolve import ncpsolve
err = 10
it = 0
if with_complementarities:
vprint("Solving WITH complementarities.")
lb = lb.reshape((-1,n_x))
ub = ub.reshape((-1,n_x))
if verbose:
headline = '|{0:^4} | {1:10} | {2:8} | {3:8} | {4:3} |'.format( 'N',' Error', 'Gain','Time', 'nit' )
stars = '-'*len(headline)
print(stars)
print(headline)
print(stars)
import time
t1 = time.time()
err_0 = numpy.nan
verbit = (verbose == 'full')
while err>tol and it<maxit:
it += 1
t_start = time.time()
mdr.set_values(controls_0.reshape(sh_c))
fn = lambda x: residuals_simple(f, g, grid, x.reshape(sh_c), mdr, dprocess, parms).reshape((-1,n_x))
dfn = SerialDifferentiableFunction(fn)
res = fn(controls_0)
if hook:
hook()
if with_complementarities:
[controls,nit] = ncpsolve(dfn, lb, ub, controls_0, verbose=verbit, maxit=inner_maxit)
else:
[controls, nit] = newton(dfn, controls_0, verbose=verbit, maxit=inner_maxit)
err = abs(controls-controls_0).max()
err_SA = err/err_0
err_0 = err
controls_0 = controls
t_finish = time.time()
elapsed = t_finish - t_start
if verbose:
print('|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} | {4:3} |'.format( it, err, err_SA, elapsed, nit ))
controls_0 = controls.reshape(sh_c)
t2 = time.time()
if verbose:
print(stars)
print("Elapsed: {} seconds.".format(t2-t1))
print(stars)
if not details:
return mdr
return TimeIterationResult(
mdr,
it,
with_complementarities,
dprocess,
err<tol, # x_converged: bool
tol, # x_tol
err, #: float
None, # log: object # TimeIterationLog
None # trace: object #{Nothing,IterationTrace}
)
|
python
|
def grains(**kwargs):
'''
Get grains for minion.
.. code-block: bash
salt '*' nxos.cmd grains
'''
if not DEVICE_DETAILS['grains_cache']:
ret = salt.utils.nxos.system_info(show_ver(**kwargs))
log.debug(ret)
DEVICE_DETAILS['grains_cache'].update(ret['nxos'])
return DEVICE_DETAILS['grains_cache']
|
python
|
def get_uri(self, ncname: str) -> Optional[str]:
""" Get the URI associated with ncname
@param ncname:
"""
uri = cu.expand_uri(ncname + ':', self.curi_maps)
return uri if uri and uri.startswith('http') else None
|
java
|
public void setBaselineFinish(int baselineNumber, Date value)
{
set(selectField(AssignmentFieldLists.BASELINE_FINISHES, baselineNumber), value);
}
|
python
|
def pool_create(hypervisor, identifier, pool_path):
"""Storage pool creation.
The following values are set in the XML configuration:
* name
* target/path
* target/permission/label
"""
path = os.path.join(pool_path, identifier)
if not os.path.exists(path):
os.makedirs(path)
xml = POOL_DEFAULT_CONFIG.format(identifier, path)
return hypervisor.storagePoolCreateXML(xml, 0)
|
java
|
public DoubleColumn map(ToDoubleFunction<Double> fun) {
DoubleColumn result = DoubleColumn.create(name());
for (double t : this) {
try {
result.append(fun.applyAsDouble(t));
} catch (Exception e) {
result.appendMissing();
}
}
return result;
}
|
java
|
public TldExtensionType<TldTaglibType<T>> getOrCreateTaglibExtension()
{
List<Node> nodeList = childNode.get("taglib-extension");
if (nodeList != null && nodeList.size() > 0)
{
return new TldExtensionTypeImpl<TldTaglibType<T>>(this, "taglib-extension", childNode, nodeList.get(0));
}
return createTaglibExtension();
}
|
python
|
def redirect_stdout(self):
"""Redirect stdout to file so that it can be tailed and aggregated with the other logs."""
self.hijacked_stdout = sys.stdout
self.hijacked_stderr = sys.stderr
# 0 must be set as the buffer, otherwise lines won't get logged in time.
sys.stdout = open(self.hitch_dir.driverout(), "ab", 0)
sys.stderr = open(self.hitch_dir.drivererr(), "ab", 0)
|
java
|
public List<StorageTierView> getTierViewsBelow(String tierAlias) {
int ordinal = getTierView(tierAlias).getTierViewOrdinal();
return mTierViews.subList(ordinal + 1, mTierViews.size());
}
|
python
|
def _generate_list_skippers(self):
"""
Generate the list of skippers of page.
:return: The list of skippers of page.
:rtype: hatemile.util.html.htmldomelement.HTMLDOMElement
"""
container = self.parser.find(
'#'
+ AccessibleNavigationImplementation.ID_CONTAINER_SKIPPERS
).first_result()
html_list = None
if container is None:
local = self.parser.find('body').first_result()
if local is not None:
container = self.parser.create_element('div')
container.set_attribute(
'id',
AccessibleNavigationImplementation.ID_CONTAINER_SKIPPERS
)
local.prepend_element(container)
if container is not None:
html_list = self.parser.find(container).find_children(
'ul'
).first_result()
if html_list is None:
html_list = self.parser.create_element('ul')
container.append_element(html_list)
self.list_skippers_added = True
return html_list
|
java
|
public CreateResolverRuleRequest withTargetIps(TargetAddress... targetIps) {
if (this.targetIps == null) {
setTargetIps(new java.util.ArrayList<TargetAddress>(targetIps.length));
}
for (TargetAddress ele : targetIps) {
this.targetIps.add(ele);
}
return this;
}
|
java
|
@Override
public void initialize(String path, List<String> columns) throws IOException,
UnsupportedOperationException {
super.initialize(path, columns);
initializeInternal();
}
|
java
|
public void tickets_ticketId_score_POST(Long ticketId, String score, String scoreComment) throws IOException {
String qPath = "/support/tickets/{ticketId}/score";
StringBuilder sb = path(qPath, ticketId);
HashMap<String, Object>o = new HashMap<String, Object>();
addBody(o, "score", score);
addBody(o, "scoreComment", scoreComment);
exec(qPath, "POST", sb.toString(), o);
}
|
python
|
def detect(self, text):
"""Detect language of the input text
:param text: The source text(s) whose language you want to identify.
Batch detection is supported via sequence input.
:type text: UTF-8 :class:`str`; :class:`unicode`; string sequence (list, tuple, iterator, generator)
:rtype: Detected
:rtype: :class:`list` (when a list is passed)
Basic usage:
>>> from googletrans import Translator
>>> translator = Translator()
>>> translator.detect('이 문장은 한글로 쓰여졌습니다.')
<Detected lang=ko confidence=0.27041003>
>>> translator.detect('この文章は日本語で書かれました。')
<Detected lang=ja confidence=0.64889508>
>>> translator.detect('This sentence is written in English.')
<Detected lang=en confidence=0.22348526>
>>> translator.detect('Tiu frazo estas skribita en Esperanto.')
<Detected lang=eo confidence=0.10538048>
Advanced usage:
>>> langs = translator.detect(['한국어', '日本語', 'English', 'le français'])
>>> for lang in langs:
... print(lang.lang, lang.confidence)
ko 1
ja 0.92929292
en 0.96954316
fr 0.043500196
"""
if isinstance(text, list):
result = []
for item in text:
lang = self.detect(item)
result.append(lang)
return result
data = self._translate(text, dest='en', src='auto')
# actual source language that will be recognized by Google Translator when the
# src passed is equal to auto.
src = ''
confidence = 0.0
try:
src = ''.join(data[8][0])
confidence = data[8][-2][0]
except Exception: # pragma: nocover
pass
result = Detected(lang=src, confidence=confidence)
return result
|
python
|
def create_prj_browser(self, ):
"""Create the project browser
This creates a combobox brower for projects
and adds it to the ui
:returns: the created combo box browser
:rtype: :class:`jukeboxcore.gui.widgets.browser.ComboBoxBrowser`
:raises: None
"""
prjbrws = ComboBoxBrowser(1, headers=['Project:'])
self.central_vbox.insertWidget(0, prjbrws)
return prjbrws
|
python
|
def _t_update_b(self):
r"""
A method to update 'b' array at each time step according to
't_scheme' and the source term value
"""
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
Vi = network['pore.volume']
dt = self.settings['t_step']
s = self.settings['t_scheme']
if (s == 'implicit'):
f1, f2, f3 = 1, 1, 0
elif (s == 'cranknicolson'):
f1, f2, f3 = 0.5, 1, 0
elif (s == 'steady'):
f1, f2, f3 = 1, 0, 1
x_old = self[self.settings['quantity']]
b = (f2*(1-f1)*(-self._A_steady)*x_old +
f2*(Vi/dt)*x_old +
f3*np.zeros(shape=(self.Np, ), dtype=float))
self._update_physics()
for item in self.settings['sources']:
Ps = self.pores(item)
# Update b
b[Ps] = b[Ps] - f2*(1-f1)*(phase[item+'.'+'rate'][Ps])
self._b = b
return b
|
python
|
def p(self, path):
"""
provide absolute path within the container
:param path: path with container
:return: str
"""
if path.startswith("/"):
path = path[1:]
p = os.path.join(self.mount_point, path)
logger.debug("path = %s", p)
return p
|
python
|
def format_stack_trace_json(self):
"""Convert a StackTrace object to json format."""
stack_trace_json = {}
if self.stack_frames:
stack_trace_json['stack_frames'] = {
'frame': self.stack_frames,
'dropped_frames_count': self.dropped_frames_count
}
stack_trace_json['stack_trace_hash_id'] = self.stack_trace_hash_id
return stack_trace_json
|
java
|
public final LogMetric updateLogMetric(MetricName metricName, LogMetric metric) {
UpdateLogMetricRequest request =
UpdateLogMetricRequest.newBuilder()
.setMetricName(metricName == null ? null : metricName.toString())
.setMetric(metric)
.build();
return updateLogMetric(request);
}
|
java
|
public Observable<P2SVpnServerConfigurationInner> createOrUpdateAsync(String resourceGroupName, String virtualWanName, String p2SVpnServerConfigurationName, P2SVpnServerConfigurationInner p2SVpnServerConfigurationParameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, virtualWanName, p2SVpnServerConfigurationName, p2SVpnServerConfigurationParameters).map(new Func1<ServiceResponse<P2SVpnServerConfigurationInner>, P2SVpnServerConfigurationInner>() {
@Override
public P2SVpnServerConfigurationInner call(ServiceResponse<P2SVpnServerConfigurationInner> response) {
return response.body();
}
});
}
|
python
|
def region_option(f):
"""
Configures --region option for CLI
:param f: Callback Function to be passed to Click
"""
def callback(ctx, param, value):
state = ctx.ensure_object(Context)
state.region = value
return value
return click.option('--region',
expose_value=False,
help='Set the AWS Region of the service (e.g. us-east-1).',
callback=callback)(f)
|
java
|
public void startMethod(String methodName, String type, short flags) {
short methodNameIndex = itsConstantPool.addUtf8(methodName);
short typeIndex = itsConstantPool.addUtf8(type);
itsCurrentMethod = new ClassFileMethod(methodName, methodNameIndex,
type, typeIndex, flags);
itsJumpFroms = new UintMap();
itsMethods.add(itsCurrentMethod);
addSuperBlockStart(0);
}
|
java
|
public static boolean renderBehaviorizedAttribute(
FacesContext facesContext, ResponseWriter writer,
String componentProperty, UIComponent component, String eventName,
Collection<ClientBehaviorContext.Parameter> eventParameters,
Map<String, List<ClientBehavior>> clientBehaviors,
String htmlAttrName, String attributeValue) throws IOException
{
return renderBehaviorizedAttribute(facesContext, writer,
componentProperty, component,
component.getClientId(facesContext), eventName,
eventParameters, clientBehaviors, htmlAttrName, attributeValue);
}
|
python
|
def fit_circle_check(points,
scale,
prior=None,
final=False,
verbose=False):
"""
Fit a circle, and reject the fit if:
* the radius is larger than tol.radius_min*scale or tol.radius_max*scale
* any segment spans more than tol.seg_angle
* any segment is longer than tol.seg_frac*scale
* the fit deviates by more than tol.radius_frac*radius
* the segments on the ends deviate from tangent by more than tol.tangent
Parameters
---------
points: (n, d) set of points which represent a path
prior: (center, radius) tuple for best guess, or None if unknown
scale: float, what is the overall scale of the set of points
verbose: boolean, if True output log.debug messages for the reasons
for fit rejection. Potentially generates hundreds of thousands of
messages so only suggested in manual debugging.
Returns
---------
if fit is acceptable:
(center, radius) tuple
else:
None
"""
# an arc needs at least three points
if len(points) < 3:
return None
# do a least squares fit on the points
C, R, r_deviation = fit_nsphere(points, prior=prior)
# check to make sure radius is between min and max allowed
if not tol.radius_min < (R / scale) < tol.radius_max:
if verbose:
log.debug('circle fit error: R %f', R / scale)
return None
# check point radius error
r_error = r_deviation / R
if r_error > tol.radius_frac:
if verbose:
log.debug('circle fit error: fit %s', str(r_error))
return None
vectors = np.diff(points, axis=0)
segment = np.linalg.norm(vectors, axis=1)
# approximate angle in radians, segments are linear length
# not arc length but this is close and avoids a cosine
angle = segment / R
if (angle > tol.seg_angle).any():
if verbose:
log.debug('circle fit error: angle %s', str(angle))
return None
if final and (angle > tol.seg_angle_min).sum() < 3:
log.debug('final: angle %s', str(angle))
return None
# check segment length as a fraction of drawing scale
scaled = segment / scale
if (scaled > tol.seg_frac).any():
if verbose:
log.debug('circle fit error: segment %s', str(scaled))
return None
# check to make sure the line segments on the ends are actually
# tangent with the candidate circle fit
mid_pt = points[[0, -2]] + (vectors[[0, -1]] * .5)
radial = unitize(mid_pt - C)
ends = unitize(vectors[[0, -1]])
tangent = np.abs(np.arccos(diagonal_dot(radial, ends)))
tangent = np.abs(tangent - np.pi / 2).max()
if tangent > tol.tangent:
if verbose:
log.debug('circle fit error: tangent %f',
np.degrees(tangent))
return None
result = {'center': C,
'radius': R}
return result
|
java
|
@Override
public ListTypedLinkFacetNamesResult listTypedLinkFacetNames(ListTypedLinkFacetNamesRequest request) {
request = beforeClientExecution(request);
return executeListTypedLinkFacetNames(request);
}
|
java
|
private static void drawLine(int x0, int y0, int x1, int y1, boolean[][] pic) {
final int xres = pic.length, yres = pic[0].length;
// Ensure bounds
y0 = (y0 < 0) ? 0 : (y0 >= yres) ? (yres - 1) : y0;
y1 = (y1 < 0) ? 0 : (y1 >= yres) ? (yres - 1) : y1;
x0 = (x0 < 0) ? 0 : (x0 >= xres) ? (xres - 1) : x0;
x1 = (x1 < 0) ? 0 : (x1 >= xres) ? (xres - 1) : x1;
// Default slope
final int dx = +Math.abs(x1 - x0), sx = x0 < x1 ? 1 : -1;
final int dy = -Math.abs(y1 - y0), sy = y0 < y1 ? 1 : -1;
// Error counter
int err = dx + dy;
for(;;) {
pic[x0][y0] = true;
if(x0 == x1 && y0 == y1) {
break;
}
final int e2 = err << 1;
if(e2 > dy) {
err += dy;
x0 += sx;
}
if(e2 < dx) {
err += dx;
y0 += sy;
}
}
}
|
java
|
private void removePartitionResults(QueryResult queryResult, int partitionId) {
List<QueryResultRow> rows = queryResult.getRows();
rows.removeIf(resultRow -> getPartitionId(resultRow) == partitionId);
}
|
python
|
def load_db(file, db, verbose=True):
"""
Load :class:`mongomock.database.Database` from a local file.
:param file: file path.
:param db: instance of :class:`mongomock.database.Database`.
:param verbose: bool, toggle on log.
:return: loaded db.
"""
db_data = json.load(file, verbose=verbose)
return _load(db_data, db)
|
python
|
def save_to_db(model_text_id, parsed_values):
"""save to db and return saved object"""
Model = apps.get_model(model_text_id)
# normalise values and separate to m2m, simple
simple_fields = {}
many2many_fields = {}
for field, value in parsed_values.items():
if (Model._meta.get_field(
field).get_internal_type() == 'ManyToManyField'):
many2many_fields[field] = value
elif (Model._meta.get_field(
field).get_internal_type() == 'DateTimeField'):
simple_fields[field] = time_parser.parse(value)
else:
simple_fields[field] = value
# ToDo: add unique identify parameter to field
# ToDo: allow unique identify m2m field
model, created = Model.objects.get_or_create(**simple_fields)
for field, value in many2many_fields.items():
setattr(model, field, value)
model.save()
return model
|
python
|
def count_missing(self, axis=None):
"""Count missing genotypes.
Parameters
----------
axis : int, optional
Axis over which to count, or None to perform overall count.
"""
b = self.is_missing()
return np.sum(b, axis=axis)
|
java
|
public void save() throws IOException {
FileChooser.SimpleFileFilter filter = FileChooser.SimpleFileFilter.getWritableImageFIlter();
JFileChooser chooser = FileChooser.getInstance();
chooser.setFileFilter(filter);
chooser.setAcceptAllFileFilterUsed(false);
chooser.setSelectedFiles(new File[0]);
int returnVal = chooser.showSaveDialog(null);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = chooser.getSelectedFile();
if (!filter.accept(file)) {
file = new File(file.getParentFile(), file.getName() + ".png");
}
save(file);
}
}
|
python
|
def tensor_dim_to_mesh_dim_size(layout, mesh_shape, tensor_dim):
"""How many ways does a tensor dimension get split.
This is used to "cheat" when building the mtf graph and peek at how a
tensor dimension will be split. Returns 1 if the tensor dimension is not
split.
Args:
layout: an input to convert_to_layout_rules
mesh_shape: an input to convert_to_shape
tensor_dim: a Dimension
Returns:
an integer
"""
layout_rules = convert_to_layout_rules(layout)
mesh_shape = convert_to_shape(mesh_shape)
mesh_axis = layout_rules.tensor_dimension_to_mesh_axis(tensor_dim, mesh_shape)
if mesh_axis is None:
return 1
else:
return mesh_shape.dims[mesh_axis].size
|
java
|
public double evaluate(Map<String, Double> localVariables) {
if (this.root == null) {
throw new RuntimeException("[function error] evaluation failed " +
"because function is not loaded");
}
return this.root.evaluate(localVariables);
}
|
java
|
boolean checkTag(long bucketIndex, int posInBucket, long tag) {
long tagStartIdx = getTagOffset(bucketIndex, posInBucket);
final int bityPerTag = bitsPerTag;
for (long i = 0; i < bityPerTag; i++) {
if (memBlock.get(i + tagStartIdx) != ((tag & (1L << i)) != 0))
return false;
}
return true;
}
|
java
|
public PutEventsResult withEntries(PutEventsResultEntry... entries) {
if (this.entries == null) {
setEntries(new java.util.ArrayList<PutEventsResultEntry>(entries.length));
}
for (PutEventsResultEntry ele : entries) {
this.entries.add(ele);
}
return this;
}
|
java
|
public boolean validate(DependencyExplorerOutput output, InvalidKeys invalidKeys) {
Collection<Map.Entry<Key<?>, String>> invalidRequiredKeys =
invalidKeys.getInvalidRequiredKeys();
for (Map.Entry<Key<?>, String> error : invalidRequiredKeys) {
reportError(output, error.getKey(), error.getValue());
}
return !cycleFinder.findAndReportCycles(output.getGraph()) && invalidRequiredKeys.isEmpty();
}
|
java
|
public static <T> CallbackList<T> create(Callback<T> callback) {
CallbackList<T> list = new CallbackList<T>();
list.add(callback);
return list;
}
|
java
|
private Map<PortalDataKey, Set<BucketTuple>> prepareImportQueue(
final ITenant tenant, final Set<Resource> templates) throws Exception {
final Map<PortalDataKey, Set<BucketTuple>> rslt = new HashMap<>();
Resource rsc = null;
try {
for (Resource r : templates) {
rsc = r;
if (log.isDebugEnabled()) {
log.debug(
"Loading template resource file for tenant "
+ "'"
+ tenant.getFname()
+ "': "
+ rsc.getFilename());
}
final Document doc = reader.read(rsc.getInputStream());
PortalDataKey atLeastOneMatchingDataKey = null;
for (PortalDataKey pdk : dataKeyImportOrder) {
boolean matches = evaluatePortalDataKeyMatch(doc, pdk);
if (matches) {
// Found the right bucket...
log.debug("Found PortalDataKey '{}' for data document {}", pdk, r.getURI());
atLeastOneMatchingDataKey = pdk;
Set<BucketTuple> bucket = rslt.get(atLeastOneMatchingDataKey);
if (bucket == null) {
// First of these we've seen; create the bucket;
bucket = new HashSet<>();
rslt.put(atLeastOneMatchingDataKey, bucket);
}
BucketTuple tuple = new BucketTuple(rsc, doc);
bucket.add(tuple);
/*
* At this point, we would normally add a break;
* statement, but group_membership.xml files need to
* match more than one PortalDataKey.
*/
}
}
if (atLeastOneMatchingDataKey == null) {
// We can't proceed
throw new RuntimeException(
"No PortalDataKey found for QName: "
+ doc.getRootElement().getQName());
}
}
} catch (Exception e) {
log.error(
"Failed to process the specified template: {}",
(rsc != null ? rsc.getFilename() : "null"),
e);
throw e;
}
return rslt;
}
|
java
|
public final Parser<RECORD> addDissector(final Dissector dissector) {
assembled = false;
if (dissector != null) {
allDissectors.add(dissector);
}
return this;
}
|
java
|
private void createCountersManager() {
UnsafeBuffer counterLabelsBuffer = monitorFileWriter.createServiceCounterLabelsBuffer(index);
UnsafeBuffer counterValuesBuffer = monitorFileWriter.createServiceCounterValuesBuffer(index);
countersManager = new CountersManager(counterLabelsBuffer, counterValuesBuffer);
}
|
java
|
@Override
public void onNotificationMessageClicked(Context context, com.xiaomi.mipush.sdk.MiPushMessage miPushMessage) {
processMiNotification(miPushMessage);
}
|
java
|
public void destroy() {
WaylandClientCore.INSTANCE()
.wl_proxy_destroy(this.pointer);
ObjectCache.remove(this.pointer);
this.jObjectPointer.close();
}
|
python
|
def listdict_to_listlist_and_matrix(sparse):
"""Transforms the adjacency list representation of a graph
of type listdict into the listlist + weight matrix representation
:param sparse: graph in listdict representation
:returns: couple with listlist representation, and weight matrix
:complexity: linear
"""
V = range(len(sparse))
graph = [[] for _ in V]
weight = [[None for v in V] for u in V]
for u in V:
for v in sparse[u]:
graph[u].append(v)
weight[u][v] = sparse[u][v]
return graph, weight
|
java
|
public Integer getAndIncrementIDSeq(final String id) {
Validate.notNull(id, "ID cannot be null");
Integer count = this.idCounts.get(id);
if (count == null) {
count = Integer.valueOf(1);
}
this.idCounts.put(id, Integer.valueOf(count.intValue() + 1));
return count;
}
|
java
|
private void scheduleRetryFuture() {
Log.v(Log.TAG_SYNC, "%s: Failed to xfer; will retry in %d sec", this, RETRY_DELAY_SECONDS);
synchronized (executor) {
if (!executor.isShutdown()) {
this.retryFuture = executor.schedule(new Runnable() {
public void run() {
retryIfReady();
}
}, RETRY_DELAY_SECONDS, TimeUnit.SECONDS);
}
}
}
|
java
|
private void triggerEvents(WebElement element, WebDriver driver) {
if ("input".equalsIgnoreCase(element.getTagName())) {
driver.findElement(By.tagName("body")).click();
}
}
|
java
|
public synchronized void commit() throws SQLException {
checkClosed();
try {
sessionProxy.commit(false);
} catch (HsqlException e) {
throw Util.sqlException(e);
}
}
|
python
|
def reactions(columns, n_results, write_db, queries):
"""Search for reactions"""
if not isinstance(queries, dict):
query_dict = {}
for q in queries:
key, value = q.split('=')
if key == 'distinct':
if value in ['True', 'true']:
query_dict.update({key: True})
continue
try:
value = int(value)
query_dict.update({key: value})
except BaseException:
query_dict.update({key: '{0}'.format(value)})
# Keep {0} in string.format for python2.6 compatibility
if write_db and n_results > 1000:
print("""Warning: You're attempting to write more than a 1000 rows
with geometries. This could take some time""")
data = query.get_reactions(columns=columns,
n_results=n_results,
write_db=write_db,
**query_dict)
if write_db:
return
table = []
headers = []
for row in data['reactions']['edges']:
table += [list(row['node'].values())]
headers = list(row['node'].keys())
print(tabulate(table, headers) + '\n')
|
java
|
public Timestamp getTimestamp(final int columnIndex) throws SQLException {
try {
return getValueObject(columnIndex).getTimestamp();
} catch (ParseException e) {
throw SQLExceptionMapper.getSQLException("Could not parse field as date");
}
}
|
python
|
def ethernet_connected_chips(self):
"""Iterate over the coordinates of Ethernet connected chips.
Yields
------
((x, y), str)
The coordinate and IP address of each Ethernet connected chip in
the system.
"""
for xy, chip_info in six.iteritems(self):
if chip_info.ethernet_up:
yield (xy, chip_info.ip_address)
|
java
|
static public BigInteger[] randomBigIntegers(double start, double end, int size){
Preconditions.checkArgument(start < end, "Start must be less than end.");
Random random = new Random();
random.setSeed(System.currentTimeMillis());
BigInteger[] result = new BigInteger[size];
for (int i = 0; i < size; i ++){
double l = random.nextDouble();
double d = (double)(end - start) * l + start;
BigInteger b = new BigDecimal(d).toBigInteger();
result[i] = b;
}
return result;
}
|
java
|
public static void quickSort(double[] array, int[] idx, int from, int to) {
if (from < to) {
double temp = array[to];
int tempIdx = idx[to];
int i = from - 1;
for (int j = from; j < to; j++) {
if (array[j] <= temp) {
i++;
double tempValue = array[j];
array[j] = array[i];
array[i] = tempValue;
int tempIndex = idx[j];
idx[j] = idx[i];
idx[i] = tempIndex;
}
}
array[to] = array[i + 1];
array[i + 1] = temp;
idx[to] = idx[i + 1];
idx[i + 1] = tempIdx;
quickSort(array, idx, from, i);
quickSort(array, idx, i + 1, to);
}
}
|
java
|
public static <T extends Trainable> T load(Class<T> aClass, String storageName, Configuration configuration) {
try {
Constructor<T> constructor = aClass.getDeclaredConstructor(String.class, Configuration.class);
constructor.setAccessible(true);
return constructor.newInstance(storageName, configuration);
}
catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException ex) {
throw new RuntimeException(ex);
}
}
|
java
|
@Override
public String getSurname() {
final NameableVisitor visitor = new NameableVisitor();
this.accept(visitor);
return visitor.getSurname();
}
|
python
|
def get(self, bucket: str, key: str) -> bytes:
"""
Retrieves the data for a given object in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which metadata is being
retrieved.
:return: the data
"""
try:
response = self.s3_client.get_object(
Bucket=bucket,
Key=key
)
return response['Body'].read()
except botocore.exceptions.ClientError as ex:
if ex.response['Error']['Code'] == "NoSuchKey":
raise BlobNotFoundError(f"Could not find s3://{bucket}/{key}") from ex
raise BlobStoreUnknownError(ex)
|
python
|
def contract(self, x):
"""
Run .contract(x) on all segmentlists.
"""
for value in self.itervalues():
value.contract(x)
return self
|
java
|
public void recreateUISharedContexts(Session session) {
uiContexts.clear();
for (Context context : session.getContexts()) {
Context uiContext = context.duplicate();
uiContexts.put(context.getIndex(), uiContext);
}
}
|
python
|
def _pick_or_create_inserted_op_moment_index(
self, splitter_index: int, op: ops.Operation,
strategy: InsertStrategy) -> int:
"""Determines and prepares where an insertion will occur.
Args:
splitter_index: The index to insert at.
op: The operation that will be inserted.
strategy: The insertion strategy.
Returns:
The index of the (possibly new) moment where the insertion should
occur.
Raises:
ValueError: Unrecognized append strategy.
"""
if (strategy is InsertStrategy.NEW or
strategy is InsertStrategy.NEW_THEN_INLINE):
self._moments.insert(splitter_index, ops.Moment())
return splitter_index
if strategy is InsertStrategy.INLINE:
if (0 <= splitter_index - 1 < len(self._moments) and
self._can_add_op_at(splitter_index - 1, op)):
return splitter_index - 1
return self._pick_or_create_inserted_op_moment_index(
splitter_index, op, InsertStrategy.NEW)
if strategy is InsertStrategy.EARLIEST:
if self._can_add_op_at(splitter_index, op):
p = self._prev_moment_available(op, splitter_index)
return p or 0
return self._pick_or_create_inserted_op_moment_index(
splitter_index, op, InsertStrategy.INLINE)
raise ValueError('Unrecognized append strategy: {}'.format(strategy))
|
java
|
public static <A, B> FeatureGenerator<A, B> combinedFeatureGenerator(
Iterable<FeatureGenerator<A, B>> generators) {
return new CombinedFeatureGenerator<A, B>(generators);
}
|
java
|
public void marshall(Message message, ProtocolMarshaller protocolMarshaller) {
if (message == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(message.getContentType(), CONTENTTYPE_BINDING);
protocolMarshaller.marshall(message.getContent(), CONTENT_BINDING);
protocolMarshaller.marshall(message.getGroupNumber(), GROUPNUMBER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public MtasToken getObjectById(String field, int docId, int mtasId)
throws IOException {
try {
Long ref;
Long objectRefApproxCorrection;
IndexDoc doc = getDoc(field, docId);
IndexInput inObjectId = indexInputList.get("indexObjectId");
IndexInput inObject = indexInputList.get("object");
IndexInput inTerm = indexInputList.get("term");
if (doc.storageFlags == MtasCodecPostingsFormat.MTAS_STORAGE_BYTE) {
inObjectId.seek(doc.fpIndexObjectId + (mtasId * 1L));
objectRefApproxCorrection = Long.valueOf(inObjectId.readByte());
} else if (doc.storageFlags == MtasCodecPostingsFormat.MTAS_STORAGE_SHORT) {
inObjectId.seek(doc.fpIndexObjectId + (mtasId * 2L));
objectRefApproxCorrection = Long.valueOf(inObjectId.readShort());
} else if (doc.storageFlags == MtasCodecPostingsFormat.MTAS_STORAGE_INTEGER) {
inObjectId.seek(doc.fpIndexObjectId + (mtasId * 4L));
objectRefApproxCorrection = Long.valueOf(inObjectId.readInt());
} else {
inObjectId.seek(doc.fpIndexObjectId + (mtasId * 8L));
objectRefApproxCorrection = Long.valueOf(inObjectId.readLong());
}
ref = objectRefApproxCorrection + doc.objectRefApproxOffset
+ (mtasId * (long) doc.objectRefApproxQuotient);
return MtasCodecPostingsFormat.getToken(inObject, inTerm, ref);
} catch (Exception e) {
throw new IOException(e);
}
}
|
java
|
public void setPanel(int panelId) {
m_panel.showWidget(panelId);
removeAllButtons();
if (panelId == PANEL_SELECT) {
for (CmsPushButton button : m_publishSelectPanel.getButtons()) {
addButton(button);
}
m_publishSelectPanel.updateDialogTitle();
} else if (panelId == PANEL_BROKEN_LINKS) {
for (CmsPushButton button : m_brokenLinksPanel.getButtons()) {
addButton(button);
}
m_brokenLinksPanel.updateTitle();
}
}
|
java
|
public static KV<String, SplitWord> remove(String key) {
MyStaticValue.ENV.remove(key);
return CRF.remove(key);
}
|
java
|
public static <T> Matcher<Iterable<? extends T>> containsInRelativeOrder(final Iterable<T> items) {
return IsIterableContainingInRelativeOrder.containsInRelativeOrder(items);
}
|
python
|
def request(self, msgtype, msgid, method, params=[]):
"""Handle an incoming call request."""
result = None
error = None
exception = None
try:
result = self.dispatch.call(method, params)
except Exception as e:
error = (e.__class__.__name__, str(e))
exception = e
if isinstance(result, Deferred):
result.add_callback(self._result, msgid)
result.add_errback(self._error, msgid)
else:
self.send_response(msgid, error, result)
|
java
|
public static OrtcMessage parseMessage(String message) throws IOException {
OrtcOperation operation = null;
String JSONMessage = null;
String parsedMessage = null;
String filteredByServer = null;
String seqId = null;
String messageChannel = null;
String messageId = null;
int messagePart = -1;
int messageTotalParts = -1;
Matcher matcher = operationPattern.matcher(message);
if (matcher != null && !matcher.matches()) {
//matcher = receivedPattern.matcher(message.replace("\\\"", "\""));
matcher = JSONPattern.matcher(message);
if ((matcher != null && matcher.matches())) {
try{
operation = OrtcOperation.Received;
JSONMessage = matcher.group(1).replace("\\\\", "\\").replace("\\\"","\"");
org.json.JSONObject json = new org.json.JSONObject(JSONMessage);
parsedMessage = (String)json.get("m");
messageChannel = (String)json.get("ch");
if (json.has("f"))
filteredByServer = (String)json.get("f");
if (json.has("s"))
seqId = (String)json.get("s");
//parsedMessage = parsedMessage.replace("\\\\n", "\n").replace("\\\"", "\"").replace("\\\\\\\\", "\\");
Matcher multiPartMatcher = parseMultiPartMessage(parsedMessage);
if (multiPartMatcher.matches()) {
parsedMessage = multiPartMatcher.group(4);
messageId = multiPartMatcher.group(1);
messagePart = Strings.isNullOrEmpty(multiPartMatcher.group(2)) ? -1 : Integer.parseInt(multiPartMatcher.group(2));
messageTotalParts = Strings.isNullOrEmpty(multiPartMatcher.group(3)) ? -1 : Integer.parseInt(multiPartMatcher.group(3));
}
}catch(NumberFormatException parseException){
//throw new NumberFormatException("Invalid message format: " + message + " - Error " + parseException.toString());
parsedMessage = matcher.group(1);
messageId = null;
messagePart = -1;
messageTotalParts = -1;
} catch (JSONException e) {
parsedMessage = matcher.group(1);
messageId = null;
messagePart = -1;
messageTotalParts = -1;
}
} else {
matcher = closePattern.matcher(message);
if (matcher != null && matcher.matches()){
operation = OrtcOperation.Close;
} else {
throw new IOException(String.format("Invalid message format: %s", message));
}
}
// CAUSE: Possible null pointer dereference
} else {
operation = operationIndex.get(matcher.group(1));
parsedMessage = matcher.group(2);
}
return new OrtcMessage(operation, parsedMessage, messageChannel, messageId, messagePart, messageTotalParts, Boolean.valueOf(filteredByServer), seqId);
}
|
java
|
@Override
public boolean isDurable()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "isDurable");
final boolean dur = (_isPubSub && dispatcherState.isDurable());
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "isDurable", Boolean.valueOf(dur));
return dur;
}
|
python
|
def make_scores(
ic50_y,
ic50_y_pred,
sample_weight=None,
threshold_nm=500,
max_ic50=50000):
"""
Calculate AUC, F1, and Kendall Tau scores.
Parameters
-----------
ic50_y : float list
true IC50s (i.e. affinities)
ic50_y_pred : float list
predicted IC50s
sample_weight : float list [optional]
threshold_nm : float [optional]
max_ic50 : float [optional]
Returns
-----------
dict with entries "auc", "f1", "tau"
"""
y_pred = from_ic50(ic50_y_pred, max_ic50)
try:
auc = sklearn.metrics.roc_auc_score(
ic50_y <= threshold_nm,
y_pred,
sample_weight=sample_weight)
except ValueError as e:
logging.warning(e)
auc = numpy.nan
try:
f1 = sklearn.metrics.f1_score(
ic50_y <= threshold_nm,
ic50_y_pred <= threshold_nm,
sample_weight=sample_weight)
except ValueError as e:
logging.warning(e)
f1 = numpy.nan
try:
tau = scipy.stats.kendalltau(ic50_y_pred, ic50_y)[0]
except ValueError as e:
logging.warning(e)
tau = numpy.nan
return dict(
auc=auc,
f1=f1,
tau=tau)
|
java
|
public void save(String property, String messageKey, Object... args) {
assertObjectNotNull("messageKey", messageKey);
doSaveInfo(prepareUserMessages(property, messageKey, args));
}
|
python
|
def geometry(obj):
"""
Apply ``vtkGeometryFilter``.
"""
gf = vtk.vtkGeometryFilter()
gf.SetInputData(obj)
gf.Update()
return gf.GetOutput()
|
java
|
@SuppressWarnings("unchecked")
static Widget createWidget(final GVRSceneObject sceneObject)
throws InstantiationException {
Class<? extends Widget> widgetClass = GroupWidget.class;
NodeEntry attributes = new NodeEntry(sceneObject);
String className = attributes.getClassName();
if (className != null) {
try {
widgetClass = (Class<? extends Widget>) Class
.forName(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
Log.e(TAG, e, "createWidget()");
throw new InstantiationException(e.getLocalizedMessage());
}
}
Log.d(TAG, "createWidget(): widgetClass: %s",
widgetClass.getSimpleName());
return createWidget(sceneObject, attributes, widgetClass);
}
|
python
|
def get(self, path, local_path):
"""
Download file from (s)FTP to local filesystem.
"""
normpath = os.path.normpath(local_path)
folder = os.path.dirname(normpath)
if folder and not os.path.exists(folder):
os.makedirs(folder)
tmp_local_path = local_path + '-luigi-tmp-%09d' % random.randrange(0, 1e10)
# download file
self._connect()
if self.sftp:
self._sftp_get(path, tmp_local_path)
else:
self._ftp_get(path, tmp_local_path)
self._close()
os.rename(tmp_local_path, local_path)
|
java
|
public static float readFloat(InputStream is) throws IOException {
byte[] bytes = new byte[4];
is.read(bytes);
return getFloat(bytes);
}
|
python
|
def _parse_tensor(self, indices=False):
'''Parse a tensor.'''
if indices:
self.line = self._skip_lines(1)
tensor = np.zeros((3, 3))
for i in range(3):
tokens = self.line.split()
if indices:
tensor[i][0] = float(tokens[1])
tensor[i][1] = float(tokens[2])
tensor[i][2] = float(tokens[3])
else:
tensor[i][0] = float(tokens[0])
tensor[i][1] = float(tokens[1])
tensor[i][2] = float(tokens[2])
self.line = self._skip_lines(1)
return tensor
|
java
|
@SuppressWarnings("unchecked")
public static UniversalIdStrMessage newInstance() {
Date now = new Date();
UniversalIdStrMessage msg = new UniversalIdStrMessage();
msg.setId(QueueUtils.IDGEN.generateId128Hex().toLowerCase()).setTimestamp(now);
return msg;
}
|
python
|
def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
"""
Create pattern matrices from x and y vectors, and rotate them
to the specified orientation.
"""
# Using this two-liner requires that x increase from left to
# right and y decrease from left to right; I don't think it
# can be rewritten in so little code otherwise - but please
# prove me wrong.
pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x)
pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x)
return pattern_x, pattern_y
|
java
|
public ListenableFuture<JsonRpcResponse> invoke(JsonRpcRequest request) {
Service service = services.lookupByName(request.service());
if (service == null) {
JsonRpcError error = new JsonRpcError(HttpResponseStatus.BAD_REQUEST,
"Unknown service: " + request.service());
JsonRpcResponse response = JsonRpcResponse.error(error);
return Futures.immediateFuture(response);
}
ServerMethod<? extends Message, ? extends Message> method = service.lookup(request.method());
serverLogger.logMethodCall(service, method);
if (method == null) {
JsonRpcError error = new JsonRpcError(HttpResponseStatus.BAD_REQUEST,
"Unknown method: " + request.service());
JsonRpcResponse response = JsonRpcResponse.error(error);
return Futures.immediateFuture(response);
}
return invoke(method, request.parameter(), request.id());
}
|
java
|
@Override
public void setOutputType(TypeInformation<OUT> outTypeInfo, ExecutionConfig executionConfig) {
StreamingFunctionUtils.setOutputType(userFunction, outTypeInfo, executionConfig);
}
|
java
|
public Observable<ServiceResponse<Void>> downloadWithServiceResponseAsync(String resourceGroupName, String virtualWANName, GetVpnSitesConfigurationRequest request) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (virtualWANName == null) {
throw new IllegalArgumentException("Parameter virtualWANName is required and cannot be null.");
}
if (request == null) {
throw new IllegalArgumentException("Parameter request is required and cannot be null.");
}
Validator.validate(request);
final String apiVersion = "2018-04-01";
Observable<Response<ResponseBody>> observable = service.download(this.client.subscriptionId(), resourceGroupName, virtualWANName, request, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
|
java
|
public static String getForwardURI(HttpServletRequest request) {
String result = (String) request.getAttribute(WebUtils.FORWARD_REQUEST_URI_ATTRIBUTE);
if (GrailsStringUtils.isBlank(result)) result = request.getRequestURI();
return result;
}
|
python
|
def unit_vector(self):
"""Generate a unit vector (norm = 1)"""
x = -math.cos(self.rpitch) * math.sin(self.ryaw)
y = -math.sin(self.rpitch)
z = math.cos(self.rpitch) * math.cos(self.ryaw)
return Vector3(x, y, z)
|
python
|
def find_loader(self, fullname):
"""Try to find a loader for the specified module, or the namespace
package portions. Returns (loader, list-of-portions).
This method is deprecated. Use find_spec() instead.
"""
spec = self.find_spec(fullname)
if spec is None:
return None, []
return spec.loader, spec.submodule_search_locations or []
|
python
|
def from_array(array):
"""
Deserialize a new WebhookInfo from a given dictionary.
:return: new WebhookInfo instance.
:rtype: WebhookInfo
"""
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['url'] = u(array.get('url'))
data['has_custom_certificate'] = bool(array.get('has_custom_certificate'))
data['pending_update_count'] = int(array.get('pending_update_count'))
data['last_error_date'] = int(array.get('last_error_date')) if array.get('last_error_date') is not None else None
data['last_error_message'] = u(array.get('last_error_message')) if array.get('last_error_message') is not None else None
data['max_connections'] = int(array.get('max_connections')) if array.get('max_connections') is not None else None
data['allowed_updates'] = WebhookInfo._builtin_from_array_list(required_type=unicode_type, value=array.get('allowed_updates'), list_level=1) if array.get('allowed_updates') is not None else None
data['_raw'] = array
return WebhookInfo(**data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.