language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
@Nonnull
public CSSMediaList addMedia (@Nonnull final ICSSMediaList aMediaList)
{
ValueEnforcer.notNull (aMediaList, "MediaList");
m_aMedia.addAll (aMediaList.getAllMedia ());
return this;
}
|
python
|
def refresh_stack(self):
"""
Recompute the stack after e.g. show_hidden_frames has been modified
"""
self.stack, _ = self.compute_stack(self.fullstack)
# find the current frame in the new stack
for i, (frame, _) in enumerate(self.stack):
if frame is self.curframe:
self.curindex = i
break
else:
self.curindex = len(self.stack)-1
self.curframe = self.stack[-1][0]
self.print_current_stack_entry()
|
java
|
public void setTitle(String title)
{
super.setTitle(title);
if ((this.widget instanceof TitleConfigurable) && (this.titledWidgetId == null))
((TitleConfigurable) this.widget).setTitle(title);
}
|
java
|
public Object evaluate(Object pContext,
VariableResolver pResolver,
Map functions,
String defaultPrefix,
Logger pLogger)
throws ELException {
return mValue;
}
|
python
|
def all_arch_srcarch_pairs():
"""
Generates all valid (ARCH, SRCARCH) tuples for the kernel, corresponding to
different architectures. SRCARCH holds the arch/ subdirectory.
"""
for srcarch in os.listdir("arch"):
# Each subdirectory of arch/ containing a Kconfig file corresponds to
# an architecture
if os.path.exists(os.path.join("arch", srcarch, "Kconfig")):
yield (srcarch, srcarch)
# Some architectures define additional ARCH settings with ARCH != SRCARCH
# (search for "Additional ARCH settings for" in the top-level Makefile)
yield ("i386", "x86")
yield ("x86_64", "x86")
yield ("sparc32", "sparc")
yield ("sparc64", "sparc")
yield ("sh64", "sh")
yield ("um", "um")
|
java
|
public static void encode (Writer writer, String string) throws IOException
{
encode(writer, string, false, true);
}
|
python
|
def iter_graph(cur):
"""Iterate over all graphs in the cache.
Args:
cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function
is meant to be run within a :obj:`with` statement.
Yields:
tuple: A 2-tuple containing:
list: The nodelist for a graph in the cache.
list: the edgelist for a graph in the cache.
Examples:
>>> nodelist = [0, 1, 2]
>>> edgelist = [(0, 1), (1, 2)]
>>> with pmc.cache_connect(':memory:') as cur:
... pmc.insert_graph(cur, nodelist, edgelist)
... list(pmc.iter_graph(cur))
[([0, 1, 2], [[0, 1], [1, 2]])]
"""
select = """SELECT num_nodes, num_edges, edges from graph;"""
for num_nodes, num_edges, edges in cur.execute(select):
yield list(range(num_nodes)), json.loads(edges)
|
python
|
def wrap_key(self, key):
"""Translate the key into the central cell
This method is only applicable in case of a periodic system.
"""
return tuple(np.round(
self.integer_cell.shortest_vector(key)
).astype(int))
|
java
|
public static Object invokeClosure(Object closure, Object[] arguments) throws Throwable {
return invokeMethodN(closure.getClass(), closure, "call", arguments);
}
|
python
|
def cmd_serve(self, *args):
'''Serve the bin directory via SimpleHTTPServer
'''
try:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
os.chdir(self.bin_dir)
handler = SimpleHTTPRequestHandler
httpd = TCPServer(("", SIMPLE_HTTP_SERVER_PORT), handler)
print("Serving via HTTP at port {}".format(SIMPLE_HTTP_SERVER_PORT))
print("Press Ctrl+c to quit serving.")
httpd.serve_forever()
|
java
|
public ObjectDataInput initDataSerializableInputAndSkipTheHeader(Data data) throws IOException {
ObjectDataInput input = createObjectDataInput(data);
byte header = input.readByte();
if (isFlagSet(header, IDS_FLAG)) {
skipBytesSafely(input, FACTORY_AND_CLASS_ID_BYTE_LENGTH);
} else {
input.readUTF();
}
if (isFlagSet(header, EE_FLAG)) {
skipBytesSafely(input, EE_BYTE_LENGTH);
}
return input;
}
|
python
|
def post(self, endpoint='', url='', data=None, use_api_key=False, omit_api_version=False):
"""Perform a post to an API endpoint.
:param string endpoint: Target endpoint. (Optional).
:param string url: Override the endpoint and provide the full url (eg for pagination). (Optional).
:param dict data: Data to pass to the post. (Optional).
:return: Response.
:rtype: ``Response``
"""
return self._request('post', endpoint, url, data=data, use_api_key=use_api_key, omit_api_version=omit_api_version)
|
python
|
def calc(self, maxiter=100, fixedprec=1e9):
"""Min Cost Flow"""
source_data_holder = []
N = self.targets.shape[0]
K = self.origins.shape[0]
# dict of labels for each target node
M, demand = self._get_demand_graph()
max_dist_trip = 400 # kilometers
cost_holder = []
itercnt = 0
while True:
itercnt += 1
logging.info(f'Iter count: {itercnt}')
# Setup the graph
g = nx.DiGraph()
self.targets = self.targets.sort_values('labels').reset_index(drop=True)
# Supply of 1 (i.e. demand = -1) means that it can only be connected to one node
g.add_nodes_from(self.targets['target_id'], demand=-1) # points
for idx in self.nearest_targets.origin_id:
g.add_node(int(idx), demand=demand[idx])
### Create the cluster centers calculate a distance cost
cost_dist = dist_vect(
np.tile(self.targets['lng'].values, K),
np.tile(self.targets['lat'].values, K),
np.repeat(self.origins['lng'].values, N),
np.repeat(self.origins['lat'].values, N)
)[:, np.newaxis].T
scaler_dist = MinMaxScaler()
cost_dist_trans = scaler_dist.fit_transform(cost_dist.T).T
# Penalty for distances too large
cost_dist_trans[cost_dist > max_dist_trip] = 10
# Create the in-cluster sales and calculate the total volume of sales generated
# TODO: rename this to something more generic, like cluster_demanded
cluster_sales = self.targets.groupby('labels').sum()[self.demand_col][:, np.newaxis]
D = cluster_sales.shape[1]
cost_sales = abs(
np.array([
np.linalg.norm(
np.repeat(cluster_sales, N)[:, np.newaxis] \
- np.tile(cluster_sales.mean(), (K * N))[:,np.newaxis],
axis=1
)
])
)
scaler_sales = MinMaxScaler()
cost_sales = scaler_sales.fit_transform(cost_sales.T).T
# Total cost TO CHANGE??
cost_total = cost_dist_trans + cost_sales
cost_holder.append(sum(cost_total[0]))
# Create the edges of points to centers
data_to_center_edges = np.concatenate(
(
np.tile(self.targets['target_id'], K).T[:, np.newaxis],
np.array([np.tile(int(i+1), self.targets.shape[0]) for i in range(K)]).reshape(self.targets.shape[0] * K, 1),
cost_total.T * 1e5
),
axis=1
).astype(np.uint64)
# Add these edges to the graph
g.add_weighted_edges_from(data_to_center_edges)
# Add the extra balance node
# To balance out the network, we create an extra node that is:
# -(K*(-1)+sum(demand_per_node))
a = 99999
g.add_node(a, demand=self.targets.shape[0] - np.sum(list(demand.values())))
C_to_a_edges = np.concatenate(
(
np.array([int(i + 1) for i in range(K)]).T[:, np.newaxis],
np.tile([[a, ]], K).T
),
axis=1
)
g.add_edges_from(C_to_a_edges)
# Calculate the minimum flow cost
f = nx.min_cost_flow(g)
# Update the new labels
M_new = {}
p = {}
for i in list(g.nodes)[:-1]:
# Sorts all the items in the dictionary and picks the cluster
# with label = 1
p = sorted(f[i].items(), key=lambda x: x[1])[-1][0]
M_new[i] = p
# Update the new labels in the df
self.targets['labels'] = self.targets.apply(lambda x: M_new[x['target_id']], axis=1)
# Set the capacity for all edges
# TO DO: Figure how/whether we need to properly set a capacity for the edges.
C = 50
nx.set_edge_attributes(g, C, 'capacity')
# Test whether we can stop
# stop condition
if np.all(M_new == M):
print("All same")
self.results = {
'dict_graph': M,
'min_cost_flow': f,
'nxgraph': g,
'model_labels': self.targets,
'costs': cost_holder
}
return True
M = M_new
source_data_holder.append(self.targets['labels'].values)
if maxiter is not None and itercnt >= maxiter:
# Max iterations reached
self.results = {
'dict_graph': M,
'min_cost_flow': f,
'nxgraph': g,
'model_labels': self.targets,
'costs': cost_holder
}
return True
|
python
|
def _create_font_choice_combo(self):
"""Creates font choice combo box"""
self.fonts = get_font_list()
self.font_choice_combo = \
_widgets.FontChoiceCombobox(self, choices=self.fonts,
style=wx.CB_READONLY, size=(125, -1))
self.font_choice_combo.SetToolTipString(_(u"Text font"))
self.AddControl(self.font_choice_combo)
self.Bind(wx.EVT_COMBOBOX, self.OnTextFont, self.font_choice_combo)
self.parent.Bind(self.EVT_CMD_TOOLBAR_UPDATE, self.OnUpdate)
|
python
|
def segment_midpoints(neurites, neurite_type=NeuriteType.all):
'''Return a list of segment mid-points in a collection of neurites'''
def _seg_midpoint(sec):
'''Return the mid-points of segments in a section'''
pts = sec.points[:, COLS.XYZ]
return np.divide(np.add(pts[:-1], pts[1:]), 2.0)
return map_segments(_seg_midpoint, neurites, neurite_type)
|
python
|
def to_wgs84(east, north, crs):
""" Convert any CRS with (east, north) coordinates to WGS84
:param east: east coordinate
:type east: float
:param north: north coordinate
:type north: float
:param crs: CRS enum constants
:type crs: constants.CRS
:return: latitude and longitude coordinates in WGS84 system
:rtype: float, float
"""
return transform_point((east, north), crs, CRS.WGS84)
|
python
|
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
opt = "setup"
if opt not in options:
options[opt] = filters.Filter(classname="weka.filters.AllFilter")
if opt not in self.help:
self.help[opt] = "The filter to apply to the dataset (Filter)."
opt = "keep_relationname"
if opt not in options:
options[opt] = False
if opt not in self.help:
self.help[opt] = "Whether to keep the original relation name (bool)."
return super(Filter, self).fix_config(options)
|
python
|
def reduce(vector):
"""
Can be a vector or matrix. If data are bool, sum Trues.
"""
if type(vector) is list: # matrix
return array(list(map(add.reduce, vector)))
else:
return sum(vector)
|
python
|
def load_xml_conf(self, xml_file, id):
'''
Creates a new config from xml file.
:param xml_file: path to xml file. Format : nutch-site.xml or nutch-default.xml
:param id:
:return: config object
'''
# converting nutch-site.xml to key:value pairs
import xml.etree.ElementTree as ET
tree = ET.parse(xml_file)
params = {}
for prop in tree.getroot().findall(".//property"):
params[prop.find('./name').text.strip()] = prop.find('./value').text.strip()
return self.proxy.Configs().create(id, configData=params)
|
python
|
def mode_string_v10(msg):
'''mode string for 1.0 protocol, from heartbeat'''
if msg.autopilot == mavlink.MAV_AUTOPILOT_PX4:
return interpret_px4_mode(msg.base_mode, msg.custom_mode)
if not msg.base_mode & mavlink.MAV_MODE_FLAG_CUSTOM_MODE_ENABLED:
return "Mode(0x%08x)" % msg.base_mode
if msg.type in [ mavlink.MAV_TYPE_QUADROTOR, mavlink.MAV_TYPE_HEXAROTOR,
mavlink.MAV_TYPE_OCTOROTOR, mavlink.MAV_TYPE_TRICOPTER,
mavlink.MAV_TYPE_COAXIAL,
mavlink.MAV_TYPE_HELICOPTER ]:
if msg.custom_mode in mode_mapping_acm:
return mode_mapping_acm[msg.custom_mode]
if msg.type == mavlink.MAV_TYPE_FIXED_WING:
if msg.custom_mode in mode_mapping_apm:
return mode_mapping_apm[msg.custom_mode]
if msg.type == mavlink.MAV_TYPE_GROUND_ROVER:
if msg.custom_mode in mode_mapping_rover:
return mode_mapping_rover[msg.custom_mode]
if msg.type == mavlink.MAV_TYPE_ANTENNA_TRACKER:
if msg.custom_mode in mode_mapping_tracker:
return mode_mapping_tracker[msg.custom_mode]
return "Mode(%u)" % msg.custom_mode
|
python
|
def _social_auth_login(self, request, **kwargs):
'''
View function that redirects to social auth login,
in case the user is not logged in.
'''
if request.user.is_authenticated():
if not request.user.is_active or not request.user.is_staff:
raise PermissionDenied()
else:
messages.add_message(request, messages.WARNING, 'Please authenticate first.')
return redirect_to_login(request.get_full_path())
|
java
|
@SuppressWarnings("unchecked")
public Class<? extends T> defineClass() {
if (generatedClass == null) {
synchronized (this) {
if (generatedClass == null) {
try {
// first check that the proxy has not already been created
generatedClass = (Class<? extends T>) classLoader.loadClass(this.className);
} catch (ClassNotFoundException e) {
buildClassDefinition();
if (protectionDomain == null) {
generatedClass = (Class<? extends T>) classFile.define();
} else {
generatedClass = (Class<? extends T>) classFile.define(protectionDomain);
}
afterClassLoad(generatedClass);
}
classFile = null;
}
}
}
return generatedClass;
}
|
python
|
def __dict_to_BetterDict(self, attr):
"""Convert the passed attr to a BetterDict if the value is a dict
Returns: The new value of the passed attribute."""
if type(self[attr]) == dict:
self[attr] = BetterDict(self[attr])
return self[attr]
|
python
|
def load_key_bindings(
get_search_state=None,
enable_abort_and_exit_bindings=False,
enable_system_bindings=False,
enable_search=False,
enable_open_in_editor=False,
enable_extra_page_navigation=False,
enable_auto_suggest_bindings=False):
"""
Create a Registry object that contains the default key bindings.
:param enable_abort_and_exit_bindings: Filter to enable Ctrl-C and Ctrl-D.
:param enable_system_bindings: Filter to enable the system bindings (meta-!
prompt and Control-Z suspension.)
:param enable_search: Filter to enable the search bindings.
:param enable_open_in_editor: Filter to enable open-in-editor.
:param enable_open_in_editor: Filter to enable open-in-editor.
:param enable_extra_page_navigation: Filter for enabling extra page
navigation. (Bindings for up/down scrolling through long pages, like in
Emacs or Vi.)
:param enable_auto_suggest_bindings: Filter to enable fish-style suggestions.
"""
assert get_search_state is None or callable(get_search_state)
# Accept both Filters and booleans as input.
enable_abort_and_exit_bindings = to_cli_filter(enable_abort_and_exit_bindings)
enable_system_bindings = to_cli_filter(enable_system_bindings)
enable_search = to_cli_filter(enable_search)
enable_open_in_editor = to_cli_filter(enable_open_in_editor)
enable_extra_page_navigation = to_cli_filter(enable_extra_page_navigation)
enable_auto_suggest_bindings = to_cli_filter(enable_auto_suggest_bindings)
registry = MergedRegistry([
# Load basic bindings.
load_basic_bindings(),
load_mouse_bindings(),
ConditionalRegistry(load_abort_and_exit_bindings(),
enable_abort_and_exit_bindings),
ConditionalRegistry(load_basic_system_bindings(),
enable_system_bindings),
# Load emacs bindings.
load_emacs_bindings(),
ConditionalRegistry(load_emacs_open_in_editor_bindings(),
enable_open_in_editor),
ConditionalRegistry(load_emacs_search_bindings(get_search_state=get_search_state),
enable_search),
ConditionalRegistry(load_emacs_system_bindings(),
enable_system_bindings),
ConditionalRegistry(load_extra_emacs_page_navigation_bindings(),
enable_extra_page_navigation),
# Load Vi bindings.
load_vi_bindings(get_search_state=get_search_state),
ConditionalRegistry(load_vi_open_in_editor_bindings(),
enable_open_in_editor),
ConditionalRegistry(load_vi_search_bindings(get_search_state=get_search_state),
enable_search),
ConditionalRegistry(load_vi_system_bindings(),
enable_system_bindings),
ConditionalRegistry(load_extra_vi_page_navigation_bindings(),
enable_extra_page_navigation),
# Suggestion bindings.
# (This has to come at the end, because the Vi bindings also have an
# implementation for the "right arrow", but we really want the
# suggestion binding when a suggestion is available.)
ConditionalRegistry(load_auto_suggestion_bindings(),
enable_auto_suggest_bindings),
])
return registry
|
java
|
Object processQNAME(
StylesheetHandler handler, String uri, String name, String rawName, String value, ElemTemplateElement owner)
throws org.xml.sax.SAXException
{
try
{
QName qname = new QName(value, handler, true);
return qname;
}
catch (IllegalArgumentException ie)
{
// thrown by QName constructor
handleError(handler,XSLTErrorResources.INVALID_QNAME, new Object[] {name, value},ie);
return null;
}
catch (RuntimeException re) {
// thrown by QName constructor
handleError(handler,XSLTErrorResources.INVALID_QNAME, new Object[] {name, value},re);
return null;
}
}
|
java
|
public IPv4AddressSection replace(int startIndex, int endIndex, IPv4AddressSection replacement, int replacementStartIndex, int replacementEndIndex) {
return replace(startIndex, endIndex, replacement, replacementStartIndex, replacementEndIndex, false);
}
|
java
|
public String getMenuLink(FieldList record)
{
FieldInfo field = record.getField("Type");
if ((field != null) && (!field.isNull()))
{
String strType = field.toString();
String strParams = record.getField("Params").toString();
if (strParams == null)
strParams = Constants.BLANK;
else if (strParams.length() > 0)
strParams = '&' + strParams;
if ((strType != null) && (strType.length() > 0))
{
String strProgram = record.getField("Program").toString();
if (strType.equalsIgnoreCase(Params.MENU))
{
field = record.getField("ID");
String strID = field.toString();
return '?' + strType + '=' + strID + strParams;
}
else if (strType.equalsIgnoreCase("applet"))
{
return '?' + strType + '=' + strProgram + strParams;
}
else if (strType.equalsIgnoreCase("link"))
{
strParams = strProgram;
if (((strParams.indexOf('.') < strParams.indexOf('/'))
&& (strParams.indexOf('.') != -1))
|| ((strParams.indexOf('/') == -1) && (strParams.indexOf(':') == -1)))
strParams = "http://" + strParams; // Default command = html link
return strParams;
}
}
}
return this.getMenuName(record); // Command = name
}
|
java
|
private static TableModel entity2ModelWithConfig(Class<?> entityClass) {
TableModel model;
model = entity2ModelIgnoreConfigMethod(entityClass);
Method method = null;
try {
method = entityClass.getMethod("config", TableModel.class);
} catch (Exception e) {// NOSONAR
}
if (method != null)
try {
method.invoke(null, model);
} catch (Exception e) {
throw new DialectException(e);
}
if (model == null)
throw new DialectException("Can not create TableModel for entityClass " + entityClass);
return model;
}
|
java
|
public void setEncoding(String charset) {
Validator.validate(!Validator.isBlank(charset), "encoding cannot be blank");
this.encoding = charset.trim();
}
|
python
|
def listen_tta(self, target, timeout):
"""Listen as Type A Target is not supported."""
info = "{device} does not support listen as Type A Target"
raise nfc.clf.UnsupportedTargetError(info.format(device=self))
|
java
|
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case AfplibPackage.IOB__OBJ_NAME:
return getObjName();
case AfplibPackage.IOB__OBJ_TYPE:
return getObjType();
case AfplibPackage.IOB__XOA_OSET:
return getXoaOset();
case AfplibPackage.IOB__YOA_OSET:
return getYoaOset();
case AfplibPackage.IOB__XOA_ORENT:
return getXoaOrent();
case AfplibPackage.IOB__YOA_ORENT:
return getYoaOrent();
case AfplibPackage.IOB__XOCA_OSET:
return getXocaOset();
case AfplibPackage.IOB__YOCA_OSET:
return getYocaOset();
case AfplibPackage.IOB__REF_CSYS:
return getRefCSys();
case AfplibPackage.IOB__TRIPLETS:
return getTriplets();
}
return super.eGet(featureID, resolve, coreType);
}
|
java
|
private void configureInputFormat(CsvInputFormat<?> format) {
format.setCharset(this.charset);
format.setDelimiter(this.lineDelimiter);
format.setFieldDelimiter(this.fieldDelimiter);
format.setCommentPrefix(this.commentPrefix);
format.setSkipFirstLineAsHeader(skipFirstLineAsHeader);
format.setLenient(ignoreInvalidLines);
if (this.parseQuotedStrings) {
format.enableQuotedStringParsing(this.quoteCharacter);
}
}
|
java
|
private static void validateInput(ColumnType columnType) throws ExecutionException {
if (columnType == null) {
String messagge = "The ColumnType can not be null.";
LOGGER.error(messagge);
throw new ExecutionException(messagge);
}
}
|
python
|
def save(self):
"""
:return: save this environment on Ariane server (create or update)
"""
LOGGER.debug("Environment.save")
post_payload = {}
consolidated_osi_id = []
if self.id is not None:
post_payload['environmentID'] = self.id
if self.name is not None:
post_payload['environmentName'] = self.name
if self.description is not None:
post_payload['environmentDescription'] = self.description
if self.color_code is not None:
post_payload['environmentColorCode'] = self.color_code
if self.osi_ids is not None:
consolidated_osi_id = copy.deepcopy(self.osi_ids)
if self.osi_2_rm is not None:
for osi_2_rm in self.osi_2_rm:
if osi_2_rm.id is None:
osi_2_rm.sync()
consolidated_osi_id.remove(osi_2_rm.id)
if self.osi_2_add is not None:
for osi_id_2_add in self.osi_2_add:
if osi_id_2_add.id is None:
osi_id_2_add.save()
consolidated_osi_id.append(osi_id_2_add.id)
post_payload['environmentOSInstancesID'] = consolidated_osi_id
args = {'http_operation': 'POST', 'operation_path': '', 'parameters': {'payload': json.dumps(post_payload)}}
response = EnvironmentService.requester.call(args)
if response.rc != 0:
LOGGER.warning(
'Environment.save - Problem while saving environment ' + self.name +
'. Reason: ' + str(response.response_content) + '-' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
else:
self.id = response.response_content['environmentID']
if self.osi_2_add is not None:
for osi_2_add in self.osi_2_add:
osi_2_add.sync()
if self.osi_2_rm is not None:
for osi_2_rm in self.osi_2_rm:
osi_2_rm.sync()
self.osi_2_add.clear()
self.osi_2_rm.clear()
self.sync()
return self
|
java
|
public void marshall(ListActionTypesRequest listActionTypesRequest, ProtocolMarshaller protocolMarshaller) {
if (listActionTypesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listActionTypesRequest.getActionOwnerFilter(), ACTIONOWNERFILTER_BINDING);
protocolMarshaller.marshall(listActionTypesRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public EntityRecognizerInputDataConfig withEntityTypes(EntityTypesListItem... entityTypes) {
if (this.entityTypes == null) {
setEntityTypes(new java.util.ArrayList<EntityTypesListItem>(entityTypes.length));
}
for (EntityTypesListItem ele : entityTypes) {
this.entityTypes.add(ele);
}
return this;
}
|
java
|
protected boolean columnTypeMatches(ColumnType columnType, boolean allColumnsShouldMatchType,
String query, boolean debugPrint, String ... groups) {
// If columnType is NULL (unspecified), then no checking of column
// types is needed
if (columnType == null) {
return true;
}
List<String> tableNames = null;
if (columnType != ColumnType.INTEGER) {
tableNames = getTableNames(query);
}
if (debugPrint) {
System.out.println(" In NonVoltDBBackend.columnTypeMatches (1):");
System.out.println(" columnType: " + columnType);
System.out.println(" groups/col: " + Arrays.toString(groups));
System.out.println(" query : " + query);
System.out.println(" tableNames: " + tableNames);
System.out.println(" allColumnsShouldMatchType: " + allColumnsShouldMatchType);
}
boolean allMatch = true;
boolean atLeastOneMatch = false;
for (String column : groups) {
if (column == null) {
continue;
}
if (columnType == ColumnType.GEO) {
if (isGeoColumn(column, tableNames, debugPrint)) {
atLeastOneMatch = true;
} else {
allMatch = false;
}
} else if (columnType == ColumnType.VARCHAR) {
if (isVarcharColumn(column, tableNames, debugPrint)) {
atLeastOneMatch = true;
} else {
allMatch = false;
}
} else if (columnType == ColumnType.BIGINT) {
if (isBigintColumn(column, tableNames, debugPrint)) {
atLeastOneMatch = true;
} else {
allMatch = false;
}
} else if (columnType == ColumnType.INTEGER) {
// In this case, an integer constant is also acceptable, so we
// call isInteger, rather than isIntegerColumn.
// Also, not specifying the table name(s) here (i.e., the null
// second argument to isInteger) is deliberately saying to treat
// anything that "looks" like an integer (i.e., the column name
// is one that is normally used for an integer column) as an
// integer; this solves certain odd materialized view cases
// where PostgreSQL decides that the SUM of BIGINT is a DECIMAL,
// but VoltDB treats it as BIGINT
if (isInteger(column, null, debugPrint)) {
atLeastOneMatch = true;
} else {
allMatch = false;
}
} else {
throw new IllegalArgumentException("Unrecognized ColumnType: " + columnType);
}
}
if (debugPrint) {
System.out.println(" In NonVoltDBBackend.columnTypeMatches (2):");
System.out.println(" allMatch : " + allMatch);
System.out.println(" atLeastOneMatch: " + atLeastOneMatch);
System.out.println(" returning : " + (allColumnsShouldMatchType ? allMatch: atLeastOneMatch));
}
if (allColumnsShouldMatchType) {
return allMatch;
} else {
return atLeastOneMatch;
}
}
|
python
|
def complete(self, flag_message="Complete", padding=None, force=False):
""" Log Level: :attr:COMPLETE
@flag_message: #str flags the message with the given text
using :func:flag
@padding: #str 'top', 'bottom' or 'all', adds a new line to the
specified area with :func:padd
@color: #str colorizes @flag_message using :func:colorize
@force: #bool whether or not to force the message to log in spite
of the assigned log level
..
from vital.debug import Logg
logg = Logg(loglevel="v")
logg("World").complete("Hello")
# (Hello) World
logg("Hello world").complete()
# (Complete) Hello world
..
"""
if self.should_log(self.COMPLETE) or force:
self._print_message(
flag_message=flag_message, color=colors.complete_color,
padding=padding)
|
java
|
@Override
public int maximumSize(Container container,
List components,
FormLayout.Measure minMeasure,
FormLayout.Measure prefMeasure,
FormLayout.Measure defaultMeasure) {
int size = basis.maximumSize(container,
components,
minMeasure,
prefMeasure,
defaultMeasure);
if (lowerBound != null) {
size = Math.max(size, lowerBound.maximumSize(
container,
components,
minMeasure,
prefMeasure,
defaultMeasure));
}
if (upperBound != null) {
size = Math.min(size, upperBound.maximumSize(
container,
components,
minMeasure,
prefMeasure,
defaultMeasure));
}
return size;
}
|
java
|
private CmsContainerElementData getBaseElementData(CmsResource page, CmsContainerElementBean element)
throws CmsException {
CmsResourceUtil resUtil = new CmsResourceUtil(m_cms, element.getResource());
CmsContainerElementData elementData = new CmsContainerElementData();
setElementInfo(element, elementData);
elementData.setLoadTime(System.currentTimeMillis());
elementData.setLastModifiedDate(element.getResource().getDateLastModified());
elementData.setLastModifiedByUser(m_cms.readUser(element.getResource().getUserLastModified()).getName());
elementData.setNavText(resUtil.getNavText());
Map<String, CmsXmlContentProperty> settingConfig = CmsXmlContentPropertyHelper.getPropertyInfo(
m_cms,
page,
element.getResource());
elementData.setSettings(
CmsXmlContentPropertyHelper.convertPropertiesToClientFormat(
m_cms,
element.getIndividualSettings(),
settingConfig));
return elementData;
}
|
java
|
public Iterator<String> getBshIterator(final Character obj) {
Integer value = Integer.valueOf(obj.charValue());
int check = 33, start = 0;
for (int i : unicodeBlockStarts) if (check <= value) {
start = check;
check = i;
} else break;
return IntStream.rangeClosed(start, value).boxed()
.map(Character::toChars)
.map(String::valueOf).iterator();
}
|
java
|
public void setTemplateHome(File home) {
raw.put(HOME_TEMPLATE.getKey(), home);
data.put(HOME_TEMPLATE, home);
}
|
python
|
def wait_on_event(event, timeout=None):
"""
Waits on a single threading Event, with an optional timeout.
This is here for compatibility reasons as python 2 can't reliably wait
on an event without a timeout and python 3 doesn't define a `maxint`.
"""
if timeout is not None:
event.wait(timeout)
return
if six.PY2:
# Thanks to a bug in python 2's threading lib, we can't simply call
# .wait() with no timeout since it would wind up ignoring signals.
while not event.is_set():
event.wait(sys.maxint)
else:
event.wait()
|
java
|
public Queue<Pair<Env<AttrContext>, JCClassDecl>> desugar(Queue<Env<AttrContext>> envs) {
ListBuffer<Pair<Env<AttrContext>, JCClassDecl>> results = new ListBuffer<>();
for (Env<AttrContext> env: envs)
desugar(env, results);
return stopIfError(CompileState.FLOW, results);
}
HashMap<Env<AttrContext>, Queue<Pair<Env<AttrContext>, JCClassDecl>>> desugaredEnvs =
new HashMap<Env<AttrContext>, Queue<Pair<Env<AttrContext>, JCClassDecl>>>();
/**
* Prepare attributed parse trees, in conjunction with their attribution contexts,
* for source or code generation. If the file was not listed on the command line,
* the current implicitSourcePolicy is taken into account.
* The preparation stops as soon as an error is found.
*/
protected void desugar(final Env<AttrContext> env, Queue<Pair<Env<AttrContext>, JCClassDecl>> results) {
if (shouldStop(CompileState.TRANSTYPES))
return;
if (implicitSourcePolicy == ImplicitSourcePolicy.NONE
&& !inputFiles.contains(env.toplevel.sourcefile)) {
return;
}
if (compileStates.isDone(env, CompileState.LOWER)) {
results.addAll(desugaredEnvs.get(env));
return;
}
/**
* Ensure that superclasses of C are desugared before C itself. This is
* required for two reasons: (i) as erasure (TransTypes) destroys
* information needed in flow analysis and (ii) as some checks carried
* out during lowering require that all synthetic fields/methods have
* already been added to C and its superclasses.
*/
class ScanNested extends TreeScanner {
Set<Env<AttrContext>> dependencies = new LinkedHashSet<Env<AttrContext>>();
protected boolean hasLambdas;
@Override
public void visitClassDef(JCClassDecl node) {
Type st = types.supertype(node.sym.type);
boolean envForSuperTypeFound = false;
while (!envForSuperTypeFound && st.hasTag(CLASS)) {
ClassSymbol c = st.tsym.outermostClass();
Env<AttrContext> stEnv = enter.getEnv(c);
if (stEnv != null && env != stEnv) {
if (dependencies.add(stEnv)) {
boolean prevHasLambdas = hasLambdas;
try {
scan(stEnv.tree);
} finally {
/*
* ignore any updates to hasLambdas made during
* the nested scan, this ensures an initalized
* LambdaToMethod is available only to those
* classes that contain lambdas
*/
hasLambdas = prevHasLambdas;
}
}
envForSuperTypeFound = true;
}
st = types.supertype(st);
}
super.visitClassDef(node);
}
@Override
public void visitLambda(JCLambda tree) {
hasLambdas = true;
super.visitLambda(tree);
}
@Override
public void visitReference(JCMemberReference tree) {
hasLambdas = true;
super.visitReference(tree);
}
}
ScanNested scanner = new ScanNested();
scanner.scan(env.tree);
for (Env<AttrContext> dep: scanner.dependencies) {
if (!compileStates.isDone(dep, CompileState.FLOW))
desugaredEnvs.put(dep, desugar(flow(attribute(dep))));
}
//We need to check for error another time as more classes might
//have been attributed and analyzed at this stage
if (shouldStop(CompileState.TRANSTYPES))
return;
if (verboseCompilePolicy)
printNote("[desugar " + env.enclClass.sym + "]");
JavaFileObject prev = log.useSource(env.enclClass.sym.sourcefile != null ?
env.enclClass.sym.sourcefile :
env.toplevel.sourcefile);
try {
//save tree prior to rewriting
JCTree untranslated = env.tree;
make.at(Position.FIRSTPOS);
TreeMaker localMake = make.forToplevel(env.toplevel);
if (env.tree instanceof JCCompilationUnit) {
if (!(stubOutput || sourceOutput || printFlat)) {
if (shouldStop(CompileState.LOWER))
return;
List<JCTree> pdef = lower.translateTopLevelClass(env, env.tree, localMake);
if (pdef.head != null) {
Assert.check(pdef.tail.isEmpty());
results.add(new Pair<Env<AttrContext>, JCClassDecl>(env, (JCClassDecl)pdef.head));
}
}
return;
}
if (stubOutput) {
//emit stub Java source file, only for compilation
//units enumerated explicitly on the command line
JCClassDecl cdef = (JCClassDecl)env.tree;
if (untranslated instanceof JCClassDecl &&
rootClasses.contains((JCClassDecl)untranslated) &&
((cdef.mods.flags & (Flags.PROTECTED|Flags.PUBLIC)) != 0 ||
cdef.sym.packge().getQualifiedName() == names.java_lang)) {
results.add(new Pair<Env<AttrContext>, JCClassDecl>(env, removeMethodBodies(cdef)));
}
return;
}
if (shouldStop(CompileState.TRANSTYPES))
return;
env.tree = transTypes.translateTopLevelClass(env.tree, localMake);
compileStates.put(env, CompileState.TRANSTYPES);
if (source.allowLambda() && scanner.hasLambdas) {
if (shouldStop(CompileState.UNLAMBDA))
return;
env.tree = LambdaToMethod.instance(context).translateTopLevelClass(env, env.tree, localMake);
compileStates.put(env, CompileState.UNLAMBDA);
}
if (shouldStop(CompileState.LOWER))
return;
if (sourceOutput) {
//emit standard Java source file, only for compilation
//units enumerated explicitly on the command line
JCClassDecl cdef = (JCClassDecl)env.tree;
if (untranslated instanceof JCClassDecl &&
rootClasses.contains((JCClassDecl)untranslated)) {
results.add(new Pair<Env<AttrContext>, JCClassDecl>(env, cdef));
}
return;
}
//translate out inner classes
List<JCTree> cdefs = lower.translateTopLevelClass(env, env.tree, localMake);
compileStates.put(env, CompileState.LOWER);
if (shouldStop(CompileState.LOWER))
return;
//generate code for each class
for (List<JCTree> l = cdefs; l.nonEmpty(); l = l.tail) {
JCClassDecl cdef = (JCClassDecl)l.head;
results.add(new Pair<Env<AttrContext>, JCClassDecl>(env, cdef));
}
}
finally {
log.useSource(prev);
}
}
|
java
|
@Override
public String getColumnName(final int col)
{
try
{
return columnsModel.getColumnNames()[col];
}
catch (Exception e)
{
log.log(Level.SEVERE, "Error occured on getting column name on index " + col + ".", e);
}
return null;
}
|
python
|
def login(self, password):
"""Attempts to log in as the current user with given password"""
if self.logged_in:
raise RuntimeError("User already logged in!")
params = {"name": self.nick, "password": password}
resp = self.conn.make_api_call("login", params)
if "error" in resp:
raise RuntimeError(
f"Login failed: {resp['error'].get('message') or resp['error']}"
)
self.session = resp["session"]
self.conn.make_call("useSession", self.session)
self.conn.cookies.update({"session": self.session})
self.logged_in = True
return True
|
java
|
public static StringBuilder appendFunctionArgs(StringBuilder sb, List<Term<?>> list) {
Term<?> prev = null, pprev = null;
for (Term<?> elem : list) {
boolean sep = true;
if (elem instanceof TermOperator && ((TermOperator) elem).getValue() == ',')
sep = false; //no spaces before commas
if ((prev != null && prev instanceof TermOperator && ((TermOperator) prev).getValue() == '-')
&& (pprev == null || pprev instanceof TermOperator)) //nothing or an operator before -
sep = false; //no spaces after unary minus
if (prev != null && sep)
sb.append(SPACE_DELIM);
pprev = prev;
prev = elem;
sb.append(elem.toString());
}
return sb;
}
|
java
|
@Override
public Dictionary<String, String> getHeaders() {
final String sourceMethod = "getHeaders"; //$NON-NLS-1$
boolean isTraceLogging = log.isLoggable(Level.FINER);
if (isTraceLogging) {
log.entering(PlatformServicesImpl.class.getName(), sourceMethod);
}
Dictionary<String, String> result = null;
if (contributingBundle != null) {
result = contributingBundle.getHeaders();
}
if (isTraceLogging) {
log.exiting(PlatformServicesImpl.class.getName(), sourceMethod, result);
}
return result;
}
|
python
|
def create(mcs, name, dict=None, object_name=None):
"""
Create a new :class:`Singleton` class
:param name: Name of the new class (Used in its __repr__ if no object_name)
:type name: str
:param dict: Optional dictionary of the classes' attributes
:type dict: Optional[Dict[str, Any]]
:param object_name: Name of an instance of the singleton. Used in __repr__.
:type object_name: Optional[str]
:return: A new Singleton instance
:rtype: Singleton
"""
if dict is None:
dict = {}
_repr = name + '()' if object_name is None else object_name
def __repr__(self=None):
return _repr
dict.setdefault('__repr__', __repr__)
return mcs(name, (object,), dict)
|
python
|
def _get_bound_pressure_height(pressure, bound, heights=None, interpolate=True):
"""Calculate the bounding pressure and height in a layer.
Given pressure, optional heights, and a bound, return either the closest pressure/height
or interpolated pressure/height. If no heights are provided, a standard atmosphere is
assumed.
Parameters
----------
pressure : `pint.Quantity`
Atmospheric pressures
bound : `pint.Quantity`
Bound to retrieve (in pressure or height)
heights : `pint.Quantity`, optional
Atmospheric heights associated with the pressure levels. Defaults to using
heights calculated from ``pressure`` assuming a standard atmosphere.
interpolate : boolean, optional
Interpolate the bound or return the nearest. Defaults to True.
Returns
-------
`pint.Quantity`
The bound pressure and height.
"""
# Make sure pressure is monotonically decreasing
sort_inds = np.argsort(pressure)[::-1]
pressure = pressure[sort_inds]
if heights is not None:
heights = heights[sort_inds]
# Bound is given in pressure
if bound.dimensionality == {'[length]': -1.0, '[mass]': 1.0, '[time]': -2.0}:
# If the bound is in the pressure data, we know the pressure bound exactly
if bound in pressure:
bound_pressure = bound
# If we have heights, we know the exact height value, otherwise return standard
# atmosphere height for the pressure
if heights is not None:
bound_height = heights[pressure == bound_pressure]
else:
bound_height = pressure_to_height_std(bound_pressure)
# If bound is not in the data, return the nearest or interpolated values
else:
if interpolate:
bound_pressure = bound # Use the user specified bound
if heights is not None: # Interpolate heights from the height data
bound_height = log_interpolate_1d(bound_pressure, pressure, heights)
else: # If not heights given, use the standard atmosphere
bound_height = pressure_to_height_std(bound_pressure)
else: # No interpolation, find the closest values
idx = (np.abs(pressure - bound)).argmin()
bound_pressure = pressure[idx]
if heights is not None:
bound_height = heights[idx]
else:
bound_height = pressure_to_height_std(bound_pressure)
# Bound is given in height
elif bound.dimensionality == {'[length]': 1.0}:
# If there is height data, see if we have the bound or need to interpolate/find nearest
if heights is not None:
if bound in heights: # Bound is in the height data
bound_height = bound
bound_pressure = pressure[heights == bound]
else: # Bound is not in the data
if interpolate:
bound_height = bound
# Need to cast back to the input type since interp (up to at least numpy
# 1.13 always returns float64. This can cause upstream users problems,
# resulting in something like np.append() to upcast.
bound_pressure = np.interp(np.atleast_1d(bound), heights,
pressure).astype(bound.dtype) * pressure.units
else:
idx = (np.abs(heights - bound)).argmin()
bound_pressure = pressure[idx]
bound_height = heights[idx]
else: # Don't have heights, so assume a standard atmosphere
bound_height = bound
bound_pressure = height_to_pressure_std(bound)
# If interpolation is on, this is all we need, if not, we need to go back and
# find the pressure closest to this and refigure the bounds
if not interpolate:
idx = (np.abs(pressure - bound_pressure)).argmin()
bound_pressure = pressure[idx]
bound_height = pressure_to_height_std(bound_pressure)
# Bound has invalid units
else:
raise ValueError('Bound must be specified in units of length or pressure.')
# If the bound is out of the range of the data, we shouldn't extrapolate
if not (_greater_or_close(bound_pressure, np.nanmin(pressure) * pressure.units)
and _less_or_close(bound_pressure, np.nanmax(pressure) * pressure.units)):
raise ValueError('Specified bound is outside pressure range.')
if heights is not None:
if not (_less_or_close(bound_height, np.nanmax(heights) * heights.units)
and _greater_or_close(bound_height, np.nanmin(heights) * heights.units)):
raise ValueError('Specified bound is outside height range.')
return bound_pressure, bound_height
|
python
|
def generate_displacements(self,
distance=0.01,
is_plusminus='auto',
is_diagonal=True,
is_trigonal=False):
"""Generate displacement dataset"""
displacement_directions = get_least_displacements(
self._symmetry,
is_plusminus=is_plusminus,
is_diagonal=is_diagonal,
is_trigonal=is_trigonal,
log_level=self._log_level)
displacement_dataset = directions_to_displacement_dataset(
displacement_directions,
distance,
self._supercell)
self.set_displacement_dataset(displacement_dataset)
|
python
|
def create_stack_user(self):
"""Create the stack user on the machine.
"""
self.run('adduser -m stack', success_status=(0, 9))
self.create_file('/etc/sudoers.d/stack', 'stack ALL=(root) NOPASSWD:ALL\n')
self.run('mkdir -p /home/stack/.ssh')
self.run('cp /root/.ssh/authorized_keys /home/stack/.ssh/authorized_keys')
self.run('chown -R stack:stack /home/stack/.ssh')
self.run('chmod 700 /home/stack/.ssh')
self.run('chmod 600 /home/stack/.ssh/authorized_keys')
self.ssh_pool.build_ssh_client(self.hostname, 'stack',
self._key_filename,
self.via_ip)
|
python
|
def red(cls):
"Make the text foreground color red."
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_MASK
wAttributes |= win32.FOREGROUND_RED
cls._set_text_attributes(wAttributes)
|
java
|
@Override
public IdentifyingToken validate(IdentifyingToken token, @Nullable ChargingStationId chargingStationId) {
if (chargingStationId == null) {
LOG.warn("No charging station id passed to validation request for token {}", token.getToken());
return token;
}
ChargingStation chargingStation = repository.findOne(chargingStationId.getId());
if (chargingStation == null) {
LOG.warn("Charging station not found in repository: {}", chargingStationId);
return token;
}
List<String> chargingStationAuthorizationProviders = chargingStation.getAuthorizationProvidersAsList();
if (chargingStationAuthorizationProviders.isEmpty()) {
LOG.warn("No authorization providers configured for charging station: {}", chargingStationId);
return token;
}
for (String authorizationProvider:chargingStationAuthorizationProviders) {
AuthorizationProvider provider = providers.get(authorizationProvider);
if (provider != null) {
IdentifyingToken validatedToken = provider.validate(token, chargingStationId);
if (validatedToken.isValid()) {
return validatedToken;
}
}
}
return token;
}
|
java
|
public List<? extends Archive> asList(final Class<? extends Archive> archive) {
final List<Archive> archives = new ArrayList<>();
final GradleEffectiveDependencies gradleEffectiveDependencies = GradleRunner.getEffectiveDependencies(projectDirectory);
for (ScopeType scopeType : scopeTypesDependencies) {
final List<File> dependenciesByScope = gradleEffectiveDependencies.getDependenciesByScope(scopeType);
for (File dependency : dependenciesByScope) {
try {
final Archive dep = ShrinkWrap.create(ZipImporter.class, dependency.getName()).importFrom(dependency).as(archive);
archives.add(dep);
} catch (Exception e) {
log.log(Level.WARNING, "Cannot import gradle dependency " + dependency + ". Not a zip-like format", e);
}
}
}
return archives;
}
|
python
|
def get_property(self):
"""
Returns the property of the variable
Example
-------------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_property()
{'bowel-problem': ['position = (335, 99)'],
'dog-out': ['position = (300, 195)'],
'family-out': ['position = (257, 99)'],
'hear-bark': ['position = (296, 268)'],
'light-on': ['position = (218, 195)']}
"""
variable_properties = {}
for block in self.variable_block():
name = self.name_expr.searchString(block)[0][0]
properties = self.property_expr.searchString(block)
variable_properties[name] = [y.strip() for x in properties for y in x]
return variable_properties
|
python
|
def AgregarDatosAutorizacion(self, nro_remito=None, cod_autorizacion=None, fecha_emision=None, fecha_vencimiento=None, **kwargs):
"Agrega la información referente a los datos de autorización del remito electrónico cárnico"
self.remito['datosEmision'] = dict(nroRemito=nro_remito, codAutorizacion=cod_autorizacion,
fechaEmision=fecha_emision, fechaVencimiento=fecha_vencimiento,
)
return True
|
python
|
def check_file_type(files):
""" Check whether the input files are in fasta format, reads format or
other/mix formats.
"""
all_are_fasta = True
all_are_reads = True
all_are_empty = True
if sys.version_info < (3, 0):
if isinstance(files, (str, unicode)): files = [files]
else:
if isinstance(files, str): files = [files]
for file_ in files:
debug.log('Checking file type: %s'%file_)
# Check if file is empty
if os.stat(file_).st_size == 0: continue
else: all_are_empty = False
with open_(file_) as f:
fc = f.readline()[0]
if fc != "@": all_are_reads = False
if fc != ">": all_are_fasta = False
if all_are_empty: return 'empty'
elif all_are_fasta: return 'fasta'
elif all_are_reads: return 'fastq'
else: return 'other'
|
python
|
def stat(self, path):
"""
Retrieve information about a file on the remote system. The return
value is an object whose attributes correspond to the attributes of
python's C{stat} structure as returned by C{os.stat}, except that it
contains fewer fields. An SFTP server may return as much or as little
info as it wants, so the results may vary from server to server.
Unlike a python C{stat} object, the result may not be accessed as a
tuple. This is mostly due to the author's slack factor.
The fields supported are: C{st_mode}, C{st_size}, C{st_uid}, C{st_gid},
C{st_atime}, and C{st_mtime}.
@param path: the filename to stat
@type path: str
@return: an object containing attributes about the given file
@rtype: SFTPAttributes
"""
path = self._adjust_cwd(path)
self._log(DEBUG, 'stat(%r)' % path)
t, msg = self._request(CMD_STAT, path)
if t != CMD_ATTRS:
raise SFTPError('Expected attributes')
return SFTPAttributes._from_msg(msg)
|
java
|
@Override
public <T> Query<T> createSqlQuery(Class<T> entityType, String sql) {
Assert.notNull(entityType, "entityType must not null");
Assert.hasText(sql, "sql must has text");
RawSqlBuilder rawSqlBuilder = RawSqlBuilder.parse(sql);
return ebeanServer.find(entityType).setRawSql(rawSqlBuilder.create());
}
|
java
|
protected boolean narrowArguments(Object[] args) {
boolean narrowed = false;
for (int a = 0; a < args.length; ++a) {
Object arg = args[a];
if (arg instanceof Number) {
Object narg = narrow((Number) arg);
if (narg != arg) {
narrowed = true;
}
args[a] = narg;
}
}
return narrowed;
}
|
java
|
public static CompletableFuture<IMessageSender> createMessageSenderFromConnectionStringBuilderAsync(ConnectionStringBuilder amqpConnectionStringBuilder) {
Utils.assertNonNull("amqpConnectionStringBuilder", amqpConnectionStringBuilder);
return createMessageSenderFromEntityPathAsync(amqpConnectionStringBuilder.getEndpoint(), amqpConnectionStringBuilder.getEntityPath(), Util.getClientSettingsFromConnectionStringBuilder(amqpConnectionStringBuilder));
}
|
java
|
public void remoteTransactionCommitted(GlobalTransaction gtx, boolean onePc) {
boolean optimisticWih1Pc = onePc && (configuration.transaction().lockingMode() == LockingMode.OPTIMISTIC);
if (configuration.transaction().transactionProtocol().isTotalOrder() || optimisticWih1Pc) {
removeRemoteTransaction(gtx);
}
}
|
python
|
def batch(args):
"""
%prog batch all.cds *.anchors
Compute Ks values for a set of anchors file. This will generate a bunch of
work directories for each comparisons. The anchorsfile should be in the form
of specie1.species2.anchors.
"""
from jcvi.apps.grid import MakeManager
p = OptionParser(batch.__doc__)
opts, args = p.parse_args(args)
if len(args) < 2:
sys.exit(not p.print_help())
cdsfile = args[0]
anchors = args[1:]
workdirs = [".".join(op.basename(x).split(".")[:2]) for x in anchors]
for wd in workdirs:
mkdir(wd)
mm = MakeManager()
for wd, ac in zip(workdirs, anchors):
pairscdsfile = wd + ".cds.fasta"
cmd = "python -m jcvi.apps.ks prepare {} {} -o {}".\
format(ac, cdsfile, pairscdsfile)
mm.add((ac, cdsfile), pairscdsfile, cmd)
ksfile = wd + ".ks"
cmd = "python -m jcvi.apps.ks calc {} -o {} --workdir {}".\
format(pairscdsfile, ksfile, wd)
mm.add(pairscdsfile, ksfile, cmd)
mm.write()
|
java
|
public CreatePresetResponse createPreset(
String presetName, String container, Clip clip, Audio audio, Encryption encryption) {
return createPreset(presetName, null, container, false, clip, audio, null, encryption, null);
}
|
python
|
def elem2json(elem, options, strip_ns=1, strip=1):
"""Convert an ElementTree or Element into a JSON string."""
if hasattr(elem, 'getroot'):
elem = elem.getroot()
if options.pretty:
return json.dumps(elem_to_internal(elem, strip_ns=strip_ns, strip=strip), sort_keys=True, indent=4, separators=(',', ': '))
else:
return json.dumps(elem_to_internal(elem, strip_ns=strip_ns, strip=strip))
|
java
|
public static ChainableStatement keydown(JsScope jsScope)
{
return new DefaultChainableStatement(KeyboardEvent.KEYDOWN.getEventLabel(),
jsScope.render());
}
|
java
|
private int minChildOrGrandchildWithComparator(int k) {
int gc = 4 * k;
int mingc;
K gcValue;
// 4 grandchilden
if (gc + 3 <= size) {
gcValue = array[gc];
mingc = gc;
if (comparator.compare(array[++gc], gcValue) < 0) {
gcValue = array[gc];
mingc = gc;
}
if (comparator.compare(array[++gc], gcValue) < 0) {
gcValue = array[gc];
mingc = gc;
}
if (comparator.compare(array[++gc], gcValue) < 0) {
mingc = gc;
}
return mingc;
}
// less or equal to 3
switch (size - gc) {
case 2:
// 3 grandchildren, two children
gcValue = array[gc];
mingc = gc;
if (comparator.compare(array[++gc], gcValue) < 0) {
gcValue = array[gc];
mingc = gc;
}
if (comparator.compare(array[++gc], gcValue) < 0) {
mingc = gc;
}
return mingc;
case 1:
// 2 grandchildren, maybe two children
gcValue = array[gc];
mingc = gc;
if (comparator.compare(array[++gc], gcValue) < 0) {
gcValue = array[gc];
mingc = gc;
}
if (2 * k + 1 <= size && comparator.compare(array[2 * k + 1], gcValue) < 0) {
mingc = 2 * k + 1;
}
return mingc;
case 0:
// 1 grandchild, maybe two children
gcValue = array[gc];
mingc = gc;
if (2 * k + 1 <= size && comparator.compare(array[2 * k + 1], gcValue) < 0) {
mingc = 2 * k + 1;
}
return mingc;
}
// 0 grandchildren
mingc = 2 * k;
gcValue = array[mingc];
if (2 * k + 1 <= size && comparator.compare(array[2 * k + 1], gcValue) < 0) {
mingc = 2 * k + 1;
}
return mingc;
}
|
java
|
protected Object createMyInstance() throws IOException {
// would like to uninherit from AbstractFactoryBean (but it's final!)
if (!isSingleton()) {
throw new RuntimeException("ReloadablePropertiesFactoryBean only works as singleton");
}
// set listener
reloadableProperties = new ReloadablePropertiesImpl();
if (preListeners != null) {
reloadableProperties.setListeners(preListeners);
}
// reload
reload(true);
// add for monitor
ReloadConfigurationMonitor.addReconfigurableBean((ReconfigurableBean) reloadableProperties);
return reloadableProperties;
}
|
java
|
protected void setupBodyFileInfo(ExceptionMessageBuilder br, String bodyFile, String plainText) {
br.addItem("Body File");
br.addElement(bodyFile);
br.addItem("Plain Text");
br.addElement(plainText);
}
|
java
|
public static Function<RSocket, RSocket> safeClose() {
return source ->
new RSocketProxy(source) {
final AtomicInteger count = new AtomicInteger();
final AtomicBoolean closed = new AtomicBoolean();
@Override
public Mono<Void> fireAndForget(Payload payload) {
return source
.fireAndForget(payload)
.doOnSubscribe(s -> count.incrementAndGet())
.doFinally(
signalType -> {
if (count.decrementAndGet() == 0 && closed.get()) {
source.dispose();
}
});
}
@Override
public Mono<Payload> requestResponse(Payload payload) {
return source
.requestResponse(payload)
.doOnSubscribe(s -> count.incrementAndGet())
.doFinally(
signalType -> {
if (count.decrementAndGet() == 0 && closed.get()) {
source.dispose();
}
});
}
@Override
public Flux<Payload> requestStream(Payload payload) {
return source
.requestStream(payload)
.doOnSubscribe(s -> count.incrementAndGet())
.doFinally(
signalType -> {
if (count.decrementAndGet() == 0 && closed.get()) {
source.dispose();
}
});
}
@Override
public Flux<Payload> requestChannel(Publisher<Payload> payloads) {
return source
.requestChannel(payloads)
.doOnSubscribe(s -> count.incrementAndGet())
.doFinally(
signalType -> {
if (count.decrementAndGet() == 0 && closed.get()) {
source.dispose();
}
});
}
@Override
public Mono<Void> metadataPush(Payload payload) {
return source
.metadataPush(payload)
.doOnSubscribe(s -> count.incrementAndGet())
.doFinally(
signalType -> {
if (count.decrementAndGet() == 0 && closed.get()) {
source.dispose();
}
});
}
@Override
public void dispose() {
if (closed.compareAndSet(false, true)) {
if (count.get() == 0) {
source.dispose();
}
}
}
};
}
|
java
|
public static int compilerOptionsMapFromConfig(IConfig config, Map<AccessibleObject, List<Object>> resultMap) {
return compilerOptionsMapFromConfig(config, resultMap, false);
}
|
python
|
def extension_context(extension_name='cpu', **kw):
"""Get the context of the specified extension.
All extension's module must provide `context(**kw)` function.
Args:
extension_name (str) : Module path relative to `nnabla_ext`.
kw (dict) : Additional keyword arguments for context function in a extension module.
Returns:
:class:`nnabla.Context`: The current extension context.
Note:
Deprecated. Use :function:`nnabla.ext_utils.get_extension_context` instead.
Example:
.. code-block:: python
ctx = extension_context('cuda.cudnn', device_id=0)
nn.set_default_context(ctx)
"""
from nnabla import logger
logger.warn(
'Deprecated API. Use `nnabla.ext_util.get_extension_context(ext_name, **kw)`.')
from nnabla.ext_utils import get_extension_context
return get_extension_context(extension_name, **kw)
|
python
|
def decrypt(s, base64 = False):
"""
对称解密函数
"""
return _cipher().decrypt(base64 and b64decode(s) or s)
|
python
|
def set_formatter(name, func):
"""Replace the formatter function used by the trace decorator to
handle formatting a specific kind of argument. There are several
kinds of arguments that trace discriminates between:
* instance argument - the object bound to an instance method.
* class argument - the class object bound to a class method.
* positional arguments (named) - values bound to distinct names.
* positional arguments (default) - named positional arguments with
default values specified in the function declaration.
* positional arguments (anonymous) - an arbitrary number of values
that are all bound to the '*' variable.
* keyword arguments - zero or more name-value pairs that are
placed in a dictionary and bound to the double-star variable.
\var{name} - specifies the name of the formatter to be modified.
* instance argument - "self", "instance" or "this"
* class argument - "class"
* named argument - "named", "param" or "parameter"
* default argument - "default", "optional"
* anonymous argument - "anonymous", "arbitrary" or "unnamed"
* keyword argument - "keyword", "pair" or "pairs"
\var{func} - a function to format an argument.
* For all but anonymous formatters this function must accept two
arguments: the variable name and the value to which it is bound.
* The anonymous formatter function is passed only one argument
corresponding to an anonymous value.
* if \var{func} is "None" then the default formatter will be used.
"""
if name in ('self', 'instance', 'this'):
global af_self
af_self = _formatter_self if func is None else func
elif name == 'class':
global af_class
af_class = _formatter_class if func is None else func
elif name in ('named', 'param', 'parameter'):
global af_named
af_named = _formatter_named if func is None else func
elif name in ('default', 'optional'):
global af_default
af_default = _formatter_defaults if func is None else func
elif name in ('anonymous', 'arbitrary', 'unnamed'):
global af_anonymous
af_anonymous = chop if func is None else func
elif name in ('keyword', 'pair', 'pairs'):
global af_keyword
af_keyword = _formatter_named if func is None else func
else:
raise ValueError('unknown trace formatter %r' % name)
|
python
|
def sky2pix(self, skypos):
"""
Get the pixel coordinates for a given sky position (degrees).
Parameters
----------
skypos : (float,float)
ra,dec position in degrees.
Returns
-------
x,y : float
Pixel coordinates.
"""
skybox = [skypos, skypos]
pixbox = self.wcs.all_world2pix(skybox, 1)
return [float(pixbox[0][0]), float(pixbox[0][1])]
|
python
|
def submit(self, method, method_args=(), method_kwargs={}, done_callback=None, done_kwargs={}, loop=None):
'''
used to send async notifications
:param method:
:param method_args:
:param method_kwargs:
:param done_callback:
:param done_kwargs:
:param loop:
:return:
'''
_future = self.pool.submit(method, *method_args, **method_kwargs)
self.current_id += 1
if done_callback:
_future.add_done_callback(lambda _f:done_callback(_f,loop,**done_kwargs))#done kwargs, hardcoded kwargs
self.request_pool[self.current_id] = _future
return self.current_id, _future
|
java
|
public static com.liferay.commerce.model.CommerceAddressRestriction createCommerceAddressRestriction(
long commerceAddressRestrictionId) {
return getService()
.createCommerceAddressRestriction(commerceAddressRestrictionId);
}
|
python
|
def get_readme():
'Get the long description from the README file'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as my_fd:
result = my_fd.read()
return result
|
python
|
def main():
"""
NAME
eqarea_magic.py
DESCRIPTION
makes equal area projections from declination/inclination data
SYNTAX
eqarea_magic.py [command line options]
INPUT
takes magic formatted sites, samples, specimens, or measurements
OPTIONS
-h prints help message and quits
-f FILE: specify input magic format file from magic, default='sites.txt'
supported types=[measurements, specimens, samples, sites]
-fsp FILE: specify specimen file name, (required if you want to plot measurements by sample)
default='specimens.txt'
-fsa FILE: specify sample file name, (required if you want to plot specimens by site)
default='samples.txt'
-fsi FILE: specify site file name, default='sites.txt'
-flo FILE: specify location file name, default='locations.txt'
-obj OBJ: specify level of plot [all, sit, sam, spc], default is all
-crd [s,g,t]: specify coordinate system, [s]pecimen, [g]eographic, [t]ilt adjusted
default is geographic, unspecified assumed geographic
-fmt [svg,png,jpg] format for output plots
-ell [F,K,B,Be,Bv] plot Fisher, Kent, Bingham, Bootstrap ellipses or Boostrap eigenvectors
-c plot as colour contour
-cm CM use color map CM [default is coolwarm]
-sav save plot and quit quietly
-no-tilt data are unoriented, allows plotting of measurement dec/inc
NOTE
all: entire file; sit: site; sam: sample; spc: specimen
"""
# extract arguments from sys.argv
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
dir_path = pmag.get_named_arg("-WD", default_val=".")
input_dir_path = pmag.get_named_arg('-ID', '')
if not input_dir_path:
input_dir_path = dir_path
in_file = pmag.get_named_arg("-f", default_val="sites.txt")
in_file = pmag.resolve_file_name(in_file, input_dir_path)
if "-ID" not in sys.argv:
input_dir_path = os.path.split(in_file)[0]
plot_by = pmag.get_named_arg("-obj", default_val="all").lower()
spec_file = pmag.get_named_arg("-fsp", default_val="specimens.txt")
samp_file = pmag.get_named_arg("-fsa", default_val="samples.txt")
site_file = pmag.get_named_arg("-fsi", default_val="sites.txt")
loc_file = pmag.get_named_arg("-flo", default_val="locations.txt")
ignore_tilt = False
if '-no-tilt' in sys.argv:
ignore_tilt = True
color_map = "coolwarm"
if '-c' in sys.argv:
contour = True
if '-cm' in sys.argv:
ind = sys.argv.index('-cm')
color_map = sys.argv[ind+1]
else:
color_map = 'coolwarm'
else:
contour = False
interactive = True
save_plots = False
if '-sav' in sys.argv:
save_plots = True
interactive = False
plot_ell = False
if '-ell' in sys.argv:
plot_ell = pmag.get_named_arg("-ell", "F")
crd = pmag.get_named_arg("-crd", default_val="g")
fmt = pmag.get_named_arg("-fmt", "svg")
ipmag.eqarea_magic(in_file, dir_path, input_dir_path, spec_file, samp_file, site_file, loc_file,
plot_by, crd, ignore_tilt, save_plots, fmt, contour, color_map,
plot_ell, "all", interactive)
|
python
|
def get_runtime_vars(varset, experiment, token):
'''get_runtime_vars will return the urlparsed string of one or more runtime
variables. If None are present, None is returned.
Parameters
==========
varset: the variable set, a dictionary lookup with exp_id, token, vars
experiment: the exp_id to look up
token: the participant id (or token) that must be defined.
Returns
=======
url: the variable portion of the url to be passed to experiment, e.g,
'?words=at the thing&color=red&globalname=globalvalue'
'''
url = ''
if experiment in varset:
variables = dict()
# Participant set variables
if token in varset[experiment]:
for k,v in varset[experiment][token].items():
variables[k] = v
# Global set variables
if "*" in varset[experiment]:
for k,v in varset[experiment]['*'].items():
# Only add the variable if not already defined
if k not in variables:
variables[k] = v
# Join together, the first ? is added by calling function
varlist = ["%s=%s" %(k,v) for k,v in variables.items()]
url = '&'.join(varlist)
bot.debug('Parsed url: %s' %url)
return url
|
java
|
public boolean upgradeLock(LockEntry reader)
{
checkTimedOutLocks();
String oidString = reader.getOidString();
ObjectLocks objectLocks = null;
synchronized (locktable)
{
objectLocks = (ObjectLocks) locktable.get(oidString);
}
if (objectLocks == null)
{
return false;
}
else
{
// add writer entry
LockEntry writer = new LockEntry(reader.getOidString(),
reader.getTransactionId(),
System.currentTimeMillis(),
reader.getIsolationLevel(),
LockEntry.LOCK_WRITE);
objectLocks.setWriter(writer);
// remove reader entry
objectLocks.getReaders().remove(reader.getTransactionId());
return true;
}
}
|
python
|
def gauss_fltr_opencv(dem, size=3, sigma=1):
"""OpenCV Gaussian filter
Still propagates NaN values
"""
import cv2
dem = malib.checkma(dem)
dem_cv = cv2.GaussianBlur(dem.filled(np.nan), (size, size), sigma)
out = np.ma.fix_invalid(dem_cv)
out.set_fill_value(dem.fill_value)
return out
|
java
|
public static boolean runWithSleepThrowOnInterrupt(long milliseconds, Runnable runnable) {
Assert.isTrue(milliseconds > 0, "Milliseconds [%d] must be greater than 0", milliseconds);
runnable.run();
if (!ThreadUtils.sleep(milliseconds, 0)) {
throw new SleepDeprivedException(String.format("Failed to wait for [%d] millisecond(s)", milliseconds));
}
return true;
}
|
java
|
private IConfigurationElement getParserWithHeighestPriority(
String natureId, IConfigurationElement[] config)
{
IConfigurationElement selectedParser = null;
int selectedParserPriority = 0;
for (IConfigurationElement e : config)
{
if (e.getAttribute("nature").equals(natureId))
{
if (selectedParser == null)
{
selectedParser = e;
String selectedParserPriorityString = selectedParser
.getAttribute("priority");
if (selectedParserPriorityString != null)
selectedParserPriority = Integer
.parseInt(selectedParserPriorityString);
} else
{
String parserPriorityString = selectedParser
.getAttribute("priority");
if (parserPriorityString != null)
{
int parserPriority = Integer
.parseInt(parserPriorityString);
if (parserPriority > selectedParserPriority)
{
selectedParser = e;
selectedParserPriority = parserPriority;
}
}
}
}
}
return selectedParser;
}
|
python
|
def get_display(display):
"""dname, protocol, host, dno, screen = get_display(display)
Parse DISPLAY into its components. If DISPLAY is None, use
the default display. The return values are:
DNAME -- the full display name (string)
PROTOCOL -- the protocol to use (None if automatic)
HOST -- the host name (string, possibly empty)
DNO -- display number (integer)
SCREEN -- default screen number (integer)
"""
modname = _display_mods.get(platform, _default_display_mod)
mod = _relative_import(modname)
return mod.get_display(display)
|
python
|
def marvcli_comment_add(user, message, datasets):
"""Add comment as user for one or more datasets"""
app = create_app()
try:
db.session.query(User).filter(User.name==user).one()
except NoResultFound:
click.echo("ERROR: No such user '{}'".format(user), err=True)
sys.exit(1)
ids = parse_setids(datasets, dbids=True)
app.site.comment(user, message, ids)
|
java
|
@Exported
public String getDescription() {
Set<Node> nodes = getNodes();
if(nodes.isEmpty()) {
Set<Cloud> clouds = getClouds();
if(clouds.isEmpty())
return Messages.Label_InvalidLabel();
return Messages.Label_ProvisionedFrom(toString(clouds));
}
if(nodes.size()==1)
return nodes.iterator().next().getNodeDescription();
return Messages.Label_GroupOf(toString(nodes));
}
|
java
|
@SuppressWarnings("rawtypes")
@WithBridgeMethods(value = SQLInsertClause.class, castRequired = true)
public <T> C populate(T obj, Mapper<T> mapper) {
Map<Path<?>, Object> values = mapper.createMap(entity, obj);
for (Map.Entry<Path<?>, Object> entry : values.entrySet()) {
set((Path) entry.getKey(), entry.getValue());
}
return (C) this;
}
|
java
|
private <T> T pluginProxy(final Class<T> type) {
Object proxy = Proxy.newProxyInstance(classLoader, new Class<?>[]{type}, new InvocationHandler() {
@Override
public Object invoke(Object target, Method method, Object[] args) throws Throwable {
for (Object plugin : getPlugins()) {
if (type.isInstance(plugin)) {
try {
Utils.invoke(plugin, method, 0, args);
} catch (Throwable t) {
if (!method.getName().equals("startOfScenarioLifeCycle") && !method.getName().equals("endOfScenarioLifeCycle")) {
// IntelliJ has its own formatter which doesn't yet implement these methods.
throw t;
}
}
}
}
return null;
}
});
return type.cast(proxy);
}
|
java
|
@Override
public void close() throws IOException {
if (upload != null) {
flush();
// close the connection
try {
upload.close();
} catch (SQLException e) {
LOG.info("JDBC statement could not be closed: " + e.getMessage());
} finally {
upload = null;
}
}
if (dbConn != null) {
try {
dbConn.close();
} catch (SQLException se) {
LOG.info("JDBC connection could not be closed: " + se.getMessage());
} finally {
dbConn = null;
}
}
}
|
python
|
def loads(self, value):
"""
Deserialize value using ``msgpack.loads``.
:param value: bytes
:returns: obj
"""
raw = False if self.encoding == "utf-8" else True
if value is None:
return None
return msgpack.loads(value, raw=raw, use_list=self.use_list)
|
python
|
def is_siemens(dicom_input):
"""
Use this function to detect if a dicom series is a siemens dataset
:param dicom_input: directory with dicom files for 1 scan
"""
# read dicom header
header = dicom_input[0]
# check if manufacturer is Siemens
if 'Manufacturer' not in header or 'Modality' not in header:
return False # we try generic conversion in these cases
# check if Modality is mr
if header.Modality.upper() != 'MR':
return False
if 'SIEMENS' not in header.Manufacturer.upper():
return False
return True
|
python
|
def validate_django_compatible_with_python():
"""
Verify Django 1.11 is present if Python 2.7 is active
Installation of pinax-cli requires the correct version of Django for
the active Python version. If the developer subsequently changes
the Python version the installed Django may no longer be compatible.
"""
python_version = sys.version[:5]
django_version = django.get_version()
if sys.version_info == (2, 7) and django_version >= "2":
click.BadArgumentUsage("Please install Django v1.11 for Python {}, or switch to Python >= v3.4".format(python_version))
|
java
|
public static void copyMergedOutput (Logger target, String name, Process process)
{
new StreamReader(target, name + " output", process.getInputStream()).start();
}
|
java
|
private synchronized void returnTransport(Transport returning) {
long now = System.currentTimeMillis();
PooledTransport unwrapped;
// check if they're returning a leased transport
if (returning instanceof LeasedTransport) {
LeasedTransport leasedTransport = (LeasedTransport) returning;
unwrapped = leasedTransport.delegate;
} else if (returning instanceof PooledTransport) {
unwrapped = (PooledTransport) returning;
} else {
throw new EsHadoopIllegalStateException("Cannot return a non-poolable Transport to the pool");
}
// make sure that this is even a leased transport before returning it
if (leased.containsKey(unwrapped)) {
leased.remove(unwrapped);
idle.put(unwrapped, now);
} else {
throw new EsHadoopIllegalStateException("Cannot return a Transport object to a pool that was not sourced from the pool");
}
}
|
java
|
@Override
public List<CommerceCountry> findByG_B_A(long groupId,
boolean billingAllowed, boolean active, int start, int end,
OrderByComparator<CommerceCountry> orderByComparator) {
return findByG_B_A(groupId, billingAllowed, active, start, end,
orderByComparator, true);
}
|
python
|
def run_script(script_path, cwd='.'):
"""Execute a script from a working directory.
:param script_path: Absolute path to the script to run.
:param cwd: The directory to run the script from.
"""
run_thru_shell = sys.platform.startswith('win')
if script_path.endswith('.py'):
script_command = [sys.executable, script_path]
else:
script_command = [script_path]
utils.make_executable(script_path)
try:
proc = subprocess.Popen(
script_command,
shell=run_thru_shell,
cwd=cwd
)
exit_status = proc.wait()
if exit_status != EXIT_SUCCESS:
raise FailedHookException(
'Hook script failed (exit status: {})'.format(exit_status)
)
except OSError as os_error:
if os_error.errno == errno.ENOEXEC:
raise FailedHookException(
'Hook script failed, might be an '
'empty file or missing a shebang'
)
raise FailedHookException(
'Hook script failed (error: {})'.format(os_error)
)
|
python
|
def get_paginated_response(self, data):
"""
Annotate the response with pagination information
"""
metadata = {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
'count': self.get_result_count(),
'num_pages': self.get_num_pages(),
}
if isinstance(data, dict):
if 'results' not in data:
raise TypeError(u'Malformed result dict')
data['pagination'] = metadata
else:
data = {
'results': data,
'pagination': metadata,
}
return Response(data)
|
java
|
private TimeUnit getPollUnit() {
String unit = mProperties.getProperty(CSV_KEY_UNIT);
if (unit == null) {
unit = CSV_DEFAULT_UNIT;
}
return TimeUnit.valueOf(unit.toUpperCase());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.