language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def get_single_instance(sql, class_type, *args, **kwargs):
"""Returns an instance of class_type populated with attributes from the DB record; throws an error if no
records are found
@param sql: Sql statement to execute
@param class_type: The type of class to instantiate and populate with DB record
@return: Return an instance with attributes set to values from DB
"""
record = CoyoteDb.get_single_record(sql, *args, **kwargs)
try:
instance = CoyoteDb.get_object_from_dictionary_representation(dictionary=record, class_type=class_type)
except AttributeError:
raise NoRecordsFoundException('No records found for {class_type} with sql run on {host}: \n {sql}'.format(
sql=sql,
host=DatabaseConfig().get('mysql_host'),
class_type=class_type
))
return instance
|
python
|
def bootstrap_statistics(series, statistic, n_samples=1000,
confidence_interval=0.95, random_state=None):
"""
Default parameters taken from
R's Hmisc smean.cl.boot
"""
if random_state is None:
random_state = np.random
alpha = 1 - confidence_interval
size = (n_samples, len(series))
inds = random_state.randint(0, len(series), size=size)
samples = series.values[inds]
means = np.sort(statistic(samples, axis=1))
return pd.DataFrame({'ymin': means[int((alpha/2)*n_samples)],
'ymax': means[int((1-alpha/2)*n_samples)],
'y': [statistic(series)]})
|
python
|
def _collapse_postconditions(base_postconditions: List[Contract], postconditions: List[Contract]) -> List[Contract]:
"""
Collapse function postconditions with the postconditions collected from the base classes.
:param base_postconditions: postconditions collected from the base classes
:param postconditions: postconditions of the function (before the collapse)
:return: collapsed sequence of postconditions
"""
return base_postconditions + postconditions
|
python
|
def no_operation(self, onerror = None):
"""Do nothing but send a request to the server."""
request.NoOperation(display = self.display,
onerror = onerror)
|
java
|
private static OptionalEntity<StopwordsItem> getEntity(final CreateForm form) {
switch (form.crudMode) {
case CrudMode.CREATE:
final StopwordsItem entity = new StopwordsItem(0, StringUtil.EMPTY);
return OptionalEntity.of(entity);
case CrudMode.EDIT:
if (form instanceof EditForm) {
return ComponentUtil.getComponent(StopwordsService.class).getStopwordsItem(form.dictId, ((EditForm) form).id);
}
break;
default:
break;
}
return OptionalEntity.empty();
}
|
python
|
def start(self):
"""
All the work going on here. To the Authority the right day and time
format and finding the correct path of the file. The Application
requires Mplayer to play the alarm sound. Please read which sounds
are supported in page:
http://web.njit.edu/all_topics/Prog_Lang_Docs/html/mplayer/formats.html
"""
self.errors()
try:
alarm_day_name = calendar.day_name[calendar.weekday(
self.now.year, self.now.month, int(self.alarm_day))]
except ValueError:
pass
self.alarm_time.insert(0, self.alarm_day)
self.alarm_time = ":".join(self.alarm_time) # reset begin format
if self.RUN_ALARM:
os.system("clear")
print("+" + "=" * 78 + "+")
print("|" + " " * 30 + "CLI Alarm Clock" + " " * 33 + "|")
print("+" + "=" * 78 + "+")
print("| Alarm set at : %s %s" % (
alarm_day_name, self.alarm_time[3:]) + " " * (
62-len(alarm_day_name + self.alarm_time[2:])) + "|")
print("| Sound file : %s" % self.song + " " * (64-len(
self.song)) + "|")
print("| Time : " + " " * 70 + "|")
print("+" + "=" * 78 + "+")
print("Press 'Ctrl + c' to cancel alarm ...")
try:
while self.RUN_ALARM:
start_time = time.strftime("%d:%H:%M:%S")
self.position(6, 10, self.color(
"green") + start_time[3:] + self.color("endc"))
time.sleep(1)
begin = start_time[:-3]
# if start_time[0] == '0':
# begin = start_time[1:-3]
if begin == self.alarm_time:
self.position(6, 10, self.color(
"red") + start_time[3:-3] + self.color(
"endc") + " Wake Up !")
for wake in self.wakeup:
print(wake)
print("\nPress 'SPACE' to pause alarm ...\n")
if not self.alarm_attempts:
self.alarm_attempts = 5
else:
self.alarm_attempts = int(self.alarm_attempts)
for att in range(0, self.alarm_attempts):
print("Attempt %d\n" % (att + 1))
play = os.system("mplayer %s '%s'" % (
self.mplayer_options, self.song))
# catch if mplayer not installed
# if play return 0 all good
# 256=KeyboardInterupt
if play != 0 and play != 256:
MplayerNotInstalledException()
break
self.RUN_ALARM = False
except KeyboardInterrupt:
print("\nAlarm canceled!")
self.RUN_ALARM = False
|
python
|
def access_token(self, code):
'''Exchange a temporary OAuth2 code for an access token.
Param: code -> temporary OAuth2 code from a Pushed callback
Returns access token as string
'''
parameters = {"code": code}
access_uri = "/".join([BASE_URL, API_VERSION, ACCESS_TOKEN])
# RFC non-compliant response prevents use of standard OAuth modules
success, response = self._request(access_uri, parameters)
if success:
return response['response']['data']['access_token']
else:
raise PushedAPIError(
response['error']['type'],
response['error']['message']
)
|
python
|
def list_formats(format_type, backend=None):
"""
Returns list of supported formats for a particular
backend.
"""
if backend is None:
backend = Store.current_backend
mode = Store.renderers[backend].mode if backend in Store.renderers else None
else:
split = backend.split(':')
backend, mode = split if len(split)==2 else (split[0], 'default')
if backend in Store.renderers:
return Store.renderers[backend].mode_formats[format_type][mode]
else:
return []
|
python
|
def _handle_init_list(self, node, scope, ctxt, stream):
"""Handle InitList nodes (e.g. when initializing a struct)
:node: TODO
:scope: TODO
:ctxt: TODO
:stream: TODO
:returns: TODO
"""
self._dlog("handling init list")
res = []
for _,init_child in node.children():
init_field = self._handle_node(init_child, scope, ctxt, stream)
res.append(init_field)
return res
|
java
|
private Object doOp(Op op) throws TimeoutException {
try {
if (op.txn != null)
op.txn.add(op);
Object result = runOp(op);
if (result == PENDING)
return op.getResult(timeout, TimeUnit.MILLISECONDS);
else if (result == null && op.isCancelled())
throw new CancellationException();
else
return result;
} catch (java.util.concurrent.TimeoutException e) {
throw new TimeoutException(e);
} catch (InterruptedException e) {
return null;
} catch (ExecutionException e) {
Throwable ex = e.getCause();
if (ex instanceof TimeoutException)
throw (TimeoutException) ex;
Throwables.propagateIfPossible(ex);
throw Throwables.propagate(ex);
}
}
|
java
|
public void save (FileSystemDataset dataset, State state) {
state.setProp(SERIALIZE_COMPACTION_FILE_PATH_NAME, dataset.datasetURN());
}
|
java
|
public static String render(List<Node> nodes) {
StringBuilder buf = new StringBuilder();
for (Node node : nodes) {
if (node instanceof Text) {
String text = ((Text) node).text;
boolean inquote = false;
for (int i = 0; i < text.length(); i++) {
char ch = text.charAt(i);
switch (ch) {
case 'G':
case 'y':
case 'Y':
case 'u':
case 'U':
case 'r':
case 'Q':
case 'q':
case 'M':
case 'L':
case 'l':
case 'w':
case 'W':
case 'd':
case 'D':
case 'F':
case 'g':
case 'E':
case 'e':
case 'c':
case 'a':
case 'b':
case 'B':
case 'h':
case 'H':
case 'K':
case 'k':
case 'j':
case 'J':
case 'C':
case 'm':
case 's':
case 'S':
case 'A':
case 'z':
case 'Z':
case 'O':
case 'v':
case 'V':
case 'X':
case 'x':
if (!inquote) {
buf.append('\'');
}
buf.append(ch);
break;
default:
if (inquote) {
buf.append('\'');
}
buf.append(ch);
break;
}
}
} else if (node instanceof Field) {
Field field = (Field) node;
for (int i = 0; i < field.width; i++) {
buf.append(field.ch);
}
}
}
return buf.toString();
}
|
java
|
public void authenticate() {
if (mAccount == null) {
// Create account also performs authentication.
createAccount();
} else {
mAccess = mAccount.authenticate();
if (mRegion != null) {
mAccess.setPreferredRegion(mRegion);
}
}
}
|
java
|
private boolean shouldValidate(Partition partition) {
for (String pathToken : this.ignoreDataPathIdentifierList) {
if (partition.getDataLocation().toString().toLowerCase().contains(pathToken.toLowerCase())) {
log.info("Skipping partition " + partition.getCompleteName() + " containing invalid token " + pathToken
.toLowerCase());
return false;
}
}
try {
long createTime = getPartitionCreateTime(partition.getName());
boolean withinTimeWindow = new DateTime(createTime).isAfter(this.maxLookBackTime) && new DateTime(createTime)
.isBefore(this.skipRecentThanTime);
if (!withinTimeWindow) {
log.info("Skipping partition " + partition.getCompleteName() + " as create time " + new DateTime(createTime)
.toString() + " is not within validation time window ");
} else {
log.info("Validating partition " + partition.getCompleteName());
return withinTimeWindow;
}
} catch (ParseException e) {
Throwables.propagate(e);
}
return false;
}
|
java
|
public void marshall(SamplingRule samplingRule, ProtocolMarshaller protocolMarshaller) {
if (samplingRule == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(samplingRule.getRuleName(), RULENAME_BINDING);
protocolMarshaller.marshall(samplingRule.getRuleARN(), RULEARN_BINDING);
protocolMarshaller.marshall(samplingRule.getResourceARN(), RESOURCEARN_BINDING);
protocolMarshaller.marshall(samplingRule.getPriority(), PRIORITY_BINDING);
protocolMarshaller.marshall(samplingRule.getFixedRate(), FIXEDRATE_BINDING);
protocolMarshaller.marshall(samplingRule.getReservoirSize(), RESERVOIRSIZE_BINDING);
protocolMarshaller.marshall(samplingRule.getServiceName(), SERVICENAME_BINDING);
protocolMarshaller.marshall(samplingRule.getServiceType(), SERVICETYPE_BINDING);
protocolMarshaller.marshall(samplingRule.getHost(), HOST_BINDING);
protocolMarshaller.marshall(samplingRule.getHTTPMethod(), HTTPMETHOD_BINDING);
protocolMarshaller.marshall(samplingRule.getURLPath(), URLPATH_BINDING);
protocolMarshaller.marshall(samplingRule.getVersion(), VERSION_BINDING);
protocolMarshaller.marshall(samplingRule.getAttributes(), ATTRIBUTES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def format_doc_text(text):
"""
A very thin wrapper around textwrap.fill to consistently wrap documentation text
for display in a command line environment. The text is wrapped to 99 characters with an
indentation depth of 4 spaces. Each line is wrapped independently in order to preserve
manually added line breaks.
:param text: The text to format, it is cleaned by inspect.cleandoc.
:return: The formatted doc text.
"""
return '\n'.join(
textwrap.fill(line, width=99, initial_indent=' ', subsequent_indent=' ')
for line in inspect.cleandoc(text).splitlines())
|
java
|
public CoinbaseBaseResponse resendCoinbaseRequest(String transactionId) throws IOException {
final CoinbaseBaseResponse response =
coinbase.resendRequest(
transactionId,
exchange.getExchangeSpecification().getApiKey(),
signatureCreator,
exchange.getNonceFactory());
return handleResponse(response);
}
|
python
|
def _get_audit_defaults(option=None):
'''
Loads audit.csv defaults into a dict in __context__ called
'lgpo.audit_defaults'. The dictionary includes fieldnames and all
configurable policies as keys. The values are used to create/modify the
``audit.csv`` file. The first entry is `fieldnames` used to create the
header for the csv file. The rest of the entries are the audit policy names.
Sample data follows:
{
'fieldnames': ['Machine Name',
'Policy Target',
'Subcategory',
'Subcategory GUID',
'Inclusion Setting',
'Exclusion Setting',
'Setting Value'],
'Audit Sensitive Privilege Use': {'Auditpol Name': 'Sensitive Privilege Use',
'Exclusion Setting': '',
'Inclusion Setting': 'No Auditing',
'Machine Name': 'WIN-8FGT3E045SE',
'Policy Target': 'System',
'Setting Value': '0',
'Subcategory': u'Audit Sensitive Privilege Use',
'Subcategory GUID': '{0CCE9228-69AE-11D9-BED3-505054503030}'},
'Audit Special Logon': {'Auditpol Name': 'Special Logon',
'Exclusion Setting': '',
'Inclusion Setting': 'No Auditing',
'Machine Name': 'WIN-8FGT3E045SE',
'Policy Target': 'System',
'Setting Value': '0',
'Subcategory': u'Audit Special Logon',
'Subcategory GUID': '{0CCE921B-69AE-11D9-BED3-505054503030}'},
'Audit System Integrity': {'Auditpol Name': 'System Integrity',
'Exclusion Setting': '',
'Inclusion Setting': 'No Auditing',
'Machine Name': 'WIN-8FGT3E045SE',
'Policy Target': 'System',
'Setting Value': '0',
'Subcategory': u'Audit System Integrity',
'Subcategory GUID': '{0CCE9212-69AE-11D9-BED3-505054503030}'},
...
}
.. note::
`Auditpol Name` designates the value to use when setting the value with
the auditpol command
Args:
option (str): The item from the dictionary to return. If ``None`` the
entire dictionary is returned. Default is ``None``
Returns:
dict: If ``None`` or one of the audit settings is passed
list: If ``fieldnames`` is passed
'''
if 'lgpo.audit_defaults' not in __context__:
# Get available setting names and GUIDs
# This is used to get the fieldnames and GUIDs for individual policies
log.debug('Loading auditpol defaults into __context__')
dump = __utils__['auditpol.get_auditpol_dump']()
reader = csv.DictReader(dump)
audit_defaults = {'fieldnames': reader.fieldnames}
for row in reader:
row['Machine Name'] = ''
row['Auditpol Name'] = row['Subcategory']
# Special handling for snowflake scenarios where the audit.csv names
# don't match the auditpol names
if row['Subcategory'] == 'Central Policy Staging':
row['Subcategory'] = 'Audit Central Access Policy Staging'
elif row['Subcategory'] == 'Plug and Play Events':
row['Subcategory'] = 'Audit PNP Activity'
elif row['Subcategory'] == 'Token Right Adjusted Events':
row['Subcategory'] = 'Audit Token Right Adjusted'
else:
row['Subcategory'] = 'Audit {0}'.format(row['Subcategory'])
audit_defaults[row['Subcategory']] = row
__context__['lgpo.audit_defaults'] = audit_defaults
if option:
return __context__['lgpo.audit_defaults'][option]
else:
return __context__['lgpo.audit_defaults']
|
java
|
@Override
public void deserializeInstance(SerializationStreamReader streamReader, OWLDataPropertyDomainAxiomImpl instance) throws SerializationException {
deserialize(streamReader, instance);
}
|
python
|
def choice(*es):
"""
Create a PEG function to match an ordered choice.
"""
msg = 'Expected one of: {}'.format(', '.join(map(repr, es)))
def match_choice(s, grm=None, pos=0):
errs = []
for e in es:
try:
return e(s, grm, pos)
except PegreError as ex:
errs.append((ex.message, ex.position))
if errs:
raise PegreChoiceError(errs, pos)
return match_choice
|
python
|
def _create_figure(kwargs: Mapping[str, Any]) -> dict:
"""Create basic dictionary object with figure properties."""
return {
"$schema": "https://vega.github.io/schema/vega/v3.json",
"width": kwargs.pop("width", DEFAULT_WIDTH),
"height": kwargs.pop("height", DEFAULT_HEIGHT),
"padding": kwargs.pop("padding", DEFAULT_PADDING)
}
|
java
|
public static String toFirstUpper(String name) {
if ( isEmpty( name )) {
return name;
}
return "" + name.toUpperCase( Locale.ROOT ).charAt(0) + name.substring(1);
}
|
python
|
def vmnet_unix(args, vmnet_range_start, vmnet_range_end):
"""
Implementation on Linux and Mac OS X.
"""
if not os.path.exists(VMWARE_NETWORKING_FILE):
raise SystemExit("VMware Player, Workstation or Fusion is not installed")
if not os.access(VMWARE_NETWORKING_FILE, os.W_OK):
raise SystemExit("You must run this script as root")
version, pairs, allocated_subnets = parse_networking_file()
if args.list and not sys.platform.startswith("win"):
for vmnet_number in range(1, 256):
vmnet_name = "VNET_{}_VIRTUAL_ADAPTER".format(vmnet_number)
if vmnet_name in pairs:
print("vmnet{}".format(vmnet_number))
return
if args.clean:
# clean all vmnets but vmnet1 and vmnet8
for key in pairs.copy().keys():
if key.startswith("VNET_1_") or key.startswith("VNET_8_"):
continue
del pairs[key]
else:
for vmnet_number in range(vmnet_range_start, vmnet_range_end + 1):
vmnet_name = "VNET_{}_VIRTUAL_ADAPTER".format(vmnet_number)
if vmnet_name in pairs:
continue
allocated_subnet = None
for subnet in ipaddress.ip_network("172.16.0.0/16").subnets(prefixlen_diff=8):
subnet = str(subnet.network_address)
if subnet not in allocated_subnets:
allocated_subnet = subnet
allocated_subnets.append(allocated_subnet)
break
if allocated_subnet is None:
print("Couldn't allocate a subnet for vmnet{}".format(vmnet_number))
continue
print("Adding vmnet{}...".format(vmnet_number))
pairs["VNET_{}_HOSTONLY_NETMASK".format(vmnet_number)] = "255.255.255.0"
pairs["VNET_{}_HOSTONLY_SUBNET".format(vmnet_number)] = allocated_subnet
pairs["VNET_{}_VIRTUAL_ADAPTER".format(vmnet_number)] = "yes"
write_networking_file(version, pairs)
|
java
|
public static LongLongIndex.LongLongUIndex loadUniqueIndex(PAGE_TYPE type,
IOResourceProvider storage, int pageId, int keySize, int valSize) {
return LOAD_UNIQUE_INDEX_SIZED.load(type, storage, pageId, keySize, valSize);
}
|
java
|
@JsonIgnore
public MetricFilter getFilter() {
final StringMatchingStrategy stringMatchingStrategy = getUseRegexFilters() ?
REGEX_STRING_MATCHING_STRATEGY : (getUseSubstringMatching() ? SUBSTRING_MATCHING_STRATEGY : DEFAULT_STRING_MATCHING_STRATEGY);
return (name, metric) -> {
// Include the metric if its name is not excluded and its name is included
// Where, by default, with no includes setting, all names are included.
return !stringMatchingStrategy.containsMatch(getExcludes(), name) &&
(getIncludes().isEmpty() || stringMatchingStrategy.containsMatch(getIncludes(), name));
};
}
|
java
|
private Map<String, String> getAttributePropertiesFromDBSingle(final String attributeName) throws DevFailed {
xlogger.entry(attributeName);
final Map<String, String> result = new CaseInsensitiveMap<String>();
final Map<String, String[]> prop = DatabaseFactory.getDatabase().getAttributeProperties(deviceName,
attributeName);
for (final Entry<String, String[]> entry : prop.entrySet()) {
final String name = entry.getKey();
final String[] value = entry.getValue();
if (value.length > 0) {
result.put(name, value[0]);
} else {
result.put(name, "");
}
}
xlogger.exit();
return result;
}
|
java
|
protected boolean isEqualPrimitiveArray(Object value1, Object value2) {
if (value1 instanceof int[]) {
if (value2 instanceof int[]) {
int[] array1 = (int[]) value1;
int[] array2 = (int[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof long[]) {
if (value2 instanceof long[]) {
long[] array1 = (long[]) value1;
long[] array2 = (long[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof double[]) {
if (value2 instanceof double[]) {
double[] array1 = (double[]) value1;
double[] array2 = (double[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof boolean[]) {
if (value2 instanceof boolean[]) {
boolean[] array1 = (boolean[]) value1;
boolean[] array2 = (boolean[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof char[]) {
if (value2 instanceof char[]) {
char[] array1 = (char[]) value1;
char[] array2 = (char[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof float[]) {
if (value2 instanceof float[]) {
float[] array1 = (float[]) value1;
float[] array2 = (float[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof short[]) {
if (value2 instanceof short[]) {
short[] array1 = (short[]) value1;
short[] array2 = (short[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
} else if (value1 instanceof byte[]) {
if (value2 instanceof byte[]) {
byte[] array1 = (byte[]) value1;
byte[] array2 = (byte[]) value2;
if (array1.length != array2.length) {
return false;
}
for (int i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
}
return false;
}
|
python
|
def name(self, name: str):
""" Name Setter
Set name with passed in variable.
@param name: New name string.
@type name: String
"""
self.pathName = os.path.join(self.path, name)
|
java
|
@Override
public V put(K key, V value)
{
if (key == null) {
V item = _nullValue;
_nullValue = value;
return item;
}
// forced resizing if 1/2 full
if (_values.length <= 2 * _size) {
K []oldKeys = _keys;
V []oldValues = _values;
_keys = (K []) new Object[2 * oldKeys.length];
_values = (V []) new Object[2 * oldValues.length];
_mask = _values.length - 1;
_size = 0;
for (int i = oldValues.length - 1; i >= 0; i--) {
K oldKey = oldKeys[i];
V oldValue = oldValues[i];
if (oldValue != null)
putImpl(oldKey, oldValue);
}
}
V item = putImpl(key, value);
return item;
}
|
python
|
def _GetFileSystemCacheIdentifier(self, path_spec):
"""Determines the file system cache identifier for the path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
str: identifier of the VFS object.
"""
string_parts = []
string_parts.append(getattr(path_spec.parent, 'comparable', ''))
string_parts.append('type: {0:s}'.format(path_spec.type_indicator))
return ''.join(string_parts)
|
java
|
public void saveFavorites() {
m_clipboard.saveFavorites();
if (m_listPanel.getWidgetCount() < 1) {
m_editButton.disable(Messages.get().key(Messages.GUI_TAB_FAVORITES_NO_ELEMENTS_0));
}
m_buttonEditingPanel.setVisible(false);
m_buttonUsePanel.setVisible(true);
}
|
python
|
def load(filename):
""""Load yaml file with specific include loader."""
if os.path.isfile(filename):
with open(filename) as handle:
return yaml_load(handle, Loader=Loader) # nosec
raise RuntimeError("File %s doesn't exist!" % filename)
|
python
|
def create(model_config, model, vec_env, algo, env_roller, parallel_envs, number_of_steps,
batch_size=256, experience_replay=1, stochastic_experience_replay=False, shuffle_transitions=True):
""" Vel factory function """
settings = OnPolicyIterationReinforcerSettings(
number_of_steps=number_of_steps,
batch_size=batch_size,
experience_replay=experience_replay,
stochastic_experience_replay=stochastic_experience_replay,
shuffle_transitions=shuffle_transitions
)
return OnPolicyIterationReinforcerFactory(
settings=settings,
parallel_envs=parallel_envs,
env_factory=vec_env,
model_factory=model,
algo=algo,
env_roller_factory=env_roller,
seed=model_config.seed
)
|
python
|
def minimize(self, theta_init, max_iter=50, callback=None, disp=0, tau=(10., 2., 2.), tol=1e-3):
"""
Minimize a list of objectives using a proximal consensus algorithm
Parameters
----------
theta_init : ndarray
Initial parameter vector (numpy array)
max_iter : int, optional
Maximum number of iterations to run (default: 50)
callback : function, optional
a function that gets called on each iteration with the following arguments: the current parameter
value (ndarray), and a dictionary that contains a information about the status of the algorithm
disp : int, optional
determines how much information to display when running. Ranges from 0 (nothing) to 3 (lots of information)
Returns
-------
theta : ndarray
The parameters found after running the optimization procedure
Other Parameters
----------------
tau : (float, float, float), optional
initial, increment and decrement parameters for the momentum scheduler (default: (10, 2, 2))
tol : float, optional
residual tolerance for assessing convergence. if both the primal and dual residuals are less
than this value, then the algorithm has converged (default: 1e-3)
"""
# get list of objectives for this parameter
num_obj = len(self.objectives)
assert num_obj >= 1, "There must be at least one objective!"
# initialize lists of primal and dual variable copies, one for each objective
orig_shape = theta_init.shape
primals = [theta_init.flatten() for _ in range(num_obj)]
duals = [np.zeros(theta_init.size) for _ in range(num_obj)]
theta_avg = np.mean(primals, axis=0).ravel()
# initialize penalty parameter
tau = namedtuple('tau', ('init', 'inc', 'dec'))(*tau)
rho = tau.init
# store cumulative runtimes of each iteration, starting now
tstart = time.time()
# clear metadata
self.metadata = defaultdict(list)
# run ADMM iterations
self.converged = False
for cur_iter in range(max_iter):
# store the parameters from the previous iteration
theta_prev = theta_avg
# update each primal variable copy by taking a proximal step via each objective
for varidx, dual in enumerate(duals):
primals[varidx] = self.objectives[varidx]((theta_prev - dual).reshape(orig_shape), rho).ravel()
# average primal copies
theta_avg = np.mean(primals, axis=0)
# update the dual variables (after primal update has finished)
for varidx, primal in enumerate(primals):
duals[varidx] += primal - theta_avg
# compute primal and dual residuals
primal_resid = float(np.sum([np.linalg.norm(primal - theta_avg) for primal in primals]))
dual_resid = num_obj * rho ** 2 * np.linalg.norm(theta_avg - theta_prev)
# update penalty parameter according to primal and dual residuals
# (see sect. 3.4.1 of the Boyd and Parikh ADMM paper)
if primal_resid > tau.init * dual_resid:
rho *= float(tau.inc)
elif dual_resid > tau.init * primal_resid:
rho /= float(tau.dec)
# update metadata for this iteration
self.metadata['Primal resid'].append(primal_resid)
self.metadata['Dual resid'].append(dual_resid)
self.metadata['Time (s)'].append(time.time() - tstart)
self.metadata['rho'].append(rho)
# invoke the callback function with the current parameters and
# history
if callback is not None:
# get the metadata from this iteration
data = valmap(last, self.metadata)
callback(theta_avg.reshape(orig_shape), data)
# update the display
self.update_display(cur_iter + 1, disp)
# check for convergence
if (primal_resid <= tol) & (dual_resid <= tol):
self.converged = True
break
# clean up display
self.update_display(-1, disp)
# store and return final parameters
self.theta = theta_avg.reshape(orig_shape)
return self.theta
|
python
|
def mk_package(contents):
"""Instantiates a package specification from a parsed "AST" of a
package.
Parameters
----------
contents : dict
Returns
----------
PackageSpecification
"""
package = contents.get('package', None)
description = contents.get('description', None)
include = contents.get('include', [])
definitions = contents.get('definitions', [])
resolved = [mk_definition(defn) for defn in definitions]
return sbp.PackageSpecification(identifier=package,
description=description,
includes=include,
definitions=resolved,
render_source=contents.get('render_source', True),
stable=contents.get('stable', False),
public=contents.get('public', True))
|
java
|
private boolean validateFuture(Object validationObject, Annotation annotate)
{
if (checkNullObject(validationObject))
{
return true;
}
int res = 0;
if (validationObject.getClass().isAssignableFrom(java.util.Date.class))
{
Date today = new Date();
Date futureDate = (Date) validationObject;
res = futureDate.compareTo(today);
}
else if (validationObject.getClass().isAssignableFrom(java.util.Calendar.class))
{
Calendar cal = Calendar.getInstance();
Calendar futureDate = (Calendar) validationObject;
res = futureDate.compareTo(cal);
}
// else
// {
// //ruleExceptionHandler(((Future) annotate).message());
// throw new RuleValidationException(((Future)
// annotate).message());
// }
if (res <= 0)
{
throwValidationException(((Future) annotate).message());
}
return true;
}
|
python
|
def _does_not_contain_replica_sections(sysmeta_pyxb):
"""Assert that ``sysmeta_pyxb`` does not contain any replica information."""
if len(getattr(sysmeta_pyxb, 'replica', [])):
raise d1_common.types.exceptions.InvalidSystemMetadata(
0,
'A replica section was included. A new object object created via '
'create() or update() cannot already have replicas. pid="{}"'.format(
d1_common.xml.get_req_val(sysmeta_pyxb.identifier)
),
identifier=d1_common.xml.get_req_val(sysmeta_pyxb.identifier),
)
|
python
|
def get_url(self, **kwargs):
"""
Return an url, relative to the request associated with this
table. Any keywords arguments provided added to the query
string, replacing existing values.
"""
return build(
self._request.path,
self._request.GET,
self._meta.prefix,
**kwargs )
|
python
|
def sync_accounts(self, accounts_data, clear = False, password=None, cb = None):
"""
Load all of the accounts from the account section of the config
into the database.
:param accounts_data:
:param password:
:return:
"""
# Map common values into the accounts records
all_accounts = self.accounts
kmap = Account.prop_map()
for account_id, values in accounts_data.items():
if not isinstance(values, dict):
continue
d = {}
a = self.library.find_or_new_account(account_id)
a.secret_password = password or self.password
for k, v in values.items():
if k in ('id',):
continue
try:
if kmap[k] == 'secret' and v:
a.encrypt_secret(v)
else:
setattr(a, kmap[k], v)
except KeyError:
d[k] = v
a.data = d
if values.get('service') == 's3':
a.url = 's3://{}'.format(a.account_id)
if cb:
cb('Loaded account: {}'.format(a.account_id))
self.database.session.commit()
|
python
|
def _SkipLengthDelimited(buffer, pos, end):
"""Skip a length-delimited value. Returns the new position."""
(size, pos) = _DecodeVarint(buffer, pos)
pos += size
if pos > end:
raise _DecodeError('Truncated message.')
return pos
|
python
|
def delete_asset(self, asset_id):
"""Deletes an ``Asset``.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
remove
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = JSONClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
asset_map = collection.find_one(
dict({'_id': ObjectId(asset_id.get_identifier())},
**self._view_filter()))
objects.Asset(osid_object_map=asset_map, runtime=self._runtime, proxy=self._proxy)._delete()
collection.delete_one({'_id': ObjectId(asset_id.get_identifier())})
|
java
|
public static String encodeHash(LatLong p) {
return encodeHash(p.getLat(), p.getLon(), MAX_HASH_LENGTH);
}
|
java
|
private static String commandRun(Command command) {
// Wait bind
if (I_COMMAND == null) {
synchronized (I_LOCK) {
if (I_COMMAND == null) {
try {
I_LOCK.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
// Cancel Destroy Service
cancelDestroyService();
// Get result
// In this we should try 5 count get the result
int count = 5;
Exception error = null;
while (count > 0) {
if (command.isCancel) {
if (command.mListener != null)
command.mListener.onCancel();
break;
}
try {
command.mResult = I_COMMAND.command(command.mId, command.mTimeout, command.mParameters);
if (command.mListener != null)
command.mListener.onCompleted(command.mResult);
break;
} catch (Exception e) {
error = e;
count--;
try {
Thread.sleep(3000);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
}
// Check is Error
if (count <= 0 && command.mListener != null) {
command.mListener.onError(error);
}
// Check is end and call destroy service
if (I_COMMAND != null) {
try {
if (I_COMMAND.getTaskCount() <= 0)
destroyService();
} catch (Exception e) {
e.printStackTrace();
}
}
// Return
return command.mResult;
}
|
python
|
def create_action(parent, text, shortcut=None, icon=None, tip=None,
toggled=None, triggered=None, data=None, menurole=None,
context=Qt.WindowShortcut):
"""Create a QAction"""
action = SpyderAction(text, parent)
if triggered is not None:
action.triggered.connect(triggered)
if toggled is not None:
action.toggled.connect(toggled)
action.setCheckable(True)
if icon is not None:
if is_text_string(icon):
icon = get_icon(icon)
action.setIcon(icon)
if tip is not None:
action.setToolTip(tip)
action.setStatusTip(tip)
if data is not None:
action.setData(to_qvariant(data))
if menurole is not None:
action.setMenuRole(menurole)
# Workround for Mac because setting context=Qt.WidgetShortcut
# there doesn't have any effect
if sys.platform == 'darwin':
action._shown_shortcut = None
if context == Qt.WidgetShortcut:
if shortcut is not None:
action._shown_shortcut = shortcut
else:
# This is going to be filled by
# main.register_shortcut
action._shown_shortcut = 'missing'
else:
if shortcut is not None:
action.setShortcut(shortcut)
action.setShortcutContext(context)
else:
if shortcut is not None:
action.setShortcut(shortcut)
action.setShortcutContext(context)
return action
|
python
|
def getServiceEndpoints(self, yadis_url, service_element):
"""Returns an iterator of endpoint objects produced by the
filter functions."""
endpoints = []
# Do an expansion of the service element by xrd:Type and xrd:URI
for type_uris, uri, _ in expandService(service_element):
# Create a basic endpoint object to represent this
# yadis_url, Service, Type, URI combination
endpoint = BasicServiceEndpoint(
yadis_url, type_uris, uri, service_element)
e = self.applyFilters(endpoint)
if e is not None:
endpoints.append(e)
return endpoints
|
python
|
def block_matrix(A, B, C, D):
r"""Generate the operator matrix with quadrants
.. math::
\begin{pmatrix} A B \\ C D \end{pmatrix}
Args:
A (Matrix): Matrix of shape ``(n, m)``
B (Matrix): Matrix of shape ``(n, k)``
C (Matrix): Matrix of shape ``(l, m)``
D (Matrix): Matrix of shape ``(l, k)``
Returns:
Matrix: The combined block matrix ``[[A, B], [C, D]]``.
"""
return vstackm((hstackm((A, B)), hstackm((C, D))))
|
python
|
def get_ceph_expected_pools(self, radosgw=False):
"""Return a list of expected ceph pools in a ceph + cinder + glance
test scenario, based on OpenStack release and whether ceph radosgw
is flagged as present or not."""
if self._get_openstack_release() == self.trusty_icehouse:
# Icehouse
pools = [
'data',
'metadata',
'rbd',
'cinder-ceph',
'glance'
]
elif (self.trusty_kilo <= self._get_openstack_release() <=
self.zesty_ocata):
# Kilo through Ocata
pools = [
'rbd',
'cinder-ceph',
'glance'
]
else:
# Pike and later
pools = [
'cinder-ceph',
'glance'
]
if radosgw:
pools.extend([
'.rgw.root',
'.rgw.control',
'.rgw',
'.rgw.gc',
'.users.uid'
])
return pools
|
python
|
def sudo(cls, line, *args, **kwds):
"""
duplicated
"""
sudo_user = Env.get(Env.JUMON_SUDO)
if sudo_user:
line = 'sudo -u {} {}'.format(sudo_user, line)
return cls.call(line, *args, **kwds)
|
java
|
public static void copyInternalResources(AbstractWisdomMojo mojo, MavenResourcesFiltering filtering) throws
IOException {
File in = new File(mojo.basedir, Constants.MAIN_RESOURCES_DIR);
if (!in.exists()) {
return;
}
File out = new File(mojo.buildDirectory, "classes");
filterAndCopy(mojo, filtering, in, out);
}
|
java
|
private void processNeighbors(DoubleDBIDListIter neighbor, ModifiableDBIDs currentCluster, ArrayModifiableDBIDs seeds) {
final boolean ismetric = getDistanceFunction().isMetric();
for(; neighbor.valid(); neighbor.advance()) {
if(processedIDs.add(neighbor)) {
if(!ismetric || neighbor.doubleValue() > 0.) {
seeds.add(neighbor);
}
}
else if(!noise.remove(neighbor)) {
continue;
}
currentCluster.add(neighbor);
}
}
|
java
|
@Override
public int compareTo(BinaryString other) {
if (javaObject != null && other.javaObject != null) {
return javaObject.compareTo(other.javaObject);
}
ensureMaterialized();
other.ensureMaterialized();
if (segments.length == 1 && other.segments.length == 1) {
int len = Math.min(sizeInBytes, other.sizeInBytes);
MemorySegment seg1 = segments[0];
MemorySegment seg2 = other.segments[0];
for (int i = 0; i < len; i++) {
int res = (seg1.get(offset + i) & 0xFF) - (seg2.get(other.offset + i) & 0xFF);
if (res != 0) {
return res;
}
}
return sizeInBytes - other.sizeInBytes;
}
// if there are multi segments.
return compareMultiSegments(other);
}
|
java
|
Map<SnapshotIdentifier, CdoSnapshot> calculate(List<CdoSnapshot> snapshots) {
Map<SnapshotIdentifier, CdoSnapshot> previousSnapshots = new HashMap<>();
populatePreviousSnapshotsWithSnapshots(previousSnapshots, snapshots);
List<CdoSnapshot> missingPreviousSnapshots = getMissingPreviousSnapshots(snapshots, previousSnapshots);
populatePreviousSnapshotsWithSnapshots(previousSnapshots, missingPreviousSnapshots);
return previousSnapshots;
}
|
java
|
public void marshall(DescribeApplicationStateRequest describeApplicationStateRequest, ProtocolMarshaller protocolMarshaller) {
if (describeApplicationStateRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeApplicationStateRequest.getApplicationId(), APPLICATIONID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
@Override
protected void checkDeviceConnection(DeviceProxy device,
String attribute, DeviceData deviceData, String event_name) throws DevFailed {
String deviceName = device.name();
if (!device_channel_map.containsKey(deviceName)) {
connect(device, attribute, event_name, deviceData);
if (!device_channel_map.containsKey(deviceName)) {
Except.throw_event_system_failed("API_NotificationServiceFailed",
"Failed to connect to event channel for device",
"EventConsumer.subscribe_event()");
}
}
}
|
java
|
public Properties readProperties() throws IOException {
Properties p;
try (Reader src = newReader()) {
p = new Properties();
p.load(src);
}
return p;
}
|
python
|
def capture_dash_in_url_name(self, node):
"""
Capture dash in URL name
"""
for keyword in node.keywords:
if keyword.arg == 'name' and '-' in keyword.value.s:
return DJ04(
lineno=node.lineno,
col=node.col_offset,
)
|
python
|
def filetime_to_dt(ft):
"""Converts a Microsoft filetime number to a Python datetime. The new
datetime object is time zone-naive but is equivalent to tzinfo=utc.
>>> filetime_to_dt(116444736000000000)
datetime.datetime(1970, 1, 1, 0, 0)
>>> filetime_to_dt(128930364000000000)
datetime.datetime(2009, 7, 25, 23, 0)
>>> filetime_to_dt(128930364000001000)
datetime.datetime(2009, 7, 25, 23, 0, 0, 100)
"""
# Get seconds and remainder in terms of Unix epoch
(s, ns100) = divmod(ft - EPOCH_AS_FILETIME, HUNDREDS_OF_NANOSECONDS)
# Convert to datetime object
dt = datetime.utcfromtimestamp(s)
# Add remainder in as microseconds. Python 3.2 requires an integer
dt = dt.replace(microsecond=(ns100 // 10))
return dt
|
java
|
public static <T extends MethodDescription> ElementMatcher.Junction<T> hasParameters(
ElementMatcher<? super Iterable<? extends ParameterDescription>> matcher) {
return new MethodParametersMatcher<T>(matcher);
}
|
python
|
def html_listify(tree, root_xl_element, extensions, list_type='ol'):
"""Convert a node tree into an xhtml nested list-of-lists.
This will create 'li' elements under the root_xl_element,
additional sublists of the type passed as list_type. The contents
of each li depends on the extensions dictonary: the keys of this
dictionary are the ids of tree elements that are repesented by files
in the epub, with associated filename extensions as the value. Those
nodes will be rendered as links to the reassembled filename: i.e.
id='abc-2345-54e4' {'abc-2345-54e4': 'xhtml'} -> abc-2345-54e4.xhtml
Other nodes will render as spans. If the node has id or short id values,
the associated li will be populated with cnx-archive-uri and
cnx-archive-shortid attributes, respectively"""
for node in tree:
li_elm = etree.SubElement(root_xl_element, 'li')
if node['id'] not in extensions: # no extension, no associated file
span_elm = lxml.html.fragment_fromstring(
node['title'], create_parent='span')
li_elm.append(span_elm)
else:
a_elm = lxml.html.fragment_fromstring(
node['title'], create_parent='a')
a_elm.set('href', ''.join([node['id'], extensions[node['id']]]))
li_elm.append(a_elm)
if node['id'] is not None and node['id'] != 'subcol':
li_elm.set('cnx-archive-uri', node['id'])
if node['shortId'] is not None:
li_elm.set('cnx-archive-shortid', node['shortId'])
if 'contents' in node:
elm = etree.SubElement(li_elm, list_type)
html_listify(node['contents'], elm, extensions)
|
java
|
void processDep(String name, ModuleDeps deps, ModuleDepInfo callerInfo, Set<String> recursionCheck, String dependee) {
final String methodName = "processDep"; //$NON-NLS-1$
final boolean traceLogging = log.isLoggable(Level.FINEST);
final boolean entryExitLogging = log.isLoggable(Level.FINER);
if (entryExitLogging) {
log.entering(DependencyList.class.getName(), methodName, new Object[]{deps, name, callerInfo});
}
boolean performHasBranching = !aggr.getOptions().isDisableHasPluginBranching();
if (traceLogging && !performHasBranching) {
log.finest("Has branching is disabled."); //$NON-NLS-1$
}
StringBuffer sb = includeDetails ? new StringBuffer() : null;
String comment = null, resolved = null;
// If a plugin is specified, save the plguin name in case alias resolution or
// has! loader plugin resolution eliminates the plugin from the module id.
int idx = (name != null) ? name.indexOf("!") : -1; //$NON-NLS-1$
String pluginName = idx > 0 ? name.substring(0, idx) : null;
resolved = aggr.getConfig().resolve(name, features, dependentFeatures, sb, resolveAliases, !performHasBranching);
if (traceLogging) {
log.finest("Module name \"" + name + "resolved to \"" + resolved + "\"."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
if (includeDetails) {
comment = ((dependee == null) ?
MessageFormat.format(Messages.DependencyList_5, source) :
MessageFormat.format(Messages.DependencyList_4, dependee)) + sb.toString();
}
if (resolved != null && resolved.length() > 0 && !resolved.equals(name)) {
name = resolved;
if (recursionCheck.contains(name)) {
if (log.isLoggable(Level.WARNING)) {
log.warning(MessageFormat.format(
Messages.DependencyList_3,
new Object[] {name, recursionCheck}
));
}
return;
}
recursionCheck.add(name);
}
ModuleDepInfo info = callerInfo != null ?
new ModuleDepInfo(callerInfo, dependee == null ? null : comment) :
new ModuleDepInfo(null, null, comment);
if (traceLogging) {
log.finest("pluginName = " + pluginName); //$NON-NLS-1$
}
// check for plugin again in case one was introduced by config aliasing.
idx = (name != null) ? name.indexOf("!") : -1; //$NON-NLS-1$
if (idx > 0) {
pluginName = name.substring(0, idx);
}
if (pluginName != null) {
processDep(pluginName, deps,
callerInfo != null ?
new ModuleDepInfo(callerInfo, Messages.DependencyList_1) :
new ModuleDepInfo(null, null, Messages.DependencyList_1),
recursionCheck != null ? new HashSet<String>(recursionCheck) : null,
dependee);
if (performHasBranching) {
if (hasPattern.matcher(pluginName).find()) {
HasNode hasNode = new HasNode(name.substring(idx+1));
if (traceLogging) {
log.finest("hasNode = " + hasNode); //$NON-NLS-1$
}
ModuleDeps hasDeps = hasNode.evaluateAll(
pluginName,
// Specify empty feature set so that dependent features discovered
// by has! plugin branching will not vary depending on the specified
// features.
Features.emptyFeatures,
dependentFeatures,
callerInfo,
includeDetails ? MessageFormat.format(
Messages.DependencyList_2,
new Object[]{name})
: null
);
if (traceLogging) {
log.finest("hasDeps = " + hasDeps); //$NON-NLS-1$
}
for (Map.Entry<String, ModuleDepInfo> entry : hasDeps.entrySet()) {
processDep(entry.getKey(), deps, entry.getValue(),
recursionCheck != null ? new HashSet<String>(recursionCheck) : null,
dependee);
}
} else {
if (traceLogging) {
log.finest("Adding module \"" + name + "\" with ModuleDepInfo: " + info + " to result deps - 3"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
deps.add(name, info);
}
} else {
if (traceLogging) {
log.finest("Adding module \"" + name + "\" with ModuleDepInfo: " + info + " to result deps - 2"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
deps.add(name, info);
}
} else {
if (traceLogging) {
log.finest("Adding module \"" + name + "\" with ModuleDepInfo: " + info + " to result deps - 1"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
deps.add(name, info);
}
if (entryExitLogging) {
log.exiting(DependencyList.class.getName(), methodName);
}
}
|
java
|
static long transform(
int relgregyear,
int dayOfYear
) {
if (relgregyear >= 1873) {
return PlainDate.of(relgregyear, dayOfYear).getDaysSinceEpochUTC();
} else {
return START_OF_YEAR[relgregyear - 701] + dayOfYear - 1;
}
}
|
java
|
public int compareTo (@Nonnull final Version rhs)
{
ValueEnforcer.notNull (rhs, "Rhs");
// compare major version
int ret = m_nMajor - rhs.m_nMajor;
if (ret == 0)
{
// compare minor version
ret = m_nMinor - rhs.m_nMinor;
if (ret == 0)
{
// compare micro version
ret = m_nMicro - rhs.m_nMicro;
if (ret == 0)
{
// check qualifier
if (m_sQualifier != null)
{
if (rhs.m_sQualifier != null)
{
ret = m_sQualifier.compareTo (rhs.m_sQualifier);
// convert to -1/0/+1
if (ret < 0)
ret = -1;
else
if (ret > 0)
ret = +1;
}
else
ret = 1;
}
else
if (rhs.m_sQualifier != null)
{
// only this qualifier == null
ret = -1;
}
else
{
// both qualifier are null
ret = 0;
}
}
}
}
return ret;
}
|
python
|
def navigate(self):
"""Return the longitudes and latitudes of the scene.
"""
tic = datetime.now()
lons40km = self._data["pos"][:, :, 1] * 1e-4
lats40km = self._data["pos"][:, :, 0] * 1e-4
try:
from geotiepoints import SatelliteInterpolator
except ImportError:
logger.warning("Could not interpolate lon/lats, "
"python-geotiepoints missing.")
self.lons, self.lats = lons40km, lats40km
else:
cols40km = np.arange(24, 2048, 40)
cols1km = np.arange(2048)
lines = lons40km.shape[0]
rows40km = np.arange(lines)
rows1km = np.arange(lines)
along_track_order = 1
cross_track_order = 3
satint = SatelliteInterpolator(
(lons40km, lats40km), (rows40km, cols40km), (rows1km, cols1km),
along_track_order, cross_track_order)
self.lons, self.lats = satint.interpolate()
logger.debug("Navigation time %s", str(datetime.now() - tic))
|
java
|
public static int determineKerasMajorVersion(Map<String, Object> modelConfig, KerasModelConfiguration config)
throws InvalidKerasConfigurationException {
int kerasMajorVersion;
if (!modelConfig.containsKey(config.getFieldKerasVersion())) {
log.warn("Could not read keras version used (no "
+ config.getFieldKerasVersion() + " field found) \n"
+ "assuming keras version is 1.0.7 or earlier."
);
kerasMajorVersion = 1;
} else {
String kerasVersionString = (String) modelConfig.get(config.getFieldKerasVersion());
if (Character.isDigit(kerasVersionString.charAt(0))) {
kerasMajorVersion = Character.getNumericValue(kerasVersionString.charAt(0));
} else {
throw new InvalidKerasConfigurationException(
"Keras version was not readable (" + config.getFieldKerasVersion() + " provided)"
);
}
}
return kerasMajorVersion;
}
|
java
|
public void validate() throws ValidationException {
super.validate();
if (getDate() != null && !(getDate() instanceof DateTime)) {
throw new ValidationException(
"Property must have a DATE-TIME value");
}
final DateTime dateTime = (DateTime) getDate();
if (dateTime != null && !dateTime.isUtc()) {
throw new ValidationException(getName() +
": DATE-TIME value must be specified in UTC time");
}
}
|
python
|
def thanks(year=None):
"""
4rd Thursday in Nov
:param year: int
:return: Thanksgiving Day
"""
nov_first = datetime.date(_year, 11, 1) if not year else datetime.date(int(year), 11, 1)
weekday_seq = nov_first.weekday()
if weekday_seq > 3:
current_day = 32 - weekday_seq
else:
current_day = 25 - weekday_seq
return datetime.date(nov_first.year, 11, current_day)
|
java
|
void encodeProperty(ByteArrayOutputStream baos, Object value) throws JMSException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "encodeProperty", new Object[]{baos, value});
// The value should be a non-null String
if (value instanceof String) {
super.encodeProperty(baos, STAR_STRING + (String)value); // The value has to have '*' preprended
}
// If it is not a String, something has gone horribly wrong
else {
throw (JMSException)JmsErrorUtils.newThrowable(JMSException.class
,"INTERNAL_ERROR_CWSIA0362"
,new Object[] {"StringPropertyCoder.encodeProperty#1", longName, value}
,null
,"StringPropertyCoder.encodeProperty#1"
,null
,tc);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "encodeProperty");
}
|
java
|
public Observable<ServerInner> beginCreateOrUpdateAsync(String resourceGroupName, String serverName, ServerInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, parameters).map(new Func1<ServiceResponse<ServerInner>, ServerInner>() {
@Override
public ServerInner call(ServiceResponse<ServerInner> response) {
return response.body();
}
});
}
|
python
|
def _convert_num(self, sign):
"""
Converts number registered in get_number_from_sign.
input = ["a2", "☉", "be3"]
output = ["a₂", "☉", "be₃"]
:param sign: string
:return sign: string
"""
# Check if there's a number at the end
new_sign, num = self._get_number_from_sign(sign)
if num < 2: # "ab" -> "ab"
return new_sign.replace(str(num),
self._convert_number_to_subscript(num))
if num > 3: # "buru14" -> "buru₁₄"
return new_sign.replace(str(num),
self._convert_number_to_subscript(num))
if self.two_three: # pylint: disable=no-else-return
return new_sign.replace(str(num),
self._convert_number_to_subscript(num))
else:
# "bad3" -> "bàd"
for i, character in enumerate(new_sign):
new_vowel = ''
if character in VOWELS:
if num == 2:
# noinspection PyUnusedLocal
new_vowel = character + chr(0x0301)
elif num == 3:
new_vowel = character + chr(0x0300)
break
return new_sign[:i] + normalize('NFC', new_vowel) + \
new_sign[i+1:].replace(str(num), '')
|
python
|
def from_floats(red, green, blue):
"""Return a new Color object from red/green/blue values from 0.0 to 1.0."""
return Color(int(red * Color.MAX_VALUE),
int(green * Color.MAX_VALUE),
int(blue * Color.MAX_VALUE))
|
java
|
public EEnum getPFCPFCFlgs() {
if (pfcpfcFlgsEEnum == null) {
pfcpfcFlgsEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(59);
}
return pfcpfcFlgsEEnum;
}
|
java
|
@SuppressWarnings("unchecked")
@Override
public T decode(final byte[] buf) {
try (final ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buf))) {
return (T) in.readObject();
} catch (final ClassNotFoundException | IOException ex) {
throw new RemoteRuntimeException(ex);
}
}
|
java
|
protected void displaySystem (String bundle, String message, byte attLevel, String localtype)
{
// nothing should be untranslated, so pass the default bundle if need be.
if (bundle == null) {
bundle = _bundle;
}
SystemMessage msg = new SystemMessage(message, bundle, attLevel);
dispatchMessage(msg, localtype);
}
|
java
|
public EClass getSamplingRatios() {
if (samplingRatiosEClass == null) {
samplingRatiosEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(403);
}
return samplingRatiosEClass;
}
|
python
|
def bind_texture(texture):
"""Draw a single texture"""
if not getattr(texture, 'image', None):
texture.image = load_image(texture.path)
glEnable(texture.image.target)
glBindTexture(texture.image.target, texture.image.id)
gl.glTexParameterf(texture.image.target,
gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_EDGE)
gl.glTexParameterf(texture.image.target,
gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_EDGE)
|
java
|
void triggerBasicEvent(Event.EventType type,
String message,
boolean flushBuffer)
{
Event triggeredEvent = new BasicEvent(type, message);
pushEvent(triggeredEvent, flushBuffer);
}
|
python
|
def get_execution_engine(name):
"""Get the execution engine by name."""
manager = driver.DriverManager(
namespace='cosmic_ray.execution_engines',
name=name,
invoke_on_load=True,
on_load_failure_callback=_log_extension_loading_failure,
)
return manager.driver
|
python
|
def get_proficiencies_for_resource_on_date(self, resource_id, from_, to):
"""Gets a ``ProficiencyList`` relating to the given resource effective during the entire given date range inclusive but not confined to the date range.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``resource_id, from`` or ``to`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.get_relationships_for_source_on_date
proficiency_list = []
for proficiency in self.get_proficiencies_for_resource(resource_id):
if overlap(from_, to, proficiency.start_date, proficiency.end_date):
proficiency_list.append(proficiency)
return objects.ProficiencyList(proficiency_list, runtime=self._runtime)
|
python
|
def on_delete(self, forced):
"""Session expiration callback
`forced`
If session item explicitly deleted, forced will be set to True. If
item expired, will be set to False.
"""
# Do not remove connection if it was not forced and there's running connection
if not forced and self.handler is not None and not self.is_closed:
self.promote()
else:
self.close()
|
python
|
def update_guest_additions(self, source, arguments, flags):
"""Automatically updates already installed Guest Additions in a VM.
At the moment only Windows guests are supported.
Because the VirtualBox Guest Additions drivers are not WHQL-certified
yet there might be warning dialogs during the actual Guest Additions
update. These need to be confirmed manually in order to continue the
installation process. This applies to Windows 2000 and Windows XP guests
and therefore these guests can't be updated in a fully automated fashion
without user interaction. However, to start a Guest Additions update for
the mentioned Windows versions anyway, the flag
AdditionsUpdateFlag_WaitForUpdateStartOnly can be specified. See
:py:class:`AdditionsUpdateFlag` for more information.
in source of type str
Path to the Guest Additions .ISO file to use for the update.
in arguments of type str
Optional command line arguments to use for the Guest Additions
installer. Useful for retrofitting features which weren't installed
before in the guest.
in flags of type :class:`AdditionsUpdateFlag`
:py:class:`AdditionsUpdateFlag` flags.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
raises :class:`VBoxErrorNotSupported`
Guest OS is not supported for automated Guest Additions updates or the
already installed Guest Additions are not ready yet.
raises :class:`VBoxErrorIprtError`
Error while updating.
"""
if not isinstance(source, basestring):
raise TypeError("source can only be an instance of type basestring")
if not isinstance(arguments, list):
raise TypeError("arguments can only be an instance of type list")
for a in arguments[:10]:
if not isinstance(a, basestring):
raise TypeError(
"array can only contain objects of type basestring")
if not isinstance(flags, list):
raise TypeError("flags can only be an instance of type list")
for a in flags[:10]:
if not isinstance(a, AdditionsUpdateFlag):
raise TypeError(
"array can only contain objects of type AdditionsUpdateFlag")
progress = self._call("updateGuestAdditions",
in_p=[source, arguments, flags])
progress = IProgress(progress)
return progress
|
java
|
private Set findSimilarTokens(String s,int i)
{
Set likeTokI = new HashSet();
for (int j=Math.max(0,i-windowSize); j<Math.min(i+windowSize,numTokens); j++) {
if (i!=j) {
Token tokj = allTokens[j];
double d = jaroWinklerDistance.score( s, tokj.getValue() );
if (d>=minTokenSimilarity) likeTokI.add( tokj );
}
}
return likeTokI;
}
|
python
|
def wrap(self, LayoutClass, *args, **kwargs):
"""
Wraps every layout object pointed in `self.slice` under a `LayoutClass` instance with
`args` and `kwargs` passed.
"""
def wrap_object(layout_object, j):
layout_object.fields[j] = self.wrapped_object(
LayoutClass, layout_object.fields[j], *args, **kwargs
)
self.pre_map(wrap_object)
|
java
|
public static <E> RubyHash<E, E> Hash(
Collection<? extends Collection<? extends E>> cols) {
RubyHash<E, E> rubyHash = newRubyHash();
for (Collection<? extends E> col : cols) {
if (col.size() < 1 || col.size() > 2) throw new IllegalArgumentException(
"ArgumentError: invalid number of elements (" + col.size()
+ " for 1..2)");
Iterator<? extends E> iter = col.iterator();
rubyHash.put(iter.next(), iter.hasNext() ? iter.next() : null);
}
return rubyHash;
}
|
java
|
public static String escapeXml10Attribute(final String text, final XmlEscapeType type, final XmlEscapeLevel level) {
return escapeXml(text, XmlEscapeSymbols.XML10_ATTRIBUTE_SYMBOLS, type, level);
}
|
java
|
public static String nameClass(String tableName)
{
StringBuffer sb = new StringBuffer();
char[] chars = new char[tableName.length()];
chars = tableName.toCharArray();
char c;
boolean nextup = false;
for (int i = 0; i < chars.length; i++) {
if (i==0) c = Character.toUpperCase(chars[i]);
else if (chars[i]=='_') {
nextup = true;
continue;
}
else if (nextup) {
nextup = false;
c = Character.toUpperCase(chars[i]);
}
else c = Character.toLowerCase(chars[i]);
sb.append(c);
}
return sb.toString();
}
|
java
|
public static void tputs(Writer out, String str, Object... params) throws IOException {
int index = 0;
int length = str.length();
int ifte = IFTE_NONE;
boolean exec = true;
Stack<Object> stack = new Stack<Object>();
while (index < length) {
char ch = str.charAt(index++);
switch (ch) {
case '\\':
ch = str.charAt(index++);
if (ch >= '0' && ch <= '9') {
throw new UnsupportedOperationException(); // todo
} else {
switch (ch) {
case 'e':
case 'E':
if (exec) {
out.write(27); // escape
}
break;
case 'n':
out.write('\n');
break;
// case 'l':
// rawPrint('\l');
// break;
case 'r':
if (exec) {
out.write('\r');
}
break;
case 't':
if (exec) {
out.write('\t');
}
break;
case 'b':
if (exec) {
out.write('\b');
}
break;
case 'f':
if (exec) {
out.write('\f');
}
break;
case 's':
if (exec) {
out.write(' ');
}
break;
case ':':
case '^':
case '\\':
if (exec) {
out.write(ch);
}
break;
default:
throw new IllegalArgumentException();
}
}
break;
case '^':
ch = str.charAt(index++);
if (exec) {
out.write(ch - '@');
}
break;
case '%':
ch = str.charAt(index++);
switch (ch) {
case '%':
if (exec) {
out.write('%');
}
break;
case 'p':
ch = str.charAt(index++);
if (exec) {
stack.push(params[ch - '1']);
}
break;
case 'P':
ch = str.charAt(index++);
if (ch >= 'a' && ch <= 'z') {
if (exec) {
dv[ch - 'a'] = stack.pop();
}
} else if (ch >= 'A' && ch <= 'Z') {
if (exec) {
sv[ch - 'A'] = stack.pop();
}
} else {
throw new IllegalArgumentException();
}
break;
case 'g':
ch = str.charAt(index++);
if (ch >= 'a' && ch <= 'z') {
if (exec) {
stack.push(dv[ch - 'a']);
}
} else if (ch >= 'A' && ch <= 'Z') {
if (exec) {
stack.push(sv[ch - 'A']);
}
} else {
throw new IllegalArgumentException();
}
break;
case '\'':
ch = str.charAt(index++);
if (exec) {
stack.push((int) ch);
}
ch = str.charAt(index++);
if (ch != '\'') {
throw new IllegalArgumentException();
}
break;
case '{':
int start = index;
while (str.charAt(index++) != '}');
if (exec) {
int v = Integer.valueOf(str.substring(start, index - 1));
stack.push(v);
}
break;
case 'l':
if (exec) {
stack.push(stack.pop().toString().length());
}
break;
case '+':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 + v2);
}
break;
case '-':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 - v2);
}
break;
case '*':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 * v2);
}
break;
case '/':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 / v2);
}
break;
case 'm':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 % v2);
}
break;
case '&':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 & v2);
}
break;
case '|':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 | v2);
}
break;
case '^':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 ^ v2);
}
break;
case '=':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 == v2);
}
break;
case '>':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 > v2);
}
break;
case '<':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 < v2);
}
break;
case 'A':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 != 0 && v2 != 0);
}
break;
case '!':
if (exec) {
int v1 = toInteger(stack.pop());
stack.push(v1 == 0);
}
break;
case '~':
if (exec) {
int v1 = toInteger(stack.pop());
stack.push(~v1);
}
break;
case 'O':
if (exec) {
int v2 = toInteger(stack.pop());
int v1 = toInteger(stack.pop());
stack.push(v1 != 0 || v2 != 0);
}
break;
case '?':
if (ifte != IFTE_NONE) {
throw new IllegalArgumentException();
} else {
ifte = IFTE_IF;
}
break;
case 't':
if (ifte != IFTE_IF && ifte != IFTE_ELSE) {
throw new IllegalArgumentException();
} else {
ifte = IFTE_THEN;
}
exec = toInteger(stack.pop()) != 0;
break;
case 'e':
if (ifte != IFTE_THEN) {
throw new IllegalArgumentException();
} else {
ifte = IFTE_ELSE;
}
exec = !exec;
break;
case ';':
if (ifte == IFTE_NONE || ifte == IFTE_IF) {
throw new IllegalArgumentException();
} else {
ifte = IFTE_NONE;
}
exec = true;
break;
case 'i':
if (params.length >= 1) {
params[0] = toInteger(params[0]) + 1;
}
if (params.length >= 2) {
params[1] = toInteger(params[1]) + 1;
}
break;
case 'd':
out.write(Integer.toString(toInteger(stack.pop())));
break;
default:
throw new UnsupportedOperationException();
}
break;
case '$':
if (str.charAt(index) == '<') {
// We don't honour delays, just skip
while (str.charAt(index++) != '>');
} else {
if (exec) {
out.write(ch);
}
}
break;
default:
if (exec) {
out.write(ch);
}
break;
}
}
}
|
python
|
def _query_by_distro(self, table_name):
"""
Query for download data broken down by OS distribution, for one day.
:param table_name: table name to query against
:type table_name: str
:return: dict of download information by distro; keys are project name,
values are a dict of distro names to dicts of distro version to
download count.
:rtype: dict
"""
logger.info('Querying for downloads by distro in table %s', table_name)
q = "SELECT file.project, details.distro.name, " \
"details.distro.version, COUNT(*) as dl_count " \
"%s " \
"%s " \
"GROUP BY file.project, details.distro.name, " \
"details.distro.version;" % (
self._from_for_table(table_name),
self._where_for_projects
)
res = self._run_query(q)
result = self._dict_for_projects()
# iterate through results
for row in res:
# pointer to the per-project result dict
proj = result[row['file_project']]
# grab the name and version; change None to 'unknown'
dname = row['details_distro_name']
dver = row['details_distro_version']
if dname not in proj:
proj[dname] = {}
if dver not in proj[dname]:
proj[dname][dver] = 0
proj[dname][dver] += int(row['dl_count'])
return result
|
java
|
@Override
public void execute(DelegateExecution execution) {
ActionDefinition actionDefinition = findRelatedActionDefinition(execution);
Connector connector = getConnector(getImplementation(execution));
IntegrationContext integrationContext = connector.apply(integrationContextBuilder.from(execution,
actionDefinition));
execution.setVariables(outboundVariablesProvider.calculateVariables(integrationContext,
actionDefinition));
leave(execution);
}
|
python
|
def get_plugin_from_string(plugin_name):
"""
Returns plugin or plugin point class from given ``plugin_name`` string.
Example of ``plugin_name``::
'my_app.MyPlugin'
"""
modulename, classname = plugin_name.rsplit('.', 1)
module = import_module(modulename)
return getattr(module, classname)
|
java
|
private static SyntaxType register(String id, String name)
{
SyntaxType syntaxType = new SyntaxType(id, name);
KNOWN_SYNTAX_TYPES.put(id, syntaxType);
return syntaxType;
}
|
java
|
public static Object callMethod(Object obj, Collection.Key methodName, Object[] args, Object defaultValue) {
if (obj == null) {
return defaultValue;
}
// checkAccesibility(obj,methodName);
MethodInstance mi = getMethodInstanceEL(obj, obj.getClass(), methodName, args);
if (mi == null) return defaultValue;
try {
return mi.invoke(obj);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
return defaultValue;
}
}
|
java
|
public static ActionImport getAndCheckActionImport(EntityDataModel entityDataModel, String actionImportName) {
ActionImport actionImport = entityDataModel.getEntityContainer().getActionImport(actionImportName);
if (actionImport == null) {
throw new ODataSystemException("Action import not found in the entity data model: " + actionImportName);
}
return actionImport;
}
|
java
|
protected FilePath findPullUpDirectory(FilePath root) throws IOException, InterruptedException {
// if the directory just contains one directory and that alone, assume that's the pull up subject
// otherwise leave it as is.
List<FilePath> children = root.list();
if(children.size()!=1) return null;
if(children.get(0).isDirectory())
return children.get(0);
return null;
}
|
python
|
def cli_create(argument_list):
"""
command-line call to create a manifest from a JAR file or a
directory
"""
parser = argparse.ArgumentParser()
parser.add_argument("content", help="file or directory")
# TODO: shouldn't we always process directories recursively?
parser.add_argument("-r", "--recursive",
help="process directories recursively")
parser.add_argument("-i", "--ignore", nargs="+", action="append",
help="patterns to ignore "
"(can be given more than once)")
parser.add_argument("-m", "--manifest", default=None,
help="output file (default is stdout)")
parser.add_argument("-d", "--digest",
help="digest(s) to use, comma-separated")
args = parser.parse_args(argument_list)
# TODO: remove digest from here, they are created when signing!
if args.digest is None:
args.digest = "MD5,SHA1"
requested_digests = args.digest.split(",")
use_digests = [_get_digest(digest) for digest in requested_digests]
if args.recursive:
entries = multi_path_generator(args.content)
else:
entries = single_path_generator(args.content)
mf = Manifest()
ignores = ["META-INF/*"]
if args.ignore:
ignores.extend(*args.ignore)
for name, chunks in entries:
# skip the stuff that we were told to ignore
if ignores and fnmatches(name, *ignores):
continue
sec = mf.create_section(name)
digests = zip(requested_digests, digest_chunks(chunks(), use_digests))
for digest_name, digest_value in digests:
sec[digest_name + "-Digest"] = digest_value
if args.manifest:
# we'll output to the manifest file if specified, and we'll
# even create parent directories for it, if necessary
makedirsp(split(args.manifest)[0])
output = open(args.manifest, "wb")
else:
output = sys.stdout
mf.store(output)
if args.manifest:
output.close()
|
python
|
def get_group(self,callb=None):
"""Convenience method to request the group from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.group is None:
mypartial=partial(self.resp_set_group)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetGroup, StateGroup, callb=callb )
return self.group
|
java
|
public CDACallback<Transformed> one(String id, CDACallback<Transformed> callback) {
return Callbacks.subscribeAsync(
baseQuery()
.one(id)
.filter(new Predicate<CDAEntry>() {
@Override
public boolean test(CDAEntry entry) {
return entry.contentType().id().equals(contentTypeId);
}
})
.map(this::transform),
callback,
client);
}
|
java
|
public OvhOrder freefax_new_GET(OvhQuantityEnum quantity) throws IOException {
String qPath = "/order/freefax/new";
StringBuilder sb = path(qPath);
query(sb, "quantity", quantity);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhOrder.class);
}
|
java
|
@Override
public CPOptionCategory remove(Serializable primaryKey)
throws NoSuchCPOptionCategoryException {
Session session = null;
try {
session = openSession();
CPOptionCategory cpOptionCategory = (CPOptionCategory)session.get(CPOptionCategoryImpl.class,
primaryKey);
if (cpOptionCategory == null) {
if (_log.isDebugEnabled()) {
_log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey);
}
throw new NoSuchCPOptionCategoryException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY +
primaryKey);
}
return remove(cpOptionCategory);
}
catch (NoSuchCPOptionCategoryException nsee) {
throw nsee;
}
catch (Exception e) {
throw processException(e);
}
finally {
closeSession(session);
}
}
|
python
|
def find_library_windows(cls):
"""Loads the SEGGER DLL from the windows installation directory.
On Windows, these are found either under:
- ``C:\\Program Files\\SEGGER\\JLink``
- ``C:\\Program Files (x86)\\SEGGER\\JLink``.
Args:
cls (Library): the ``Library`` class
Returns:
The paths to the J-Link library files in the order that they are
found.
"""
dll = cls.get_appropriate_windows_sdk_name() + '.dll'
root = 'C:\\'
for d in os.listdir(root):
dir_path = os.path.join(root, d)
# Have to account for the different Program Files directories.
if d.startswith('Program Files') and os.path.isdir(dir_path):
dir_path = os.path.join(dir_path, 'SEGGER')
if not os.path.isdir(dir_path):
continue
# Find all the versioned J-Link directories.
ds = filter(lambda x: x.startswith('JLink'), os.listdir(dir_path))
for jlink_dir in ds:
# The DLL always has the same name, so if it is found, just
# return it.
lib_path = os.path.join(dir_path, jlink_dir, dll)
if os.path.isfile(lib_path):
yield lib_path
|
python
|
def create_connection(self):
"""See: https://github.com/python/cpython/blob/40ee9a3640d702bce127e9877c82a99ce817f0d1/Lib/socket.py#L691"""
err = None
try:
for res in socket.getaddrinfo(self._server, self._port, 0, self._sock_type):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
sock.settimeout(self._timeout)
sock.connect(sa)
# Break explicitly a reference cycle
err = None
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise socket.error('No valid addresses found, try checking your IPv6 connectivity')
except socket.gaierror as e:
err_code, message = e.args
if err_code == socket.EAI_NODATA or err_code == socket.EAI_NONAME:
raise socket.error('Unable to resolve host, check your DNS: {}'.format(message))
raise
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.