language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
java
|
public E set(E value, Locale locale) {
if (defaultLocale == null)
defaultLocale = locale;
if (value != null)
return values.put(locale, value);
values.remove(locale);
return null;
}
|
python
|
def find_zero_constrained_reactions(model):
"""Return list of reactions that are constrained to zero flux."""
return [rxn for rxn in model.reactions if
rxn.lower_bound == 0 and
rxn.upper_bound == 0]
|
python
|
def make_epub(binders, file):
"""Creates an EPUB file from a binder(s)."""
if not isinstance(binders, (list, set, tuple,)):
binders = [binders]
epub = EPUB([_make_package(binder) for binder in binders])
epub.to_file(epub, file)
|
java
|
public static URI toHttp(final URI inputUri) throws URISyntaxException {
Objects.requireNonNull(inputUri, "Input URI must not be null");
String wsScheme = inputUri.getScheme();
if ("http".equalsIgnoreCase(wsScheme) || "https".equalsIgnoreCase(wsScheme)) {
// leave alone
return inputUri;
}
if ("ws".equalsIgnoreCase(wsScheme)) {
// convert to http
return new URI("http" + inputUri.toString().substring(wsScheme.length()));
}
if ("wss".equalsIgnoreCase(wsScheme)) {
// convert to https
return new URI("https" + inputUri.toString().substring(wsScheme.length()));
}
throw new URISyntaxException(inputUri.toString(), "Unrecognized WebSocket scheme");
}
|
python
|
def scalar_reshape(a, newshape, order='C'):
"""
Reshape, but also return scalars or empty lists.
Identical to `numpy.reshape` except in the case where `newshape` is
the empty tuple, in which case we return a scalar instead of a
0-dimensional array.
Examples
--------
>>> a = np.arange(6)
>>> np.array_equal(np.reshape(a, (3, 2)), scalar_reshape(a, (3, 2)))
True
>>> scalar_reshape(np.array([3.14]), newshape=())
3.14
>>> scalar_reshape(np.array([2.71]), newshape=(1,))
array([ 2.71])
>>> scalar_reshape(np.array([]), newshape=(0,))
[]
"""
if newshape == ():
return np.asscalar(a)
if newshape == (0,):
return []
return np.reshape(a, newshape, order)
|
python
|
def py2dict(elements):
"""Convert a Python object into a Python dictionary."""
metadata_dict = {}
# Loop through all elements in the Python object.
for element in elements.children:
# Start an empty element list if an entry for the element
# list hasn't been made in the dictionary.
if element.tag not in metadata_dict:
metadata_dict[element.tag] = []
element_dict = {}
if hasattr(element, 'qualifier') and element.qualifier is not None:
element_dict['qualifier'] = element.qualifier
# Set the element's content as a dictionary
# of children elements.
if len(element.children) > 0:
child_dict = {}
for child in element.children:
if child.content is not None:
child_dict[child.tag] = child.content
element_dict['content'] = child_dict
# Set element content that is not children.
elif element.content is not None:
if element.content.strip() != '':
element_dict['content'] = element.content
# Append the dictionary to the element list
# if the element has content or children.
if element_dict.get('content', False):
metadata_dict[element.tag].append(element_dict)
return metadata_dict
|
python
|
def hide_routemap_holder_route_map_content_set_dampening_half_life(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
route_map = ET.SubElement(hide_routemap_holder, "route-map")
name_key = ET.SubElement(route_map, "name")
name_key.text = kwargs.pop('name')
action_rm_key = ET.SubElement(route_map, "action-rm")
action_rm_key.text = kwargs.pop('action_rm')
instance_key = ET.SubElement(route_map, "instance")
instance_key.text = kwargs.pop('instance')
content = ET.SubElement(route_map, "content")
set = ET.SubElement(content, "set")
dampening = ET.SubElement(set, "dampening")
half_life = ET.SubElement(dampening, "half-life")
half_life.text = kwargs.pop('half_life')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
public static BoxResourceIterable<BoxRecentItem> getRecentItems(final BoxAPIConnection api,
int limit, String... fields) {
QueryStringBuilder builder = new QueryStringBuilder();
if (fields.length > 0) {
builder.appendParam("fields", fields);
}
return new BoxResourceIterable<BoxRecentItem>(
api, RECENTS_URL_TEMPLATE.buildWithQuery(api.getBaseURL(), builder.toString()),
limit) {
@Override
protected BoxRecentItem factory(JsonObject jsonObject) {
return new BoxRecentItem(jsonObject, api);
}
};
}
|
python
|
def _toMathInfo(self, guidelines=True):
"""
Subclasses may override this method.
"""
import fontMath
# A little trickery is needed here because MathInfo
# handles font level guidelines. Those are not in this
# object so we temporarily fake them just enough for
# MathInfo and then move them back to the proper place.
self.guidelines = []
if guidelines:
for guideline in self.font.guidelines:
d = dict(
x=guideline.x,
y=guideline.y,
angle=guideline.angle,
name=guideline.name,
identifier=guideline.identifier,
color=guideline.color
)
self.guidelines.append(d)
info = fontMath.MathInfo(self)
del self.guidelines
return info
|
java
|
@SuppressWarnings("unchecked")
public static <E> E get(Properties props, String key, E defaultValue, Type type) {
String value = props.getProperty(key);
if (value == null) {
return defaultValue;
} else {
return (E) MetaClass.cast(value, type);
}
}
|
python
|
def update_status_with_media(self, **params): # pragma: no cover
"""Updates the authenticating user's current status and attaches media
for upload. In other words, it creates a Tweet with a picture attached.
Docs:
https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update_with_media
"""
warnings.warn(
'This method is deprecated. You should use Twython.upload_media instead.',
TwythonDeprecationWarning,
stacklevel=2
)
return self.post('statuses/update_with_media', params=params)
|
python
|
def _get_server(vm_, volumes, nics):
'''
Construct server instance from cloud profile config
'''
# Apply component overrides to the size from the cloud profile config
vm_size = _override_size(vm_)
# Set the server availability zone from the cloud profile config
availability_zone = config.get_cloud_config_value(
'availability_zone', vm_, __opts__, default=None,
search_global=False
)
# Assign CPU family from the cloud profile config
cpu_family = config.get_cloud_config_value(
'cpu_family', vm_, __opts__, default=None,
search_global=False
)
# Contruct server object
return Server(
name=vm_['name'],
ram=vm_size['ram'],
availability_zone=availability_zone,
cores=vm_size['cores'],
cpu_family=cpu_family,
create_volumes=volumes,
nics=nics
)
|
python
|
def convert_to_env_data(mgr, env_paths, validator_func, activate_func,
name_template, display_name_template, name_prefix):
"""Converts a list of paths to environments to env_data.
env_data is a structure {name -> (ressourcedir, kernel spec)}
"""
env_data = {}
for venv_dir in env_paths:
venv_name = os.path.split(os.path.abspath(venv_dir))[1]
kernel_name = name_template.format(name_prefix + venv_name)
kernel_name = kernel_name.lower()
if kernel_name in env_data:
mgr.log.debug(
"Found duplicate env kernel: %s, which would again point to %s. Using the first!",
kernel_name, venv_dir)
continue
argv, language, resource_dir = validator_func(venv_dir)
if not argv:
# probably does not contain the kernel type (e.g. not R or python or does not contain
# the kernel code itself)
continue
display_name = display_name_template.format(kernel_name)
kspec_dict = {"argv": argv, "language": language,
"display_name": display_name,
"resource_dir": resource_dir
}
# the default vars are needed to save the vars in the function context
def loader(env_dir=venv_dir, activate_func=activate_func, mgr=mgr):
mgr.log.debug("Loading env data for %s" % env_dir)
res = activate_func(mgr, env_dir)
# mgr.log.info("PATH: %s" % res['PATH'])
return res
kspec = EnvironmentLoadingKernelSpec(loader, **kspec_dict)
env_data.update({kernel_name: (resource_dir, kspec)})
return env_data
|
java
|
@Override
@Deprecated
public void setTitle(String title) {
StringBuilder strBuilder = new StringBuilder();
if (title != null && !title.isEmpty()) {
strBuilder.append(title);
strBuilder.append(" - ");
}
strBuilder.append(Constant.PROGRAM_NAME).append(' ').append(Constant.PROGRAM_VERSION);
super.setTitle(strBuilder.toString());
}
|
python
|
def construct_context(self, request):
"""
Builds context with various required variables.
"""
opts = self.model._meta
app_label = opts.app_label
object_name = opts.object_name.lower()
form = self.construct_form(request)
media = self.media(form)
context = {
'user': request.user,
'title': '%s %s' % (self.label, opts.verbose_name_plural.lower()),
'tool': self,
'opts': opts,
'app_label': app_label,
'media': media,
'form': form,
'changelist_url': reverse('admin:%s_%s_changelist' % (
app_label, object_name
))
}
# Pass along fieldset if sepcififed.
if hasattr(form, 'fieldsets'):
admin_form = helpers.AdminForm(form, form.fieldsets, {})
context['adminform'] = admin_form
return context
|
python
|
def add_nic(self, nic, sync=True):
"""
add a nic to this OS instance.
:param nic: the nic to add on this OS instance
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the nic object on list to be added on next save().
:return:
"""
LOGGER.debug("OSInstance.add_nic")
if not sync:
self.nic_2_add.append(nic)
else:
if nic.id is None:
nic.save()
if self.id is not None and nic.id is not None:
params = {
'id': self.id,
'nicID': nic.id
}
args = {'http_operation': 'GET', 'operation_path': 'update/nics/add', 'parameters': params}
response = OSInstanceService.requester.call(args)
if response.rc != 0:
LOGGER.warning(
'OSInstance.add_nic - Problem while updating OS instance ' + self.name +
'. Reason: ' + str(response.response_content) + '-' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
else:
self.nic_ids.append(nic.id)
nic.nic_osi_id = self.id
else:
LOGGER.warning(
'OSInstance.add_nic - Problem while updating OS instance ' + self.name +
'. Reason: NIC ' + nic.name + ' id is None'
)
|
java
|
static void insertEmptyFlag(final WritableMemory wmem, final boolean empty) {
int flags = wmem.getByte(FLAGS_BYTE);
if (empty) { flags |= EMPTY_FLAG_MASK; }
else { flags &= ~EMPTY_FLAG_MASK; }
wmem.putByte(FLAGS_BYTE, (byte) flags);
}
|
java
|
@Deprecated
public static boolean noneSatisfy(String string, CharPredicate predicate)
{
return StringIterate.noneSatisfyChar(string, predicate);
}
|
java
|
private Object resolveReferenceValueOfField(FieldData field) {
Object value = null;
// Regardless of its path, the field references another resource.
// fetch the field value (the path(s) to the referenced resource(s)) and resolve these resources.
if (field.metaData.isCollectionType()) {
String[] referencedResourcePaths = resolvePropertyTypedValue(field, String[].class);
if (referencedResourcePaths != null) {
value = createCollectionOfReferences(field, referencedResourcePaths);
}
} else {
String referencedResourcePath = resolvePropertyTypedValue(field, String.class);
if (referencedResourcePath != null) {
if (field.metaData.isAppendPathPresentOnReference()) {
referencedResourcePath += field.metaData.getAppendPathOnReference();
}
value = resolveResource(referencedResourcePath, field.metaData.getType());
}
}
return value;
}
|
java
|
public HtmlEmailImpl setHtmlMsg(String aHtml) throws EmailException {
if (StringUtil.isEmpty(aHtml)) {
throw new EmailException("Invalid message supplied");
}
this.html = aHtml;
return this;
}
|
java
|
public static Attribute attribute(String name, String customGet, String customSet){
return new Attribute(name, customGet, customSet);
}
|
java
|
@BetaApi
public final Operation deleteInstanceGroupManager(String instanceGroupManager) {
DeleteInstanceGroupManagerHttpRequest request =
DeleteInstanceGroupManagerHttpRequest.newBuilder()
.setInstanceGroupManager(instanceGroupManager)
.build();
return deleteInstanceGroupManager(request);
}
|
java
|
private boolean canBeMerged(DenseRange<T> current, DenseRange<T> next) {
return Order.of(comparator, current.end(), Optional.of(next.begin())) == Order.EQ || current.overlaps(next);
}
|
python
|
def pasa(args):
"""
%prog pasa pasa_db fastafile
Run EVM in TIGR-only mode.
"""
p = OptionParser(pasa.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
pasa_db, fastafile = args
termexons = "pasa.terminal_exons.gff3"
if need_update(fastafile, termexons):
cmd = "$ANNOT_DEVEL/PASA2/scripts/pasa_asmbls_to_training_set.dbi"
cmd += ' -M "{0}:mysql.tigr.org" -p "access:access"'.format(pasa_db)
cmd += ' -g {0}'.format(fastafile)
sh(cmd)
cmd = "$EVM/PasaUtils/retrieve_terminal_CDS_exons.pl"
cmd += " trainingSetCandidates.fasta trainingSetCandidates.gff"
sh(cmd, outfile=termexons)
return termexons
|
python
|
def _find_set_info(set):
'''
Return information about the set
'''
cmd = '{0} list -t {1}'.format(_ipset_cmd(), set)
out = __salt__['cmd.run_all'](cmd, python_shell=False)
if out['retcode'] > 0:
# Set doesn't exist return false
return False
setinfo = {}
_tmp = out['stdout'].split('\n')
for item in _tmp:
# Only split if item has a colon
if ':' in item:
key, value = item.split(':', 1)
setinfo[key] = value[1:]
return setinfo
|
java
|
public ServiceFuture<VirtualNetworkGatewayConnectionInner> updateTagsAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName, final ServiceCallback<VirtualNetworkGatewayConnectionInner> serviceCallback) {
return ServiceFuture.fromResponse(updateTagsWithServiceResponseAsync(resourceGroupName, virtualNetworkGatewayConnectionName), serviceCallback);
}
|
java
|
@Override
public boolean simpleEvaluation(Agent currentAgent) {
Agent agent = currentAgent;
if (bodyId != null) {
agent = agent.getAgentsAppState().getAgent(bodyId);
if(agent == null) {
logger.log(Level.SEVERE, "Body {0} does not exists!", new Object[]{bodyId});
return false;
}
}
if(agent instanceof HumanAgent) {
return ((HumanAgent)agent).getBodyPosture().name().equals(bodyState);
}
return false;
}
|
java
|
protected void filterMarginalSeparators()
{
for (Iterator<Separator> it = hsep.iterator(); it.hasNext();)
{
Separator sep = it.next();
if (sep.getY1() == root.getY1() || sep.getY2() == root.getY2())
it.remove();
}
for (Iterator<Separator> it = vsep.iterator(); it.hasNext();)
{
Separator sep = it.next();
if (sep.getX1() == root.getX1() || sep.getX2() == root.getX2())
it.remove();
}
}
|
java
|
private static Class<?> lookupSerializableType(Class<?> type) {
Class<?> serializableType = serializableTypes.get(type);
if (serializableType != null) {
return serializableType;
}
serializableType = findSerializableType(type);
if (serializableType != null) {
serializableTypes.put(type, serializableType);
return serializableType;
}
return type;
}
|
python
|
def sortWithinPartitions(self, *cols, **kwargs):
"""Returns a new :class:`DataFrame` with each partition sorted by the specified column(s).
:param cols: list of :class:`Column` or column names to sort by.
:param ascending: boolean or list of boolean (default True).
Sort ascending vs. descending. Specify list for multiple sort orders.
If a list is specified, length of the list must equal length of the `cols`.
>>> df.sortWithinPartitions("age", ascending=False).show()
+---+-----+
|age| name|
+---+-----+
| 2|Alice|
| 5| Bob|
+---+-----+
"""
jdf = self._jdf.sortWithinPartitions(self._sort_cols(cols, kwargs))
return DataFrame(jdf, self.sql_ctx)
|
java
|
private static void addDescriptions(Entry entry, String... descriptions) {
try {
entry.add(SchemaConstants.OBJECT_CLASS_ATTRIBUTE, SchemaConstants.DESCRIPTIVE_OBJECT_OC);
if (descriptions == null) {
// case 1
entry.add(SchemaConstants.EMPTY_FLAG_ATTRIBUTE, String.valueOf(false));
} else if (descriptions.length == 1 && descriptions[0].isEmpty()) {
// case 2
entry.add(SchemaConstants.EMPTY_FLAG_ATTRIBUTE, String.valueOf(true));
} else {
// case 3
entry.add(SchemaConstants.STRING_ATTRIBUTE,
StringUtils.join(descriptions, ServerConfig.multipleValueSeparator));
}
} catch (LdapException e) {
throw new LdapRuntimeException(e);
}
}
|
python
|
def remove_sub(self, sub):
"""
Remove all references to a specific Subject ID
:param sub: A Subject ID
"""
for _sid in self.get('sub2sid', sub):
self.remove('sid2sub', _sid, sub)
self.delete('sub2sid', sub)
|
python
|
def get_messenger(config):
"""Return an appropriate Messenger.
If we're in debug mode, or email settings aren't set, return a debug
version which logs the message instead of attempting to send a real
email.
"""
email_settings = EmailConfig(config)
if config.get("mode") == "debug":
return DebugMessenger(email_settings)
problems = email_settings.validate()
if problems:
logger.info(problems + " Will log errors instead of emailing them.")
return DebugMessenger(email_settings)
return EmailingMessenger(email_settings)
|
java
|
@Nonnull
public static <T> LTieLongFunctionBuilder<T> tieLongFunction(Consumer<LTieLongFunction<T>> consumer) {
return new LTieLongFunctionBuilder(consumer);
}
|
java
|
public void setClob(final int parameterIndex, final Reader reader, final long length)
throws SQLException {
setCharacterStream(parameterIndex, reader, length);
}
|
java
|
public void marshall(SkillGroupData skillGroupData, ProtocolMarshaller protocolMarshaller) {
if (skillGroupData == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(skillGroupData.getSkillGroupArn(), SKILLGROUPARN_BINDING);
protocolMarshaller.marshall(skillGroupData.getSkillGroupName(), SKILLGROUPNAME_BINDING);
protocolMarshaller.marshall(skillGroupData.getDescription(), DESCRIPTION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public static <K, V> MapDataStore<K, V> createWriteBehindStore(MapStoreContext mapStoreContext, int partitionId,
WriteBehindProcessor writeBehindProcessor) {
MapServiceContext mapServiceContext = mapStoreContext.getMapServiceContext();
NodeEngine nodeEngine = mapServiceContext.getNodeEngine();
MapStoreConfig mapStoreConfig = mapStoreContext.getMapStoreConfig();
InternalSerializationService serializationService
= ((InternalSerializationService) nodeEngine.getSerializationService());
WriteBehindStore mapDataStore = new WriteBehindStore(mapStoreContext, partitionId, serializationService);
mapDataStore.setWriteBehindQueue(newWriteBehindQueue(mapServiceContext, mapStoreConfig.isWriteCoalescing()));
mapDataStore.setWriteBehindProcessor(writeBehindProcessor);
return (MapDataStore<K, V>) mapDataStore;
}
|
java
|
protected void sequence_Parameter(ISerializationContext context, Parameter semanticObject) {
if (errorAcceptor != null) {
if (transientValues.isValueTransient(semanticObject, XtextPackage.Literals.PARAMETER__NAME) == ValueTransient.YES)
errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, XtextPackage.Literals.PARAMETER__NAME));
}
SequenceFeeder feeder = createSequencerFeeder(context, semanticObject);
feeder.accept(grammarAccess.getParameterAccess().getNameIDTerminalRuleCall_0(), semanticObject.getName());
feeder.finish();
}
|
python
|
def replace(self, photo, photo_file, **kwds):
"""
Endpoint: /photo/<id>/replace.json
Uploads the specified photo file to replace an existing photo.
"""
with open(photo_file, 'rb') as in_file:
result = self._client.post("/photo/%s/replace.json" %
self._extract_id(photo),
files={'photo': in_file},
**kwds)["result"]
return Photo(self._client, result)
|
python
|
def estimate_completion(self):
"""
Estimate completion time for a task.
:returns: deferred that when fired returns a datetime object for the
estimated, or the actual datetime, or None if we could not
estimate a time for this task method.
"""
if self.completion_ts:
# Task is already complete. Return the exact completion time:
defer.returnValue(self.completed)
# Get the timestamps from the descendent task that's doing the work:
if self.method == 'build' or self.method == 'image':
subtask_completion = yield self.estimate_descendents()
defer.returnValue(subtask_completion)
if self.state == task_states.FREE:
est_completion = yield self._estimate_free()
defer.returnValue(est_completion)
avg_delta = yield self.estimate_duration()
if avg_delta is None:
defer.returnValue(None)
est_completion = self.started + avg_delta
defer.returnValue(est_completion)
|
java
|
public void enableSettingsCursor(Cursor cursor) {
menuCursor = cursor;
IoDevice device = cursor.getIoDevice();
if (device != null)
{
settingsIoDeviceFarDepth = device.getFarDepth();
settingsIoDeviceNearDepth = device.getNearDepth();
}
settingsCursor.transferIoDevice(cursor);
}
|
python
|
def get_pg_info():
"""Check PostgreSQL connection."""
from psycopg2 import connect, OperationalError
log.debug("entered get_pg_info")
try:
conf = settings.DATABASES['default']
database = conf["NAME"]
user = conf["USER"]
host = conf["HOST"]
port = conf["PORT"]
password = conf["PASSWORD"]
except (AttributeError, KeyError):
log.error("No PostgreSQL connection info found in settings.")
return {"status": NO_CONFIG}
except TypeError:
return {"status": DOWN}
log.debug("got past getting conf")
try:
start = datetime.now()
connection = connect(
database=database, user=user, host=host,
port=port, password=password, connect_timeout=TIMEOUT_SECONDS,
)
log.debug("at end of context manager")
micro = (datetime.now() - start).microseconds
connection.close()
except (OperationalError, KeyError) as ex:
log.error("No PostgreSQL connection info found in settings. %s Error: %s",
conf, ex)
return {"status": DOWN}
log.debug("got to end of postgres check successfully")
return {"status": UP, "response_microseconds": micro}
|
java
|
public boolean addItem()
{
try ( Session session = driver.session() )
{
return session.writeTransaction( new TransactionWork<Boolean>()
{
@Override
public Boolean execute( Transaction tx )
{
tx.run( "CREATE (a:Item)" );
return true;
}
} );
}
catch ( ServiceUnavailableException ex )
{
return false;
}
}
|
java
|
public static AgentProperties readIaasProperties( Properties props ) throws IOException {
// Deal with files transferred through user data.
// Store files in the system's temporary directory.
// In Karaf, this will point to the "data/tmp" directory.
File msgResourcesDirectory = new File( System.getProperty( "java.io.tmpdir" ), "roboconf-messaging" );
props = UserDataHelpers.processUserData( props, msgResourcesDirectory );
// Given #213, we have to replace some characters escaped by AWS (and probably Openstack too).
AgentProperties result = new AgentProperties();
result.setApplicationName( updatedField( props, UserDataHelpers.APPLICATION_NAME ));
result.setScopedInstancePath( updatedField( props, UserDataHelpers.SCOPED_INSTANCE_PATH ));
result.setDomain( updatedField( props, UserDataHelpers.DOMAIN ));
final Map<String, String> messagingConfiguration = new LinkedHashMap<> ();
List<String> toSkip = Arrays.asList( UserDataHelpers.APPLICATION_NAME, UserDataHelpers.DOMAIN, UserDataHelpers.SCOPED_INSTANCE_PATH );
for( String k : props.stringPropertyNames()) {
if( ! toSkip.contains( k )) {
// All other properties are considered messaging-specific.
messagingConfiguration.put(k, updatedField( props, k ));
}
}
result.setMessagingConfiguration( Collections.unmodifiableMap( messagingConfiguration ));
return result;
}
|
python
|
def set_web_hook(self, url=None, certificate=None):
"""
Use this method to specify a url and receive incoming updates via an outgoing webhook. Whenever there is an
update for the bot, we will send an HTTPS POST request to the specified url, containing a JSON-serialized
Update. In case of an unsuccessful request, we will give up after a reasonable amount of attempts.
"""
payload = dict(url=url, certificate=certificate)
return self._get('setWebHook', payload)
|
python
|
def input_has_value(step, field_name, value):
"""
Check that the form input element has given value.
"""
with AssertContextManager(step):
text_field = find_any_field(world.browser,
DATE_FIELDS + TEXT_FIELDS,
field_name)
assert_false(step, text_field is False,
'Can not find a field named "%s"' % field_name)
assert_equals(text_field.get_attribute('value'), value)
|
python
|
def addDependencyInfo(self):
""" Adds version info about the installed dependencies
"""
logger.debug("Adding dependency info to the AboutDialog")
self.progressLabel.setText("Retrieving package info...")
self.editor.clear()
self._addModuleInfo(mi.PythonModuleInfo())
self._addModuleInfo(mi.QtModuleInfo())
modules = ['numpy', 'scipy', 'pandas', 'pyqtgraph']
for module in modules:
self._addModuleInfo(module)
self._addModuleInfo(mi.PillowInfo())
self._addModuleInfo(mi.H5pyModuleInfo())
self._addModuleInfo(mi.NetCDF4ModuleInfo())
self.progressLabel.setText("")
logger.debug("Finished adding dependency info to the AboutDialog")
|
java
|
@NonNull
public Transition excludeChildren(@Nullable Class type, boolean exclude) {
mTargetTypeChildExcludes = excludeObject(mTargetTypeChildExcludes, type, exclude);
return this;
}
|
python
|
def get_user_name(user, full_name=True):
"""Return the user's name as a string.
:param user: `models.User` object. The user to get the name of.
:param full_name: (optional) Whether to return full user name, or just first name.
:return: The user's name.
""" # noqa
try:
if full_name:
name = ' '.join([user.first_name, user.last_name])
else:
name = user.first_name
except AttributeError:
name = None
return name
|
python
|
def alpha_multiply(self, alpha, data, shape=None):
"""(alpha) can be a scalar or an array.
"""
# alpha can be a scalar or an array
if shape is None:
shape = data.shape
if len(data.shape) == 2:
res = alpha * data
# If desired shape is monochrome then return a mono image
# otherwise broadcast to a grey color image.
if len(shape) == 2:
return res
# note: in timing tests, dstack was not as efficient here...
data = np.empty(shape)
data[:, :, 0] = res[:, :]
data[:, :, 1] = res[:, :]
data[:, :, 2] = res[:, :]
return data
else:
# note: in timing tests, dstack was not as efficient here...
res = np.empty(shape)
res[:, :, 0] = data[:, :, 0] * alpha
res[:, :, 1] = data[:, :, 1] * alpha
res[:, :, 2] = data[:, :, 2] * alpha
return res
|
java
|
@Override
public void serviceRequest(final Request request) {
// Get trigger id
triggerId = request.getParameter(WServlet.AJAX_TRIGGER_PARAM_NAME);
if (triggerId == null) {
throw new SystemException("No AJAX trigger id to check step count");
}
// Get trigger and its context
ComponentWithContext trigger = AjaxHelper.getCurrentTriggerAndContext();
if (trigger == null) {
throw new IllegalStateException(
"No component/context available for AJAX trigger " + triggerId + ".");
}
// Get expected step count
UIContext uic = UIContextHolder.getCurrent();
int expected = uic.getEnvironment().getStep();
// Step should already be set on the session
if (expected == 0) {
throw new SystemException(
"Step count should already be set on the session before AJAX request.");
}
// Get step count on the request
int got = StepCountUtil.getRequestStep(request);
// Check we are on the current step
if (expected == got) {
// Process Service Request
getBackingComponent().serviceRequest(request);
} else { // Invalid step
LOG.warn("AJAX: Wrong step detected. Expected step " + expected + " but got step " + got);
// "GET" Ajax requests are just ignored and return an error code
if ("GET".equals(request.getMethod())) {
LOG.warn("Error code will be sent in the response for AJAX GET Request.");
handleErrorCode();
// Make sure the render phase is not processed
throw new ActionEscape();
} else if (StepCountUtil.isErrorRedirect()) { // Redirect to error page
LOG.warn("User will be redirected to an error page.");
redirectUrl = StepCountUtil.getErrorUrl();
} else { // Warp to the future by refreshing the page
LOG.warn("Warp the user back to the future by refreshing the page.");
handleWarpToTheFuture(uic);
redirectUrl = buildApplicationUrl(uic);
}
}
}
|
java
|
public void setVideoAssets(com.google.api.ads.admanager.axis.v201811.VideoRedirectAsset[] videoAssets) {
this.videoAssets = videoAssets;
}
|
python
|
def touch_log(log, cwd='.'):
"""
Touches the log file. Creates if not exists OR updates the modification date if exists.
:param log:
:return: nothing
"""
logfile = '%s/%s' % (cwd, log)
with open(logfile, 'a'):
os.utime(logfile, None)
|
java
|
public Criteria function(Function function) {
Assert.notNull(function, "Cannot add 'null' function to criteria.");
predicates.add(new Predicate(OperationKey.FUNCTION, function));
return this;
}
|
python
|
def get_file_list(shortFile):
"""
The function get_file_list expands a short filename to a sorted list
of filenames. The short filename can contain variables and wildcards.
"""
if "://" in shortFile: # seems to be a URL
return [shortFile]
# expand tilde and variables
expandedFile = os.path.expandvars(os.path.expanduser(shortFile))
# expand wildcards
fileList = glob.glob(expandedFile)
# sort alphabetical,
# if list is emtpy, sorting returns None, so better do not sort
if len(fileList) != 0:
fileList.sort()
else:
logging.warning("No file matches '%s'.", shortFile)
return fileList
|
java
|
@SuppressWarnings("static-method")
protected String generateJson(Map<String, Object> map) throws JsonProcessingException {
final ObjectMapper mapper = new ObjectMapper();
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(map);
}
|
python
|
def main():
"""Run the merge driver program."""
usage = \
"""%prog [options] <input GTFS a.zip> <input GTFS b.zip> <output GTFS.zip>
Merges <input GTFS a.zip> and <input GTFS b.zip> into a new GTFS file
<output GTFS.zip>.
For more information see
https://github.com/google/transitfeed/wiki/Merge
"""
parser = util.OptionParserLongError(
usage=usage, version='%prog '+transitfeed.__version__)
parser.add_option('--cutoff_date',
dest='cutoff_date',
default=None,
help='a transition date from the old feed to the new '
'feed in the format YYYYMMDD')
parser.add_option('--largest_stop_distance',
dest='largest_stop_distance',
default=StopMerger.largest_stop_distance,
help='the furthest distance two stops can be apart and '
'still be merged, in metres')
parser.add_option('--largest_shape_distance',
dest='largest_shape_distance',
default=ShapeMerger.largest_shape_distance,
help='the furthest distance the endpoints of two shapes '
'can be apart and the shape still be merged, in metres')
parser.add_option('--html_output_path',
dest='html_output_path',
default='merge-results.html',
help='write the html output to this file')
parser.add_option('--no_browser',
dest='no_browser',
action='store_true',
help='prevents the merge results from being opened in a '
'browser')
parser.add_option('--latest_version', dest='latest_version',
action='store',
help='a version number such as 1.2.1 or None to get the '
'latest version from the project page. Output a warning if '
'merge.py is older than this version.')
parser.add_option('-m', '--memory_db', dest='memory_db', action='store_true',
help='Use in-memory sqlite db instead of a temporary file. '
'It is faster but uses more RAM.')
parser.set_defaults(memory_db=False)
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error('You did not provide all required command line arguments.')
old_feed_path = os.path.abspath(args[0])
new_feed_path = os.path.abspath(args[1])
merged_feed_path = os.path.abspath(args[2])
if old_feed_path.find("IWantMyCrash") != -1:
# See tests/testmerge.py
raise Exception('For testing the merge crash handler.')
a_schedule = LoadWithoutErrors(old_feed_path, options.memory_db)
b_schedule = LoadWithoutErrors(new_feed_path, options.memory_db)
merged_schedule = transitfeed.Schedule(memory_db=options.memory_db)
accumulator = HTMLProblemAccumulator()
problem_reporter = MergeProblemReporter(accumulator)
util.CheckVersion(problem_reporter, options.latest_version)
feed_merger = FeedMerger(a_schedule, b_schedule, merged_schedule,
problem_reporter)
feed_merger.AddDefaultMergers()
feed_merger.GetMerger(StopMerger).SetLargestStopDistance(float(
options.largest_stop_distance))
feed_merger.GetMerger(ShapeMerger).SetLargestShapeDistance(float(
options.largest_shape_distance))
if options.cutoff_date is not None:
service_period_merger = feed_merger.GetMerger(ServicePeriodMerger)
service_period_merger.DisjoinCalendars(options.cutoff_date)
if feed_merger.MergeSchedules():
feed_merger.GetMergedSchedule().WriteGoogleTransitFeed(merged_feed_path)
else:
merged_feed_path = None
output_file = file(options.html_output_path, 'w')
accumulator.WriteOutput(output_file, feed_merger,
old_feed_path, new_feed_path, merged_feed_path)
output_file.close()
if not options.no_browser:
webbrowser.open('file://%s' % os.path.abspath(options.html_output_path))
|
java
|
public void setRotation(float w, float x, float y, float z) {
componentRotation.set(w, x, y, z);
if (sceneObject != null) {
sceneObject.getTransform().setRotation(w, x, y, z);
}
}
|
java
|
public void marshall(UnlinkDeveloperIdentityRequest unlinkDeveloperIdentityRequest, ProtocolMarshaller protocolMarshaller) {
if (unlinkDeveloperIdentityRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(unlinkDeveloperIdentityRequest.getIdentityId(), IDENTITYID_BINDING);
protocolMarshaller.marshall(unlinkDeveloperIdentityRequest.getIdentityPoolId(), IDENTITYPOOLID_BINDING);
protocolMarshaller.marshall(unlinkDeveloperIdentityRequest.getDeveloperProviderName(), DEVELOPERPROVIDERNAME_BINDING);
protocolMarshaller.marshall(unlinkDeveloperIdentityRequest.getDeveloperUserIdentifier(), DEVELOPERUSERIDENTIFIER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def get_substrates(self, material_id, number=50, orient=None):
"""
Get a substrate list for a material id. The list is in order of
increasing elastic energy if a elastic tensor is available for
the material_id. Otherwise the list is in order of increasing
matching area.
Args:
material_id (str): Materials Project material_id, e.g. 'mp-123'.
orient (list) : substrate orientation to look for
number (int) : number of substrates to return;
n=0 returns all available matches
Returns:
list of dicts with substrate matches
"""
req = "/materials/{}/substrates?n={}".format(material_id, number)
if orient:
req += "&orient={}".format(" ".join(map(str, orient)))
return self._make_request(req)
|
python
|
def _parse_binding_config(self, binding_config):
"""Parse configured interface -> ACL bindings
Bindings are returned as a set of (intf, name, direction) tuples:
set([(intf1, acl_name, direction),
(intf2, acl_name, direction),
...,
])
"""
parsed_bindings = set()
for acl in binding_config['aclList']:
for intf in acl['configuredIngressIntfs']:
parsed_bindings.add((intf['name'], acl['name'],
a_const.INGRESS_DIRECTION))
for intf in acl['configuredEgressIntfs']:
parsed_bindings.add((intf['name'], acl['name'],
a_const.EGRESS_DIRECTION))
return parsed_bindings
|
java
|
public ZapReply nextReply(boolean wait)
{
if (!repliesEnabled) {
System.out.println("ZAuth: replies are disabled. Please use replies(true);");
return null;
}
return ZapReply.recv(replies, wait);
}
|
java
|
public static BufferedImage getResized(final BufferedImage originalImage,
final String formatName, final int targetWidth, final int targetHeight) throws IOException
{
return read(resize(originalImage, formatName, targetWidth, targetHeight));
}
|
python
|
def decorate_client(api_client, func, name):
"""A helper for decorating :class:`bravado.client.SwaggerClient`.
:class:`bravado.client.SwaggerClient` can be extended by creating a class
which wraps all calls to it. This helper is used in a :func:`__getattr__`
to check if the attr exists on the api_client. If the attr does not exist
raise :class:`AttributeError`, if it exists and is not callable return it,
and if it is callable return a partial function calling `func` with `name`.
Example usage:
.. code-block:: python
class SomeClientDecorator(object):
def __init__(self, api_client, ...):
self.api_client = api_client
# First arg should be suffiently unique to not conflict with any of
# the kwargs
def wrap_call(self, client_call_name, *args, **kwargs):
...
def __getattr__(self, name):
return decorate_client(self.api_client, self.wrap_call, name)
:param api_client: the client which is being decorated
:type api_client: :class:`bravado.client.SwaggerClient`
:param func: a callable which accepts `name`, `*args`, `**kwargs`
:type func: callable
:param name: the attribute being accessed
:type name: string
:returns: the attribute from the `api_client` or a partial of `func`
:raises: :class:`AttributeError`
"""
client_attr = getattr(api_client, name)
if not callable(client_attr):
return client_attr
return OperationDecorator(client_attr, functools.partial(func, name))
|
python
|
def add_websocket(
self,
path: str,
endpoint: Optional[str]=None,
view_func: Optional[Callable]=None,
defaults: Optional[dict]=None,
host: Optional[str]=None,
subdomain: Optional[str]=None,
*,
strict_slashes: bool=True,
) -> None:
"""Add a websocket rule to the blueprint.
This is designed to be used on the blueprint directly, and
has the same arguments as
:meth:`~quart.Quart.add_websocket`. An example usage,
.. code-block:: python
def route():
...
blueprint = Blueprint(__name__)
blueprint.add_websocket('/', route)
"""
return self.add_url_rule(
path, endpoint, view_func, {'GET'}, defaults=defaults, host=host, subdomain=subdomain,
provide_automatic_options=False, is_websocket=True, strict_slashes=strict_slashes,
)
|
java
|
public Observable<ServiceResponse<SharedAccessAuthorizationRuleResourceInner>> createOrUpdateAuthorizationRuleWithServiceResponseAsync(String resourceGroupName, String namespaceName, String notificationHubName, String authorizationRuleName, SharedAccessAuthorizationRuleProperties properties) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (namespaceName == null) {
throw new IllegalArgumentException("Parameter namespaceName is required and cannot be null.");
}
if (notificationHubName == null) {
throw new IllegalArgumentException("Parameter notificationHubName is required and cannot be null.");
}
if (authorizationRuleName == null) {
throw new IllegalArgumentException("Parameter authorizationRuleName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (properties == null) {
throw new IllegalArgumentException("Parameter properties is required and cannot be null.");
}
Validator.validate(properties);
SharedAccessAuthorizationRuleCreateOrUpdateParameters parameters = new SharedAccessAuthorizationRuleCreateOrUpdateParameters();
parameters.withProperties(properties);
return service.createOrUpdateAuthorizationRule(resourceGroupName, namespaceName, notificationHubName, authorizationRuleName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<SharedAccessAuthorizationRuleResourceInner>>>() {
@Override
public Observable<ServiceResponse<SharedAccessAuthorizationRuleResourceInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<SharedAccessAuthorizationRuleResourceInner> clientResponse = createOrUpdateAuthorizationRuleDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
python
|
def create_assembly(self, did, wid, name='My Assembly'):
'''
Creates a new assembly element in the specified document / workspace.
Args:
- did (str): Document ID
- wid (str): Workspace ID
- name (str, default='My Assembly')
Returns:
- requests.Response: Onshape response data
'''
payload = {
'name': name
}
return self._api.request('post', '/api/assemblies/d/' + did + '/w/' + wid, body=payload)
|
java
|
public VirtualHubInner beginUpdateTags(String resourceGroupName, String virtualHubName, Map<String, String> tags) {
return beginUpdateTagsWithServiceResponseAsync(resourceGroupName, virtualHubName, tags).toBlocking().single().body();
}
|
java
|
public EClass getEDG() {
if (edgEClass == null) {
edgEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(238);
}
return edgEClass;
}
|
python
|
def run(self):
"""
consume message from channel on the consuming thread.
"""
LOGGER.debug("rabbitmq.Service.run")
try:
self.channel.start_consuming()
except Exception as e:
LOGGER.warn("rabbitmq.Service.run - Exception raised while consuming")
|
java
|
public static boolean isPointNearMultiLatLng(LatLng point, MultiLatLng multiLatLng, double tolerance) {
boolean near = false;
for (LatLng multiPoint : multiLatLng.getLatLngs()) {
near = isPointNearPoint(point, multiPoint, tolerance);
if (near) {
break;
}
}
return near;
}
|
java
|
public final void mDATE() throws RecognitionException {
try {
int _type = DATE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// druidG.g:680:2: ( DATE_YEAR_MONTH_ONLY '-' NUM NUM )
// druidG.g:680:4: DATE_YEAR_MONTH_ONLY '-' NUM NUM
{
mDATE_YEAR_MONTH_ONLY();
match('-');
mNUM();
mNUM();
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
|
python
|
def _create_dock_toggle_action(self):
"""Create action for plugin dockable window (show/hide)."""
# pylint: disable=W0201
icon = resources_path('img', 'icons', 'icon.svg')
self.action_dock = QAction(
QIcon(icon),
self.tr('Toggle InaSAFE Dock'), self.iface.mainWindow())
self.action_dock.setObjectName('InaSAFEDockToggle')
self.action_dock.setStatusTip(self.tr(
'Show/hide InaSAFE dock widget'))
self.action_dock.setWhatsThis(self.tr(
'Show/hide InaSAFE dock widget'))
self.action_dock.setCheckable(True)
self.action_dock.setChecked(True)
self.action_dock.triggered.connect(self.toggle_dock_visibility)
self.add_action(self.action_dock)
|
java
|
protected com.vividsolutions.jts.geom.Geometry toJtsGeometryCollection(Geometry<?> src) {
com.vividsolutions.jts.geom.Geometry returnGeometry;
if (src instanceof Point) {
returnGeometry = toJtsPoint((Point) src);
} else if (src instanceof LineString) {
returnGeometry = toJtsLineString((LineString) src);
} else if (src instanceof Polygon) {
returnGeometry = toJtsPolygon((Polygon) src);
} else if (src instanceof MultiPoint) {
AbstractJtsCodec<com.vividsolutions.jts.geom.MultiPoint, MultiPoint> codec = new MultiPointCodec(this.geometryFactory);
returnGeometry = codec.fromGeometry((MultiPoint) src);
} else if (src instanceof MultiLineString) {
AbstractJtsCodec<com.vividsolutions.jts.geom.MultiLineString, MultiLineString> codec = new MultiLineStringCodec(this.geometryFactory);
returnGeometry = codec.fromGeometry((MultiLineString) src);
} else if (src instanceof MultiPolygon) {
AbstractJtsCodec<com.vividsolutions.jts.geom.MultiPolygon, MultiPolygon> codec = new MultiPolygonCodec(this.geometryFactory);
returnGeometry = codec.fromGeometry((MultiPolygon) src);
} else if (src instanceof GeometryCollection) {
GeometryCollection geometryCollection = (GeometryCollection) src;
returnGeometry = this.geometryFactory.createGeometryCollection(StreamSupport.stream(geometryCollection.getGeometries().spliterator(), false)
.map(this::toJtsGeometryCollection)
.toArray(com.vividsolutions.jts.geom.Geometry[]::new));
} else {
throw new IllegalArgumentException("Unsupported geometry type: " + src.type());
}
return returnGeometry;
}
|
java
|
@Override
public Set<OWLLiteral> getDataPropertyValues(OWLNamedIndividual ind,
OWLDataProperty pe) throws InconsistentOntologyException,
FreshEntitiesException, ReasonerInterruptedException,
TimeOutException {
return Collections.emptySet();
}
|
java
|
public T findOneByAttribute(String attribute, Object value) {
CriteriaBuilder cb = getEntityManager().getCriteriaBuilder();
CriteriaQuery<T> query = cb.createQuery(getEntityClass());
Root<T> from = query.from(getEntityClass());
query.where(cb.equal(from.get(attribute), value));
try {
return getEntityManager().createQuery(query).getSingleResult();
} catch (NoResultException e) {
return null;
}
}
|
python
|
def get_subscription_labels(self, userPk):
"""Returns a list with all the labels the user is subscribed to"""
r = self._request('subscriptions/' + str(userPk))
if r:
s = r.json()
return s
return []
|
python
|
def _parse_routes(iface, opts):
'''
Filters given options and outputs valid settings for
the route settings file.
'''
# Normalize keys
opts = dict((k.lower(), v) for (k, v) in six.iteritems(opts))
result = {}
if 'routes' not in opts:
_raise_error_routes(iface, 'routes', 'List of routes')
for opt in opts:
result[opt] = opts[opt]
return result
|
python
|
def graph_route(self, request):
"""Given a single run, return the graph definition in protobuf format."""
run = request.args.get('run')
tag = request.args.get('tag', '')
conceptual_arg = request.args.get('conceptual', False)
is_conceptual = True if conceptual_arg == 'true' else False
if run is None:
return http_util.Respond(
request, 'query parameter "run" is required', 'text/plain', 400)
limit_attr_size = request.args.get('limit_attr_size', None)
if limit_attr_size is not None:
try:
limit_attr_size = int(limit_attr_size)
except ValueError:
return http_util.Respond(
request, 'query parameter `limit_attr_size` must be an integer',
'text/plain', 400)
large_attrs_key = request.args.get('large_attrs_key', None)
try:
result = self.graph_impl(run, tag, is_conceptual, limit_attr_size, large_attrs_key)
except ValueError as e:
return http_util.Respond(request, e.message, 'text/plain', code=400)
else:
if result is not None:
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
return http_util.Respond(request, body, mime_type)
else:
return http_util.Respond(request, '404 Not Found', 'text/plain',
code=404)
|
python
|
def ImportContractAddr(wallet, contract_hash, pubkey_script_hash):
"""
Args:
wallet (Wallet): a UserWallet instance
contract_hash (UInt160): hash of the contract to import
pubkey_script_hash (UInt160):
Returns:
neo.SmartContract.Contract.Contract
"""
contract = Blockchain.Default().GetContract(contract_hash)
if not contract or not pubkey_script_hash:
print("Could not find contract")
return
reedeem_script = contract.Code.Script.hex()
# there has to be at least 1 param, and the first one needs to be a signature param
param_list = bytearray(b'\x00')
# if there's more than one param
# we set the first parameter to be the signature param
if len(contract.Code.ParameterList) > 1:
param_list = bytearray(contract.Code.ParameterList)
param_list[0] = 0
verification_contract = Contract.Create(reedeem_script, param_list, pubkey_script_hash)
address = verification_contract.Address
wallet.AddContract(verification_contract)
print(f"Added contract address {address} to wallet")
return verification_contract
|
java
|
@Nullable
private UCPMUserValue callHasInline(int seen) {
if (seen != Const.INVOKEVIRTUAL) {
return null;
}
String sig = getSigConstantOperand();
String returnSig = SignatureUtils.getReturnSignature(sig);
if (Values.SIG_JAVA_UTIL_STRINGBUILDER.equals(returnSig) || Values.SIG_JAVA_UTIL_STRINGBUFFER.equals(returnSig)) {
int parmCount = SignatureUtils.getNumParameters(sig);
if (stack.getStackDepth() > parmCount) {
OpcodeStack.Item itm = stack.getStackItem(parmCount);
return (UCPMUserValue) itm.getUserValue();
}
}
return null;
}
|
java
|
@Override
public void validate(T arg) throws ValidationException {
for (final Validator<T> validator : validators) {
validator.validate(arg);
}
}
|
python
|
def setDaemon(self, runnable, isdaemon, noregister = False):
'''
If a runnable is a daemon, it will not keep the main loop running. The main loop will end when all alived runnables are daemons.
'''
if not noregister and runnable not in self.registerIndex:
self.register((), runnable)
if isdaemon:
self.daemons.add(runnable)
else:
self.daemons.discard(runnable)
|
python
|
def set_pin_retries(ctx, management_key, pin, pin_retries, puk_retries, force):
"""
Set the number of PIN and PUK retries.
NOTE: This will reset the PIN and PUK to their factory defaults.
"""
controller = ctx.obj['controller']
_ensure_authenticated(
ctx, controller, pin, management_key, require_pin_and_key=True,
no_prompt=force)
click.echo('WARNING: This will reset the PIN and PUK to the factory '
'defaults!')
force or click.confirm('Set PIN and PUK retry counters to: {} {}?'.format(
pin_retries, puk_retries), abort=True, err=True)
try:
controller.set_pin_retries(pin_retries, puk_retries)
click.echo('Default PINs are set.')
click.echo('PIN: 123456')
click.echo('PUK: 12345678')
except Exception as e:
logger.error('Failed to set PIN retries', exc_info=e)
ctx.fail('Setting pin retries failed.')
|
python
|
def train():
"""Training loop for language model.
"""
print(model)
from_epoch = 0
model.initialize(mx.init.Xavier(factor_type='out'), ctx=context)
trainer_params = {'learning_rate': args.lr, 'wd': 0, 'eps': args.eps}
trainer = gluon.Trainer(model.collect_params(), 'adagrad', trainer_params)
if args.from_epoch:
from_epoch = args.from_epoch
checkpoint_name = '%s.%s'%(args.save, format(from_epoch - 1, '02d'))
model.load_parameters(checkpoint_name)
trainer.load_states('%s.state'%args.save)
print('Loaded parameters from checkpoint %s'%(checkpoint_name))
model.hybridize(static_alloc=True, static_shape=True)
encoder_params = model.encoder.collect_params().values()
embedding_params = list(model.embedding.collect_params().values())
parallel_model = ParallelBigRNN(model, loss)
parallel = Parallel(len(context), parallel_model)
for epoch in range(from_epoch, args.epochs):
sys.stdout.flush()
total_L = 0.0
start_epoch_time = time.time()
start_log_interval_time = time.time()
hiddens = [model.begin_state(batch_size=args.batch_size,
func=mx.nd.zeros, ctx=ctx) for ctx in context]
nbatch = 0
has_next = True
train_data_iter = iter(train_data)
data, target, mask, sample = next(train_data_iter)
while has_next:
nbatch += 1
hiddens = detach(hiddens)
Ls = []
for _, batch in enumerate(zip(data, target, mask, sample, hiddens)):
parallel.put(batch)
for _ in range(len(data)):
hidden, ls = parallel.get()
# hidden states are ordered by context id
index = context.index(hidden[0].context)
hiddens[index] = hidden
Ls.append(ls)
# prefetch the next batch of data
try:
data, target, mask, sample = next(train_data_iter)
except StopIteration:
has_next = False
# rescale embedding grad
for ctx in context:
x = embedding_params[0].grad(ctx)
x[:] *= args.batch_size
encoder_grad = [p.grad(ctx) for p in encoder_params]
# perform gradient clipping per ctx
gluon.utils.clip_global_norm(encoder_grad, args.clip)
trainer.step(len(context))
total_L += sum([mx.nd.sum(L).asscalar() / args.bptt for L in Ls])
if nbatch % args.log_interval == 0:
cur_L = total_L / args.log_interval / len(context)
ppl = math.exp(cur_L) if cur_L < 100 else float('inf')
print('[Epoch %d Batch %d] loss %.2f, ppl %.2f, '
'throughput %.2f samples/s'
%(epoch, nbatch, cur_L, ppl,
train_batch_size*args.log_interval/(time.time()-start_log_interval_time)))
total_L = 0.0
start_log_interval_time = time.time()
sys.stdout.flush()
end_epoch_time = time.time()
print('Epoch %d took %.2f seconds.'%(epoch, end_epoch_time - start_epoch_time))
mx.nd.waitall()
checkpoint_name = '%s.%s'%(args.save, format(epoch, '02d'))
model.save_parameters(checkpoint_name)
trainer.save_states('%s.state'%args.save)
|
java
|
protected void writeToSerializedStream(ObjectOutputStream stream) throws IOException {
stream.writeLong(cas);
stream.writeInt(expiry);
stream.writeUTF(id);
stream.writeObject(content);
stream.writeObject(mutationToken);
}
|
python
|
def static_dag_launchpoint(job, job_vars):
"""
Statically define jobs in the pipeline
job_vars: tuple Tuple of dictionaries: input_args and ids
"""
input_args, ids = job_vars
if input_args['config_fastq']:
cores = input_args['cpu_count']
a = job.wrapJobFn(mapsplice, job_vars, cores=cores, disk='130G').encapsulate()
else:
a = job.wrapJobFn(merge_fastqs, job_vars, disk='70 G').encapsulate()
b = job.wrapJobFn(consolidate_output, job_vars, a.rv())
# Take advantage of "encapsulate" to simplify pipeline wiring
job.addChild(a)
a.addChild(b)
|
python
|
def _asyncio_open_serial_windows(path):
"""
Open a windows named pipe
:returns: An IO like object
"""
try:
yield from wait_for_named_pipe_creation(path)
except asyncio.TimeoutError:
raise NodeError('Pipe file "{}" is missing'.format(path))
return WindowsPipe(path)
|
java
|
public void replaceValue(List<CmsDataViewValue> replacementValues) {
CmsAttributeHandler handler = (CmsAttributeHandler)m_handler;
Element parent = CmsDomUtil.getAncestor(
m_widget.getElement(),
I_CmsLayoutBundle.INSTANCE.form().attributeValue()).getParentElement();
NodeList<Node> siblings = parent.getChildNodes();
for (int j = 0; j < siblings.getLength(); j++) {
Node node = siblings.getItem(j);
if (node instanceof Element) {
Element elem = (Element)node;
if (elem.isOrHasChild(m_widget.getElement())) {
m_index = j;
break;
}
}
}
Panel container = handler.removeAttributeValueAndReturnPrevParent(m_index, true);
int i = m_index;
for (CmsDataViewValue value : replacementValues) {
CmsEntity entity = CmsEntityBackend.getInstance().createEntity(null, m_entity.getTypeName());
writeValueToEntity(value, entity);
// handler.addNewAttributeValue(entity);
handler.insertNewAttributeValue(entity, i, container);
i += 1;
}
handler.updateButtonVisisbility();
}
|
java
|
private void sendRequestHeader(MessageType type, int size) throws IOException {
ByteBuffer buf = ByteBuffer.allocate(8);
buf.order(ByteOrder.BIG_ENDIAN);
buf.put((byte) 'L');
buf.put((byte) '1');
buf.put(type.getValue());
buf.put((byte) 0); // request
buf.putInt(size);
buf.flip();
logger.finest("SEND: type=" + (0) + ", command=" + ((int) type.getValue()) + ", size=" + size);
os.write(buf.array());
}
|
python
|
def set_flag(self, user, note=None, status=None):
"""Flags the object.
:param User user:
:param str note: User-defined note for this flag.
:param int status: Optional status integer (the meaning is defined by a developer).
:return:
"""
if not user.id:
return None
init_kwargs = {
'user': user,
'linked_object': self,
}
if note is not None:
init_kwargs['note'] = note
if status is not None:
init_kwargs['status'] = status
flag = get_flag_model()(**init_kwargs)
try:
flag.save()
except IntegrityError: # Record already exists.
pass
return flag
|
java
|
public MucConfigFormManager setRoomSecret(String secret)
throws MucConfigurationNotSupportedException {
if (!answerForm.hasField(MUC_ROOMCONFIG_ROOMSECRET)) {
throw new MucConfigurationNotSupportedException(MUC_ROOMCONFIG_ROOMSECRET);
}
answerForm.setAnswer(MUC_ROOMCONFIG_ROOMSECRET, secret);
return this;
}
|
java
|
public final static void setMaxSelectors(int size) throws IOException {
synchronized (selectors) {
if (size < maxSelectors) {
reduce(size);
} else if (size > maxSelectors) {
grow(size);
}
maxSelectors = size;
}
}
|
python
|
def rank(self):
"""
Returns the item's rank (if it has one)
as a dict that includes required score, name, and level.
"""
if self._rank != {}:
# Don't bother doing attribute lookups again
return self._rank
try:
# The eater determining the rank
levelkey, typename, count = self.kill_eaters[0]
except IndexError:
# Apparently no eater available
self._rank = None
return None
rankset = self._ranks.get(levelkey,
[{"level": 0,
"required_score": 0,
"name": "Strange"}])
for rank in rankset:
self._rank = rank
if count < rank["required_score"]:
break
return self._rank
|
java
|
private Month readOptionalMonth(JSONValue val) {
String str = readOptionalString(val);
if (null != str) {
try {
return Month.valueOf(str);
} catch (@SuppressWarnings("unused") IllegalArgumentException e) {
// Do nothing -return the default value
}
}
return null;
}
|
java
|
public final int compare( byte[] bytes1, byte[] bytes2) {
int diff;
for (int i = 0; i < bytes1.length && i < bytes2.length; i++) {
diff = (bytes1[i] & 0xFF) - (bytes2[i] & 0xFF);
if (diff != 0) {
return diff;
}
}
// if array entries are equal till the first ends, then the
// longer is "bigger"
return bytes1.length - bytes2.length;
}
|
java
|
public PolicyEventsQueryResultsInner listQueryResultsForResourceGroup(String subscriptionId, String resourceGroupName) {
return listQueryResultsForResourceGroupWithServiceResponseAsync(subscriptionId, resourceGroupName).toBlocking().single().body();
}
|
java
|
@SuppressWarnings("all")
@Override
public void startAnimation(Animation animation) {
if (animation != null &&
(getAnimation() == null || getAnimation().hasEnded())) {
super.startAnimation(animation);
}
}
|
java
|
public void filter(ClientRequestContext request) throws IOException
{
if(!request.getHeaders().containsKey("X-Query-Key"))
request.getHeaders().add("X-Query-Key", this.querykey);
}
|
python
|
def wait_for_array(self, wait_for, timeout_ms):
"""Waits for one or more events to happen.
Scriptable version of :py:func:`wait_for` .
in wait_for of type :class:`GuestSessionWaitForFlag`
Specifies what to wait for;
see :py:class:`GuestSessionWaitForFlag` for more information.
in timeout_ms of type int
Timeout (in ms) to wait for the operation to complete.
Pass 0 for an infinite timeout.
return reason of type :class:`GuestSessionWaitResult`
The overall wait result;
see :py:class:`GuestSessionWaitResult` for more information.
"""
if not isinstance(wait_for, list):
raise TypeError("wait_for can only be an instance of type list")
for a in wait_for[:10]:
if not isinstance(a, GuestSessionWaitForFlag):
raise TypeError(
"array can only contain objects of type GuestSessionWaitForFlag")
if not isinstance(timeout_ms, baseinteger):
raise TypeError("timeout_ms can only be an instance of type baseinteger")
reason = self._call("waitForArray",
in_p=[wait_for, timeout_ms])
reason = GuestSessionWaitResult(reason)
return reason
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.