language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
|---|---|
python
|
def to_shell_manager(self, mpi_procs=1):
"""
Returns a new `TaskManager` with the same parameters as self but replace the :class:`QueueAdapter`
with a :class:`ShellAdapter` with mpi_procs so that we can submit the job without passing through the queue.
"""
my_kwargs = copy.deepcopy(self._kwargs)
my_kwargs["policy"] = TaskPolicy(autoparal=0)
# On BlueGene we need at least two qadapters.
# One for running jobs on the computing nodes and another one
# for running small jobs on the fronted. These two qadapters
# will have different enviroments and different executables.
# If None of the q-adapters has qtype==shell, we change qtype to shell
# and we return a new Manager for sequential jobs with the same parameters as self.
# If the list contains a qadapter with qtype == shell, we ignore the remaining qadapters
# when we build the new Manager.
has_shell_qad = False
for d in my_kwargs["qadapters"]:
if d["queue"]["qtype"] == "shell": has_shell_qad = True
if has_shell_qad:
my_kwargs["qadapters"] = [d for d in my_kwargs["qadapters"] if d["queue"]["qtype"] == "shell"]
for d in my_kwargs["qadapters"]:
d["queue"]["qtype"] = "shell"
d["limits"]["min_cores"] = mpi_procs
d["limits"]["max_cores"] = mpi_procs
# If shell_runner is specified, replace mpi_runner with shell_runner
# in the script used to run jobs on the frontend.
# On same machines based on Slurm, indeed, mpirun/mpiexec is not available
# and jobs should be executed with `srun -n4 exec` when running on the computing nodes
# or with `exec` when running in sequential on the frontend.
if "job" in d and "shell_runner" in d["job"]:
shell_runner = d["job"]["shell_runner"]
#print("shell_runner:", shell_runner, type(shell_runner))
if not shell_runner or shell_runner == "None": shell_runner = ""
d["job"]["mpi_runner"] = shell_runner
#print("shell_runner:", shell_runner)
#print(my_kwargs)
new = self.__class__(**my_kwargs)
new.set_mpi_procs(mpi_procs)
return new
|
java
|
public static MozuUrl getExtendedPropertiesUrl(Boolean draft, String orderId)
{
UrlFormatter formatter = new UrlFormatter("/api/commerce/orders/{orderId}/extendedproperties?draft={draft}");
formatter.formatUrl("draft", draft);
formatter.formatUrl("orderId", orderId);
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ;
}
|
java
|
public static boolean hasFinishedCopying(File file) {
FileInputStream fis = null;
FileLock lock = null;
try {
fis = new FileInputStream(file);
FileChannel fc = fis.getChannel();
lock = fc.tryLock(0L, Long.MAX_VALUE, true);
} catch (IOException ioe) {
return false;
} finally {
IOUtils.close(fis);
}
return lock != null;
}
|
python
|
def start_agent(self, cfgin=True):
"""
CLI interface to start 12-factor service
"""
default_conf = {
"threads": {
"result": {
"number": 0,
"function": None
},
"worker": {
"number": 0,
"function": None
},
},
"interval": {
"refresh": 900,
"heartbeat": 300,
"reporting": 300,
"test": 60
},
"heartbeat-hook": False
}
indata = {}
if cfgin:
indata = json.load(sys.stdin)
elif os.environ.get("REFLEX_MONITOR_CONFIG"):
indata = os.environ.get("REFLEX_MONITOR_CONFIG")
if indata[0] != "{":
indata = base64.b64decode(indata)
else:
self.NOTIFY("Using default configuration")
conf = dictlib.union(default_conf, indata)
conf['threads']['result']['function'] = self.handler_thread
conf['threads']['worker']['function'] = self.worker_thread
self.NOTIFY("Starting monitor Agent")
try:
self.configure(conf).start()
except KeyboardInterrupt:
self.thread_stopper.set()
if self.refresh_stopper:
self.refresh_stopper.set()
if self.heartbeat_stopper:
self.heartbeat_stopper.set()
if self.reporting_stopper:
self.reporting_stopper.set()
|
python
|
def interpret_script(shell_script):
"""Make it appear as if commands are typed into the terminal."""
with CaptureOutput() as capturer:
shell = subprocess.Popen(['bash', '-'], stdin=subprocess.PIPE)
with open(shell_script) as handle:
for line in handle:
sys.stdout.write(ansi_wrap('$', color='green') + ' ' + line)
sys.stdout.flush()
shell.stdin.write(line)
shell.stdin.flush()
shell.stdin.close()
time.sleep(12)
# Get the text that was shown in the terminal.
captured_output = capturer.get_text()
# Store the text that was shown in the terminal.
filename, extension = os.path.splitext(shell_script)
transcript_file = '%s.txt' % filename
logger.info("Updating %s ..", format_path(transcript_file))
with open(transcript_file, 'w') as handle:
handle.write(ansi_strip(captured_output))
|
java
|
@SuppressWarnings("unchecked")
private BaseDescr lhsPatternBind(PatternContainerDescrBuilder<?, ?> ce,
final boolean allowOr) throws RecognitionException {
PatternDescrBuilder<?> pattern = null;
CEDescrBuilder<?, OrDescr> or = null;
BaseDescr result = null;
Token first = input.LT(1);
pattern = helper.start((DescrBuilder<?, ?>) ce,
PatternDescrBuilder.class,
null);
if (pattern != null) {
result = pattern.getDescr();
}
String label = null;
boolean isUnification = false;
if (input.LA(1) == DRL6Lexer.ID && input.LA(2) == DRL6Lexer.COLON && !helper.validateCEKeyword(1)) {
label = label(DroolsEditorType.IDENTIFIER_PATTERN);
if (state.failed)
return null;
} else if (input.LA(1) == DRL6Lexer.ID && input.LA(2) == DRL6Lexer.UNIFY && !helper.validateCEKeyword(1)) {
label = unif(DroolsEditorType.IDENTIFIER_PATTERN);
if (state.failed)
return null;
isUnification = true;
}
if (input.LA(1) == DRL6Lexer.LEFT_PAREN) {
try {
match(input,
DRL6Lexer.LEFT_PAREN,
null,
null,
DroolsEditorType.SYMBOL);
if (state.failed)
return null;
if (helper.validateCEKeyword(1)) {
failMismatchedTokenException();
return null; // in case it is backtracking
}
lhsPattern(pattern,
label,
isUnification);
if (state.failed)
return null;
if (allowOr && helper.validateIdentifierKey(DroolsSoftKeywords.OR) && ce instanceof CEDescrBuilder) {
if (state.backtracking == 0) {
// this is necessary because of the crappy bind with multi-pattern OR syntax
or = ((CEDescrBuilder<DescrBuilder<?, ?>, OrDescr>) ce).or();
result = or.getDescr();
helper.end(PatternDescrBuilder.class,
pattern);
helper.start(or,
CEDescrBuilder.class,
null);
// adjust real or starting token:
helper.setStart(or,
first);
// remove original pattern from the parent CE child list:
((ConditionalElementDescr) ce.getDescr()).getDescrs().remove(pattern.getDescr());
// add pattern to the OR instead
or.getDescr().addDescr(pattern.getDescr());
}
while (helper.validateIdentifierKey(DroolsSoftKeywords.OR)) {
match(input,
DRL6Lexer.ID,
DroolsSoftKeywords.OR,
null,
DroolsEditorType.KEYWORD);
if (state.failed)
return null;
pattern = helper.start(or,
PatternDescrBuilder.class,
null);
// new pattern, same binding
lhsPattern(pattern,
label,
isUnification);
if (state.failed)
return null;
helper.end(PatternDescrBuilder.class,
pattern);
}
}
match(input,
DRL6Lexer.RIGHT_PAREN,
null,
null,
DroolsEditorType.SYMBOL);
if (state.failed)
return null;
} finally {
if (or != null) {
helper.end(CEDescrBuilder.class,
or);
} else {
helper.end(PatternDescrBuilder.class,
pattern);
}
}
} else {
try {
lhsPattern(pattern,
label,
isUnification);
if (state.failed)
return null;
} finally {
helper.end(PatternDescrBuilder.class,
pattern);
}
}
return result;
}
|
python
|
def param_labels(self):
"""The param_names vector is a list each parameter's analysis_path, and is used for *GetDist* visualization.
The parameter names are determined from the class instance names of the model_mapper. Latex tags are
properties of each model class."""
paramnames_labels = []
prior_class_dict = self.variable.prior_class_dict
prior_prior_model_dict = self.variable.prior_prior_model_dict
for prior_name, prior in self.variable.prior_tuples_ordered_by_id:
param_string = self.label_config.label(prior_name)
prior_model = prior_prior_model_dict[prior]
cls = prior_class_dict[prior]
cls_string = "{}{}".format(self.label_config.subscript(cls), prior_model.component_number + 1)
param_label = "{}_{{\\mathrm{{{}}}}}".format(param_string, cls_string)
paramnames_labels.append(param_label)
return paramnames_labels
|
java
|
public String convertIfcReflectanceMethodEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
java
|
@Override
public RandomVariableDifferentiable mult(RandomVariable randomVariable) {
return new RandomVariableDifferentiableAADPathwise(
getValues().mult(randomVariable),
Arrays.asList(this, randomVariable),
OperatorType.MULT);
}
|
java
|
public static Fraction getFraction(final int whole, final int numerator, final int denominator) {
if (denominator == 0) {
throw new ArithmeticException("The denominator must not be zero");
}
if (denominator < 0) {
throw new ArithmeticException("The denominator must not be negative");
}
if (numerator < 0) {
throw new ArithmeticException("The numerator must not be negative");
}
long numeratorValue;
if (whole < 0) {
numeratorValue = whole * (long) denominator - numerator;
} else {
numeratorValue = whole * (long) denominator + numerator;
}
if (numeratorValue < Integer.MIN_VALUE || numeratorValue > Integer.MAX_VALUE) {
throw new ArithmeticException("Numerator too large to represent as an Integer.");
}
return new Fraction((int) numeratorValue, denominator);
}
|
python
|
def _l_cv_weight_factor(self):
"""
Return multiplier for L-CV weightings in case of enhanced single site analysis.
Methodology source: Science Report SC050050, eqn. 6.15a and 6.15b
"""
b = 0.0047 * sqrt(0) + 0.0023 / 2
c = 0.02609 / (self.catchment.record_length - 1)
return c / (b + c)
|
python
|
def get_all_descendants(parent):
"""Get all the descendants of a parent class, recursively."""
children = parent.__subclasses__()
descendants = children[:]
for child in children:
descendants += get_all_descendants(child)
return descendants
|
python
|
def on_press(callback, suppress=False):
"""
Invokes `callback` for every KEY_DOWN event. For details see `hook`.
"""
return hook(lambda e: e.event_type == KEY_UP or callback(e), suppress=suppress)
|
java
|
public static ContextInfo toContextInfo(final ContextBase context, final ExceptionInfo error) {
final ContextInfo.Builder builder = ContextInfo.newBuilder()
.setContextId(context.getId())
.setEvaluatorId(context.getEvaluatorId())
.setParentId(context.getParentId().orElse(""))
.setEvaluatorDescriptorInfo(toEvaluatorDescriptorInfo(
context.getEvaluatorDescriptor()));
if (error != null) {
builder.setException(error);
}
return builder.build();
}
|
python
|
def BooleanField(default=NOTHING, required=True, repr=True, cmp=True,
key=None):
"""
Create new bool field on a model.
:param default: any boolean value
:param bool required: whether or not the object is invalid if not provided.
:param bool repr: include this field should appear in object's repr.
:param bool cmp: include this field in generated comparison.
:param string key: override name of the value when converted to dict.
"""
default = _init_fields.init_default(required, default, None)
validator = _init_fields.init_validator(required, bool)
return attrib(default=default, validator=validator, repr=repr, cmp=cmp,
metadata=dict(key=key))
|
python
|
def is_correct(self):
"""Check if this object configuration is correct ::
* Check if dateranges of timeperiod are valid
* Call our parent class is_correct checker
:return: True if the configuration is correct, otherwise False if at least one daterange
is not correct
:rtype: bool
"""
state = True
for daterange in self.dateranges:
good = daterange.is_correct()
if not good:
self.add_error("[timeperiod::%s] invalid daterange '%s'"
% (self.get_name(), daterange))
state &= good
# Warn about non correct entries
for entry in self.invalid_entries:
self.add_error("[timeperiod::%s] invalid entry '%s'" % (self.get_name(), entry))
return super(Timeperiod, self).is_correct() and state
|
java
|
public static FailHandler createFailHandlerFromString(final String failHandler) {
// determine fail handler implementation from string value
if (failHandler.equalsIgnoreCase(ReliableFailHandler.IDENTIFIER)) {
return new ReliableFailHandler();
}
else if (failHandler.equalsIgnoreCase(UnreliableFailHandler.IDENTIFIER)) {
return new UnreliableFailHandler();
}
else {
// create fail handler using parameter as identifier or class name
try {
return (FailHandler) Class.forName(failHandler).newInstance();
}
catch (final ClassNotFoundException e) {
throw new IllegalArgumentException("failed to instantiate FailHandler instance from argument " +
failHandler, e);
}
catch (final InstantiationException e) {
throw new IllegalArgumentException("failed to instantiate FailHandler instance from argument " +
failHandler, e);
}
catch (final IllegalAccessException e) {
throw new IllegalArgumentException("failed to instantiate FailHandler instance from argument " +
failHandler, e);
}
catch (final ClassCastException e) {
throw new IllegalArgumentException("instance from argument " + failHandler +
" does not implement FailHandler", e);
}
}
}
|
python
|
def set_image(self):
"""Parses image element and set values"""
temp_soup = self.full_soup
for item in temp_soup.findAll('item'):
item.decompose()
image = temp_soup.find('image')
try:
self.image_title = image.find('title').string
except AttributeError:
self.image_title = None
try:
self.image_url = image.find('url').string
except AttributeError:
self.image_url = None
try:
self.image_link = image.find('link').string
except AttributeError:
self.image_link = None
try:
self.image_width = image.find('width').string
except AttributeError:
self.image_width = None
try:
self.image_height = image.find('height').string
except AttributeError:
self.image_height = None
|
python
|
def initialize(self, params, repetition):
"""
Initialize experiment parameters and default values from configuration file.
Called by reset() at the beginning of each experiment and each repetition.
"""
self.name = params["name"]
self.dataDir = params.get("datadir", "data")
self.seed = params.get("seed", 42) + repetition
torch.manual_seed(self.seed)
np.random.seed(self.seed)
random.seed(self.seed)
# Training
self.epochs = params.get("epochs", 1)
self.batch_size = params.get("batch_size", 128)
self.batches_in_epoch = params.get("batches_in_epoch", 100000)
self.first_epoch_batch_size = params.get("first_epoch_batch_size",
self.batch_size)
self.batches_in_first_epoch = params.get("batches_in_first_epoch",
self.batches_in_epoch)
# Testing
self.test_batch_size = params.get("test_batch_size", 1000)
self.test_batches_in_epoch = params.get("test_batches_in_epoch", 100000)
self.noise_values = map(float,
params.get("noise_values", "0.0, 0.1").split(", "))
self.best_noise_score = 0.0
self.best_epoch = -1
# Optimizer
self.optimizer_class = eval(params.get("optimizer", "torch.optim.SGD"))
self.lr = params.get("learning_rate", 0.05)
self.momentum = params.get("momentum", 0.9)
self.weight_decay = params.get("weight_decay", 0.0)
self.optimizer_params = eval(params.get("optimizer_params", "{}"))
self.lr_scheduler_gamma = params.get("lr_scheduler_gamma", 0.9)
self.loss_function = eval(params.get("loss_function",
"torch.nn.functional.cross_entropy"))
# Network parameters
self.conv1_sparsity = params.get("conv1_sparsity", 1.0)
self.network_type = params.get("network_type", "sparse")
self.growth_rate = params.get("growth_rate", 12)
self.nblocks = map(int,
params.get("nblocks", "6, 12, 24, 16").split(", "))
self.k_inference_factor = params.get("k_inference_factor", 1.5)
self.dense_sparsities = map(float,
params.get("dense_sparsities",
"1.0, 1.0, 1.0, 1.0").split(", "))
self.transition_sparsities = map(float,
params.get("transition_sparsities",
"0.1, 0.1, 0.2").split(", "))
self.linear_sparsity = params.get("linear_sparsity", 0.0)
self.linear_weight_sparsity = params.get("linear_weight_sparsity", 0.3)
self.linear_n = params.get("linear_n", 500)
self.avg_pool_size = params.get("avg_pool_size", 2)
self.dense_c1_out_planes = params.get("dense_c1_out_planes", 4*self.growth_rate)
|
python
|
def time_delta(self, end_datetime=None):
"""
Get a timedelta object
"""
start_datetime = self._parse_start_datetime('now')
end_datetime = self._parse_end_datetime(end_datetime)
seconds = end_datetime - start_datetime
ts = self.generator.random.randint(*sorted([0, seconds]))
return timedelta(seconds=ts)
|
java
|
static private void append(StringBuilder tgt, String pfx, int dgt, long val) {
tgt.append(pfx);
if (dgt > 1) {
int pad = (dgt - 1);
for (long xa = val; xa > 9 && pad > 0; xa /= 10) {
pad--;
}
for (int xa = 0; xa < pad; xa++) {
tgt.append('0');
}
}
tgt.append(val);
}
|
java
|
private File generateJsonFromIndividualESAs(Path jsonDirectory, Map<String, String> shortNameMap) throws IOException, RepositoryException, InstallException {
String dir = jsonDirectory.toString();
List<File> esas = (List<File>) data.get(INDIVIDUAL_ESAS);
File singleJson = new File(dir + "/SingleJson.json");
for (File esa : esas) {
try {
populateFeatureNameFromManifest(esa, shortNameMap);
} catch (IOException e) {
throw new InstallException(Messages.INSTALL_KERNEL_MESSAGES.getLogMessage("ERROR_ESA_NOT_FOUND", esa.getAbsolutePath()));
}
SingleFileRepositoryConnection mySingleFileRepo = null;
if (singleJson.exists()) {
mySingleFileRepo = new SingleFileRepositoryConnection(singleJson);
} else {
try {
mySingleFileRepo = SingleFileRepositoryConnection.createEmptyRepository(singleJson);
} catch (IOException e) {
throw new InstallException(Messages.INSTALL_KERNEL_MESSAGES.getLogMessage("ERROR_SINGLE_REPO_CONNECTION_FAILED", dir,
esa.getAbsolutePath()));
}
}
Parser<? extends RepositoryResourceWritable> parser = new EsaParser(true);
RepositoryResourceWritable resource = parser.parseFileToResource(esa, null, null);
resource.updateGeneratedFields(true);
resource.setRepositoryConnection(mySingleFileRepo);
// Overload the Maven coordinates field with the file path, since the ESA should be installed from that path
resource.setMavenCoordinates(esa.getAbsolutePath());
resource.uploadToMassive(new AddThenDeleteStrategy());
}
return singleJson;
}
|
python
|
def imm_copy(imm, **kwargs):
'''
imm_copy(imm, a=b, c=d...) yields a persisent copy of the immutable object imm that differs from
imm only in that the parameters a, c, etc. have been changed to have the values b, d, etc.
If the object imm is persistent and no changes are made, imm is returned. If imm is transient,
a persistent copy of imm is always made.
'''
if not is_imm(imm):
raise ValueError('Non-immutable given to imm_copy')
if imm_is_persistent(imm) and len(kwargs) == 0:
# no changes and copy risk
return imm
dup = copy.copy(imm)
if _imm_is_trans(dup): dup = dup.persist()
dd = object.__getattribute__(dup, '__dict__')
if _imm_is_persist(dup):
# we update values directly then recompute checks and invalidate cache
all_checks = set([])
all_deps = set([])
params = _imm_param_data(imm)
for (p,v) in six.iteritems(kwargs):
if p not in params:
raise ValueError('attempt to set non-parameter \'%s\' in imm_copy()' % p)
(_, tx_fn, arg_lists, check_fns, deps) = params[p]
for (arg_list, check_fn) in zip(arg_lists, check_fns):
all_checks.add((tuple(arg_list), check_fn))
all_deps |= set(deps)
dd[p] = v if tx_fn is None else tx_fn(v)
# now invalidate the deps
for dep in all_deps:
if dep in dd:
del dd[dep]
# now run the tests
for (arg_list, check_fn) in all_checks:
if not check_fn(*[getattr(dup, arg) for arg in arg_list]):
raise ValueError(
'Requirement \'%s%s\' failed when copying immutable' % (check_fn, arg_list))
else:
# this is an initial-state immutable...
for (p,v) in six.iteritems(kwargs): object.__setattr__(dup, p, v)
_imm_init_to_trans(dup)
_imm_trans_to_persist(dup)
return dup
|
python
|
def group_values(self, group_name):
"""Return all distinct group values for given group."""
group_index = self.groups.index(group_name)
values = []
for key in self.data_keys:
if key[group_index] not in values:
values.append(key[group_index])
return values
|
java
|
@SuppressWarnings("unchecked")
public void addBeanContextServicesListener(BeanContextServicesListener listener)
{
if (listener == null)
{
throw new NullPointerException();
}
synchronized (bcsListeners)
{
bcsListeners.add(listener);
}
}
|
python
|
def guesstype(timestr):
"""Tries to guess whether a string represents a time or a time delta and
returns the appropriate object.
:param timestr (required)
The string to be analyzed
"""
timestr_full = " {} ".format(timestr)
if timestr_full.find(" in ") != -1 or timestr_full.find(" ago ") != -1:
return Chronyk(timestr)
comps = ["second", "minute", "hour", "day", "week", "month", "year"]
for comp in comps:
if timestr_full.find(comp) != -1:
return ChronykDelta(timestr)
return Chronyk(timestr)
|
python
|
def _sign(self, data):
"""
Compute a signature string according to the CloudStack
signature method (hmac/sha1).
"""
# Python2/3 urlencode aren't good enough for this task.
params = "&".join(
"=".join((key, cs_encode(value)))
for key, value in sorted(data.items())
)
digest = hmac.new(
self.secret.encode('utf-8'),
msg=params.lower().encode('utf-8'),
digestmod=hashlib.sha1).digest()
return base64.b64encode(digest).decode('utf-8').strip()
|
java
|
public void sendUnreadMessagesAck(ArrayList<AVIMMessage> messages, String conversationId) {
if (AVIMOptions.getGlobalOptions().isOnlyPushCount() && null != messages && messages.size() > 0) {
Long largestTimeStamp = 0L;
for (AVIMMessage message : messages) {
if (largestTimeStamp < message.getTimestamp()) {
largestTimeStamp = message.getTimestamp();
}
}
AVConnectionManager.getInstance().sendPacket(ConversationAckPacket.getConversationAckPacket(getSelfPeerId(),
conversationId, largestTimeStamp));
}
}
|
java
|
public WasInformedBy newWasInformedBy(QualifiedName id, QualifiedName a2, QualifiedName a1, Collection<Attribute> attributes) {
WasInformedBy res=newWasInformedBy(id,a2,a1);
setAttributes(res, attributes);
return res;
}
|
java
|
public void marshall(DeleteApiKeyRequest deleteApiKeyRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteApiKeyRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteApiKeyRequest.getApiId(), APIID_BINDING);
protocolMarshaller.marshall(deleteApiKeyRequest.getId(), ID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def conv_stack(name, x, mid_channels, output_channels, dilations=None,
activation="relu", dropout=0.0):
"""3-layer convolutional stack.
Args:
name: variable scope.
x: 5-D Tensor.
mid_channels: Number of output channels of the first layer.
output_channels: Number of output channels.
dilations: Dilations to apply in the first 3x3 layer and the last 3x3 layer.
By default, apply no dilations.
activation: relu or gatu.
If relu, the second layer is relu(W*x)
If gatu, the second layer is tanh(W1*x) * sigmoid(W2*x)
dropout: float, 0.0
Returns:
output: output of 3 layer conv network.
"""
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
x = conv_block("conv_block", x, mid_channels=mid_channels,
dilations=dilations, activation=activation,
dropout=dropout)
# Final layer.
x = conv("zeros", x, apply_actnorm=False, conv_init="zeros",
output_channels=output_channels, dilations=dilations)
return x
|
java
|
@Override
public void commitJob(JobContext context) throws IOException {
super.commitJob(context);
// Get the destination configuration information.
Configuration conf = context.getConfiguration();
TableReference destTable = BigQueryOutputConfiguration.getTableReference(conf);
String destProjectId = BigQueryOutputConfiguration.getProjectId(conf);
String writeDisposition = BigQueryOutputConfiguration.getWriteDisposition(conf);
Optional<BigQueryTableSchema> destSchema = BigQueryOutputConfiguration.getTableSchema(conf);
String kmsKeyName = BigQueryOutputConfiguration.getKmsKeyName(conf);
BigQueryFileFormat outputFileFormat = BigQueryOutputConfiguration.getFileFormat(conf);
List<String> sourceUris = getOutputFileURIs();
try {
getBigQueryHelper()
.importFromGcs(
destProjectId,
destTable,
destSchema.isPresent() ? destSchema.get().get() : null,
kmsKeyName,
outputFileFormat,
writeDisposition,
sourceUris,
true);
} catch (InterruptedException e) {
throw new IOException("Failed to import GCS into BigQuery", e);
}
cleanup(context);
}
|
python
|
def options(self, parser, env):
"""
Add options to command line.
"""
super(LeakDetectorPlugin, self).options(parser, env)
parser.add_option("--leak-detector-level", action="store",
default=env.get('NOSE_LEAK_DETECTOR_LEVEL'),
dest="leak_detector_level",
help="Level at which to detect leaks and report memory deltas "
"(0=None, 1=Dir, 2=Module, 3=TestCaseClass, 4=Test)")
parser.add_option("--leak-detector-report-delta", action="store_true",
default=env.get('NOSE_LEAK_DETECTOR_REPORT_DELTA'),
dest="leak_detector_report_delta",
help="")
parser.add_option("--leak-detector-patch-mock", action="store_true",
default=env.get('NOSE_LEAK_DETECTOR_PATCH_MOCK', True),
dest="leak_detector_patch_mock",
help="")
parser.add_option("--leak-detector-add-traceback", action="store_true",
default=env.get('NOSE_LEAK_DETECTOR_SAVE_TRACEBACK', False),
dest="leak_detector_save_traceback",
help="")
parser.add_option("--leak-detector-ignore-pattern", action="append",
default=(list(filter(operator.truth,
env.get('NOSE_LEAK_DETECTOR_IGNORE_PATTERNS',
'').split(','))) or
['NOSE_LEAK_DETECTOR_IGNORE']),
dest="leak_detector_ignore_patterns",
help="")
|
python
|
def invoice_items(self, **params):
"""Return a deferred."""
params['customer'] = self.id
return InvoiceItem.all(self.api_key, **params)
|
java
|
public <T> T parse(JsonObject obj, Class<T> clazz) {
return gson.fromJson(obj, clazz);
}
|
python
|
def _get_dependencies_from_json(ireq, sources):
"""Retrieves dependencies for the install requirement from the JSON API.
:param ireq: A single InstallRequirement
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:return: A set of dependency lines for generating new InstallRequirements.
:rtype: set(str) or None
"""
if os.environ.get("PASSA_IGNORE_JSON_API"):
return
# It is technically possible to parse extras out of the JSON API's
# requirement format, but it is such a chore let's just use the simple API.
if ireq.extras:
return
try:
version = get_pinned_version(ireq)
except ValueError:
return
url_prefixes = [
proc_url[:-7] # Strip "/simple".
for proc_url in (
raw_url.rstrip("/")
for raw_url in (source.get("url", "") for source in sources)
)
if proc_url.endswith("/simple")
]
session = requests.session()
for prefix in url_prefixes:
url = "{prefix}/pypi/{name}/{version}/json".format(
prefix=prefix,
name=packaging.utils.canonicalize_name(ireq.name),
version=version,
)
try:
dependencies = _get_dependencies_from_json_url(url, session)
if dependencies is not None:
return dependencies
except Exception as e:
print("unable to read dependencies via {0} ({1})".format(url, e))
session.close()
return
|
python
|
def filter_step(G, covY, pred, yt):
"""Filtering step of Kalman filter.
Parameters
----------
G: (dy, dx) numpy array
mean of Y_t | X_t is G * X_t
covX: (dx, dx) numpy array
covariance of Y_t | X_t
pred: MeanAndCov object
predictive distribution at time t
Returns
-------
pred: MeanAndCov object
filtering distribution at time t
logpyt: float
log density of Y_t | Y_{0:t-1}
"""
# data prediction
data_pred_mean = np.matmul(pred.mean, G.T)
data_pred_cov = dotdot(G, pred.cov, G.T) + covY
if covY.shape[0] == 1:
logpyt = dists.Normal(loc=data_pred_mean,
scale=np.sqrt(data_pred_cov)).logpdf(yt)
else:
logpyt = dists.MvNormal(loc=data_pred_mean,
cov=data_pred_cov).logpdf(yt)
# filter
residual = yt - data_pred_mean
gain = dotdot(pred.cov, G.T, inv(data_pred_cov))
filt_mean = pred.mean + np.matmul(residual, gain.T)
filt_cov = pred.cov - dotdot(gain, G, pred.cov)
return MeanAndCov(mean=filt_mean, cov=filt_cov), logpyt
|
java
|
@Override
public void destroy(Exception e) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "destroy entry");
}
H2StreamProcessor stream;
for (Integer i : streamTable.keySet()) {
stream = streamTable.get(i);
// notify streams waiting for a window update
synchronized (stream) {
stream.notifyAll();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "destroying " + stream + ", " + stream.getId());
}
if (stream.getId() != 0) {
stream.getWrappedInboundLink().destroy(e);
}
}
initialVC = null;
frameReadProcessor = null;
h2MuxReadCallback = null;
h2MuxTCPConnectionContext = null;
h2MuxTCPReadContext = null;
h2MuxTCPWriteContext = null;
localConnectionSettings = null;
remoteConnectionSettings = null;
readContextTable = null;
writeContextTable = null;
super.destroy(e);
}
|
java
|
private void fvswap(int[] fmap, int yyp1, int yyp2, int yyn) {
while (yyn > 0) {
fswap(fmap, yyp1, yyp2);
yyp1++; yyp2++; yyn--;
}
}
|
python
|
def _remote_download(self, url):
"""To download the remote plugin package,
there are four methods of setting filename according to priority,
each of which stops setting when a qualified filename is obtained,
and an exception is triggered when a qualified valid filename is ultimately unavailable.
1. Add url `plugin_filename` query parameters
2. The file name is resolved in the url, eg: http://xx.xx.com/plugin-v0.0.1.tar.gz
3. Parse the Content-Disposition in the return header
4. Parse the Content-Type in the return header
"""
#: Try to set filename in advance based on the previous two steps
if self.__isValidUrl(url):
filename = self.__getFilename(url, scene=1)
if not filename:
filename = self.__getFilename(url, scene=2)
#: fix UnboundLocalError
f = None
try:
f = urllib2.urlopen(url, timeout=10)
except (AttributeError, ValueError, urllib2.URLError):
raise InstallError("Open URL Error")
else:
if not filename:
filename = self.__getFilename(f, scene=3)
if not filename:
filename = self.__getFilename(f, scene=4)
if filename and self.__isValidFilename(filename):
suffix = self.__getFilenameSuffix(filename)
with NamedTemporaryFile(mode='w+b', prefix='fpk-', suffix=suffix, delete=False) as fp:
fp.write(f.read())
filename = fp.name
try:
self.__unpack_tgz(filename) if self.__isValidTGZ(suffix) else self.__unpack_zip(filename)
finally:
os.remove(filename)
else:
raise InstallError("Invalid Filename")
finally:
if f is not None:
f.close()
else:
raise InstallError("Invalid URL")
|
python
|
def detectFileEncoding(self, fileName):
'''
Detect content encoding of specific file.
It will return None if it can't determine the encoding.
'''
try:
import chardet
except ImportError:
return
with open(fileName, 'rb') as inputFile:
raw = inputFile.read(2048)
result = chardet.detect(raw)
if result['confidence'] > 0.9:
if result['encoding'].lower() == 'ascii':
# UTF-8 files can be falsely detected as ASCII files if they
# don't contain non-ASCII characters in first 2048 bytes.
# We map ASCII to UTF-8 to avoid such situations.
return 'utf-8'
return result['encoding']
|
python
|
def file_observer(population, num_generations, num_evaluations, args):
"""Print the output of the evolutionary computation to a file.
This function saves the results of the evolutionary computation
to two files. The first file, which by default is named
'inspyred-statistics-file-<timestamp>.csv', contains the basic
generational statistics of the population throughout the run
(worst, best, median, and average fitness and standard deviation
of the fitness values). The second file, which by default is named
'inspyred-individuals-file-<timestamp>.csv', contains every individual
during each generation of the run. Both files may be passed to the
function as keyword arguments (see below).
The format of each line of the statistics file is as follows::
generation number, population size, worst, best, median, average, standard deviation
The format of each line of the individuals file is as follows::
generation number, individual number, fitness, string representation of candidate
.. note::
This function makes use of the ``inspyred.ec.analysis.fitness_statistics``
function, so it is subject to the same requirements.
.. Arguments:
population -- the population of Individuals
num_generations -- the number of elapsed generations
num_evaluations -- the number of candidate solution evaluations
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *statistics_file* -- a file object (default: see text)
- *individuals_file* -- a file object (default: see text)
"""
try:
statistics_file = args['statistics_file']
except KeyError:
statistics_file = open('inspyred-statistics-file-{0}.csv'.format(time.strftime('%m%d%Y-%H%M%S')), 'w')
args['statistics_file'] = statistics_file
try:
individuals_file = args['individuals_file']
except KeyError:
individuals_file = open('inspyred-individuals-file-{0}.csv'.format(time.strftime('%m%d%Y-%H%M%S')), 'w')
args['individuals_file'] = individuals_file
stats = inspyred.ec.analysis.fitness_statistics(population)
worst_fit = stats['worst']
best_fit = stats['best']
avg_fit = stats['mean']
med_fit = stats['median']
std_fit = stats['std']
statistics_file.write('{0}, {1}, {2}, {3}, {4}, {5}, {6}\n'.format(num_generations, len(population), worst_fit, best_fit, med_fit, avg_fit, std_fit))
for i, p in enumerate(population):
individuals_file.write('{0}, {1}, {2}, {3}\n'.format(num_generations, i, p.fitness, str(p.candidate)))
statistics_file.flush()
individuals_file.flush()
|
java
|
void getBlockCrc(DataInputStream in, VersionAndOpcode versionAndOpcode)
throws IOException {
// header
BlockChecksumHeader blockChecksumHeader =
new BlockChecksumHeader(versionAndOpcode);
blockChecksumHeader.readFields(in);
final int namespaceId = blockChecksumHeader.getNamespaceId();
final Block block = new Block(blockChecksumHeader.getBlockId(), 0,
blockChecksumHeader.getGenStamp());
DataOutputStream out = null;
ReplicaToRead ri = datanode.data.getReplicaToRead(namespaceId, block);
if (ri == null) {
throw new IOException("Unknown block");
}
updateCurrentThreadName("getting CRC checksum for block " + block);
try {
//write reply
out = new DataOutputStream(
NetUtils.getOutputStream(s, datanode.socketWriteTimeout));
int blockCrc;
if (ri.hasBlockCrcInfo()) {
// There is actually a short window that the block is reopened
// and we got exception when call getBlockCrc but it's OK. It's
// only happens for append(). So far we don't optimize for this
// use case. We can do it later when necessary.
//
blockCrc = ri.getBlockCrc();
} else {
try {
if (ri.isInlineChecksum()) {
blockCrc = BlockInlineChecksumReader.getBlockCrc(datanode, ri,
namespaceId, block);
} else {
blockCrc = BlockWithChecksumFileReader.getBlockCrc(datanode, ri,
namespaceId, block);
}
} catch (IOException ioe) {
LOG.warn("Exception when getting Block CRC", ioe);
out.writeShort(DataTransferProtocol.OP_STATUS_ERROR);
out.flush();
throw ioe;
}
}
out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS);
out.writeLong(blockCrc);
out.flush();
} finally {
IOUtils.closeStream(out);
}
}
|
python
|
def _load_plugins(self):
""" Attempts to load plugin modules according to the order of available
plugin directories.
"""
# import base plugin modules
try:
__import__('focus.plugin.modules')
#import focus.plugin.modules
except ImportError as exc:
raise errors.PluginImport(unicode(exc))
# load user defined plugin modules
try:
user_plugin_dir = os.path.join(self._data_dir, 'plugins')
_import_modules(user_plugin_dir)
except Exception as exc:
raise errors.UserPluginImport(unicode(exc))
|
java
|
public static boolean isPresentAll(Optional<?>... optionals) {
for (Optional<?> optional : optionals)
if (!optional.isPresent())
return false;
return true;
}
|
java
|
protected final List<T> findSortedByQuery(String query, String sort, Integer skip, Integer limit, Object... params) {
return this.dataAccess.findSortedByQuery(query, sort, skip, limit, params);
}
|
java
|
@Nullable
public <T> Provider<T> getFactory(Class<T> classDefinition) {
//noinspection unchecked
return factories.getRootValue().get(classDefinition);
}
|
java
|
public static List<Subunit> extractSubunits(Structure structure,
int absMinLen, double fraction, int minLen) {
// The extracted subunit container
List<Subunit> subunits = new ArrayList<Subunit>();
for (Chain c : structure.getPolyChains()) {
// Only take protein chains
if (c.isProtein()) {
Atom[] ca = StructureTools.getRepresentativeAtomArray(c);
logger.debug("Chain " + c.getId() + "; CA Atoms: " + ca.length + "; SEQRES: " + c.getSeqResSequence());
if (ca.length==0)
continue;
subunits.add(new Subunit(ca, c.getId(), null, structure));
}
}
// Calculate the minimum length of a Subunit
int adjustedMinLen = calcAdjustedMinimumSequenceLength(subunits,
absMinLen, fraction, minLen);
logger.debug("Adjusted minimum sequence length: " + adjustedMinLen);
// Filter out short Subunits
for (int s = subunits.size() - 1; s >= 0; s--) {
if (subunits.get(s).size() < adjustedMinLen)
subunits.remove(s);
}
return subunits;
}
|
python
|
def write(self, arg, **kwargs):
"""Write instance to file."""
if hasattr(arg, 'seek'):
self._tofile(arg, **kwargs)
else:
with open(arg, 'wb') as fid:
self._tofile(fid, **kwargs)
|
java
|
public static <T> T query(Connection conn, String sql, RsHandler<T> rsh, Map<String, Object> paramMap) throws SQLException {
final NamedSql namedSql = new NamedSql(sql, paramMap);
return query(conn, namedSql.getSql(), rsh, namedSql.getParams());
}
|
python
|
def on_message(self, unused_channel, basic_deliver, properties, body):
"""Called on receipt of a message from a queue.
Processes the message using the self._process method or function and positively
acknowledges the queue if successful. If processing is not succesful,
the message can either be rejected, quarantined or negatively acknowledged,
depending on the failure mode.
: param basic_deliver: AMQP basic.deliver method
: param properties: Message properties
: param body: Message body
: returns: None
"""
if self.check_tx_id:
try:
tx_id = self.tx_id(properties)
logger.info('Received message',
queue=self._queue,
delivery_tag=basic_deliver.delivery_tag,
app_id=properties.app_id,
tx_id=tx_id)
except KeyError as e:
self.reject_message(basic_deliver.delivery_tag)
logger.error("Bad message properties - no tx_id",
action="rejected",
exception=str(e))
return None
except TypeError as e:
self.reject_message(basic_deliver.delivery_tag)
logger.error("Bad message properties - no headers",
action="rejected",
exception=str(e))
return None
else:
logger.debug("check_tx_id is False. Not checking tx_id for message.",
delivery_tag=basic_deliver.delivery_tag)
tx_id = None
try:
try:
self.process(body.decode("utf-8"), tx_id)
except TypeError:
logger.error('Incorrect call to process method')
raise QuarantinableError
self.acknowledge_message(basic_deliver.delivery_tag,
tx_id=tx_id)
except (QuarantinableError, BadMessageError) as e:
# Throw it into the quarantine queue to be dealt with
try:
self.quarantine_publisher.publish_message(body, headers={'tx_id': tx_id})
self.reject_message(basic_deliver.delivery_tag, tx_id=tx_id)
logger.error("Quarantinable error occured",
action="quarantined",
exception=str(e),
tx_id=tx_id)
except PublishMessageError:
logger.error("Unable to publish message to quarantine queue. Rejecting message and requeuing.")
self.reject_message(basic_deliver.delivery_tag,
requeue=True,
tx_id=tx_id)
except RetryableError as e:
self.nack_message(basic_deliver.delivery_tag, tx_id=tx_id)
logger.error("Failed to process",
action="nack",
exception=str(e),
tx_id=tx_id)
except Exception as e:
self.nack_message(basic_deliver.delivery_tag, tx_id=tx_id)
logger.exception("Unexpected exception occurred")
logger.error("Failed to process",
action="nack",
exception=str(e),
tx_id=tx_id)
|
java
|
@Override
public void play(SourceFactory sourceFactory, Sink<Event> sink) {
LOG.info("Actions will be played using one source.");
Source source = null;
try {
source = sourceFactory.create();
} catch (InitializationFailedException e) {
LOG.error("Source initialization failed. Can not proceed with playing, stopping...", e);
return;
}
try {
play(sink, source);
} finally {
source.close();
}
}
|
python
|
def _handle_invalid_tag_start(self):
"""Handle the (possible) start of an implicitly closing single tag."""
reset = self._head + 1
self._head += 2
try:
if not is_single_only(self.tag_splitter.split(self._read())[0]):
raise BadRoute()
tag = self._really_parse_tag()
except BadRoute:
self._head = reset
self._emit_text("</")
else:
tag[0].invalid = True # Set flag of TagOpenOpen
self._emit_all(tag)
|
python
|
def _handle_ctrl_c(self, *args):
"""Handle the keyboard interrupts."""
if self.anybar: self.anybar.change("exclamation")
if self._stop:
print("\nForced shutdown...")
raise SystemExit
if not self._stop:
hline = 42 * '='
print(
'\n' + hline + "\nGot CTRL+C, waiting for current cycle...\n"
"Press CTRL+C again if you're in hurry!\n" + hline
)
self._stop = True
|
python
|
def get_instance(self, payload):
"""
Build an instance of OutgoingCallerIdInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.outgoing_caller_id.OutgoingCallerIdInstance
:rtype: twilio.rest.api.v2010.account.outgoing_caller_id.OutgoingCallerIdInstance
"""
return OutgoingCallerIdInstance(self._version, payload, account_sid=self._solution['account_sid'], )
|
java
|
public int getRandomInt() {
Integer newInt = null;
do {
newInt = randomGenerator.nextInt();
} while(this.previousRandomInts.contains(newInt));
this.previousRandomInts.add(newInt);
return newInt.intValue();
}
|
python
|
def nla_put_string(msg, attrtype, value):
"""Add string attribute to Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L674
Positional arguments:
msg -- Netlink message (nl_msg class instance).
attrtype -- attribute type (integer).
value -- bytes() or bytearray() value (e.g. 'Test'.encode('ascii')).
Returns:
0 on success or a negative error code.
"""
data = bytearray(value) + bytearray(b'\0')
return nla_put(msg, attrtype, len(data), data)
|
python
|
def get_objective(self):
"""Gets the related objective.
return: (osid.learning.Objective) - the related objective
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.Activity.get_objective
if not bool(self._my_map['objectiveId']):
raise errors.IllegalState('objective empty')
mgr = self._get_provider_manager('LEARNING')
if not mgr.supports_objective_lookup():
raise errors.OperationFailed('Learning does not support Objective lookup')
lookup_session = mgr.get_objective_lookup_session(proxy=getattr(self, "_proxy", None))
lookup_session.use_federated_objective_bank_view()
return lookup_session.get_objective(self.get_objective_id())
|
java
|
public synchronized void setString(String parameterName,
String x) throws SQLException {
setString(findParameterIndex(parameterName), x);
}
|
java
|
public static Process runProcess(String... cmdParts) throws IOException {
return new ProcessBuilder(buildCommandline(cmdParts))
.inheritIO()
.start();
}
|
python
|
def layer_sort(hmap):
"""
Find a global ordering for layers in a HoloMap of CompositeOverlay
types.
"""
orderings = {}
for o in hmap:
okeys = [get_overlay_spec(o, k, v) for k, v in o.data.items()]
if len(okeys) == 1 and not okeys[0] in orderings:
orderings[okeys[0]] = []
else:
orderings.update({k: [] if k == v else [v] for k, v in zip(okeys[1:], okeys)})
return [i for g in sort_topologically(orderings) for i in sorted(g)]
|
java
|
public List<String> getOrphansList() throws DuplicateBundlePathException {
// Create a mapping for every resource available
JoinableResourceBundleImpl tempBundle = new JoinableResourceOrphanBundleImpl("orphansTemp", "orphansTemp",
this.resourceExtension, new InclusionPattern(), Collections.singletonList(this.baseDir), rsHandler,
generatorRegistry);
// Add licenses
Set<String> licensesPathList = tempBundle.getLicensesPathList();
for (Iterator<String> it = licensesPathList.iterator(); it.hasNext();) {
addFileIfNotMapped(it.next());
}
// Add resources
List<BundlePath> allPaths = tempBundle.getItemPathList();
for (Iterator<BundlePath> it = allPaths.iterator(); it.hasNext();) {
addFileIfNotMapped(it.next().getPath());
}
return this.bundleMapping;
}
|
python
|
def get_gtf_db(gtf, in_memory=False):
"""
create a gffutils DB, in memory if we don't have write permissions
"""
db_file = gtf + ".db"
if file_exists(db_file):
return gffutils.FeatureDB(db_file)
if not os.access(os.path.dirname(db_file), os.W_OK | os.X_OK):
in_memory = True
db_file = ":memory:" if in_memory else db_file
if in_memory or not file_exists(db_file):
infer_extent = guess_infer_extent(gtf)
disable_extent = not infer_extent
db = gffutils.create_db(gtf, dbfn=db_file,
disable_infer_genes=disable_extent,
disable_infer_transcripts=disable_extent)
if in_memory:
return db
else:
return gffutils.FeatureDB(db_file)
|
java
|
@Override
public BaseType extendGenericType()
{
BaseType []oldParams = getParameters();
BaseType []newParams = new BaseType[oldParams.length];
boolean isExtend = false;
for (int i = 0; i < newParams.length; i++) {
BaseType param = oldParams[i];
if (param instanceof ClassType
|| param instanceof ParamType) {
BaseType []upperBounds = new BaseType[] { param };
BaseType []lowerBounds = new BaseType[] { };//ObjectType.OBJECT_TYPE };
BaseType extParam = new WildcardTypeImpl(lowerBounds, upperBounds);
newParams[i] = extParam;
isExtend = true;
}
else {
newParams[i] = param;
}
}
if (isExtend) {
ParamType extendsType = new ParamType(_type, newParams, _paramMap);
return extendsType;
}
else {
return this;
}
}
|
java
|
@Nonnull
public static <I> CompactDFA<I> randomICDFA(Random rand,
@Nonnegative int numStates,
Alphabet<I> inputs,
boolean minimize) {
CompactDFA<I> dfa = new RandomICAutomatonGenerator<Boolean, Void>().withStateProperties(Random::nextBoolean)
.generateICDeterministicAutomaton(numStates,
inputs,
new CompactDFA.Creator<>(),
rand);
if (minimize) {
dfa = DFAs.minimize(dfa);
}
return dfa;
}
|
python
|
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'classifier_id') and self.classifier_id is not None:
_dict['classifier_id'] = self.classifier_id
if hasattr(self, 'classes') and self.classes is not None:
_dict['classes'] = [x._to_dict() for x in self.classes]
return _dict
|
python
|
def dump_in_memory_result(self, result, output_path):
"""Recursively dumps the result of our processing into files within the
given output path.
Args:
result: The in-memory result of our processing.
output_path: Full path to the folder into which to dump the files.
Returns:
The number of files generated (integer).
"""
file_count = 0
logger.debug("Dumping in-memory processing results to output folder: %s", output_path)
for k, v in iteritems(result):
cur_output_path = os.path.join(output_path, k)
if isinstance(v, dict):
file_count += self.dump_in_memory_result(v, cur_output_path)
else:
if not os.path.isdir(output_path):
os.makedirs(output_path)
filename = os.path.join(output_path, k)
logger.debug("Writing output file: %s", filename)
# dump the contents of the file
with open(filename, 'wt', encoding=self.config.encoding) as f:
f.write(v)
file_count += 1
return file_count
|
java
|
public void comment(char ch[], int start, int length)
throws SAXException {
append(m_doc.createComment(new String(ch, start, length)));
}
|
python
|
def read_raid_configuration(self, raid_config=None):
"""Read the logical drives from the system
:param raid_config: None in case of post-delete read or in case of
post-create a dictionary containing target raid
configuration data. This data stucture should be as
follows:
raid_config = {'logical_disks': [{'raid_level': 1,
'size_gb': 100, 'physical_disks': ['6I:1:5'],
'controller': 'HPE Smart Array P408i-a SR Gen10'},
<info-for-logical-disk-2>]}
:raises: IloError, on an error from iLO.
:returns: A dictionary containing list of logical disks
"""
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
return sushy_system.read_raid(raid_config=raid_config)
|
java
|
public Long count(Criteria<T, T> criteria) {
SingularAttribute<? super T, PK> id = getEntityManager().getMetamodel().entity(entityClass).getId(entityKey);
return criteria.select(Long.class, countDistinct(id))
.getSingleResult();
}
|
java
|
public static PropertiesBuilder builder(KnowledgeComponentImplementationModel implementationModel) {
PropertiesModel propertiesModel = null;
if (implementationModel != null) {
propertiesModel = implementationModel.getProperties();
}
return new PropertiesBuilder(propertiesModel);
}
|
java
|
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out element count, and any hidden stuff
s.defaultWriteObject();
// Write out array length, for compatibility with 1.5 version
s.writeInt(Math.max(2, size + 1));
// Write out all elements in the "proper order".
for (int i = 0; i < size; i++)
s.writeObject(queue[i]);
}
|
java
|
public DescribeConfigurationRecorderStatusResult withConfigurationRecordersStatus(ConfigurationRecorderStatus... configurationRecordersStatus) {
if (this.configurationRecordersStatus == null) {
setConfigurationRecordersStatus(new com.amazonaws.internal.SdkInternalList<ConfigurationRecorderStatus>(configurationRecordersStatus.length));
}
for (ConfigurationRecorderStatus ele : configurationRecordersStatus) {
this.configurationRecordersStatus.add(ele);
}
return this;
}
|
java
|
private void writeLengthToStream(int length, OutputStream out) throws IOException {
sharedByteBuffer.clear();
sharedByteBuffer.order(ByteOrder.BIG_ENDIAN).putInt(length);
sharedByteBuffer.flip();
out.write(sharedByteBuffer.array(), 0, Ints.BYTES);
sharedByteBuffer.order(byteOrder);
}
|
python
|
def confirm_push(self, coord, version):
"""Ask the user if a push should be done for a particular version of a
particular coordinate. Return True if the push should be done"""
if not self.get_options().prompt:
return True
try:
isatty = os.isatty(sys.stdin.fileno())
except ValueError:
# In tests, sys.stdin might not have a fileno
isatty = False
if not isatty:
return True
push = input('\nPublish {} with revision {} ? [y|N] '.format(
coord, version
))
print('\n')
return push.strip().lower() == 'y'
|
java
|
private float clampMag(float value, float absMin, float absMax) {
final float absValue = Math.abs(value);
if (absValue < absMin) return 0;
if (absValue > absMax) return value > 0 ? absMax : -absMax;
return value;
}
|
java
|
public void emptyElement (String uri, String localName)
throws SAXException
{
emptyElement(uri, localName, "", EMPTY_ATTS);
}
|
python
|
def fast_hamiltonian(Ep, epsilonp, detuning_knob, rm, omega_level, xi, theta,
file_name=None):
r"""Return a fast function that returns a Hamiltonian as an array.
INPUT:
- ``Ep`` - A list with the electric field amplitudes (real or complex).
- ``epsilonp`` - A list of the polarization vectors of the fields \
(real or complex).
- ``detuning_knob`` - A list of the detunings of each field (relative \
to the transition of lowest energy).
- ``rm`` - The below-diagonal components
of the position operator in the cartesian basis:
.. math::
\vec{r}^{(-)}_{i j} = [ x_{ij}, y_{ij}, z_{ij} ]
\hspace{1cm} \forall \hspace{1cm} 0 < j < i
- ``omega_level`` - The angular frequencies of each state.
- ``xi`` - An array whose ``xi[l, i, j]`` element is 1 if the \
transition :math:`|i\rangle \rightarrow |j\rangle`\ is driven by field \
``l`` and 0 otherwise.
- ``theta`` - A list of symbolic expressions representing a phase \
transformation.
- ``file_name`` - A string indicating a file to save the function's \
code.
If the arguments Ep, epsilonp, and detuning_knob are symbolic amounts, \
the returned function will accept numeric values of Ep, epsilonp, and \
detuning_knob as arguments.
All quantities should be in SI units.
EXAMPLES:
We build an example using states coupled like this:
--- |4> --- |5> --- |6>
^ ^ ^
| | |
| --- |2> | --- |3> |
2 | ^ 2 | ^ | 2
| 1 | | 1 | |
| | | | |
------------------------------------- |1>
With the numbers on kets labeling states and the plain numbers labeling
fields.
The number of states and fields:
>>> Ne = 6
>>> Nl = 2
We invent some energy levels:
>>> omega_level = np.array([0.0, 100.0, 100.0, 200.0, 200.0, 300.0])
>>> omega_level = omega_level*1e6*2*np.pi
We build the symbol xi, that chooses which laser couples which
transition.
>>> xi = np.zeros((Nl, Ne, Ne))
>>> coup = [[(1, 0), (2, 0)], [(3, 0), (4, 0), (5, 0)]]
>>> for l in range(Nl):
... for pair in coup[l]:
... xi[l, pair[0], pair[1]] = 1.0
... xi[l, pair[1], pair[0]] = 1.0
We invent some electric dipole matrix elements:
>>> from scipy.constants import physical_constants
>>> a0 = physical_constants["Bohr radius"][0]
>>> rm = np.zeros((3, Ne, Ne))
>>> for l in range(Nl):
... for i in range(Ne):
... for j in range(i):
... if xi[l, i, j] != 0:
... rm[2, i, j] = float(i)*a0
The phase transformation:
>>> theta = phase_transformation(Ne, Nl, rm, xi)
We define the possible arguments:
>>> from sympy import symbols, pi
>>> from fast.symbolic import polarization_vector
>>> detuning_knob = symbols("delta1 delta2")
>>> detuning_knob_vals = np.array([-1.0, 3.0])*1e6*2*np.pi
>>> Ep, omega_laser = define_laser_variables(Nl)
>>> Ep_vals = [1e2, 1e2]
>>> alpha = symbols("alpha")
>>> epsilon = polarization_vector(0, pi/2, alpha, 0, 1)
>>> epsilonp = [epsilon, epsilon]
>>> epsilonp_vals = [[0.0, 0.0, 1.0], [0.0, 0.0, 1.0]]
There are 8 ways to call fast_hamiltonian:
1 .- Get a function of detunings, field amplitudes, polarizations:
>>> H1 = fast_hamiltonian(Ep, epsilonp, detuning_knob, rm,
... omega_level, xi, theta)
2 .- Get a function of field amplitudes, polarizations:
>>> H2 = fast_hamiltonian(Ep, epsilonp, detuning_knob_vals, rm,
... omega_level, xi, theta)
3 .- Get a function of detunings, polarizations:
>>> H3 = fast_hamiltonian(Ep_vals, epsilonp, detuning_knob, rm,
... omega_level, xi, theta)
4 .- Get a function of detunings, field amplitudes:
>>> H4 = fast_hamiltonian(Ep, epsilonp_vals, detuning_knob, rm,
... omega_level, xi, theta)
5 .- Get a function of detunings:
>>> H5 = fast_hamiltonian(Ep_vals, epsilonp_vals, detuning_knob, rm,
... omega_level, xi, theta)
6 .- Get a function of field amplitudes:
>>> H6 = fast_hamiltonian(Ep, epsilonp_vals, detuning_knob_vals, rm,
... omega_level, xi, theta)
7 .- Get a function of polarizations:
>>> H7 = fast_hamiltonian(Ep_vals, epsilonp, detuning_knob_vals, rm,
... omega_level, xi, theta)
8 .- Get a function of nothing:
>>> H8 = fast_hamiltonian(Ep_vals, epsilonp_vals, detuning_knob_vals, rm,
... omega_level, xi, theta)
We test all of these combinations.
>>> print(H1(Ep_vals, epsilonp_vals, detuning_knob_vals) \
... /hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H2(Ep_vals, epsilonp_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H3(epsilonp_vals, detuning_knob_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H4(Ep_vals, detuning_knob_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H5(detuning_knob_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H6(Ep_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H7(epsilonp_vals)/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
>>> print(H8()/hbar_num/2/np.pi*1e-6)
[[ 0. +0.j 0.6398+0.j 1.2795+0.j 1.9193+0.j 2.5591+0.j 3.1989+0.j]
[ 0.6398+0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.2795+0.j 0. +0.j 1. +0.j 0. +0.j 0. +0.j 0. +0.j]
[ 1.9193+0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j 0. +0.j]
[ 2.5591+0.j 0. +0.j 0. +0.j 0. +0.j -3. +0.j 0. +0.j]
[ 3.1989+0.j 0. +0.j 0. +0.j 0. +0.j 0. +0.j 97. +0.j]]
"""
# We determine which arguments are constants.
if True:
Nl = len(Ep)
Ne = np.array(rm[0]).shape[0]
try:
Ep = np.array([complex(Ep[l]) for l in range(Nl)])
variable_Ep = False
except:
variable_Ep = True
try:
epsilonp = [np.array([complex(epsilonp[l][i]) for i in range(3)])
for l in range(Nl)]
variable_epsilonp = False
except:
variable_epsilonp = True
try:
detuning_knob = np.array([float(detuning_knob[l])
for l in range(Nl)])
variable_detuning_knob = False
except:
variable_detuning_knob = True
# We convert rm to a numpy array
rm = np.array([[[complex(rm[k][i, j])
for j in range(Ne)] for i in range(Ne)]
for k in range(3)])
# We establish the arguments of the output function.
if True:
code = ""
code += "def hamiltonian("
if variable_Ep: code += "Ep, "
if variable_epsilonp: code += "epsilonp, "
if variable_detuning_knob: code += "detuning_knob, "
if code[-2:] == ", ":
code = code[:-2]
code += "):\n"
code += ' r"""A fast calculation of the hamiltonian."""\n'
code += " H = np.zeros(("+str(Ne)+", "+str(Ne)+"), complex)\n\n"
# We get the code for the below-diagonal elements
# (Rabi frequencies).
if True:
code += " # We calculate the below-diagonal elements.\n"
for i in range(Ne):
for j in range(i):
for l in range(Nl):
if xi[l, i, j] == 1.0:
# We get the below-diagonal terms.
code += " H["+str(i)+", "+str(j)+"] = "
# We get the code for Ep.
if variable_Ep:
code += "0.5*Ep["+str(l)+"]"
else:
code += str(0.5*Ep[l])
# We get the code for epsilonp dot rm
rmij = rm[:, i, j]
if variable_epsilonp:
code += "*cartesian_dot_product("
code += "epsilonp["+str(l)+"],"
code += str(list(rmij*e_num))+" )"
else:
dp = cartesian_dot_product(epsilonp[l], rmij)
dp = dp*e_num
code += "*("+str(dp)+")"
code += "\n"
# We get the code for the above-diagonal elements
# (Conjugate Rabi frequencies).
if True:
code += "\n"
code += """ # We calculate the above-diagonal elements.\n"""
code += """ for i in range("""+str(Ne)+"""):\n"""
code += """ for j in range(i+1, """+str(Ne)+"""):\n"""
code += """ H[i, j] = H[j, i].conjugate()\n\n"""
# We get the code for the diagonal elements (detunings).
if True:
code += " # We calculate the diagonal elements.\n"
# We build the degeneration simplification and is inverse (to avoid
# large combinatorics).
aux = define_simplification(omega_level, xi, Nl)
u, invu, omega_levelu, Neu, xiu = aux
# For each field we find the smallest transition frequency, and its
# simplified indices.
omega_min, iu0, ju0 = find_omega_min(omega_levelu, Neu, Nl, xiu)
#####################################
# We get the code to calculate the non degenerate detunings.
pairs = detunings_indices(Neu, Nl, xiu)
if not variable_detuning_knob:
code += " detuning_knob = np.zeros("+str(Nl)+")\n"
for l in range(Nl):
code += " detuning_knob["+str(l)+"] = " +\
str(detuning_knob[l])+"\n"
code_det = detunings_code(Neu, Nl, pairs, omega_levelu, iu0, ju0)
code += code_det
code += "\n"
#####################################
# We find the coefficients a_l that multiply omega_laser_l in
# H_ii = omega_level_iu + theta_iu = \sum_i a_i varpi_i + remainder
_omega_level, omega, gamma = define_frequencies(Ne)
_omega_levelu, omega, gamma = define_frequencies(Neu)
E0, omega_laser = define_laser_variables(Nl)
# So we build all combinations.
combs = detunings_combinations(pairs)
for i in range(Ne):
_Hii = theta[i] + _omega_levelu[u(i)]
aux = (_Hii, combs, omega_laser,
_omega_levelu, omega_levelu, iu0, ju0)
assign = detunings_rewrite(*aux)
if assign != "":
code += " H["+str(i)+", "+str(i)+"] = "+assign+"\n"
code += "\n"
code += """ for i in range("""+str(Ne)+"""):\n"""
code += """ H[i, i] = H[i, i]*"""+str(hbar_num)+"\n"
code += " return H\n"
if file_name is not None:
f = file(file_name, "w")
f.write(code)
f.close()
hamiltonian = code
exec hamiltonian
return hamiltonian
|
java
|
@Override
public CommerceNotificationQueueEntry findByCommerceNotificationTemplateId_Last(
long commerceNotificationTemplateId,
OrderByComparator<CommerceNotificationQueueEntry> orderByComparator)
throws NoSuchNotificationQueueEntryException {
CommerceNotificationQueueEntry commerceNotificationQueueEntry = fetchByCommerceNotificationTemplateId_Last(commerceNotificationTemplateId,
orderByComparator);
if (commerceNotificationQueueEntry != null) {
return commerceNotificationQueueEntry;
}
StringBundler msg = new StringBundler(4);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("commerceNotificationTemplateId=");
msg.append(commerceNotificationTemplateId);
msg.append("}");
throw new NoSuchNotificationQueueEntryException(msg.toString());
}
|
java
|
void reportError(String msg, Element e) {
messager.printMessage(Diagnostic.Kind.ERROR, msg, e);
errorCount++;
}
|
java
|
public CloseableReference<Bitmap> createBitmap(
Bitmap source,
int x,
int y,
int width,
int height,
@Nullable Matrix matrix,
boolean filter,
@Nullable Object callerContext) {
Preconditions.checkNotNull(source, "Source bitmap cannot be null");
checkXYSign(x, y);
checkWidthHeight(width, height);
checkFinalImageBounds(source, x, y, width, height);
// assigned because matrix can modify the final width, height
int newWidth = width;
int newHeight = height;
Canvas canvas;
CloseableReference<Bitmap> bitmapRef;
Paint paint;
Rect srcRectangle = new Rect(x, y, x + width, y + height);
RectF dstRectangle = new RectF(0, 0, width, height);
Bitmap.Config newConfig = getSuitableBitmapConfig(source);
if (matrix == null || matrix.isIdentity()) {
bitmapRef = createBitmap(newWidth, newHeight, newConfig, source.hasAlpha(), callerContext);
setPropertyFromSourceBitmap(source, bitmapRef.get());
canvas = new Canvas(bitmapRef.get());
paint = null; // not needed
} else {
boolean transformed = !matrix.rectStaysRect();
RectF deviceRectangle = new RectF();
matrix.mapRect(deviceRectangle, dstRectangle);
newWidth = Math.round(deviceRectangle.width());
newHeight = Math.round(deviceRectangle.height());
bitmapRef =
createBitmap(
newWidth,
newHeight,
transformed ? Bitmap.Config.ARGB_8888 : newConfig,
transformed || source.hasAlpha(),
callerContext);
setPropertyFromSourceBitmap(source, bitmapRef.get());
canvas = new Canvas(bitmapRef.get());
canvas.translate(-deviceRectangle.left, -deviceRectangle.top);
canvas.concat(matrix);
paint = new Paint();
paint.setFilterBitmap(filter);
if (transformed) {
paint.setAntiAlias(true);
}
}
canvas.drawBitmap(source, srcRectangle, dstRectangle, paint);
canvas.setBitmap(null);
return bitmapRef;
}
|
java
|
public static Optional<GeneratorSet> reduce( SystemInputDef inputDef, GeneratorSet genDef, SystemTestDef baseDef, ReducerOptions options)
{
// Create a new set of generators to be updated.
GeneratorSet genDefNew = genDef.cloneOf();
// Identify functions to reduce.
String function = options.getFunction();
Stream<FunctionInputDef> functionInputDefs;
if( function == null)
{
functionInputDefs = toStream( inputDef.getFunctionInputDefs());
}
else if( inputDef.getFunctionInputDef( function) == null)
{
throw new RuntimeException( "Function=" + function + " is not defined");
}
else
{
functionInputDefs = Stream.of( inputDef.getFunctionInputDef( function));
}
// For each of the specified function(s), find a seed that generates minimum test cases
Map<String,ITestCaseGenerator> generatorsNew =
functionInputDefs
.map( functionInputDef ->
new SimpleEntry<String,ITestCaseGenerator>(
functionInputDef.getName(),
reduce(
functionInputDef,
genDefNew.getGenerator( functionInputDef.getName()),
baseDef == null? null : baseDef.getFunctionTestDef( functionInputDef.getName()),
options)
.orElse( null)))
.filter( e -> e.getValue() != null)
.collect( toMap( SimpleEntry::getKey, SimpleEntry::getValue));
if( generatorsNew.isEmpty())
{
logger_.info( "Generator definitions not changed");
return Optional.empty();
}
else
{
generatorsNew.forEach( (f, g) -> genDefNew.setGenerator( f, g));
return Optional.of( genDefNew);
}
}
|
python
|
def namespace_splitter(self, value):
"""
Setter for **self.__namespace_splitter** attribute.
:param value: Attribute value.
:type value: unicode
"""
if value is not None:
assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format(
"namespace_splitter", value)
assert len(value) == 1, "'{0}' attribute: '{1}' has multiples characters!".format("namespace_splitter",
value)
assert not re.search(r"\w", value), "'{0}' attribute: '{1}' is an alphanumeric character!".format(
"namespace_splitter", value)
self.__namespace_splitter = value
|
python
|
def query_form_data(self):
"""
Get the formdata stored in the database for existing slice.
params: slice_id: integer
"""
form_data = {}
slice_id = request.args.get('slice_id')
if slice_id:
slc = db.session.query(models.Slice).filter_by(id=slice_id).one_or_none()
if slc:
form_data = slc.form_data.copy()
update_time_range(form_data)
return json.dumps(form_data)
|
java
|
public DeleteInvitationsResult withUnprocessedAccounts(Result... unprocessedAccounts) {
if (this.unprocessedAccounts == null) {
setUnprocessedAccounts(new java.util.ArrayList<Result>(unprocessedAccounts.length));
}
for (Result ele : unprocessedAccounts) {
this.unprocessedAccounts.add(ele);
}
return this;
}
|
java
|
public static void setExpirationTimes(long operationTTLMillis, long operationMaxIdleMillis, Record record,
MapConfig mapConfig, boolean consultMapConfig) {
long ttlMillis = pickTTLMillis(operationTTLMillis, record.getTtl(), mapConfig, consultMapConfig);
long maxIdleMillis = pickMaxIdleMillis(operationMaxIdleMillis, record.getMaxIdle(), mapConfig, consultMapConfig);
record.setTtl(ttlMillis);
record.setMaxIdle(maxIdleMillis);
setExpirationTime(record);
}
|
java
|
public org.inferred.freebuilder.processor.property.Property.Builder addAccessorAnnotations(
Excerpt element) {
if (accessorAnnotations instanceof ImmutableList) {
accessorAnnotations = new ArrayList<>(accessorAnnotations);
}
accessorAnnotations.add(Objects.requireNonNull(element));
return (org.inferred.freebuilder.processor.property.Property.Builder) this;
}
|
java
|
private boolean notAllowedEnd(String endTag) {
errorState = errorState && endElements.containsKey(endTag)
&& forbiddenIdEndElements.containsKey(endTag);
return errorState;
}
|
java
|
@Override
public TopicConnection createTopicConnection() throws JMSException
{
String username = getStringProperty(Context.SECURITY_PRINCIPAL,null);
String password = getStringProperty(Context.SECURITY_CREDENTIALS,null);
return createTopicConnection(username,password);
}
|
java
|
public ExtendedSwidProcessor setExtendedInformation(final JAXBElement<Object>... extendedInformationList) {
ExtendedInformationComplexType eict = new ExtendedInformationComplexType();
if (extendedInformationList.length > 0) {
for (JAXBElement<Object> extendedInformation : extendedInformationList) {
eict.getAny().add(extendedInformation);
}
}
swidTag.getExtendedInformation().add(eict);
return this;
}
|
java
|
@SuppressWarnings("unchecked")
public Paging<PlaylistSimplified> execute() throws
IOException,
SpotifyWebApiException {
return new PlaylistSimplified.JsonUtil().createModelObjectPaging(getJson(), "playlists");
}
|
python
|
def run(self):
"""Run GapFill command"""
# Load compound information
def compound_name(id):
if id not in self._model.compounds:
return id
return self._model.compounds[id].properties.get('name', id)
# Calculate penalty if penalty file exists
penalties = {}
if self._args.penalty is not None:
for line in self._args.penalty:
line, _, comment = line.partition('#')
line = line.strip()
if line == '':
continue
rxnid, penalty = line.split(None, 1)
penalties[rxnid] = float(penalty)
core = set(self._mm.reactions)
solver = self._get_solver(integer=True)
default_comp = self._model.default_compartment
epsilon = self._args.epsilon
v_max = float(self._model.default_flux_limit)
blocked = set()
for compound in self._args.compound:
if compound.compartment is None:
compound = compound.in_compartment(default_comp)
blocked.add(compound)
if len(blocked) > 0:
logger.info('Unblocking compounds: {}...'.format(
', '.join(text_type(c) for c in sorted(blocked))))
else:
logger.info(
'Unblocking all compounds in model. Use --compound option to'
' unblock specific compounds.')
blocked = set(self._mm.compounds)
exclude = set()
if self._model.biomass_reaction is not None:
exclude.add(self._model.biomass_reaction)
# Add exchange and transport reactions to database
model_complete, weights = create_extended_model(
self._model,
db_penalty=self._args.db_penalty,
ex_penalty=self._args.ex_penalty,
tp_penalty=self._args.tp_penalty,
penalties=penalties)
implicit_sinks = not self._args.no_implicit_sinks
logger.info('Searching for reactions to fill gaps')
try:
added_reactions, no_bounds_reactions = gapfill(
model_complete, core, blocked, exclude, solver=solver,
epsilon=epsilon, v_max=v_max, weights=weights,
implicit_sinks=implicit_sinks,
allow_bounds_expansion=self._args.allow_bounds_expansion)
except GapFillError as e:
self._log_epsilon_and_fail(epsilon, e)
for reaction_id in sorted(self._mm.reactions):
rx = self._mm.get_reaction(reaction_id)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(reaction_id, 'Model', 0, rxt))
for rxnid in sorted(added_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(
rxnid, 'Add', weights.get(rxnid, 1), rxt))
for rxnid in sorted(no_bounds_reactions):
rx = model_complete.get_reaction(rxnid)
rxt = rx.translated_compounds(compound_name)
print('{}\t{}\t{}\t{}'.format(
rxnid, 'Remove bounds', weights.get(rxnid, 1), rxt))
|
java
|
private void readAssignments(Project project)
{
Project.Assignments assignments = project.getAssignments();
if (assignments != null)
{
SplitTaskFactory splitFactory = new SplitTaskFactory();
TimephasedWorkNormaliser normaliser = new MSPDITimephasedWorkNormaliser();
for (Project.Assignments.Assignment assignment : assignments.getAssignment())
{
readAssignment(assignment, splitFactory, normaliser);
}
}
}
|
python
|
def insert_multiple(self, documents):
"""
Insert multiple documents into the table.
:param documents: a list of documents to insert
:returns: a list containing the inserted documents' IDs
"""
doc_ids = []
data = self._read()
for doc in documents:
doc_id = self._get_doc_id(doc)
doc_ids.append(doc_id)
data[doc_id] = dict(doc)
self._write(data)
return doc_ids
|
python
|
def fetch_all_first_values(session: Session,
select_statement: Select) -> List[Any]:
"""
Returns a list of the first values in each row returned by a ``SELECT``
query.
A Core version of this sort of thing:
http://xion.io/post/code/sqlalchemy-query-values.html
Args:
session: SQLAlchemy :class:`Session` object
select_statement: SQLAlchemy :class:`Select` object
Returns:
a list of the first value of each result row
"""
rows = session.execute(select_statement) # type: ResultProxy
try:
return [row[0] for row in rows]
except ValueError as e:
raise MultipleResultsFound(str(e))
|
python
|
def parse_command(self, string):
"""Parse out any possible valid command from an input string."""
possible_command, _, rest = string.partition(" ")
# Commands are case-insensitive, stored as lowercase
possible_command = possible_command.lower()
if possible_command not in self.commands:
return None, None
event = self.commands[possible_command]["event"]
args = shlex.split(rest.strip())
return event, args
|
java
|
private boolean
findProperty() {
String prop;
List lserver = new ArrayList(0);
List lsearch = new ArrayList(0);
StringTokenizer st;
prop = System.getProperty("dns.server");
if (prop != null) {
st = new StringTokenizer(prop, ",");
while (st.hasMoreTokens())
addServer(st.nextToken(), lserver);
}
prop = System.getProperty("dns.search");
if (prop != null) {
st = new StringTokenizer(prop, ",");
while (st.hasMoreTokens())
addSearch(st.nextToken(), lsearch);
}
configureFromLists(lserver, lsearch);
return (servers != null && searchlist != null);
}
|
python
|
def set_note(self, name='C', octave=4, dynamics={}):
"""Set the note to name in octave with dynamics.
Return the objects if it succeeded, raise an NoteFormatError
otherwise.
"""
dash_index = name.split('-')
if len(dash_index) == 1:
if notes.is_valid_note(name):
self.name = name
self.octave = octave
self.dynamics = dynamics
return self
else:
raise NoteFormatError("The string '%s' is not a valid "
"representation of a note in mingus" % name)
elif len(dash_index) == 2:
if notes.is_valid_note(dash_index[0]):
self.name = dash_index[0]
self.octave = int(dash_index[1])
self.dynamics = dynamics
return self
else:
raise NoteFormatError("The string '%s' is not a valid "
"representation of a note in mingus" % name)
return False
|
java
|
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size) throws ValueOutOfRangeException, IllegalStateException {
return channelClient.incrementPayment(size, null, null);
}
|
java
|
private List<HBaseDataWrapper> scanResults(final String tableName, List<HBaseDataWrapper> results)
throws IOException
{
if (fetchSize == null)
{
for (Result result : scanner)
{
HBaseDataWrapper data = new HBaseDataWrapper(tableName, result.getRow());
data.setColumns(result.listCells());
results.add(data);
}
scanner = null;
resultsIter = null;
}
return results;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.