function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0, amsgrad=False):
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay, amsgrad=amsgrad)
super(Adam, self).__init__(params, defaults) | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def __init__(
self,
cfg,
confidence_threshold=0.7,
show_mask_heatmaps=False,
masks_per_dim=2,
min_image_size=224, | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def build_transform(self):
"""
Creates a basic transformation that was used to train the models
"""
cfg = self.cfg
# we are loading images with OpenCV, so we don't need to convert them
# to BGR, they are already! So all we need to do is to normalize
# by 255 if we want to convert to BGR255 format, or flip the channels
# if we want it to be in RGB in [0-1] range.
if cfg.INPUT.TO_BGR255:
to_bgr_transform = T.Lambda(lambda x: x * 255)
else:
to_bgr_transform = T.Lambda(lambda x: x[[2, 1, 0]])
normalize_transform = T.Normalize(
mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD
)
transform = T.Compose(
[
T.ToPILImage(),
T.Resize(self.min_image_size),
T.ToTensor(),
to_bgr_transform,
normalize_transform,
]
)
return transform | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def compute_prediction(self, original_image):
"""
Arguments:
original_image (np.ndarray): an image as returned by OpenCV
Returns:
prediction (BoxList): the detected objects. Additional information
of the detection properties can be found in the fields of
the BoxList via `prediction.fields()`
"""
# apply pre-processing to image
image = self.transforms(original_image)
# convert to an ImageList, padded so that it is divisible by
# cfg.DATALOADER.SIZE_DIVISIBILITY
image_list = to_image_list(image, self.cfg.DATALOADER.SIZE_DIVISIBILITY)
image_list = image_list.to(self.device)
# compute predictions
with torch.no_grad():
predictions = self.model(image_list)
predictions = [o.to(self.cpu_device) for o in predictions]
# always single image is passed at a time
prediction = predictions[0]
# reshape prediction (a BoxList) into the original image size
height, width = original_image.shape[:-1]
prediction = prediction.resize((width, height))
if prediction.has_field("mask"):
# if we have masks, paste the masks in the right position
# in the image, as defined by the bounding boxes
masks = prediction.get_field("mask")
# always single image is passed at a time
masks = self.masker([masks], [prediction])[0]
prediction.add_field("mask", masks)
return prediction | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def compute_colors_for_labels(self, labels):
"""
Simple function that adds fixed colors depending on the class
"""
colors = labels[:, None] * self.palette
colors = (colors % 255).numpy().astype("uint8")
return colors | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def overlay_mask(self, image, predictions):
"""
Adds the instances contours for each predicted object.
Each label has a different color.
Arguments:
image (np.ndarray): an image as returned by OpenCV
predictions (BoxList): the result of the computation by the model.
It should contain the field `mask` and `labels`.
"""
masks = predictions.get_field("mask").numpy()
labels = predictions.get_field("labels")
colors = self.compute_colors_for_labels(labels).tolist()
for mask, color in zip(masks, colors):
thresh = mask[0, :, :, None]
contours, hierarchy = cv2_util.findContours(
thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE
)
image = cv2.drawContours(image, contours, -1, color, 3)
composite = image
return composite | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def create_mask_montage(self, image, predictions):
"""
Create a montage showing the probability heatmaps for each one one of the
detected objects
Arguments:
image (np.ndarray): an image as returned by OpenCV
predictions (BoxList): the result of the computation by the model.
It should contain the field `mask`.
"""
masks = predictions.get_field("mask")
masks_per_dim = self.masks_per_dim
masks = L.interpolate(
masks.float(), scale_factor=1 / masks_per_dim
).byte()
height, width = masks.shape[-2:]
max_masks = masks_per_dim ** 2
masks = masks[:max_masks]
# handle case where we have less detections than max_masks
if len(masks) < max_masks:
masks_padded = torch.zeros(max_masks, 1, height, width, dtype=torch.uint8)
masks_padded[: len(masks)] = masks
masks = masks_padded
masks = masks.reshape(masks_per_dim, masks_per_dim, height, width)
result = torch.zeros(
(masks_per_dim * height, masks_per_dim * width), dtype=torch.uint8
)
for y in range(masks_per_dim):
start_y = y * height
end_y = (y + 1) * height
for x in range(masks_per_dim):
start_x = x * width
end_x = (x + 1) * width
result[start_y:end_y, start_x:end_x] = masks[y, x]
return cv2.applyColorMap(result.numpy(), cv2.COLORMAP_JET) | mlperf/training_results_v0.7 | [
11,
25,
11,
1,
1606268455
] |
def __init__(
self,
*,
application_name: str,
attach_log: bool = False,
namespace: Optional[str] = None,
kubernetes_conn_id: str = "kubernetes_default",
api_group: str = 'sparkoperator.k8s.io',
api_version: str = 'v1beta2',
**kwargs, | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def _log_driver(self, application_state: str, response: dict) -> None:
if not self.attach_log:
return
status_info = response["status"]
if "driverInfo" not in status_info:
return
driver_info = status_info["driverInfo"]
if "podName" not in driver_info:
return
driver_pod_name = driver_info["podName"]
namespace = response["metadata"]["namespace"]
log_method = self.log.error if application_state in self.FAILURE_STATES else self.log.info
try:
log = ""
for line in self.hook.get_pod_logs(driver_pod_name, namespace=namespace):
log += line.decode()
log_method(log)
except client.rest.ApiException as e:
self.log.warning(
"Could not read logs for pod %s. It may have been disposed.\n"
"Make sure timeToLiveSeconds is set on your SparkApplication spec.\n"
"underlying exception: %s",
driver_pod_name,
e,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def create_string_buffer(init, size=None):
"""create_string_buffer(aBytes) -> character array
create_string_buffer(anInteger) -> character array
create_string_buffer(aString, anInteger) -> character array
"""
if isinstance(init, bytes):
if size is None:
size = len(init)+1
buftype = c_char * size
buf = buftype()
buf.value = init
return buf
elif isinstance(init, int):
buftype = c_char * init
buf = buftype()
return buf
raise TypeError(init) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def CFUNCTYPE(restype, *argtypes, **kw):
"""CFUNCTYPE(restype, *argtypes,
use_errno=False, use_last_error=False) -> function prototype.
restype: the result type
argtypes: a sequence specifying the argument types
The function prototype can be called in different ways to create a
callable object:
prototype(integer address) -> foreign function
prototype(callable) -> create and return a C callable function from callable
prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method
prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal
prototype((function name, dll object)[, paramflags]) -> foreign function exported by name
"""
flags = _FUNCFLAG_CDECL
if kw.pop("use_errno", False):
flags |= _FUNCFLAG_USE_ERRNO
if kw.pop("use_last_error", False):
flags |= _FUNCFLAG_USE_LASTERROR
if kw:
raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
try:
return _c_functype_cache[(restype, argtypes, flags)]
except KeyError:
class CFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = flags
_c_functype_cache[(restype, argtypes, flags)] = CFunctionType
return CFunctionType | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def WINFUNCTYPE(restype, *argtypes, **kw):
# docstring set later (very similar to CFUNCTYPE.__doc__)
flags = _FUNCFLAG_STDCALL
if kw.pop("use_errno", False):
flags |= _FUNCFLAG_USE_ERRNO
if kw.pop("use_last_error", False):
flags |= _FUNCFLAG_USE_LASTERROR
if kw:
raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
try:
return _win_functype_cache[(restype, argtypes, flags)]
except KeyError:
class WinFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = flags
_win_functype_cache[(restype, argtypes, flags)] = WinFunctionType
return WinFunctionType | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def _check_size(typ, typecode=None):
# Check if sizeof(ctypes_type) against struct.calcsize. This
# should protect somewhat against a misconfigured libffi.
from struct import calcsize
if typecode is None:
# Most _type_ codes are the same as used in struct
typecode = typ._type_
actual, required = sizeof(typ), calcsize(typecode)
if actual != required:
raise SystemError("sizeof(%s) wrong: %d instead of %d" % \
(typ, actual, required)) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __repr__(self):
try:
return super().__repr__()
except ValueError:
return "%s(<NULL>)" % type(self).__name__ | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, c_void_p.from_buffer(self).value) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, c_void_p.from_buffer(self).value) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def _reset_cache():
_pointer_type_cache.clear()
_c_functype_cache.clear()
if _os.name in ("nt", "ce"):
_win_functype_cache.clear()
# _SimpleCData.c_wchar_p_from_param
POINTER(c_wchar).from_param = c_wchar_p.from_param
# _SimpleCData.c_char_p_from_param
POINTER(c_char).from_param = c_char_p.from_param
_pointer_type_cache[None] = c_void_p
# XXX for whatever reasons, creating the first instance of a callback
# function is needed for the unittests on Win64 to succeed. This MAY
# be a compiler bug, since the problem occurs only when _ctypes is
# compiled with the MS SDK compiler. Or an uninitialized variable?
CFUNCTYPE(c_int)(lambda: None) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def SetPointerType(pointer, cls):
if _pointer_type_cache.get(cls, None) is not None:
raise RuntimeError("This type already exists in the cache")
if id(pointer) not in _pointer_type_cache:
raise RuntimeError("What's this???")
pointer.set_type(cls)
_pointer_type_cache[cls] = pointer
del _pointer_type_cache[id(pointer)] | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def ARRAY(typ, len):
return typ * len | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __init__(self, name, mode=DEFAULT_MODE, handle=None,
use_errno=False,
use_last_error=False):
self._name = name
flags = self._func_flags_
if use_errno:
flags |= _FUNCFLAG_USE_ERRNO
if use_last_error:
flags |= _FUNCFLAG_USE_LASTERROR
class _FuncPtr(_CFuncPtr):
_flags_ = flags
_restype_ = self._func_restype_
self._FuncPtr = _FuncPtr
if handle is None:
self._handle = _dlopen(self._name, mode)
else:
self._handle = handle | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
raise AttributeError(name)
func = self.__getitem__(name)
setattr(self, name, func)
return func | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __init__(self, dlltype):
self._dlltype = dlltype | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def __getitem__(self, name):
return getattr(self, name) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def WinError(code=None, descr=None):
if code is None:
code = GetLastError()
if descr is None:
descr = FormatError(code).strip()
return OSError(None, descr, None, code) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def PYFUNCTYPE(restype, *argtypes):
class CFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
return CFunctionType | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def cast(obj, typ):
return _cast(obj, obj, typ) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def string_at(ptr, size=-1):
"""string_at(addr[, size]) -> string
Return the string at addr."""
return _string_at(ptr, size) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def wstring_at(ptr, size=-1):
"""wstring_at(addr[, size]) -> string
Return the string at addr."""
return _wstring_at(ptr, size) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def DllGetClassObject(rclsid, riid, ppv):
try:
ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
except ImportError:
return -2147221231 # CLASS_E_CLASSNOTAVAILABLE
else:
return ccom.DllGetClassObject(rclsid, riid, ppv) | Microvellum/Fluid-Designer | [
69,
30,
69,
37,
1461884765
] |
def test_seq_ex_in_sequence_categorical_column_with_identity(self):
self._test_parsed_sequence_example(
'int_list', sfc.sequence_categorical_column_with_identity,
10, [3, 6], [2, 4, 6]) | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def test_seq_ex_in_sequence_categorical_column_with_vocabulary_list(self):
self._test_parsed_sequence_example(
'bytes_list', sfc.sequence_categorical_column_with_vocabulary_list,
list(string.ascii_lowercase), [3, 4],
[compat.as_bytes(x) for x in 'acg']) | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def _test_parsed_sequence_example(
self, col_name, col_fn, col_arg, shape, values):
"""Helper function to check that each FeatureColumn parses correctly.
Args:
col_name: string, name to give to the feature column. Should match
the name that the column will parse out of the features dict.
col_fn: function used to create the feature column. For example,
sequence_numeric_column.
col_arg: second arg that the target feature column is expecting.
shape: the expected dense_shape of the feature after parsing into
a SparseTensor.
values: the expected values at index [0, 2, 6] of the feature
after parsing into a SparseTensor.
"""
example = _make_sequence_example()
columns = [
fc.categorical_column_with_identity('int_ctx', num_buckets=100),
fc.numeric_column('float_ctx'),
col_fn(col_name, col_arg)
]
context, seq_features = parsing_ops.parse_single_sequence_example(
example.SerializeToString(),
context_features=fc.make_parse_example_spec_v2(columns[:2]),
sequence_features=fc.make_parse_example_spec_v2(columns[2:]))
with self.cached_session() as sess:
ctx_result, seq_result = sess.run([context, seq_features])
self.assertEqual(list(seq_result[col_name].dense_shape), shape)
self.assertEqual(
list(seq_result[col_name].values[[0, 2, 6]]), values)
self.assertEqual(list(ctx_result['int_ctx'].dense_shape), [1])
self.assertEqual(ctx_result['int_ctx'].values[0], 5)
self.assertEqual(list(ctx_result['float_ctx'].shape), [1])
self.assertAlmostEqual(ctx_result['float_ctx'][0], 123.6, places=1) | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def _make_sequence_example():
example = example_pb2.SequenceExample()
return text_format.Parse(_SEQ_EX_PROTO, example) | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def processPyPath(ServerConfig):
"""Use ServerConfig to add to the python path."""
if ServerConfig.get('pypath_append'):
path_append = ServerConfig['pypath_append'].split(':')
#expand all ~'s in the list
path_append = [os.path.expanduser(path) for path in path_append]
sys.path.extend(path_append) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def normalizeUrlList(url_list):
"""Add necessary default entries that the user did not enter."""
for dict in url_list:
if not dict.get('kp.app_object'):
dict['kp.app_object'] = 'application' | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def normalizeWsgiVars(WsgiConfig):
"""Put WSGI config data in a state that the server expects."""
WsgiConfig['wsgi_ver'] = tuple(WsgiConfig['wsgi_ver'].split('.')) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def initializeLogger(consolename='kamaelia'):
"""This sets up the logging system."""
formatter = logging.Formatter('%(levelname)s/%(name)s: %(message)s') | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def __init__(self, name, rng=None):
"""
Args:
name: Name of the used fuzzer.
rng: Random number generator for generating experiments.
random_seed: Random-seed used for d8 throughout one fuzz session.
"""
self.name = name
self.rng = rng or random.Random() | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def ceiling_fan(name: str):
"""Create a ceiling fan with given name."""
return {
"name": name,
"type": DeviceType.CEILING_FAN,
"actions": ["SetSpeed", "SetDirection"],
} | tchellomello/home-assistant | [
7,
1,
7,
6,
1467778429
] |
def get_input_function():
"""A function to get test inputs. Returns an image with one box."""
image = tf.random_uniform([32, 32, 3], dtype=tf.float32)
class_label = tf.random_uniform(
[1], minval=0, maxval=NUMBER_OF_CLASSES, dtype=tf.int32)
box_label = tf.random_uniform(
[1, 4], minval=0.4, maxval=0.6, dtype=tf.float32)
return {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_classes: class_label,
fields.InputDataFields.groundtruth_boxes: box_label
} | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def __init__(self):
super(FakeDetectionModel, self).__init__(num_classes=NUMBER_OF_CLASSES)
self._classification_loss = losses.WeightedSigmoidClassificationLoss(
anchorwise_output=True)
self._localization_loss = losses.WeightedSmoothL1LocalizationLoss(
anchorwise_output=True) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def predict(self, preprocessed_inputs):
"""Prediction tensors from inputs tensor.
Args:
preprocessed_inputs: a [batch, 28, 28, channels] float32 tensor.
Returns:
prediction_dict: a dictionary holding prediction tensors to be
passed to the Loss or Postprocess functions.
"""
flattened_inputs = tf.contrib.layers.flatten(preprocessed_inputs)
class_prediction = tf.contrib.layers.fully_connected(
flattened_inputs, self._num_classes)
box_prediction = tf.contrib.layers.fully_connected(flattened_inputs, 4)
return {
'class_predictions_with_background': tf.reshape(
class_prediction, [-1, 1, self._num_classes]),
'box_encodings': tf.reshape(box_prediction, [-1, 1, 4])
} | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def loss(self, prediction_dict):
"""Compute scalar loss tensors with respect to provided groundtruth.
Calling this function requires that groundtruth tensors have been
provided via the provide_groundtruth function.
Args:
prediction_dict: a dictionary holding predicted tensors
Returns:
a dictionary mapping strings (loss names) to scalar tensors representing
loss values.
"""
batch_reg_targets = tf.stack(
self.groundtruth_lists(fields.BoxListFields.boxes))
batch_cls_targets = tf.stack(
self.groundtruth_lists(fields.BoxListFields.classes))
weights = tf.constant(
1.0, dtype=tf.float32,
shape=[len(self.groundtruth_lists(fields.BoxListFields.boxes)), 1])
location_losses = self._localization_loss(
prediction_dict['box_encodings'], batch_reg_targets,
weights=weights)
cls_losses = self._classification_loss(
prediction_dict['class_predictions_with_background'], batch_cls_targets,
weights=weights)
loss_dict = {
'localization_loss': tf.reduce_sum(location_losses),
'classification_loss': tf.reduce_sum(cls_losses),
}
return loss_dict | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def restore(unused_sess):
return | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def test_configure_trainer_and_train_two_steps(self):
train_config_text_proto = """
optimizer {
adam_optimizer {
learning_rate {
constant_learning_rate {
learning_rate: 0.01
}
}
}
}
data_augmentation_options {
random_adjust_brightness {
max_delta: 0.2
}
}
data_augmentation_options {
random_adjust_contrast {
min_delta: 0.7
max_delta: 1.1
}
}
num_steps: 2
"""
train_config = train_pb2.TrainConfig()
text_format.Merge(train_config_text_proto, train_config)
train_dir = self.get_temp_dir()
trainer.train(create_tensor_dict_fn=get_input_function,
create_model_fn=FakeDetectionModel,
train_config=train_config,
master='',
task=0,
num_clones=1,
worker_replicas=1,
clone_on_cpu=True,
ps_tasks=0,
worker_job_name='worker',
is_chief=True,
train_dir=train_dir) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def read_golden_file(self, extension):
return file(
os.path.join(
os.path.dirname(__file__),
'unexpire_test.' + extension + '.expected')).read() | nwjs/chromium.src | [
136,
133,
136,
45,
1453904223
] |
def testHFile(self):
h = generate_unexpire_flags.gen_features_header('foobar', 123)
golden_h = self.read_golden_file('h')
self.assertEquals(golden_h, h) | nwjs/chromium.src | [
136,
133,
136,
45,
1453904223
] |
def test_silence(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn():
pass
_fn()
self.assertEqual(1, mock_warning.call_count)
with deprecation.silence():
_fn()
self.assertEqual(1, mock_warning.call_count)
_fn()
self.assertEqual(2, mock_warning.call_count) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_deprecated_illegal_args(self):
instructions = "This is how you update..."
with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
deprecation.deprecated("", instructions)
with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
deprecation.deprecated("07-04-2016", instructions)
date = "2016-07-04"
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated(date, None)
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated(date, "") | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_no_date(self, mock_warning):
date = None
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
Returns:
Sum of args.
"""
return arg0 + arg1
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed in a future version."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n"
"\nReturns:"
"\n Sum of args." % instructions, _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(
args[0], r"deprecated and will be removed")
self._assert_subset(set(["in a future version", instructions]),
set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
Returns:
Sum of args.
"""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
"""fn doc."""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_instance_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
Returns:
Sum of args.
"""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions),
getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_instance_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
"""fn doc."""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions),
getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_instance_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def __init(self):
pass | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def _prop(self):
return "prop_wrong_order" | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_prop_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@property
@deprecation.deprecated(date, instructions)
def _prop(self):
"""prop doc.
Returns:
String.
"""
return "prop_with_doc"
# Assert function docs are properly updated.
self.assertEqual(
"prop doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s"
"\n"
"\nReturns:"
"\n String." % (date, instructions), getattr(_Object, "_prop").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual("prop_with_doc", _Object()._prop)
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_prop_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@property
@deprecation.deprecated(date, instructions)
def _prop(self):
return "prop_no_doc"
# Assert function docs are properly updated.
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), getattr(_Object, "_prop").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual("prop_no_doc", _Object()._prop)
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def _assert_subset(self, expected_subset, actual_set):
self.assertTrue(
actual_set.issuperset(expected_subset),
msg="%s is not a superset of %s." % (actual_set, expected_subset)) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_deprecated_missing_args(self):
date = "2016-07-04"
instructions = "This is how you update..."
def _fn(arg0, arg1, deprecated=None):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
with self.assertRaisesRegexp(ValueError, "not present.*\\['missing'\\]"):
deprecation.deprecated_args(date, instructions, "missing")(_fn) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
deprecated: Deprecated!
Returns:
Sum of args.
"""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n deprecated: Deprecated!"
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
"""fn doc."""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION ARGUMENTS"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_varargs(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, *deprecated):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True, False))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_kwargs(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, **deprecated):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, a=True, b=False))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_positional_and_named(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "d1", "d2")
def _fn(arg0, d1=None, arg1=2, d2=None):
return arg0 + arg1 if d1 else arg1 + arg0 if d2 else arg0 * arg1
# Assert calls without the deprecated arguments log nothing.
self.assertEqual(2, _fn(1, arg1=2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated arguments log warnings.
self.assertEqual(2, _fn(1, None, 2, d2=False))
self.assertEqual(2, mock_warning.call_count)
(args1, _) = mock_warning.call_args_list[0]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions, "d1"]),
set(args1[1:]))
(args2, _) = mock_warning.call_args_list[1]
self.assertRegexpMatches(args2[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions, "d2"]),
set(args2[1:])) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_positional_and_named_with_ok_vals(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, ("d1", None),
("d2", "my_ok_val"))
def _fn(arg0, d1=None, arg1=2, d2=None):
return arg0 + arg1 if d1 else arg1 + arg0 if d2 else arg0 * arg1
# Assert calls without the deprecated arguments log nothing.
self.assertEqual(2, _fn(1, arg1=2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated arguments log warnings.
self.assertEqual(2, _fn(1, False, 2, d2=False))
self.assertEqual(2, mock_warning.call_count)
(args1, _) = mock_warning.call_args_list[0]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions, "d1"]),
set(args1[1:]))
(args2, _) = mock_warning.call_args_list[1]
self.assertRegexpMatches(args2[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions, "d2"]),
set(args2[1:]))
# Assert calls with the deprecated arguments don't log warnings if
# the value matches the 'ok_val'.
mock_warning.reset_mock()
self.assertEqual(3, _fn(1, None, 2, d2="my_ok_val"))
self.assertEqual(0, mock_warning.call_count) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def _assert_subset(self, expected_subset, actual_set):
self.assertTrue(
actual_set.issuperset(expected_subset),
msg="%s is not a superset of %s." % (actual_set, expected_subset)) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
deprecated: Deprecated!
Returns:
Sum of args.
"""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n deprecated: Deprecated!"
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn with deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
"""fn doc."""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn with deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION ARGUMENTS"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed")
self._assert_subset(set(["after " + date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def testDeprecatedArgumentLookup(self):
good_value = 3
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", good_value, "val_old",
None), good_value)
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", None, "val_old",
good_value), good_value)
with self.assertRaisesRegexp(ValueError,
"Cannot specify both 'val_old' and 'val_new'"):
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", good_value,
"val_old", good_value),
good_value) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def touch(path):
with open(path, 'a'):
os.utime(path, None) | chrisspen/homebot | [
8,
5,
8,
23,
1471872070
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_all(
self,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def instrument(graph, **kwargs):
track_subsections(graph, **kwargs)
# Construct a fresh Timer object
profiler = kwargs['profiler']
timer = Timer(profiler.name, list(profiler.all_sections))
instrument_sections(graph, timer=timer, **kwargs) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def track_subsections(iet, **kwargs):
"""
Add custom Sections to the `profiler`. Custom Sections include:
* MPI Calls (e.g., HaloUpdateCall and HaloUpdateWait)
* Busy-waiting on While(lock) (e.g., from host-device orchestration)
"""
profiler = kwargs['profiler']
sregistry = kwargs['sregistry']
name_mapper = {
HaloUpdateCall: 'haloupdate',
HaloWaitCall: 'halowait',
RemainderCall: 'remainder',
HaloUpdateList: 'haloupdate',
HaloWaitList: 'halowait',
BusyWait: 'busywait'
}
mapper = {}
for NodeType in [MPIList, MPICall, BusyWait]:
for k, v in MapNodes(Section, NodeType).visit(iet).items():
for i in v:
if i in mapper or not any(issubclass(i.__class__, n)
for n in profiler.trackable_subsections):
continue
name = sregistry.make_name(prefix=name_mapper[i.__class__])
mapper[i] = Section(name, body=i, is_subsection=True)
profiler.track_subsection(k.name, name)
iet = Transformer(mapper).visit(iet)
return iet, {} | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def close(self):
# type: () -> None
self._client.close() | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, plotly_name="colorbar", parent_name="bar.marker", **kwargs):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.bar.mar
ker.colorbar.Tickformatstop` instances or dicts
with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.bar.marker.colorbar.tickformatstopdefaults),
sets the default property values to use for
elements of bar.marker.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
`orientation` is "v".
ticklabelstep
Sets the spacing between tick labels as
compared to the spacing between ticks. A value
of 1 (default) means each tick gets a label. A
value of 2 means shows every 2nd label. A
larger value n means only every nth tick is
labeled. `tick0` determines which labels are
shown. Not implemented for axes with `type`
"log" or "multicategory", or when `tickmode` is
"array".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.bar.marker.colorba
r.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
bar.marker.colorbar.title.font instead. Sets
this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
bar.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Defaults to
"top" when `orientation` if "v" and defaults
to "right" when `orientation` if "h". Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction). Defaults to 1.02 when `orientation`
is "v" and 0.5 when `orientation` is "h".
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar. Defaults to "left" when `orientation` is
"v" and "center" when `orientation` is "h".
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction). Defaults to 0.5 when `orientation`
is "v" and 1.02 when `orientation` is "h".
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
ypad
Sets the amount of padding (in px) along the y
direction. | plotly/plotly.py | [
13052,
2308,
13052,
1319,
1385013188
] |
def __init__(self, customerCount=0, pfixedPct=0.0, qfixedPct=0.0, qfixed=0.0, pfixed=0.0, LoadResponse=None, *args, **kw_args):
"""Initialises a new 'EnergyConsumer' instance.
@param customerCount: Number of individual customers represented by this Demand
@param pfixedPct: Fixed active power as per cent of load group fixed active power. Load sign convention is used, i.e. positive sign means flow out from a node.
@param qfixedPct: Fixed reactive power as per cent of load group fixed reactive power. Load sign convention is used, i.e. positive sign means flow out from a node.
@param qfixed: Reactive power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
@param pfixed: Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
@param LoadResponse: The load response characteristic of this load.
"""
#: Number of individual customers represented by this Demand
self.customerCount = customerCount
#: Fixed active power as per cent of load group fixed active power. Load sign convention is used, i.e. positive sign means flow out from a node.
self.pfixedPct = pfixedPct
#: Fixed reactive power as per cent of load group fixed reactive power. Load sign convention is used, i.e. positive sign means flow out from a node.
self.qfixedPct = qfixedPct
#: Reactive power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
self.qfixed = qfixed
#: Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
self.pfixed = pfixed
self._LoadResponse = None
self.LoadResponse = LoadResponse
super(EnergyConsumer, self).__init__(*args, **kw_args) | rwl/PyCIM | [
68,
33,
68,
7,
1238978196
] |
def getLoadResponse(self):
"""The load response characteristic of this load.
"""
return self._LoadResponse | rwl/PyCIM | [
68,
33,
68,
7,
1238978196
] |
def __init__(self, base_url):
self.base_url = base_url | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def list(self):
return self._req('GET', 'api/kernelspecs') | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def kernel_resource(self, name, path):
return self._req('GET', url_path_join('kernelspecs', name, path)) | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def setUp(self):
ipydir = self.ipython_dir.name
sample_kernel_dir = pjoin(ipydir, 'kernels', 'sample')
try:
os.makedirs(sample_kernel_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(pjoin(sample_kernel_dir, 'kernel.json'), 'w') as f:
json.dump(sample_kernel_json, f)
with io.open(pjoin(sample_kernel_dir, 'resource.txt'), 'w',
encoding='utf-8') as f:
f.write(some_resource)
self.ks_api = KernelSpecAPI(self.base_url()) | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def test_list_kernelspecs(self):
model = self.ks_api.list().json()
assert isinstance(model, dict)
self.assertEqual(model['default'], NATIVE_KERNEL_NAME)
specs = model['kernelspecs']
assert isinstance(specs, dict)
# 2: the sample kernelspec created in setUp, and the native Python
# kernel
self.assertGreaterEqual(len(specs), 2)
def is_sample_kernelspec(s):
return s['name'] == 'sample' and s['display_name'] == 'Test kernel'
def is_default_kernelspec(s):
return s['name'] == NATIVE_KERNEL_NAME and s['display_name'].startswith("IPython")
assert any(is_sample_kernelspec(s) for s in specs.values()), specs
assert any(is_default_kernelspec(s) for s in specs.values()), specs | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def test_get_nonexistant_kernelspec(self):
with assert_http_error(404):
self.ks_api.kernel_spec_info('nonexistant') | mattvonrocketstein/smash | [
12,
1,
12,
10,
1321798817
] |
def __init__(self):
"""
Default constructor
"""
self.targets = []
self.effects = EffectsCollection()
self.spirit = 0 | tuturto/pyherc | [
43,
2,
43,
69,
1327858418
] |
def add_effect_handle(self, handle):
"""
Add effect handle
:param handle: effect handle to add
:type handle: EffectHandle
"""
self.effects.add_effect_handle(handle) | tuturto/pyherc | [
43,
2,
43,
69,
1327858418
] |
def get_effect_handles(self, trigger=None):
"""
Get effect handles
:param trigger: optional trigger type
:type trigger: string
:returns: effect handles
:rtype: [EffectHandle]
"""
return self.effects.get_effect_handles(trigger) | tuturto/pyherc | [
43,
2,
43,
69,
1327858418
] |
def remove_effect_handle(self, handle):
"""
Remove given handle
:param handle: handle to remove
:type handle: EffectHandle
"""
self.effects.remove_effect_handle(handle) | tuturto/pyherc | [
43,
2,
43,
69,
1327858418
] |
def build_ranking(
cls,
year: Year,
rank: int,
team_key: TeamKey,
wins: int,
losses: int,
ties: int,
qual_average: Optional[float],
matches_played: int,
dq: int,
sort_orders: List[float], | the-blue-alliance/the-blue-alliance | [
334,
153,
334,
422,
1283632451
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.