id int64 11 59.9k | original stringlengths 33 150k | modified stringlengths 37 150k |
|---|---|---|
35,844 | def test_msg_data_assignment(get_contract):
code = """
@external
def foo() -> Bytes[4]:
x: Bytes[4] = msg.data[4]
return x
"""
contract = get_contract(code)
assert contract.foo().hex() == "c2985578" # fn sig
| def test_msg_data_assignment(get_contract):
code = """
@external
def foo() -> Bytes[4]:
x: Bytes[4] = slice(msg.data, 0, 4) # Throws if `len(msg.data) < 4`
return x
"""
contract = get_contract(code)
assert contract.foo().hex() == "c2985578" # fn sig
|
9,628 | def privileges_get(cursor, user, host):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENT... | def privileges_get(cursor, user, host):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENT... |
28,598 | def plot_pair(
data,
group="posterior",
var_names: Optional[List[str]] = None,
filter_vars: Optional[str] = None,
coords=None,
marginals=False,
figsize=None,
textsize=None,
kind: Union[str, List[str]] = "scatter",
gridsize="auto",
contour: Optional[bool] = None,
plot_kwar... | def plot_pair(
data,
group="posterior",
var_names: Optional[List[str]] = None,
filter_vars: Optional[str] = None,
coords=None,
marginals=False,
figsize=None,
textsize=None,
kind: Union[str, List[str]] = "scatter",
gridsize="auto",
contour: Optional[bool] = None,
plot_kwar... |
29,029 | def process_ssh_key(keypair: Mapping, credentials: Credentials):
if len(credentials.identities) != 1:
raise SSHKeyProcessingError(
f"SSH credentials have {len(credentials.identities)}" f" users associated with " f"it!"
)
if not _contains_both_keys(keypair):
raise SSHKeyProce... | def process_ssh_key(keypair: Mapping, credentials: Credentials):
if len(credentials.identities) != 1:
raise SSHKeyProcessingError(
f"SSH credentials have {len(credentials.identities)}" f" users associated with " f"it!"
)
if not _contains_both_keys(keypair):
raise SSHKeyProce... |
34,644 | def _convert_lookup_tables_to_regex(
training_data: TrainingData, pattern_names: Optional[List[Text]] = None
) -> List[Dict[Text, Text]]:
"""Convert the lookup tables from the training data to regex patterns.
Args:
training_data: The training data.
pattern_names: List of pattern names to us... | def _convert_lookup_tables_to_regex(
training_data: TrainingData, pattern_names: Optional[List[Text]] = None
) -> List[Dict[Text, Text]]:
"""Convert the lookup tables from the training data to regex patterns.
Args:
training_data: The training data.
pattern_names: List of pattern names to us... |
46,574 | def invalid_cases():
rng = Random(1234)
for (name, (typ, offsets)) in PRESET_CONTAINERS.items():
# using mode_max_count, so that the extra byte cannot be picked up as normal list content
yield f'{name}_extra_byte', \
invalid_test_case(lambda: serialize(
container_... | def invalid_cases():
rng = Random(1234)
for (name, (typ, offsets)) in PRESET_CONTAINERS.items():
# using mode_max_count, so that the extra byte cannot be picked up as normal list content
yield f'{name}_extra_byte', \
invalid_test_case(lambda: serialize(
container_... |
27,966 | def check_config_file(args):
"""
LOG and check about the config file usage.
If a config file is set but does not exists the program will
exit.
LOG is not initialized in the process_config_file function yet
so we can not log the usage there. Using print will
always print out the config file ... | def check_config_file(args):
"""
LOG and check about the config file usage.
If a config file is set but does not exists the program will
exit.
LOG is not initialized in the process_config_file function yet
so we can not log the usage there. Using print will
always print out the config file ... |
22,602 | def forward(model: Model[InT, OutT], X: InT, is_train: bool) -> Tuple[OutT, Callable]:
nO = model.get_dim("nO")
nP = model.get_dim("nP")
nI = model.get_dim("nI")
b = model.get_param("b")
W = cast(Array2d, model.get_param("W"))
W = W.reshape((nO * nP, nI))
Y = model.ops.gemm(X, W, trans2=True... | def forward(model: Model[InT, OutT], X: InT, is_train: bool) -> Tuple[OutT, Callable]:
nO = model.get_dim("nO")
nP = model.get_dim("nP")
nI = model.get_dim("nI")
b = model.get_param("b")
W = model.get_param("W")
W = W.reshape((nO * nP, nI))
Y = model.ops.gemm(X, W, trans2=True)
Y += b.re... |
3,904 | def extrema_bounding(G, compute="diameter"):
"""Compute requested extreme distance metric of undirected graph G
Computation is based on smart lower and upper bounds, and in practice
linear in the number of nodes, rather than quadratic (except for some
border cases such as complete graphs or circle shap... | def extrema_bounding(G, compute="diameter"):
"""Compute requested extreme distance metric of undirected graph G
Computation is based on smart lower and upper bounds, and in practice
linear in the number of nodes, rather than quadratic (except for some
border cases such as complete graphs or circle shap... |
20,007 | def analyze_color(rgb_img, mask, hist_plot_type=None):
"""Analyze the color properties of an image object
Inputs:
rgb_img = RGB image data
mask = Binary mask made from selected contours
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
Returns:
analysis_image ... | def analyze_color(rgb_img, mask, hist_plot_type=None):
"""Analyze the color properties of an image object
Inputs:
rgb_img = RGB image data
mask = Binary mask made from selected contours
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
Returns:
analysis_image ... |
57,841 | def domain_command(client, args):
domain = args.get('domain')
domain_details = client.domain_details(hostname=domain)
domain_subdomains = client.domain_subdomains(hostname=domain)
domain_whois = client.get_whois(query_type="domain", hostname=domain)
domain_tags = client.domain_tags(hostname=domain)
... | def domain_command(client, args):
domain = args.get('domain')
domain_details = client.domain_details(hostname=domain)
domain_subdomains = client.domain_subdomains(hostname=domain)
domain_whois = client.get_whois(query_type="domain", hostname=domain)
domain_tags = client.domain_tags(hostname=domain)
... |
25,991 | def validate_scale_unit_ranges(namespace):
unit_num = namespace.scale_units
err_msg = "The number of --scale-units should in range [2, 50]."
if unit_num and (unit_num < 2 or unit_num > 50):
raise InvalidArgumentValueError(err_msg)
| def validate_scale_unit_ranges(namespace):
unit_num = namespace.scale_units
err_msg = "The number of --scale-units should in range [2, 50]."
if unit_num is not None and (unit_num < 2 or unit_num > 50):
raise InvalidArgumentValueError(err_msg)
|
12,425 | def handle(name, cfg, cloud, log, _args):
if "autoinstall" not in cfg:
LOG.debug(
"Skipping module named %s, no 'autoinstall' key in configuration",
name,
)
return
snap_list, _ = subp(["snap", "list"])
installer_present = None
for snap_name in LIVE_INSTA... | def handle(name, cfg, cloud, log, _args):
if "autoinstall" not in cfg:
LOG.debug(
"Skipping module named %s, no 'autoinstall' key in configuration",
name,
)
return
snap_list, _ = subp(["snap", "list"])
installer_present = None
for snap_name in LIVE_INSTA... |
48,468 | def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', aliases=['service', 'unit']),
state=dict(type='str', choices=['reloaded', 'restarted', 'started', 'stopped']),
enabled=dict(type='bool'),
force=dict(type='bool'),
... | def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', aliases=['service', 'unit']),
state=dict(type='str', choices=['reloaded', 'restarted', 'started', 'stopped']),
enabled=dict(type='bool'),
force=dict(type='bool'),
... |
22,289 | def stream_to_open_named_file(stream, fd, filename, source_encoding=None, source_error='strict', target_encoding=None, target_error='strict'):
"""Writes a stream to the provided file descriptor, returns the file name. Closes file descriptor"""
# signature and behavor is somewhat odd, due to backwards compatibil... | def stream_to_open_named_file(stream, fd, filename, source_encoding=None, source_error='strict', target_encoding=None, target_error='strict'):
"""Writes a stream to the provided file descriptor, returns the file name. Closes file descriptor"""
# signature and behavor is somewhat odd, due to backwards compatibil... |
25,175 | def infer_typing_newtype(node, context_itton=None):
"""Infer a typing.TypeVar(...) or typing.NewType(...) call"""
try:
func = next(node.func.infer(context=context_itton))
except (InferenceError, StopIteration) as exc:
raise UseInferenceDefault from exc
if func.qname() != "typing.NewType... | def infer_typing_newtype(node: nodes.Call, context_itton: Optional[InferenceContext] =None):
"""Infer a typing.TypeVar(...) or typing.NewType(...) call"""
try:
func = next(node.func.infer(context=context_itton))
except (InferenceError, StopIteration) as exc:
raise UseInferenceDefault from ex... |
32,204 | def fetch_incidents(client: Client, max_results: int, last_run: Dict[str, str],
first_fetch_time: str, query: Optional[str], mirror_direction: str,
mirror_tag: List[str]) -> Tuple[Dict[str, str], List[dict]]:
"""This function retrieves new incidents every interval (default is... | def fetch_incidents(client: Client, max_results: int, last_run: Dict[str, str],
first_fetch_time: str, query: Optional[str], mirror_direction: str,
mirror_tag: List[str]) -> Tuple[Dict[str, str], List[dict]]:
"""This function retrieves new incidents every interval (default is... |
17,393 | def broadcast_dimension_size(
variables: List[Variable],
) -> Dict[Hashable, int]:
"""Extract dimension sizes from a dictionary of variables.
Raises ValueError if any dimensions have different sizes.
"""
dims: Dict[Hashable, int] = {}
for var in variables:
for dim, size in zip(var.dims,... | def broadcast_dimension_size(
variables: List[Variable]
) -> Dict[Hashable, int]:
"""Extract dimension sizes from a dictionary of variables.
Raises ValueError if any dimensions have different sizes.
"""
dims: Dict[Hashable, int] = {}
for var in variables:
for dim, size in zip(var.dims, ... |
2,965 | def read_gbq(
query,
project_id=None,
index_col=None,
col_order=None,
reauth=False,
auth_local_webserver=False,
dialect=None,
location=None,
configuration=None,
credentials=None,
use_bqstorage_api=None,
private_key=None,
verbose=None,
progress_bar_type="tqdm",
):
... | def read_gbq(
query,
project_id=None,
index_col=None,
col_order=None,
reauth=False,
auth_local_webserver=False,
dialect=None,
location=None,
configuration=None,
credentials=None,
use_bqstorage_api=None,
private_key=None,
verbose=None,
progress_bar_type=None,
):
... |
46,294 | def is_diagonal(matrix, tol=1e-8):
"""Determine whether affine is a diagonal matrix.
Parameters
----------
matrix : 2-D array
The matrix to test.
tol : float, optional
Consider any entries with magnitude < `tol` as 0.
Returns
-------
is_diag : bool
Boolean indic... | def is_diagonal(matrix, tol=1e-8):
"""Determine whether affine is a diagonal matrix.
Parameters
----------
matrix : 2-D array
The matrix to test.
tol : float, optional
Consider any entries with magnitude < `tol` as 0.
Returns
-------
is_diag : bool
True if matri... |
19,202 | def patchelf_root_spec():
"""Return the root spec used to bootstrap patchelf"""
# TODO: patchelf is restricted to v0.13 since earlier versions have
# TODO: bugs that we don't to deal with, while v0.14 requires a C++17
# TODO: which may not be available on all platforms.
return _root_spec('patchelf@0... | def patchelf_root_spec():
"""Return the root spec used to bootstrap patchelf"""
# TODO: patchelf is restricted to v0.13 since earlier versions have
# TODO: bugs that we don't to deal with, while v0.14 requires a C++17
# TODO: which may not be available on all platforms.
return _root_spec('patchelf@0... |
34,225 | def configure_file_logging(logger_obj: logging.Logger, log_file: Optional[Text]):
if not log_file:
return
formatter = logging.Formatter("%(asctime)s [%(levelname)-5.5s] %(message)s")
file_handler = logging.FileHandler(log_file, encoding='utf-8')
file_handler.setLevel(logger_obj.level)
file... | def configure_file_logging(logger_obj: logging.Logger, log_file: Optional[Text]):
if not log_file:
return
formatter = logging.Formatter("%(asctime)s [%(levelname)-5.5s] %(message)s")
file_handler = logging.FileHandler(log_file, encoding="utf-8")
file_handler.setLevel(logger_obj.level)
file... |
36,568 | def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one ... | def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one ... |
35,871 | def main():
parser = get_sendpayment_parser()
(options, args) = parser.parse_args()
load_program_config(config_path=options.datadir)
if options.schedule == '':
if ((len(args) < 2) or
(btc.is_bip21_uri(args[1]) and len(args) != 2) or
(not btc.is_bip21_uri(args[1]) and len... | def main():
parser = get_sendpayment_parser()
(options, args) = parser.parse_args()
load_program_config(config_path=options.datadir)
if options.schedule == '':
if ((len(args) < 2) or
(btc.is_bip21_uri(args[1]) and len(args) != 2) or
(not btc.is_bip21_uri(args[1]) and len... |
32,266 | def get_modified_remote_data_command(client: Client, args: Dict[str, Any], params: Dict) -> GetModifiedRemoteDataResponse:
remote_args = GetModifiedRemoteDataArgs(args)
query_date = dateparser.parse(remote_args.last_update,
settings={'TIMEZONE': 'UTC'}).strftime('%Y-%m-%dT%H:%M... | def get_modified_remote_data_command(client: Client, args: Dict[str, Any], params: Dict) -> GetModifiedRemoteDataResponse:
remote_args = GetModifiedRemoteDataArgs(args)
query_date = dateparser.parse(remote_args.last_update,
settings={'TIMEZONE': 'UTC'}).strftime(DATE_FORMAT) #... |
10,201 | def main():
argument_spec = RabbitClient.rabbitmq_argument_spec()
argument_spec.update(
exchange=dict(type='str', default=''),
routing_key=dict(type='str', required=False),
body=dict(type='str', required=False),
src=dict(aliases=['file'], type='path', required=False),
con... | def main():
argument_spec = RabbitClient.rabbitmq_argument_spec()
argument_spec.update(
exchange=dict(type='str', default=''),
routing_key=dict(type='str', required=False),
body=dict(type='str', required=False),
src=dict(aliases=['file'], type='path', required=False),
con... |
5,740 | def van_der_corput(
n: IntNumber,
base: IntNumber = 2,
*,
start_index: IntNumber = 0,
scramble: bool = False,
seed: SeedType = None) -> np.ndarray:
"""Van der Corput sequence.
Pseudo-random number generator based on a b-adic expansion.
Scrambling uses permut... | def van_der_corput(
n: IntNumber,
base: IntNumber = 2,
*,
start_index: IntNumber = 0,
scramble: bool = False,
seed: SeedType = None) -> np.ndarray:
"""Van der Corput sequence.
Pseudo-random number generator based on a b-adic expansion.
Scrambling uses permut... |
33,022 | def pois_from_address(address, distance, tags=None, return_query=False, **kwargs):
"""
Get OSM points of Interests within some distance north, south, east, and west of
an address.
Parameters
----------
address : string
the address to geocode to a lat-long point
distance : numeric
... | def pois_from_address(address, distance, tags=None, return_query=False, **kwargs):
"""
Get OSM points of Interests within some distance north, south, east, and west of
an address.
Parameters
----------
address : string
the address to geocode to a lat-long point
distance : numeric
... |
32,291 | def check_spyware_profiles(
topology: Topology,
device_filter_string: str = None,
minimum_block_severities: str = "critical,high",
minimum_alert_severities: str = "medium,low"
) -> ConfigurationHygieneCheckResult:
"""
Checks the configured Anti-spyware profiles to ensure at least... | def check_spyware_profiles(
topology: Topology,
device_filter_string: Optional[str] = None,
minimum_block_severities: str = "critical,high",
minimum_alert_severities: str = "medium,low"
) -> ConfigurationHygieneCheckResult:
"""
Checks the configured Anti-spyware profiles to ensur... |
43,667 | def excitations_to_wires(singles, doubles, wires=None):
r"""Map the indices representing the single and double excitations
generated with the function :func:`~.excitations` to the wires that
the Unitary Coupled-Cluster (UCCSD) template will act on.
Args:
singles (list[list[int]]): List with the... | def excitations_to_wires(singles, doubles, wires=None):
r"""Map the indices representing the single and double excitations
generated with the function :func:`~.excitations` to the wires that
the Unitary Coupled-Cluster (UCCSD) template will act on.
Args:
singles (list[list[int]]): List with the... |
57,819 | def create_process_command(
credentials: Dict, sensor_id: int,
command_string: str,
wait_timeout: int = 30,
wait_for_output: bool = True,
wait_for_completion: bool = True,
**additional_params):
# additional_param may include: remote_output_file_name: str, working_dire... | def create_process_command(
credentials: Dict, sensor_id: int,
command_string: str,
wait_timeout: int = 30,
wait_for_output: bool = True,
wait_for_completion: bool = True,
**additional_params):
# additional_param may include: remote_output_file_name: str, working_dire... |
34,173 | def add_subparser(
subparsers: argparse._SubParsersAction, parents: List[argparse.ArgumentParser]
):
import rasa.cli.arguments.train as core_cli
train_parser = subparsers.add_parser(
"train",
help="Train a Rasa model using your NLU data and stories.",
parents=parents,
format... | def add_subparser(
subparsers: argparse._SubParsersAction, parents: List[argparse.ArgumentParser]
):
import rasa.cli.arguments.train as core_cli
train_parser = subparsers.add_parser(
"train",
help="Trains a Rasa model using your NLU data and stories.",
parents=parents,
forma... |
34,970 | def conv3d_ndhwc(
Input,
Filter,
stride,
padding,
dilation,
out_dtype="float32",
auto_scheduler_rewritten_layout="",
):
"""Convolution operator in NDHWC layout.
Parameters
----------
Input : tvm.te.Tensor
5-D with shape [batch, in_depth, in_height, in_width, in_chann... | def conv3d_ndhwc(
Input,
Filter,
stride,
padding,
dilation,
out_dtype="float32",
auto_scheduler_rewritten_layout="",
):
"""Convolution operator in NDHWC layout.
Parameters
----------
Input : tvm.te.Tensor
5-D with shape [batch, in_depth, in_height, in_width, in_chann... |
34,629 | def add_subparser(
subparsers: SubParsersAction, parents: List[argparse.ArgumentParser]
) -> None:
"""Add all parsers for training in chunks.
Args:
subparsers: subparser we are going to attach to
parents: Parent parsers, needed to ensure tree structure in argparse
"""
train_parser =... | def add_subparser(
subparsers: SubParsersAction, parents: List[argparse.ArgumentParser]
) -> None:
"""Add all parsers for training in chunks.
Args:
subparsers: subparser we are going to attach to
parents: Parent parsers, needed to ensure tree structure in argparse
"""
train_parser =... |
35,487 | def extract_casync_image(target_slot_number: int, partition: dict, cloudlog):
path = get_partition_path(target_slot_number, partition)
seed_path = path[:-1] + ('b' if path[-1] == 'a' else 'a')
target = casync.parse_caibx(partition['casync_caibx'])
sources = []
# First source is the current partition. Index... | def extract_casync_image(target_slot_number: int, partition: dict, cloudlog):
path = get_partition_path(target_slot_number, partition)
seed_path = path[:-1] + ('b' if path[-1] == 'a' else 'a')
target = casync.parse_caibx(partition['casync_caibx'])
sources = []
# First source is the current partition. Index... |
38,900 | def model_schema(model: Type['BaseModel'], by_alias: bool = True, ref_prefix: Optional[str] = None) -> Dict[str, Any]:
"""
Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
JSON key.
:param model: a Pydantic model (a class that inherits from BaseMode... | def model_schema(model: Type['BaseModel'], by_alias: bool = True, ref_prefix: Optional[str] = None) -> Dict[str, Any]:
"""
Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
JSON key.
:param model: a Pydantic model (a class that inherits from BaseMode... |
16,666 | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return switches controlled by telnet commands."""
devices: dict[str, Any] = config.get(CONF_SWITCHES, {})
switches = []
... | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return switches controlled by telnet commands."""
devices: dict[str, Any] = config.get(CONF_SWITCHES, {})
switches = []
... |
30,945 | def get_remote_data_command(id: str, lastUpdate: str) -> GetRemoteDataResponse:
""" Mirror-in data to incident from Jira into demisto 'jira issue' incident.
Notes:
1. Documentation on mirroring - https://xsoar.pan.dev/docs/integrations/mirroring_integration
Args:
id: Remote incident id.
... | def get_remote_data_command(id: str, lastUpdate: str) -> GetRemoteDataResponse:
""" Mirror-in data to incident from Jira into demisto 'jira issue' incident.
Notes:
1. Documentation on mirroring - https://xsoar.pan.dev/docs/integrations/mirroring_integration
Args:
id: Remote incident id.
... |
57,858 | def get_group_command(client, args):
scim = verify_and_load_scim_data(args.get('scim'))
group_id = scim.get('id')
group_name = scim.get('displayName')
if not (group_id or group_name):
return_error("You must supply either 'id' or 'displayName' in the scim data")
if not group_id:
res... | def get_group_command(client, args):
scim = verify_and_load_scim_data(args.get('scim'))
group_id = scim.get('id')
group_name = scim.get('displayName')
if not (group_id or group_name):
return_error("You must supply either 'id' or 'displayName' in the scim data")
if not group_id:
res... |
7,519 | def convert_numpy(numpy_type):
"""Return a tuple containing a function which converts a list into a numpy
array and the type produced by the converter function.
Parameters
----------
numpy_type : numpy data-type
The numpy type required of an array returned by ``converter``. Must be a
... | def convert_numpy(numpy_type):
"""Return a tuple containing a function which converts a list into a numpy
array and the type produced by the converter function.
Parameters
----------
numpy_type : numpy data-type
The numpy type required of an array returned by ``converter``. Must be a
... |
45,924 | def _draw_pixel(
image: torch.Tensor,
x: int,
y: int,
color: torch.Tensor,
):
r"""Draws a pixel into an image.
Args:
image: the input image to where to draw the lines with shape (C,H,W).
x: the x coordinate of the pixel.
y: the y coordinate of the pixel.
color: t... | def _draw_pixel(
image: torch.Tensor,
x: int,
y: int,
color: torch.Tensor,
):
r"""Draws a pixel into an image.
Args:
image: the input image to where to draw the lines with shape :math:`(C,H,W)`.
x: the x coordinate of the pixel.
y: the y coordinate of the pixel.
... |
37,095 | def _to_tuple(values):
"""
Return the input, sorted, and as a tuple.
Args:
values: An integer, a list of ints, or a tuple of ints.
Returns:
tuple: The input values as a sorted tuple.
"""
try:
return tuple(sorted(values))
except TypeError:
return (values,)
| def _to_tuple(values):
"""
Return the input, sorted, and as a tuple.
Args:
values: An integer, a list of ints, or a tuple of ints.
Returns:
tuple: The input values as a sorted tuple.
"""
try:
return tuple(values)
except TypeError:
return (values,)
|
22,364 | def make_same_length(list1, list2):
# If either list is 1 item, we'll append to it until its length is the same as the other.
if len(list1) == 1:
for _i in range(1, len(list2)):
list1.append(list1[0])
elif len(list2) == 1:
for _i in range(1, len(list1)):
list2.append(... | def make_same_length(list1, list2):
# If either list is 1 item, we'll append to it until its length is the same as the other.
if len(list1) == 1:
for _i in range(1, len(list2)):
list1.append(list1[0])
elif len(list2) == 1:
for _ in range(1, len(list1)):
list2.append(l... |
31,093 | def create_incident_from_saved_data(field_mapping, incident_result=False):
created_incident = {}
demisto_incident = demisto.incident()
custom_fields = demisto_incident.get('CustomFields', {})
if incident_result: # need to return the incident with extra fields
fields_to_create = ['xdralerts', '... | def create_incident_from_saved_data(field_mapping, include_extra_data=False):
created_incident = {}
demisto_incident = demisto.incident()
custom_fields = demisto_incident.get('CustomFields', {})
if incident_result: # need to return the incident with extra fields
fields_to_create = ['xdralerts'... |
1,715 | def load_files(container_path, description=None, categories=None,
load_content=True, shuffle=True, encoding=None,
decode_error='strict', random_state=0):
"""Load text files with categories as subfolder names.
Individual samples are assumed to be files stored a two levels folder
... | def load_files(container_path, description=None, categories=None,
load_content=True, shuffle=True, encoding=None,
decode_error='strict', random_state=0):
"""Load text files with categories as subfolder names.
Individual samples are assumed to be files stored a two levels folder
... |
4,603 | def test_invalid_filetype(tmp_path):
"""Invalid file types/associated files for load method."""
bad_nii, bad_conf = create_tmp_filepath(tmp_path, copy_confounds=True,
old_deriveative_suffix=False)
conf, _ = load_confounds(bad_nii)
# more than one legal filena... | def test_invalid_filetype(tmp_path):
"""Invalid file types/associated files for load method."""
bad_nii, bad_conf = create_tmp_filepath(tmp_path, copy_confounds=True,
old_derivative_suffix=False)
conf, _ = load_confounds(bad_nii)
# more than one legal filenam... |
32,871 | def _attempt_patch_module(module):
# type: (str) -> bool
"""_patch_module will attempt to monkey patch the module.
Returns if the module got patched.
Can also raise errors if it fails.
"""
path = "ddtrace.contrib.%s" % module
with _LOCK:
if module in _PATCHED_MODULES and module not ... | def _attempt_patch_module(module):
# type: (str) -> bool
"""_patch_module will attempt to monkey patch the module.
Returns if the module got patched.
Can also raise errors if it fails.
"""
path = "ddtrace.contrib.%s" % module
with _LOCK:
if module in _PATCHED_MODULES and module not ... |
36,523 | def get_gdb_version():
try:
cmd = ["gdb", "-nx", "--version"]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
with proc:
version, ... | def get_gdb_version():
try:
cmd = ["gdb", "-nx", "--version"]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
with proc:
version, ... |
52,488 | def service(service_name, service_option):
if os.path.basename(which('systemctl') or '') == 'systemctl' and is_running_systemd():
status = exec_cmd(f"sudo systemctl {service_option} {service_name}", _raise=False)
if service_option == "status":
return status == 0
if service_option == "reload":
if status == ... | def service(service_name, service_option):
if os.path.basename(which('systemctl') or '') == 'systemctl' and is_running_systemd():
status = exec_cmd(f"sudo systemctl {service_option} {service_name}", _raise=False)
if service_option == "status":
return status == 0
if service_option == "reload":
if status != ... |
14,287 | def deprecated(
msg: str, category: Type[Warning] = DeprecationWarning
) -> Callable[[AnyCallableT], AnyCallableT]:
"""Emits a DeprecationWarning when the decorated function is called.
This decorator works on normal functions, methods, and properties.
Usage on properties requires the ``@property`` deco... | def deprecated(
msg: str, category: Type[Warning] = DeprecationWarning
) -> Callable[[AnyCallableT], AnyCallableT]:
"""Emits a DeprecationWarning when the decorated function is called.
This decorator works on normal functions, methods, and properties.
Usage on properties requires the ``@property`` deco... |
12,049 | def _generate_cubes(
header, column_headings, coords, data_arrays, cell_methods=None
):
"""
Yield :class:`iris.cube.Cube` instances given
the headers, column headings, coords and data_arrays extracted
from a NAME file.
"""
for i, data_array in enumerate(data_arrays):
# Turn the dict... | def _generate_cubes(
header, column_headings, coords, data_arrays, cell_methods=None
):
"""
Yield :class:`iris.cube.Cube` instances given
the headers, column headings, coords and data_arrays extracted
from a NAME file.
"""
for i, data_array in enumerate(data_arrays):
# Turn the dict... |
8,919 | def _join(bot, channel, key=None, save=True):
if not channel:
return
if not key:
bot.join(channel)
else:
bot.join(channel, key)
if save:
channels = dict(_get_config_channels(bot.config.core.channels))
# save only if channel is new or key has been changed
... | def _join(bot, channel, key=None, save=True):
if not channel:
return
if not key:
bot.join(channel)
else:
bot.join(channel, key)
if save:
channels = dict(_get_config_channels(bot.config.core.channels))
# save only if channel is new or key has been changed
... |
31,023 | def panorama_route_lookup(dest_ip: str, virtual_router=None):
"""
Given the provided ip address, looks up the outgoing interface and zone on the firewall.
"""
if not VSYS:
raise Exception("The 'panorama-route-lookup' command is only relevant for a Firewall instance.")
response = panorama_ge... | def panorama_route_lookup(dest_ip: str, virtual_router=None):
"""
Given the provided ip address, looks up the outgoing interface and zone on the firewall.
"""
if not VSYS:
raise Exception("The 'panorama-route-lookup' command is only relevant for a Firewall instance.")
response = panorama_ge... |
2,589 | def test_lof_performance(global_dtype):
# Generate train/test data
rng = check_random_state(2)
X = 0.3 * rng.randn(120, 2).astype(global_dtype)
X_train = X[:100]
# Generate some abnormal novel observations
X_outliers = rng.uniform(low=-4, high=4, size=(20, 2))
X_test = np.r_[X[100:], X_outl... | def test_lof_performance(global_dtype):
# Generate train/test data
rng = check_random_state(2)
X = 0.3 * rng.randn(120, 2).astype(global_dtype, copy=False)
X_train = X[:100]
# Generate some abnormal novel observations
X_outliers = rng.uniform(low=-4, high=4, size=(20, 2))
X_test = np.r_[X[1... |
42,084 | def _get_optimization_history_plot(
studies: List[Study],
target: Optional[Callable[[FrozenTrial], float]],
target_name: str,
error_bar: bool,
) -> "go.Figure":
layout = go.Layout(
title="Optimization History Plot",
xaxis={"title": "trial number"},
yaxis={"title": target_nam... | def _get_optimization_history_plot(
studies: List[Study],
target: Optional[Callable[[FrozenTrial], float]],
target_name: str,
error_bar: bool,
) -> "go.Figure":
layout = go.Layout(
title="Optimization History Plot",
xaxis={"title": "Trial Number"},
yaxis={"title": target_nam... |
43,270 | def get_seed(seed):
"""
Convenience function to use the global seed by default if the provided seed is None.
Args:
seed (int, optional): seed value
Returns:
seed if not None, otherwise the global seed
"""
if seed is None:
return _sg_seed
else:
return seed
| def get_seed(seed=None):
"""
Convenience function to use the global seed by default if the provided seed is None.
Args:
seed (int, optional): seed value
Returns:
seed if not None, otherwise the global seed
"""
if seed is None:
return _sg_seed
else:
return s... |
30,866 | def update_output(output, simple_values, element_values, info_dict):
for info in info_dict:
info_type = info.get('type', '')
if info_type == 'simple':
field = dict_safe_get(simple_values, [info.get('field')], {}, dict)
output[info['header']] = dict_safe_get(field, ['values',... | def update_output(output, simple_values, element_values, info_dict):
for info in info_dict:
info_type = info.get('type', '')
if info_type == 'simple':
output[info['header']] = dict_safe_get(simple_values, [info.get('field'), 'values', 0])
elif info_type == 'element':
... |
38,968 | def make_literal_validator(type_: Any) -> Callable[[Any], Any]:
permitted_choices: Tuple[Any, ...] = all_literal_values(type_)
allowed_choices_set = set(permitted_choices)
def literal_validator(v: Any) -> Any:
if v not in allowed_choices_set:
raise errors.WrongConstantError(given=v, per... | def make_literal_validator(type_: Any) -> Callable[[Any], Any]:
permitted_choices = all_literal_values(type_)
allowed_choices_set = set(permitted_choices)
def literal_validator(v: Any) -> Any:
if v not in allowed_choices_set:
raise errors.WrongConstantError(given=v, permitted=permitted_... |
37,360 | def _match_Num_or_Parameter(node: ast.AST) -> bool:
"""Match number of circuit Parameter Expression."""
if isinstance(node, ast.Num):
return True
elif (isinstance(node, ast.Constant) and
isinstance(node.value, circuit.ParameterExpression)):
return True
return False
| def _match_num_or_parameter(node: ast.AST) -> bool:
"""Match number of circuit Parameter Expression."""
if isinstance(node, ast.Num):
return True
elif (isinstance(node, ast.Constant) and
isinstance(node.value, circuit.ParameterExpression)):
return True
return False
|
44,514 | def map_resource_to_metadata_type(
mb_sdk_type: aiplatform.base.AiPlatformResourceNoun
) -> Tuple[str, str]:
"""Maps an MB SDK type to Metadata type.
Returns:
Tuple of component parameter name and metadata type.
ie aiplatform.Model -> "model", "Model"
"""
# type should always be in... | def map_resource_to_metadata_type(
mb_sdk_type: aiplatform.base.AiPlatformResourceNoun
) -> Tuple[str, str]:
"""Maps an MB SDK type to Metadata type.
Returns:
Tuple of component parameter name and metadata type.
ie aiplatform.Model -> "model", "Model"
"""
# type should always be in... |
40,595 | def annuity(n, r):
"""Calculate the annuity factor for an asset with lifetime n years and
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6"""
if isinstance(r, pd.Series):
return pd.Series(1/n, index=r.index).where(r == 0, r/(1. - 1./(1.+r)**n))
elif r > 0:
return r / (1. - 1./(1.+r... | def calculate_annuity(n, r):
"""Calculate the annuity factor for an asset with lifetime n years and
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6"""
if isinstance(r, pd.Series):
return pd.Series(1/n, index=r.index).where(r == 0, r/(1. - 1./(1.+r)**n))
elif r > 0:
return r / (1. ... |
22,239 | def install_update_trigger(migrate_engine):
"""Installs trigger on database table to update history table
when contents have changed. Installs a function and a trigger
for postgres, other sql variants only require the trigger def
"""
pg_create_trigger = DDL("""
CREATE FUNCTION update_histor... | def install_update_trigger(migrate_engine):
"""Installs trigger on database table to update history table
when contents have changed. Installs a function and a trigger
for postgres, other sql variants only require the trigger def
"""
pg_create_trigger = DDL("""
CREATE FUNCTION update_histor... |
36,246 | def sam(
adata: AnnData,
max_iter: int = 10,
num_norm_avg: int = 50,
k: int = 20,
distance: str = 'correlation',
standardization: Optional[str] = 'Normalizer',
weight_pcs: bool = True,
npcs: Optional[int] = None,
n_genes: Optional[int] = None,
projection: Optional[str] = 'umap',
... | def sam(
adata: AnnData,
max_iter: int = 10,
num_norm_avg: int = 50,
k: int = 20,
distance: str = 'correlation',
standardization: Literal['Normalizer', 'StandardScaler'] = 'Normalizer',
weight_pcs: bool = True,
npcs: Optional[int] = None,
n_genes: Optional[int] = None,
projection... |
31,210 | def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
token = demisto.params().get('token')
# get the service API url
base_url = urljoin(demisto.params()['url'], '/api/rest')
verify_certificate = not demisto.params().get('insecure', False)
# How much time before the first fetc... | def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
token = demisto.params().get('token')
# get the service API url
base_url = urljoin(demisto.params()['url'], '/api/rest')
verify_certificate = not demisto.params().get('insecure', False)
# How much time before the first fetc... |
41,841 | def dump_best_config(input_config_file: str, output_config_file: str, study: optuna.Study) -> None:
"""Save jsonnet after updating with parameters from the best trial in the study.
Args:
input_config_file:
Input configuration file (Jsonnet) specified in
:class:`~optuna.integrati... | def dump_best_config(input_config_file: str, output_config_file: str, study: optuna.Study) -> None:
"""Save jsonnet after updating with parameters from the best trial in the study.
Args:
input_config_file:
Input configuration file (Jsonnet) specified in
:class:`~optuna.integrati... |
22,370 | def print_folders(pad, folder):
# For debugging...
pad_str = ''
for _i in range(1, pad):
pad_str += ' '
print(f'{pad_str} id: {folder.id} key: {folder.key}')
for repository_dependency in folder.repository_dependencies:
print(f' {pad_str}{repository_dependency.listify}')
for su... | def print_folders(pad, folder):
# For debugging...
pad_str = ' ' * pad
for _i in range(1, pad):
pad_str += ' '
print(f'{pad_str} id: {folder.id} key: {folder.key}')
for repository_dependency in folder.repository_dependencies:
print(f' {pad_str}{repository_dependency.listify}')
... |
14,214 | def load_entry():
"""Gather entry point information by parsing :envar:`COCOTB_ENTRY_POINT`."""
entry_point_str = os.environ.get("COCOTB_ENTRY_POINT", "cocotb:_initialise_testbench")
try:
if ":" not in entry_point_str:
raise ValueError("Invalid COCOTB_ENTRY_POINT, missing entry function (... | def load_entry():
"""Gather entry point information by parsing :envvar:`COCOTB_ENTRY_POINT`."""
entry_point_str = os.environ.get("COCOTB_ENTRY_POINT", "cocotb:_initialise_testbench")
try:
if ":" not in entry_point_str:
raise ValueError("Invalid COCOTB_ENTRY_POINT, missing entry function ... |
9,872 | def main():
""" This section is for arguments parsing """
state_map = dict(
present='vrouter-bgp-add',
absent='vrouter-bgp-remove',
update='vrouter-bgp-modify'
)
argument_spec = dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=False, type... | def main():
""" This section is for arguments parsing """
state_map = dict(
present='vrouter-bgp-add',
absent='vrouter-bgp-remove',
update='vrouter-bgp-modify'
)
argument_spec = dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=False, type... |
31,903 | def start_quick_scan(client, data_args):
# get computer group ID from computer group name
computer_group_name = data_args.get('computer-group-name')
raw_response = client.do_request('GET', f"/api/v2/groups/by-name/{computer_group_name}")
raw_response_data = raw_response.get('data')
if not raw_respon... | def start_quick_scan(client, data_args):
# get computer group ID from computer group name
computer_group_name = data_args.get('computer-group-name')
raw_response = client.do_request('GET', f"/api/v2/groups/by-name/{computer_group_name}")
raw_response_data = raw_response.get('data')
if not raw_respon... |
13,747 | def get_course_enrollment_details(course_id, include_expired=False):
"""Get the course modes for course. Also get enrollment start and end date, invite only, etc.
Given a course_id, return a serializable dictionary of properties describing course enrollment information.
Args:
course_id (str): The ... | def get_course_enrollment_details(course_id, include_expired=False):
"""Get the course modes for course. Also get enrollment start and end date, invite only, etc.
Given a course_id, return a serializable dictionary of properties describing course enrollment information.
Args:
course_id (str): The ... |
39,875 | def confirm_staged_stake(stakeholder, value, duration):
click.confirm(f"""
* Ursula Node Operator Notice *
-------------------------------
By agreeing to stake {str(value)}:
- Staked tokens will be locked, and unavailable for transactions for the stake duration.
- You are obligated to maintain a networked and a... | def confirm_staged_stake(stakeholder, value, duration):
click.confirm(f"""
* Ursula Node Operator Notice *
-------------------------------
By agreeing to stake {str(value)}:
- Staked tokens will be locked, and unavailable for transactions for the stake duration.
- You are obligated to maintain a networked and a... |
30,413 | def check_base_branch(pr_num):
print_color('Starting to fetch the base branch of PR num {}'.format(pr_num), LOG_COLORS.GREEN)
base_branch = get_base_branch(pr_num)
print_color('Finished to fetch the base branch of PR num {}'.format(pr_num), LOG_COLORS.GREEN)
if base_branch == 'master':
print_err... | def check_base_branch(pr_num):
print_color('Starting to fetch the base branch of PR num {}'.format(pr_num), LOG_COLORS.GREEN)
base_branch = get_base_branch(pr_num)
print_color('Finished to fetch the base branch of PR num {}'.format(pr_num), LOG_COLORS.GREEN)
if base_branch == 'master':
print_err... |
29,810 | def info_mark():
"""
:return: string that can print an info symbol
"""
return PaastaColors.blue("\u2139")
| def info_mark() -> str:
"""
:return: string that can print an info symbol
"""
return PaastaColors.blue("\u2139")
|
27,942 | def evaluate(model, X_test, Y_test, eval_size, batch_size):
N_test = X_test.shape[0] if eval_size is None else eval_size
if N_test > X_test.shape[0]:
raise ValueError(
'Test size can be no larger than {}'.format(X_test.shape[0]))
with chx.no_backprop_mode():
total_loss = chx.ar... | def evaluate(model, X_test, Y_test, eval_size, batch_size):
N_test = X_test.shape[0] if eval_size is None else eval_size
if N_test > X_test.shape[0]:
raise ValueError(
'Test size can be no larger than {}'.format(X_test.shape[0]))
with chx.no_backprop_mode():
total_loss = chx.ar... |
34,309 | def add_confused_intents_to_report(
report: Dict, target_intents: Iterable[Any], predicted_intents: Iterable[Any]
) -> Dict:
from sklearn.metrics import confusion_matrix
from sklearn.utils.multiclass import unique_labels
cnf_matrix = confusion_matrix(target_intents, predicted_intents)
indices = n... | def add_confused_intents_to_report(
report: Dict, target_intents: Iterable[Any], predicted_intents: Iterable[Any]
) -> Dict[Text, Dict[Text, Union[Dict, float]]]:
from sklearn.metrics import confusion_matrix
from sklearn.utils.multiclass import unique_labels
cnf_matrix = confusion_matrix(target_intent... |
31,073 | def install_packs(client: demisto_client,
host: str,
packs_to_install: list,
request_timeout: int = 999999,
is_nightly: bool = False):
""" Make a packs installation request.
Args:
client (demisto_client): The configured client to u... | def install_packs(client: demisto_client,
host: str,
packs_to_install: list,
request_timeout: int = 999999,
is_nightly: bool = False):
""" Make a packs installation request.
Args:
client (demisto_client): The configured client to u... |
21,246 | def _make_glob_from_args(
reporter: Optional[str],
volumes: Optional[range],
page: Optional[str],
) -> List[str]:
"""Make list of glob paths
:param reporter: The reporter to filter if any
:param volumes: The volumes of the reporter to filter to, if any
:return: A list of glob paths
"""
... | def _make_glob_from_args(
reporter: Optional[str],
volumes: Optional[range],
page: Optional[str],
) -> List[str]:
"""Make list of glob paths
:param reporter: The reporter to filter if any
:param volumes: The volumes of the reporter to filter to, if any
:return: A list of glob paths
"""
... |
520 | def ignore_couch_changes_for_sql_domains(change):
if not change.metadata or not change.metadata.domain:
return False
if change.metadata.data_source_type == SOURCE_COUCH and should_use_sql_backend(change.metadata.domain):
return True
| def is_couch_change_for_sql_domain(change):
if not change.metadata or not change.metadata.domain:
return False
if change.metadata.data_source_type == SOURCE_COUCH and should_use_sql_backend(change.metadata.domain):
return True
|
19,497 | def _develop_specs_from_env(spec, env):
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
if not dev_info:
return
path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
if "dev_path" in spec.variants:
error_msg = (
"The dev_path for spec {name} is not co... | def _develop_specs_from_env(spec, env):
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
if not dev_info:
return
path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
if "dev_path" in spec.variants:
error_msg = (
"The dev_path for spec {name} is not co... |
5,468 | def global_contribution_form(request):
"""Adds contribution form to the context."""
if enabled(request):
initial_data = {}
if hasattr(request, 'user'):
initial_data = {
'name': request.user.get_full_name() or request.user.username,
'email': request.use... | def global_contribution_form(request):
"""Adds contribution form to the context."""
if enabled(request):
initial_data = {}
if hasattr(request, 'user'):
initial_data = {
'name': request.user.get_full_name() or request.user.username,
'email': request.use... |
22,040 | def download_google_bigquery_table(project, dataset, table, columns=None, condition=None, export=None, client_project=None, credentials=None):
'''Download (stream) an entire Google BigQuery table locally.
:param str project: The Google BigQuery project that owns the table.
:param str dataset: The dataset t... | def from_table(project, dataset, table, columns=None, condition=None, export=None, client_project=None, credentials=None):
'''Download (stream) an entire Google BigQuery table locally.
:param str project: The Google BigQuery project that owns the table.
:param str dataset: The dataset the table is part of.... |
54,828 | def chop_in_blocks_vector_multi(v, idtodelete):
"""
Splits an array of vectors into two arrays of vectors, where
idtodelete specifies which elements of the vectors go into vb
"""
idtokeep = np.sort(list(set(np.arange(len(v[0]))) - set(idtodelete)))
va = v[:, idtokeep]
vb = v[:, idtodelete]
... | def chop_in_blocks_vector_multi(v, idtodelete):
"""
Splits an array of vectors into two arrays of vectors, where
idtodelete specifies which elements of the vectors go into vb
"""
index_diff_set = set(np.arange(len(v[0]))) - set(idtodelete)
idtokeep = np.sort(list(index_diff_set))
va = v[:, i... |
27,962 | def main(args):
"""
Perform analysis on the given logfiles and store the results in a machine-
readable format.
"""
logger.setup_logger(args.verbose if 'verbose' in args else None)
if len(args.logfile) != 1:
LOG.warning("Only one log file can be processed right now!")
sys.exit(1... | def main(args):
"""
Perform analysis on the given logfiles and store the results in a machine-
readable format.
"""
logger.setup_logger(args.verbose if 'verbose' in args else None)
if len(args.logfile) != 1:
LOG.warning("Only one log file can be processed right now!")
sys.exit(1... |
32,448 | def main() -> None: # pragma: no cover
params = demisto.params()
url = params.get('url')
api_version = params.get('api_version')
token = demisto.params().get('credentials', {}).get('password')
base_url = urljoin(url, f'/api/{api_version}/')
verify_certificate = not demisto.params().get('insecu... | def main() -> None: # pragma: no cover
params = demisto.params()
url = params.get('url')
api_version = params.get('api_version')
token = demisto.params().get('credentials', {}).get('password')
base_url = urljoin(url, f'/api/{api_version}/')
verify_certificate = not demisto.params().get('insecu... |
47,113 | def load_tf_weights_in_gpt_neo(model, config, gpt_neo_checkpoint_path):
"""Load tf checkpoints in a pytorch model"""
try:
import re
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Plea... | def load_tf_weights_in_gpt_neo(model, config, gpt_neo_checkpoint_path):
"""Load tf checkpoints in a pytorch model"""
try:
import re
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Plea... |
25,005 | def _is_part_of_assignment_target(node: nodes.NodeNG) -> bool:
"""Check whether use of a variable is happening as part of the left-hand
side of an assignment.
This requires recursive checking, because destructuring assignment can have
arbitrarily nested tuples and lists to unpack.
"""
if isinst... | def _is_part_of_assignment_target(node: nodes.NodeNG) -> bool:
"""Check whether use of a variable is happening as part of the left-hand
side of an assignment.
This requires recursive checking, because destructuring assignment can have
arbitrarily nested tuples and lists to unpack.
"""
if isinst... |
14,540 | def validate_nslr_data(eye_positions: np.ndarray, eye_timestamps: np.ndarray):
def has_nan(arr: np.ndarray):
return np.any(np.isnan(arr))
def is_monotonic(arr: np.ndarray):
return np.all(arr[:-1] <= arr[1:])
def is_unique(arr: np.ndarray):
return arr.shape == np.unique(arr, axis=0)... | def validate_nslr_data(eye_positions: np.ndarray, eye_timestamps: np.ndarray):
def has_nan(arr: np.ndarray):
return np.any(np.isnan(arr))
def is_monotonic(arr: np.ndarray):
return np.all(arr[:-1] <= arr[1:])
def is_unique(arr: np.ndarray):
return arr.shape == np.unique(arr, axis=0)... |
43,366 | def bloch_messiah(cov):
r"""Performs the Bloch-Messiah decomposition of single mode
Gaussian state.
Args:
cov (array): :math:`2\times 2` covariance matrix.
Returns:
tuple: mean photon number, rotation angle, and
squeezing magnitude of the Gaussian state.
"""
det = np.li... | def bloch_messiah(cov):
r"""Performs the Bloch-Messiah decomposition of a single-mode
Gaussian state.
Args:
cov (array): :math:`2\times 2` covariance matrix.
Returns:
tuple: mean photon number, rotation angle, and
squeezing magnitude of the Gaussian state.
"""
det = np.... |
467 | def send_HTML_email(subject, recipient, html_content, text_content=None,
cc=None, email_from=settings.DEFAULT_FROM_EMAIL,
file_attachments=None, bcc=None,
smtp_exception_skip_list=None, messaging_event_id=None):
recipients = list(recipient) if not isinstan... | def send_HTML_email(subject, recipient, html_content, text_content=None,
cc=None, email_from=settings.DEFAULT_FROM_EMAIL,
file_attachments=None, bcc=None,
smtp_exception_skip_list=None, messaging_event_id=None):
recipients = list(recipient) if not isinstan... |
31,584 | def get_ip_neighbors_command(client, args):
ipaddress = args.get('ipaddress')
res = client.get_ip_neighbors(ipaddress=ipaddress)
readable_output = tableToMarkdown(
f"IP neighbors for {ipaddress}:",
[{
"IP": x.get('ip', ''),
"Hostnames": x.get('hostnames', None),
... | def get_ip_neighbors_command(client, args):
ipaddress = args.get('ipaddress')
res = client.get_ip_neighbors(ipaddress=ipaddress)
readable_output = tableToMarkdown(
f"IP neighbors for {ipaddress}:",
[{
"IP": x.get('ip', ''),
"Hostnames": x.get('hostnames', None),
... |
58,118 | def generate_dbotscore(response: Dict) -> List:
"""Creates CommandResult object based on the contents of 'response' argument
and provides DBotScore objects.
Parameters
----------
response : dict
Object returned by ANYRUN API call in 'get_report' function.
Returns
-------
Li... | def generate_dbotscore(response: Dict) -> List:
"""Creates CommandResult object based on the contents of 'response' argument
and provides DBotScore objects.
Parameters
----------
response : dict
Object returned by ANYRUN API call in 'get_report' function.
Returns
-------
Li... |
58,294 | def is_rpm_distribution(d: Distribution) -> bool:
return d in [
Distribution.fedora,
Distribution.mageia,
Distribution.centos,
Distribution.centos_epel,
Distribution.openmandriva,
Distribution.rocky,
Distribution.rocky_epel,
Distribution.alma,
... | def is_rpm_distribution(d: Distribution) -> bool:
return d in (
Distribution.fedora,
Distribution.mageia,
Distribution.centos,
Distribution.centos_epel,
Distribution.openmandriva,
Distribution.rocky,
Distribution.rocky_epel,
Distribution.alma,
... |
30,777 | def fetch_incidents():
last_run = demisto.getLastRun()
last_incidents_ids = []
if last_run:
last_fetch = last_run.get('time')
last_fetch = datetime.strptime(last_fetch, TIME_FORMAT)
last_incidents_ids = last_run.get('last_event_ids')
else:
# first time fetching
l... | def fetch_incidents():
last_run = demisto.getLastRun()
last_incidents_ids = []
if last_run:
last_fetch = last_run.get('time')
last_fetch = datetime.strptime(last_fetch, TIME_FORMAT)
last_incidents_ids = last_run.get('last_event_ids')
else:
# first time fetching
l... |
31,872 | def tc_update_indicator_command():
args = demisto.args()
indicator = args['indicator']
rating = args.get('rating')
confidence = args.get('confidence')
size = args.get('size')
dns_active = args.get('dnsActive')
whois_active = args.get('whoisActive')
false_positive = args.get('falsePositiv... | def tc_update_indicator_command():
args = demisto.args()
indicator = args['indicator']
rating = args.get('rating')
confidence = args.get('confidence')
size = args.get('size')
dns_active = args.get('dnsActive')
whois_active = args.get('whoisActive')
false_positive = args.get('falsePositiv... |
33,283 | def convert_otio_to_svg(timeline, filepath):
renderer = SVGRenderer(2406.0, 1054.0)
image_margin = 10.0
font_size = 15
total_duration = 0
min_time = 0.0
max_time = 0.0
all_clips_data = []
clip_count = -1
for curr_clip in timeline.tracks[0]:
if isinstance(curr_clip, otio.schem... | def convert_otio_to_svg(timeline, filepath):
renderer = SVGRenderer(2406.0, 1054.0)
image_margin = 10.0
font_size = 15
total_duration = 0
min_time = 0.0
max_time = 0.0
all_clips_data = []
clip_count = -1
for curr_clip in timeline.tracks[0].each_clip():
avlbl_start = total... |
5,418 | def build_interface(iface, iface_type, enabled, **settings):
"""
Build an interface script for a network interface.
CLI Example:
.. code-block:: bash
salt '*' ip.build_interface eth0 eth <settings>
"""
iface_type = iface_type.lower()
if iface_type not in _IFACE_TYPES:
_ra... | def build_interface(iface, iface_type, enabled, **settings):
"""
Build an interface script for a network interface.
CLI Example:
.. code-block:: bash
salt '*' ip.build_interface eth0 eth <settings>
"""
iface_type = iface_type.lower()
if iface_type not in _IFACE_TYPES:
_ra... |
31,148 | def main():
SESSION.proxies = handle_proxy()
client = SixgillEnrichClient(
demisto.params()["client_id"], demisto.params()["client_secret"], CHANNEL_CODE, demisto, SESSION, VERIFY
)
command = demisto.command()
demisto.info(f"Command being called is {command}")
commands: Dict[str, Calla... | def main():
SESSION.proxies = handle_proxy()
client = SixgillEnrichClient(
demisto.params()["client_id"], demisto.params()["client_secret"], CHANNEL_CODE, demisto, SESSION, VERIFY
)
command = demisto.command()
demisto.info(f"Command being called is {command}")
commands: Dict[str, Calla... |
35,248 | def real_if_close(x, tol=100):
"""If input is complex with all imaginary parts close to zero, return real
parts.
“Close to zero” is defined as tol * (machine epsilon of the type for x).
.. seealso:: :func:`numpy.real_if_close`
"""
x = cupy.asanyarray(x)
if not issubclass(x.dtype.type, cupy... | def real_if_close(x, tol=100):
"""If input is complex with all imaginary parts close to zero, return real
parts.
“Close to zero” is defined as tol * (machine epsilon of the type for x).
.. seealso:: :func:`numpy.real_if_close`
"""
x = cupy.asanyarray(x)
if not issubclass(x.dtype.type, cupy... |
43,296 | def _chebyshev(one_hot_encoded_row, laplacian, coeffs, deg, max_eig):
"""
This function calculates one column of the Chebyshev approximation of exp(-scale * laplacian) for
all scales.
Args:
one_hot_encoded_row (SparseTensor): a sparse tensor indicating which column (node) to calculate.
... | def _chebyshev(one_hot_encoded_row, laplacian, coeffs, deg, max_eig):
"""
This function calculates one column of the Chebyshev approximation of exp(-scale * laplacian) for
all scales.
Args:
one_hot_encoded_row (SparseTensor): a sparse tensor indicating which column (node) to calculate.
... |
9,890 | def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
copy_to=dict(type='path', aliases=['to']),
copy_from=dict(type='path', aliases=['from']),
src=dict(type='str', aliases=['source']),
dst=dict(type='str', aliases=['destination']),
columns=dic... | def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
copy_to=dict(type='path', aliases=['to']),
copy_from=dict(type='path', aliases=['from']),
src=dict(type='str', aliases=['source']),
dst=dict(type='str', aliases=['destination']),
columns=dic... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.