code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def create_audit_event(self, code='AUDIT'):
event = self._meta.event_model(code=code, model=self.__class__.__name__)
if current_user:
event.created_by = current_user.get_id()
self.copy_foreign_keys(event)
self.populate_audit_fields(event)
return event | Creates a generic auditing Event logging the changes between saves
and the initial data in creates.
Kwargs:
code (str): The code to set the new Event to.
Returns:
Event: A new event with relevant info inserted into it | codesearchnet |
def read(self, length=(- 1)):
if (0 <= length < len(self)):
newpos = (self.pos + length)
data = self.buf[self.pos:newpos]
self.pos = newpos
self.__discard()
return data
data = self.buf[self.pos:]
self.clear()
return data | Reads from the FIFO.
Reads as much data as possible from the FIFO up to the specified
length. If the length argument is negative or ommited all data
currently available in the FIFO will be read. If there is no data
available in the FIFO an empty string is returned.
Args:
length: The amount of data to read from the FI... | codesearchnet |
def run_gpu_or_tpu(func: _F) -> _F:
if tf_inspect.isclass(func):
raise ValueError('`run_gpu_or_tpu` only supports test methods.')
def decorated(self: 'TensorFlowTestCase', *args, **kwargs):
if config.list_physical_devices('GPU'):
return func(self, 'GPU', *args, **kwargs)
if ... | Execute the decorated test only if a physical GPU or TPU is available.
This function is intended to be applied to tests that require the presence
of a physical GPU or TPU. It complies with the following rules:
- If a GPU is available, the test will run on the GPU.
- If a GPU is absent and a TPU is available, the test ... | github-repos |
def FromDBInstance(db_token):
hash_ar = bytearray(binascii.unhexlify(db_token.ContractHash))
hash_ar.reverse()
hash = UInt160(data=hash_ar)
token = NEP5Token(script=None)
token.SetScriptHash(hash)
token.name = db_token.Name
token.symbol = db_token.Symbol
token.decimals = db_token.Decimal... | Get a NEP5Token instance from a database token.
Args:
db_token (neo.Implementations.Wallets.peewee.Models.NEP5Token):
Returns:
NEP5Token: self. | codesearchnet |
def AddArguments(cls, argument_group):
storage_formats = sorted(definitions.STORAGE_FORMATS)
argument_group.add_argument(
'--storage_format', '--storage-format', action='store',
choices=storage_formats, dest='storage_format', type=str,
metavar='FORMAT', default=definitions.DEFAULT_... | Adds command line arguments to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group. | juraj-google-style |
def map_across_full_axis(self, axis, map_func):
num_splits = self._compute_num_partitions()
preprocessed_map_func = self.preprocess_func(map_func)
partitions = self.column_partitions if not axis else self.row_partitions
... | Applies `map_func` to every partition.
Note: This method should be used in the case that `map_func` relies on
some global information about the axis.
Args:
axis: The axis to perform the map across (0 - index, 1 - columns).
map_func: The function to apply.
Returns:
A new BaseFrameManager object, the type of object th... | juraj-google-style |
def add_line(self, start, end, color=(0.5, 0.5, 0.5), width=1):
source = vtk.vtkLineSource()
source.SetPoint1(start)
source.SetPoint2(end)
vertexIDs = vtk.vtkStringArray()
vertexIDs.SetNumberOfComponents(1)
vertexIDs.SetName("VertexIDs")
vertexI... | Adds a line.
Args:
start: Starting coordinates for line.
end: Ending coordinates for line.
color: Color for text as RGB. Defaults to grey.
width: Width of line. Defaults to 1. | juraj-google-style |
def sget_timestamp(self, cycle, step, dataset_number=None):
dataset_number = self._validate_dataset_number(dataset_number)
if dataset_number is None:
self._report_empty_dataset()
return
cycle_index_header = self.headers_normal.cycle_index_txt
timestamp_h... | Returns timestamp for cycle, step.
Convinience function; same as issuing
dfdata[(dfdata[cycle_index_header] == cycle) &
(dfdata[step_index_header] == step)][timestamp_header]
Args:
cycle: cycle number
step: step number
dataset_number: the dataset number (automatic selection if None)
Returns:
pandas.Series | juraj-google-style |
def condense(input_string):
try:
assert isinstance(input_string, basestring)
except AssertionError:
raise TypeError
removed_leading_whitespace = re.sub('>\s+', '>', input_string).strip()
removed_trailing_whitespace = re.sub('\s+<', '<', removed_leading_whitespace).strip()
return... | Trims leadings and trailing whitespace between tags in an html document
Args:
input_string: A (possible unicode) string representing HTML.
Returns:
A (possibly unicode) string representing HTML.
Raises:
TypeError: Raised if input_string isn't a unicode string or string. | juraj-google-style |
def DataRefreshRequired(self, path=None, last=None):
if (last is None):
if (path is None):
raise type_info.TypeValueError("Either 'path' or 'last' must be supplied as an argument.")
fd = aff4.FACTORY.Open(self.root.Add(path), token=self.token)
stat_obj = fd.Get(fd.Schema.STAT)
... | True if we need to update this path from the client.
Args:
path: The path relative to the root to check freshness of.
last: An aff4:last attribute to check freshness of.
At least one of path or last must be supplied.
Returns:
True if the path hasn't been updated in the last
self.max_age_before_refresh seconds, else ... | codesearchnet |
def start(self, **kwargs):
return self.client.api.start(self.id, **kwargs) | Start this container. Similar to the ``docker start`` command, but
doesn't support attach options.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error. | codesearchnet |
def _make_query_from_terms(self, terms):
match_query = ''
expanded_terms = self._expand_terms(terms)
if expanded_terms['doc']:
match_query = self.backend._and_join(expanded_terms['doc'])
if expanded_terms['keywords']:
if match_query:
ma... | Creates a query for partition from decomposed search terms.
Args:
terms (dict or unicode or string):
Returns:
tuple of (str, dict): First element is str with FTS query, second is parameters of the query. | juraj-google-style |
def get_continent(self, callsign, timestamp=timestamp_now):
return self.get_all(callsign, timestamp)[const.CONTINENT] | Returns the continent Identifier of a callsign
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
str: continent identified
Raises:
KeyError: No Continent found for callsign
Note:
The following continent identifiers are used:
- EU: Europe
- NA: N... | codesearchnet |
def parse(response_text: str, *, batch: bool, validate_against_schema: bool=True) -> Union[(JSONRPCResponse, List[JSONRPCResponse])]:
if (not response_text):
if batch:
return []
else:
return NotificationResponse()
deserialized = deserialize(response_text)
if validate_... | Parses response text, returning JSONRPCResponse objects.
Args:
response_text: JSON-RPC response string.
batch: If the response_text is an empty string, this determines how to parse.
validate_against_schema: Validate against the json-rpc schema.
Returns:
Either a JSONRPCResponse, or a list of them.
Raises:
json.JSOND... | codesearchnet |
def from_stream(credential_filename):
if credential_filename and os.path.isfile(credential_filename):
try:
return _get_application_default_credential_from_file(
credential_filename)
except (ApplicationDefaultCredentialsError, ValueError) as er... | Create a Credentials object by reading information from a file.
It returns an object of type GoogleCredentials.
Args:
credential_filename: the path to the file from where the
credentials are to be read
Raises:
ApplicationDefaultCredentialsError: raised when the credentials
fail to be retrieved. | juraj-google-style |
def _to_json(self, strip, to_serialize=None):
curr_type = self.__class__
if (to_serialize is None):
to_serialize = copy.copy(self.__dict__)
else:
to_serialize = copy.copy(to_serialize)
for member in strip:
if (member in to_serialize):
del to_serialize[member]
to_s... | Utility function that creates JSON repr. of a Credentials object.
Args:
strip: array, An array of names of members to exclude from the
JSON.
to_serialize: dict, (Optional) The properties for this object
that will be serialized. This allows callers to
modify before serializing.
Returns:
string, a JSON representation o... | codesearchnet |
def insert_tile(self, tile_info):
for (i, tile) in enumerate(self.registered_tiles):
if (tile.slot == tile_info.slot):
self.registered_tiles[i] = tile_info
return
self.registered_tiles.append(tile_info) | Add or replace an entry in the tile cache.
Args:
tile_info (TileInfo): The newly registered tile. | codesearchnet |
def _create_or_get_tensor_history_values_cache(self, cache_name, graph, shape=None, dtype=dtypes.float32):
if graph is None:
raise ValueError('Invalid graph.')
if graph not in self._history_value_cache:
self._history_value_cache[graph] = {}
if cache_name not in self._history_value_cache[grap... | Creates a variable as the cache to store historic intermediate tensor values.
Args:
cache_name: Name to be given to the cache (an instance of tf.variable).
graph: Tensorflow graph.
shape: A list of dimensions.
dtype: Data type of created cache.
Returns:
A ref to newly created or existing cache with the given dimension... | github-repos |
def dotd(A, B):
r
A = asarray(A, float)
B = asarray(B, float)
if A.ndim == 1 and B.ndim == 1:
return dot(A, B)
out = empty((A.shape[0],), float)
out[:] = sum(A * B.T, axis=1)
return out | r"""Diagonal of :math:`\mathrm A\mathrm B^\intercal`.
If ``A`` is :math:`n\times p` and ``B`` is :math:`p\times n`, it is done in
:math:`O(pn)`.
Args:
A (array_like): Left matrix.
B (array_like): Right matrix.
Returns:
:class:`numpy.ndarray`: Resulting diagonal. | juraj-google-style |
def parse_uri(self, uri=None):
if not uri:
return rdflib.term.URIRef(self.root)
elif type(uri) == str:
if type(uri) == str and not uri.startswith('http'):
return rdflib.term.URIRef("%s%s" % (self.root, uri))
else:
return rdflib.term.URIRef(uri)
elif type(uri) == rdflib.t... | parses and cleans up possible uri inputs, return instance of rdflib.term.URIRef
Args:
uri (rdflib.term.URIRef,str): input URI
Returns:
rdflib.term.URIRef | juraj-google-style |
def download_and_install(uri, name=DEFAULT_MODULE_NAME, cache=True):
should_use_cache = (cache and exists(name))
if (not should_use_cache):
with _files.tmpdir() as tmpdir:
if uri.startswith('s3:
dst = os.path.join(tmpdir, 'tar_file')
_files.s3_download(uri, ds... | Download, prepare and install a compressed tar file from S3 or local directory as a module.
The SageMaker Python SDK saves the user provided scripts as compressed tar files in S3.
This function downloads this compressed file and, if provided, transforms it
into a module before installing it.
This method is the predec... | codesearchnet |
def generate_sigproc_header(f):
header_string = b''
header_string += to_sigproc_keyword(b'HEADER_START')
for keyword in f.header.keys():
if keyword == b'src_raj':
header_string += to_sigproc_keyword(b'src_raj') + to_sigproc_angle(f.header[b'src_raj'])
elif keyword == b'sr... | Generate a serialzed sigproc header which can be written to disk.
Args:
f (Filterbank object): Filterbank object for which to generate header
Returns:
header_str (str): Serialized string corresponding to header | juraj-google-style |
def get_size(fileobj):
old_pos = fileobj.tell()
try:
fileobj.seek(0, 2)
return fileobj.tell()
finally:
fileobj.seek(old_pos, 0) | Returns the size of the file.
The position when passed in will be preserved if no error occurs.
Args:
fileobj (fileobj)
Returns:
int: The size of the file
Raises:
IOError | codesearchnet |
def get_max_recv_data_size(self, target):
fname = 'get_max_recv_data_size'
cname = ((self.__class__.__module__ + '.') + self.__class__.__name__)
raise NotImplementedError(('%s.%s() is required' % (cname, fname))) | Returns the maximum number of data bytes for receiving.
The maximum number of data bytes acceptable for receiving with
either :meth:`send_cmd_recv_rsp` or :meth:`send_rsp_recv_cmd`.
The value reflects the local device capabilities for receiving
in the mode determined by *target*. It does not relate to any
protocol cap... | codesearchnet |
def scores2recos(self, scores, candidates, rev=False):
sorted_indices = np.argsort(scores)
if rev:
sorted_indices = sorted_indices[::(- 1)]
return (candidates[sorted_indices], scores[sorted_indices]) | Get recommendation list for a user u_index based on scores.
Args:
scores (numpy array; (n_target_items,)):
Scores for the target items. Smaller score indicates a promising item.
candidates (numpy array; (# target items, )): Target items' indices. Only these items are considered as the recommendation candidates.
rev (b... | codesearchnet |
def convert_outlook_msg(msg_bytes):
if (not is_outlook_msg(msg_bytes)):
raise ValueError('The supplied bytes are not an Outlook MSG file')
orig_dir = os.getcwd()
tmp_dir = tempfile.mkdtemp()
os.chdir(tmp_dir)
with open('sample.msg', 'wb') as msg_file:
msg_file.write(msg_bytes)
tr... | Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to
standard RFC 822 format
Args:
msg_bytes (bytes): the content of the .msg file
Returns:
A RFC 822 string | codesearchnet |
def check_for_wdiff():
cmd = ['which', CMD_WDIFF]
DEVNULL = open(os.devnull, 'wb')
proc = sub.Popen(cmd, stdout=DEVNULL)
proc.wait()
DEVNULL.close()
if (proc.returncode != 0):
msg = "the `{}` command can't be found".format(CMD_WDIFF)
raise WdiffNotFoundError(msg) | Checks if the `wdiff` command can be found.
Raises:
WdiffNotFoundError: if ``wdiff`` is not found. | codesearchnet |
def start(self, extra_args='', tag=''):
if self.started:
return
utils.create_dir(self.log_path)
if tag:
tag = (tag + ',')
out_file_name = 'IPerfServer,{},{}{}.log'.format(self.port, tag, len(self.log_files))
full_out_path = os.path.join(self.log_path, out_file_name)
cmd = ('%s %s... | Starts iperf server on specified port.
Args:
extra_args: A string representing extra arguments to start iperf
server with.
tag: Appended to log file name to identify logs from different
iperf runs. | codesearchnet |
def find_container_traits(cls_or_string):
if utils.is_str(cls_or_string):
if not templates.is_instantiation(cls_or_string):
return None
name = templates.name(cls_or_string)
if name.startswith('std::'):
name = name[len('std::'):]
if name.startswith('std::... | Find the container traits type of a declaration.
Args:
cls_or_string (str | declarations.declaration_t): a string
Returns:
declarations.container_traits: a container traits | juraj-google-style |
def self(self) -> 'EFBChat':
self.chat_name = 'You'
self.chat_alias = None
self.chat_uid = EFBChat.SELF_ID
self.chat_type = ChatType.User
return self | Set the chat as yourself.
In this context, "yourself" means the user behind the master channel.
Every channel should relate this to the corresponding target.
Returns:
EFBChat: This object. | codesearchnet |
def _DisableNetworkManager(self, interfaces, logger):
for interface in interfaces:
interface_config = os.path.join(
self.network_path, 'ifcfg-%s' % interface)
if os.path.exists(interface_config):
self._ModifyInterface(
interface_config, 'DEVICE', interface, replace=Fal... | Disable network manager management on a list of network interfaces.
Args:
interfaces: list of string, the output device names enable.
logger: logger object, used to write to SysLog and serial port. | juraj-google-style |
def all_near_zero_mod(a: Union[(float, complex, Iterable[float], np.ndarray)], period: float, *, atol: float=1e-08) -> bool:
b = (((np.asarray(a) + (period / 2)) % period) - (period / 2))
return np.all(np.less_equal(np.abs(b), atol)) | Checks if the tensor's elements are all near multiples of the period.
Args:
a: Tensor of elements that could all be near multiples of the period.
period: The period, e.g. 2 pi when working in radians.
atol: Absolute tolerance. | codesearchnet |
def send_state_event(self, event_type, content, state_key=""):
return self.client.api.send_state_event(
self.room_id,
event_type,
content,
state_key
) | Send a state event to the room.
Args:
event_type (str): The type of event that you are sending.
content (): An object with the content of the message.
state_key (str, optional): A unique key to identify the state. | juraj-google-style |
def altcode(msg):
if df(msg) not in [0, 4, 16, 20]:
raise RuntimeError("Message must be Downlink Format 0, 4, 16, or 20.")
mbin = hex2bin(msg)
mbit = mbin[25]
qbit = mbin[27]
if mbit == '0':
if qbit == '1':
vbin = mbin[19:25] + mbin[26] ... | Computes the altitude from DF4 or DF20 message, bit 20-32.
credit: @fbyrkjeland
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: altitude in ft | juraj-google-style |
def to_dict(mapreduce_yaml):
all_configs = []
for config in mapreduce_yaml.mapreduce:
out = {
"name": config.name,
"mapper_input_reader": config.mapper.input_reader,
"mapper_handler": config.mapper.handler,
}
if config.mapper.params_validator:
out["ma... | Converts a MapReduceYaml file into a JSON-encodable dictionary.
For use in user-visible UI and internal methods for interfacing with
user code (like param validation). as a list
Args:
mapreduce_yaml: The Pyton representation of the mapreduce.yaml document.
Returns:
A list of configuration dictionaries. | juraj-google-style |
def _get_colors(n):
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex as r2h
from numpy import linspace
cols = linspace(0.05, 0.95, n)
cmap = plt.get_cmap('nipy_spectral')
return [r2h(cmap(i)) for i in cols] | Returns n unique and "evenly" spaced colors for the backgrounds
of the projects.
Args:
n (int): The number of unique colors wanted.
Returns:
colors (list of str): The colors in hex form. | codesearchnet |
def format_unitary(mat, decimals=None):
num_basis = len(mat)
mat_complex = np.zeros((num_basis, num_basis), dtype=complex)
for i, vec in enumerate(mat):
mat_complex[i] = format_statevector(vec, decimals)
return mat_complex | Format unitary coming from the backend to present to the Qiskit user.
Args:
mat (list[list]): a list of list of [re, im] complex numbers
decimals (int): the number of decimals in the statevector.
If None, no rounding is done.
Returns:
list[list[complex]]: a matrix of complex numbers | juraj-google-style |
def run(self):
return self._test_suite | Runs the dynamically generated test suite.
This method simply returns the test suite class created during
initialization. The test runner (e.g., unittest.main()) can then be used
to discover and run the tests within this suite.
Returns:
The dynamically created unittest.TestCase subclass. | github-repos |
def _GetConfigValue(self, config_parser, section_name, value_name):
try:
return config_parser.get(section_name, value_name)
except configparser.NoOptionError:
return None | Retrieves a value from the config parser.
Args:
config_parser (ConfigParser): configuration parser.
section_name (str): name of the section that contains the value.
value_name (str): name of the value.
Returns:
object: configuration value or None if the value does not exists. | codesearchnet |
def copy_pkg(self, filename, _):
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection['mount_point'], 'Packages', basename)) | Copy a package to the repo's Package subdirectory.
Args:
filename: Path for file to copy.
_: Ignored. Used for compatibility with JDS repos. | codesearchnet |
def run(data, base_logdir, session_id, group_id, hparams):
model = model_fn(hparams=hparams, seed=session_id)
logdir = os.path.join(base_logdir, session_id)
callback = tf.keras.callbacks.TensorBoard(
logdir,
update_freq=flags.FLAGS.summary_freq,
profile_batch=0,
)
hparams_callback = hp... | Run a training/validation session.
Flags must have been parsed for this function to behave.
Args:
data: The data as loaded by `prepare_data()`.
base_logdir: The top-level logdir to which to write summary data.
session_id: A unique string ID for this session.
group_id: The string ID of the session group that includes ... | juraj-google-style |
def predict_features(self, df_features, df_target, idx=0, C=.1, **kwargs):
lsvc = LinearSVR(C=C).fit(df_features.values, df_target.values)
return np.abs(lsvc.coef_) | For one variable, predict its neighbouring nodes.
Args:
df_features (pandas.DataFrame):
df_target (pandas.Series):
idx (int): (optional) for printing purposes
kwargs (dict): additional options for algorithms
C (float): Penalty parameter of the error term
Returns:
list: scores of each feature relatively to the target | juraj-google-style |
def _add_step(self, step):
self._closed()
self.has_workflow_step = self.has_workflow_step or step.is_workflow
self.wf_steps[step.name_in_workflow] = step | Add a step to the workflow.
Args:
step (Step): a step from the steps library. | juraj-google-style |
def ensure_app_data_dir(appname, *args):
from ubelt import util_path
dpath = get_app_data_dir(appname, *args)
util_path.ensuredir(dpath)
return dpath | Calls `get_app_data_dir` but ensures the directory exists.
Args:
appname (str): the name of the application
*args: any other subdirectories may be specified
SeeAlso:
get_app_data_dir
Example:
>>> import ubelt as ub
>>> dpath = ub.ensure_app_data_dir('ubelt')
>>> assert exists(dpath) | codesearchnet |
def GetFailedTasks(self):
with self._lock:
return [task for task in self._tasks_abandoned.values() if (not task.has_retry)] | Retrieves all failed tasks.
Failed tasks are tasks that were abandoned and have no retry task once
the foreman is done processing.
Returns:
list[Task]: tasks. | codesearchnet |
def decorate(fn):
if (not isfunction(fn)):
raise TypeError('paco: fn must be a callable object')
@functools.wraps(fn)
def decorator(*args, **kw):
for arg in args:
if iscoro_or_corofunc(arg):
return fn(*args, **kw)
if (len(args) and (args[0] is None)):
... | Generic decorator for coroutines helper functions allowing
multiple variadic initialization arguments.
This function is intended to be used internally.
Arguments:
fn (function): target function to decorate.
Raises:
TypeError: if function or coroutine function is not provided.
Returns:
function: decorated function. | codesearchnet |
def easeInOutCubic(n):
_checkRange(n)
n = (2 * n)
if (n < 1):
return (0.5 * (n ** 3))
else:
n = (n - 2)
return (0.5 * ((n ** 3) + 2)) | A cubic tween function that accelerates, reaches the midpoint, and then decelerates.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). | codesearchnet |
def recipe_fred_series_to_bigquery(config, auth, fred_api_key, fred_series_id, fred_units, fred_frequency, fred_aggregation_method, project, dataset):
fred(config, {'auth': auth, 'api_key': fred_api_key, 'frequency': fred_frequency, 'series': [{'series_id': fred_series_id, 'units': fred_units, 'aggregation_method':... | Download federal reserve series.
Args:
auth (authentication) - Credentials used for writing data.
fred_api_key (string) - 32 character alpha-numeric lowercase string.
fred_series_id (string) - Series ID to pull data from.
fred_units (choice) - A key that indicates a data value transformation.
fred_frequency (choice) -... | github-repos |
def show_constant(val: types.BaseValue) -> str:
def _ellipsis_printer(v):
if isinstance(v, types.PythonConstant):
return v.str_of_constant(_ellipsis_printer)
return '...'
return _ellipsis_printer(val) | Pretty-print a value if it is a constant.
Recurses into a constant, printing the underlying Python value for constants
and just using "..." for everything else (e.g., Variables). This is useful
for generating clear error messages that show the exact values related to an
error while preventing implementation details fr... | github-repos |
def get_player_stats(self, player_key, board_key):
player_stats_url = self.api_path + 'player/' + player_key + '/league/' + board_key + '/stats/'
response = self.get_response(player_stats_url)
return response | Calling the Player Stats API
Args:
player_key: Key of the player
board_key: key of the board
Return:
json data | juraj-google-style |
class XLNetPoolerEndLogits(nn.Module):
def __init__(self, config: XLNetConfig):
super().__init__()
self.dense_0 = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.activation = nn.Tanh()
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
se... | Compute SQuAD end logits from sequence hidden states.
Args:
config ([`XLNetConfig`]):
The config used by the model, will be used to grab the `hidden_size` of the model and the `layer_norm_eps`
to use. | github-repos |
def update_ref(profile, ref, sha):
resource = "/refs/" + ref
payload = {"sha": sha}
data = api.patch_request(profile, resource, payload)
return prepare(data) | Point a ref to a new SHA.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
ref
The ref to update, e.g., ``heads/my-feature-branch``.
sha
The SHA of the commit to point the ref to.
R... | juraj-google-style |
def _format_param_val(self, param_val):
if isinstance(param_val, list):
return ' '.join((str(x) for x in param_val))
else:
return str(param_val) | Internal method to format values in the packmol parameter dictionaries
Args:
param_val:
Some object to turn into String
Returns:
string representation of the object | codesearchnet |
def Eq(left: str, right: str) -> BooleanTerm:
if left == right:
return TRUE
elif left > right:
return _Eq(left, right)
else:
return _Eq(right, left) | Create an equality or its simplified equivalent.
This will ensure that left > right. (For left == right, it'll just return
TRUE).
Args:
left: A string. Left side of the equality. This will get sorted, so it might
end up on the right.
right: A string. Right side of the equality. This will get sorted, so it
might end u... | github-repos |
def get_attrs(obj: object) -> dict[str, object]:
attrs = {}
for k in dir(obj) + object.__dir__(obj):
if k in attrs:
continue
try:
v = getattr(obj, k)
except Exception as e:
v = ExceptionWrapper(e)
attrs[k] = v
return attrs | Parse all attributes from an object.
Limitation:
* Descriptor will be resolved, so all properties are executed (some can
have side effects, or take a lot of time to compute)
Args:
obj: Object to inspect
Returns:
Dict mapping attribute name to values. | github-repos |
def grating_coupler_period(wavelength, n_eff, n_clad, incidence_angle_deg, diffration_order=1):
k0 = ((2.0 * np.pi) / wavelength)
beta = (n_eff.real * k0)
n_inc = n_clad
grating_period = (((2.0 * np.pi) * diffration_order) / (beta - ((k0 * n_inc) * np.sin(np.radians(incidence_angle_deg)))))
return g... | Calculate the period needed for a grating coupler.
Args:
wavelength (float): The target wavelength for the
grating coupler.
n_eff (float): The effective index of the mode
of a waveguide with the width of the grating
coupler.
n_clad (float): The refractive index of the cladding.
incidence_angle_deg (float): The inciden... | codesearchnet |
def add_spectrum(self, label, spectrum, color=None):
self._spectra[label] = spectrum
self.colors.append((color or self.colors_cycle[(len(self._spectra) % len(self.colors_cycle))])) | Adds a Spectrum for plotting.
Args:
label (str): Label for the Spectrum. Must be unique.
spectrum: Spectrum object
color (str): This is passed on to matplotlib. E.g., "k--" indicates
a dashed black line. If None, a color will be chosen based on
the default color cycle. | codesearchnet |
def files(self, request, id):
gist = self.send(request, id).json()
return gist['files'] | Returns a list of files in the gist
Arguments:
request: an initial request object
id: the gist identifier
Returns:
A list of the files | juraj-google-style |
def ReadFromDirectory(self, artifacts_reader, path, extension='yaml'):
for artifact_definition in artifacts_reader.ReadDirectory(
path, extension=extension):
self.RegisterDefinition(artifact_definition) | Reads artifact definitions into the registry from files in a directory.
This function does not recurse sub directories.
Args:
artifacts_reader (ArtifactsReader): an artifacts reader.
path (str): path of the directory to read from.
extension (Optional[str]): extension of the filenames to read.
Raises:
KeyError: if a ... | juraj-google-style |
def install(self, updates):
if (updates.count() == 0):
ret = {'Success': False, 'Updates': 'Nothing to install'}
return ret
installer = self._session.CreateUpdateInstaller()
self._session.ClientApplicationID = 'Salt: Install Update'
with salt.utils.winapi.Com():
install_list = wi... | Install the updates passed in the updates collection. Load the updates
collection using the ``search`` or ``available`` functions. If the
updates need to be downloaded, use the ``download`` function.
Args:
updates (Updates): An instance of the Updates class containing a
the updates to be installed.
Returns:
dict: A ... | codesearchnet |
async def get(self, request):
ticket = await self.get_ticket(request)
if ticket is None:
return None
try:
now = time.time()
fields = self._ticket.validate(ticket, self._get_ip(request), now)
if (self._reissue_ti... | Gets the user_id for the request.
Gets the ticket for the request using the get_ticket() function, and
authenticates the ticket.
Args:
request: aiohttp Request object.
Returns:
The userid for the request, or None if the ticket is not
authenticated. | juraj-google-style |
def _encode_required_fields(self, builder: expressions.Builder) -> List[validation_pb2.SqlRequirement]:
if not isinstance(builder.return_type, _fhir_path_data_types.StructureDataType):
return []
if builder.return_type.element_type == 'Extension':
return []
encoded_requirements: List[validati... | Returns `SqlRequirement`s for all required fields in `ElementDefinition`.
Args:
builder: The builder containing the element to encode required fields for.
Returns:
A list of `SqlRequirement`s representing requirements generated from
required fields on the element. | github-repos |
def _get_userprofile_from_registry(user, sid):
profile_dir = __utils__['reg.read_value']('HKEY_LOCAL_MACHINE', 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{0}'.format(sid), 'ProfileImagePath')['vdata']
log.debug('user %s with sid=%s profile is located at "%s"', user, sid, profile_dir)
ret... | In case net user doesn't return the userprofile we can get it from the
registry
Args:
user (str): The user name, used in debug message
sid (str): The sid to lookup in the registry
Returns:
str: Profile directory | codesearchnet |
def _code_search(query, github_user=None):
github_client = temple.utils.GithubClient()
headers = {'Accept': 'application/vnd.github.v3.text-match+json'}
resp = github_client.get('/search/code', params={'q': query, 'per_page': 100}, headers=headers)
if ((resp.status_code == requests.codes.unprocessable_e... | Performs a Github API code search
Args:
query (str): The query sent to Github's code search
github_user (str, optional): The Github user being searched in the query string
Returns:
dict: A dictionary of repository information keyed on the git SSH url
Raises:
`InvalidGithubUserError`: When ``github_user`` is invalid | codesearchnet |
def _prefix_from_ip_int(self, ip_int):
trailing_zeroes = _count_righthand_zero_bits(ip_int,
self._max_prefixlen)
prefixlen = self._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefi... | Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones | juraj-google-style |
def __init__(self, connect_func, max_size=10):
self.connect_func = connect_func
self.limiter = threading.BoundedSemaphore(max_size)
self.idle_conns = []
self.closed = False | Creates a ConnectionPool.
Args:
connect_func: A closure which returns a new connection to the underlying
database, i.e. a MySQLdb.Connection. Should raise or block if the
database is unavailable.
max_size: The maximum number of simultaneous connections. | juraj-google-style |
def top_kth_iterative(x, k):
def next_x(cur_x, _):
top_x = tf.reduce_max(cur_x, axis=-1, keep_dims=True)
return cur_x * to_float(cur_x < top_x)
fin_x = tf.foldl(next_x, tf.range(k - 1), initializer=tf.stop_gradient(x),
parallel_iterations=2, back_prop=Fals... | Compute the k-th top element of x on the last axis iteratively.
This assumes values in x are non-negative, rescale if needed.
It is often faster than tf.nn.top_k for small k, especially if k < 30.
Note: this does not support back-propagation, it stops gradients!
Args:
x: a Tensor of non-negative numbers of type float... | juraj-google-style |
def bind(self, devices_to_bind):
if self.entity_api_key == "":
return {'status': 'failure', 'response': 'No API key found in request'}
url = self.base_url + "api/0.1.0/subscribe/bind"
headers = {"apikey": self.entity_api_key}
data = {
"exchange": "amq.top... | This function allows an entity to list the devices to subscribe for data. This function must be called
at least once, before doing a subscribe. Subscribe function will listen to devices that are bound here.
Args:
devices_to_bind (list): an array of devices to listen to.
Example bind(["test100","testDemo"]) | juraj-google-style |
def run(self, dag):
if self.layout is None:
if self.property_set["layout"]:
self.layout = self.property_set["layout"]
else:
self.layout = Layout.generate_trivial_layout(*dag.qregs.values())
self.property_set['is_swap_mapped'] = True
... | If `dag` is mapped to `coupling_map`, the property
`is_swap_mapped` is set to True (or to False otherwise).
Args:
dag (DAGCircuit): DAG to map. | juraj-google-style |
def Normalize(self, fraction=1.0):
if self.log:
raise ValueError('Pmf is under a log transform')
total = self.Total()
if (total == 0.0):
raise ValueError('total probability is zero.')
logging.warning('Normalize: total probability is zero.')
return total
factor = (float(fr... | Normalizes this PMF so the sum of all probs is fraction.
Args:
fraction: what the total should be after normalization
Returns: the total probability before normalizing | codesearchnet |
def _get_bucket_attribute(bucket, query_param, xml_response_tag, retry_params=None, _account_id=None):
api = storage_api._get_storage_api(retry_params=retry_params, account_id=_account_id)
common.validate_bucket_path(bucket)
(status, headers, content) = api.get_bucket(('%s?%s' % (bucket, query_param)))
... | Helper method to request a bucket parameter and parse the response.
Args:
bucket: A Google Cloud Storage bucket of form '/bucket'.
query_param: The query parameter to include in the get bucket request.
xml_response_tag: The expected tag in the xml response.
retry_params: An api_utils.RetryParams for this call to GCS. ... | codesearchnet |
def create_sequence_pretty_tensor(sequence_input, shape=None, save_state=True):
inputs = prettytensor.wrap_sequence(sequence_input.inputs, tensor_shape=shape)
targets = prettytensor.wrap_sequence(sequence_input.targets)
if save_state:
bookkeeper.set_recurrent_state_saver(sequence_input)
return inputs, ta... | Creates a PrettyTensor object for the given sequence.
The first dimension is treated as a time-dimension * batch and a default is
set for `unroll` and `state_saver`.
TODO(eiderman): Remove shape.
Args:
sequence_input: A SequenceInput or StateSavingSequenceInput
shape: The shape of each item in the sequence (includin... | juraj-google-style |
def pow(self, other, axis="columns", level=None, fill_value=None):
return self._binary_op(
"pow", other, axis=axis, level=level, fill_value=fill_value
) | Pow this DataFrame against another DataFrame/Series/scalar.
Args:
other: The object to use to apply the pow against this.
axis: The axis to pow over.
level: The Multilevel index level to apply pow over.
fill_value: The value to fill NaNs with.
Returns:
A new DataFrame with the Pow applied. | juraj-google-style |
def read(self, key):
key = quote(key, safe='~')
url = '/internal/playbooks/keyValue/{}'.format(key)
r = self.tcex.session.get(url)
data = r.content
if ((data is not None) and (not isinstance(data, str))):
data = str(r.content, 'utf-8')
return data | Read data from remote KV store for the provided key.
Args:
key (string): The key to read in remote KV store.
Returns:
(any): The response data from the remote KV store. | codesearchnet |
def __init__(self, prefs, g, kappa=2.0, omega=0.5, beta=1.0, mu=1.0,
freeparams=['kappa', 'omega', 'beta', 'mu']):
_checkParam('g', g, self.PARAMLIMITS, self.PARAMTYPES)
assert abs(1 - g.sum()) <= ALMOST_ZERO, "g doesn't sum to 1"
self.g = g.copy()
self.g /= self.g.... | Initialize an `ExpCM_empirical_phi` object.
Args:
`prefs`, `kappa`, `omega`, `beta`, `mu`, `freeparams`
Same meaning as for an `ExpCM`
`g`
Has the meaning described in the main class doc string. | juraj-google-style |
def get_section_header(self, section):
self._ensure_section_headers_loaded()
if (type(section) is int):
return self._section_headers_by_index[section]
else:
return self._section_headers_by_name[section] | Get a specific section header by index or name.
Args:
section(int or str): The index or name of the section header to return.
Returns:
:class:`~ELF.SectionHeader`: The section header.
Raises:
KeyError: The requested section header does not exist. | codesearchnet |
def export_warnings(self, export_file):
warn_filepath = op.dirname(export_file)
warn_filename = op.splitext(op.basename(export_file))[0]
self._add_entry(templates.EXPORT_WARNINGS.format(warnings_export_path=warn_filepath, warnings_export_file=warn_filename)) | Append an export warnings entry to the journal.
This instructs Revit to export warnings from the opened model.
Currently Revit will stop journal execution if the model does not
have any warnings and the export warnings UI button is disabled.
Args:
export_file (str): full path of the ouput html file | codesearchnet |
def rank_dated_files(pattern, dir, descending=True):
files = glob.glob(op.join(dir, pattern))
return sorted(files, reverse=descending) | Search a directory for files that match a pattern. Return an ordered list of these files by filename.
Args:
pattern: The glob pattern to search for.
dir: Path to directory where the files will be searched for.
descending: Default True, will sort alphabetically by descending order.
Returns:
list: Rank-ordered list by ... | codesearchnet |
def nextindx(self):
indx = 0
with s_lmdbslab.Scan(self.slab, self.db) as curs:
last_key = curs.last_key()
if (last_key is not None):
indx = (s_common.int64un(last_key) + 1)
return indx | Determine the next insert offset according to storage.
Returns:
int: The next insert offset. | codesearchnet |
def _preprocess_numpy_input(x, data_format, mode):
if not issubclass(x.dtype.type, np.floating):
x = x.astype(backend.floatx(), copy=False)
if mode == 'tf':
x /= 127.5
x -= 1.0
return x
elif mode == 'torch':
x /= 255.0
mean = [0.485, 0.456, 0.406]
std ... | Preprocesses a NumPy array encoding a batch of images.
Args:
x: Input array, 3D or 4D.
data_format: Data format of the image array.
mode: One of "caffe", "tf" or "torch".
- caffe: will convert the images from RGB to BGR,
then will zero-center each color channel with
respect to the ImageNet dataset,
without scaling.
- ... | github-repos |
async def post(self):
logging.debug('\n\n[+] -- Account debugging. ')
if settings.SIGNATURE_VERIFICATION:
super().verify()
try:
data = json.loads(self.request.body)
except:
self.set_status(400)
self.write({'error': 400, 'reason': 'Unexpected data format. JSON required'})
... | Creates new account
Accepts:
- message (signed dict):
- "device_id" - str
- "email" - str
- "phone" - str
- "public_key" - str
- "signature" - str
Returns:
dictionary with following fields:
- "device_id" - str
- "phone" - str
- "public_key" - str
- "count" - int ( wallets amount )
- "level" - int (2 by default)
- "n... | codesearchnet |
def set_membership(self, room_id, user_id, membership, reason="", profile=None,
timestamp=None):
if profile is None:
profile = {}
body = {
"membership": membership,
"reason": reason
}
if 'displayname' in profile:
... | Perform PUT /rooms/$room_id/state/m.room.member/$user_id
Args:
room_id (str): The room ID
user_id (str): The user ID
membership (str): New membership value
reason (str): The reason
timestamp (int): Set origin_server_ts (For application services only) | juraj-google-style |
def __sid_to_username(sid):
if sid is None or sid == '':
return ''
try:
sid_bin = win32security.GetBinarySid(sid)
except pywintypes.error as exc:
raise ValueError(
'pkg: Software owned by {0} is not valid: [{1}] {2}'.format(sid... | Provided with a valid Windows Security Identifier (SID) and returns a Username
Args:
sid (str): Security Identifier (SID).
Returns:
str: Username in the format of username@realm or username@computer. | juraj-google-style |
def set_hasher(self, hash, rounds=None):
hash = hash.replace('-', '_')
if (hash not in VALID_HASHERS):
raise WrongHashAlgorithm(WRONG_HASH_MESSAGE)
hasher = getattr(ph, hash)
utils.test_hasher(hasher)
default_rounds = getattr(hasher, 'default_rounds', 1)
min_rounds = getattr(hasher, 'min... | Updates the has algorithm and, optionally, the number of rounds
to use.
Raises:
`~WrongHashAlgorithm` if new algorithm isn't one of the three
recomended options. | codesearchnet |
def _open_script_interface(self, connection_id, callback):
try:
context = self.connections.get_context(connection_id)
except ArgumentError:
callback(connection_id, self.id, False, "Could not find connection information")
return
success = HighSpeedCh... | Enable script streaming interface for this IOTile device
Args:
connection_id (int): The unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason) | juraj-google-style |
def _get_values(self, data_blob, dtype_enum, shape_string):
buf = np.frombuffer(data_blob, dtype=tf.DType(dtype_enum).as_numpy_dtype)
return buf.reshape([int(i) for i in shape_string.split(',')]).tolist() | Obtains values for histogram data given blob and dtype enum.
Args:
data_blob: The blob obtained from the database.
dtype_enum: The enum representing the dtype.
shape_string: A comma-separated string of numbers denoting shape.
Returns:
The histogram values as a list served to the frontend. | juraj-google-style |
def send_log_messages(self, messages: List[LogMessage]) -> None:
pass | Sends multiple log messages to be handled.
Args:
* messages: list of LogMessage dictionaries
Returns:
* None | github-repos |
def resolve_backend_name(name, backends, deprecated, aliased):
available = [backend.name() for backend in backends]
resolved_name = deprecated.get(name, aliased.get(name, name))
if isinstance(resolved_name, list):
resolved_name = next((b for b in resolved_name if (b in available)), '')
if (resol... | Resolve backend name from a deprecated name or an alias.
A group will be resolved in order of member priorities, depending on
availability.
Args:
name (str): name of backend to resolve
backends (list[BaseBackend]): list of available backends.
deprecated (dict[str: str]): dict of deprecated names.
aliased (dict[str: l... | codesearchnet |
def _reconstruct_sequence_inputs(op_def, inputs, attrs) -> list[Union[tensor_lib.Tensor, list[tensor_lib.Tensor]]]:
grouped_inputs = []
i = 0
for input_arg in op_def.input_arg:
if input_arg.number_attr:
input_len = attrs[input_arg.number_attr].i
is_sequence = True
eli... | Regroups a flat list of input tensors into scalar and sequence inputs.
Args:
op_def: The `op_def_pb2.OpDef` (for knowing the input types)
inputs: a list of input `Tensor`s to the op.
attrs: mapping from attr name to `attr_value_pb2.AttrValue` (these define
how long each sequence is)
Returns:
A list of `Tensor`s (corr... | github-repos |
def get_create_agent(agent_kwargs):
def create_agent(sess, environment, summary_writer=None):
'Creates a DQN agent.\n\n Simplified version of `dopamine.discrete_domains.train.create_agent`\n\n Args:\n sess: a session\n environment: an environment\n summary_writer: a summary writer.\n\n... | Factory for dopamine agent initialization.
Args:
agent_kwargs: dict of BatchDQNAgent parameters
Returns:
Function(sess, environment, summary_writer) -> BatchDQNAgent instance. | codesearchnet |
def with_attributes(name, checkpointable_objects=None, functions=None, copy_from=None):
checkpointable_objects = checkpointable_objects or []
functions = functions or []
if copy_from is not None:
for cls in copy_from:
checkpointable_objects.extend(cls.all_checkpointable_objects)
... | Creates a subclass with all attributes as specified in the arguments.
Args:
name: Name of subclass
checkpointable_objects: List of checkpointable objects to be serialized
in the SavedModel.
functions: List of functions to be serialized in the SavedModel.
copy_from: List of other SerializedAttributes subclasses. The re... | github-repos |
def _ParseRecordLogline(self, parser_mediator, structure):
date_time = dfdatetime_time_elements.TimeElementsInMilliseconds()
try:
datetime_iso8601 = self._GetISO8601String(structure.date_time)
date_time.CopyFromStringISO8601(datetime_iso8601)
except ValueError:
parser_mediator.Produc... | Parses a logline record structure and produces events.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
structure (pyparsing.ParseResults): structure of tokens derived from
a line of a text file. | juraj-google-style |
def _shard_num_args(self, constant_dict: Dict[(str, Any)]=None) -> List[Dict[(str, Any)]]:
args = []
for shard_num in range(self._num_shards):
append_dict = (dict(constant_dict) if constant_dict else {})
append_dict['shard_num'] = shard_num
append_dict['num_shards'] = self._num_shards
... | Helper that returns a list of dicts including a num_shard entry.
The dict for each entry also includes shared_mem_dict, the number of
shards, the number of shard qubits, and the supplied constant dict.
Args:
constant_dict: Dictionary that will be updated to every element of
the returned list of dictionaries.
Returns... | codesearchnet |
def _async_open(self, session_id, proto_version):
try:
(yield self.application_context.create_session_if_needed(session_id, self.request))
session = self.application_context.get_session(session_id)
protocol = Protocol(proto_version)
self.receiver = Receiver(protocol)
log.debu... | Perform the specific steps needed to open a connection to a Bokeh session
Specifically, this method coordinates:
* Getting a session for a session ID (creating a new one if needed)
* Creating a protocol receiver and hander
* Opening a new ServerConnection and sending it an ACK
Args:
session_id (str) :
A session ID t... | codesearchnet |
def _concatenate_inner(self, direction):
tmp_bucket = []
source_chunks = (self if direction else self[::(- 1)])
target_chunks = ChunkList()
for chunk in source_chunks:
if ((chunk.dependency == direction) or ((direction is False) and chunk.is_space())):
tmp_bucket.append(chunk)
... | Concatenates chunks based on each chunk's dependency.
Args:
direction (bool): Direction of concatenation process. True for forward. | codesearchnet |
def get_strategy() -> 'StrategyBase':
return _get_per_thread_mode().strategy | Returns the current `tf.distribute.Strategy` object.
Typically only used in a cross-replica context:
```
if tf.distribute.in_cross_replica_context():
strategy = tf.distribute.get_strategy()
...
```
Returns:
A `tf.distribute.Strategy` object. Inside a `with strategy.scope()` block,
it returns `strategy`, otherwise it... | github-repos |
def _transpile_circuit(circuit_config_tuple):
(circuit, transpile_config) = circuit_config_tuple
if transpile_config.pass_manager:
pass_manager = transpile_config.pass_manager
elif transpile_config.coupling_map:
pass_manager = default_pass_manager(transpile_config.basis_gates, transpile_conf... | Select a PassManager and run a single circuit through it.
Args:
circuit_config_tuple (tuple):
circuit (QuantumCircuit): circuit to transpile
transpile_config (TranspileConfig): configuration dictating how to transpile
Returns:
QuantumCircuit: transpiled circuit | codesearchnet |
def _avro_rows(block, avro_schema):
blockio = six.BytesIO(block.avro_rows.serialized_binary_rows)
while True:
try:
(yield fastavro.schemaless_reader(blockio, avro_schema))
except StopIteration:
break | Parse all rows in a stream block.
Args:
block ( \
~google.cloud.bigquery_storage_v1beta1.types.ReadRowsResponse \
):
A block containing Avro bytes to parse into rows.
avro_schema (fastavro.schema):
A parsed Avro schema, used to deserialized the bytes in the
block.
Returns:
Iterable[Mapping]:
A sequence of rows, repre... | codesearchnet |
def can_fetch(self, request: Request, file=None) -> bool:
try:
return self.can_fetch_pool(request)
except NotInPoolError:
pass
(yield from self.fetch_robots_txt(request, file=file))
return self.can_fetch_pool(request) | Return whether the request can fetched.
Args:
request: Request.
file: A file object to where the robots.txt contents are written.
Coroutine. | codesearchnet |
def append(
self,
moment_or_operation_tree: Union[ops.Moment, ops.OP_TREE],
strategy: InsertStrategy = InsertStrategy.EARLIEST):
self.insert(len(self._moments), moment_or_operation_tree, strategy) | Appends operations onto the end of the circuit.
Moments within the operation tree are appended intact.
Args:
moment_or_operation_tree: The moment or operation tree to append.
strategy: How to pick/create the moment to put operations into. | juraj-google-style |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.