Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
2,000 | def yesterday(date=None):
if not date:
return _date - datetime.timedelta(days=1)
else:
current_date = parse(date)
return current_date - datetime.timedelta(days=1) | yesterday once more |
2,001 | def safe_size_check(checked_path, error_detail, max_bytes=500000000):
actual_size = 0
for dirpath, dirnames, filenames in os.walk(checked_path):
for f in filenames:
fp = os.path.join(dirpath, f)
actual_size += os.path.getsize(fp)
assert actual_size <= max_bytes, "Path {... | Determines if a particular path is larger than expected. Useful before any recursive remove. |
2,002 | def valuecounter(table, *field, **kwargs):
missing = kwargs.get(, None)
counter = Counter()
for v in values(table, field, missing=missing):
try:
counter[v] += 1
except IndexError:
pass
return counter | Find distinct values for the given field and count the number of
occurrences. Returns a :class:`dict` mapping values to counts. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar'],
... ['a', True],
... ['b'],
... ['b', True],
... ... |
2,003 | def groups_roles(self, room_id=None, room_name=None, **kwargs):
if room_id:
return self.__call_api_get(, roomId=room_id, kwargs=kwargs)
elif room_name:
return self.__call_api_get(, roomName=room_name, kwargs=kwargs)
else:
raise RocketMissingParamExcep... | Lists all user’s roles in the private group. |
2,004 | def register(im1, im2, params, exact_params=False, verbose=1):
tempdir = get_tempdir()
_clear_temp_dir()
refIm = im1
if isinstance(im1, (tuple,list)):
refIm = im1[0]
if not exact_params:
params = _compile_params(params, refIm)
if isinstance(para... | register(im1, im2, params, exact_params=False, verbose=1)
Perform the registration of `im1` to `im2`, using the given
parameters. Returns `(im1_deformed, field)`, where `field` is a
tuple with arrays describing the deformation for each dimension
(x-y-z order, in world units).
Parameters:
... |
2,005 | def reftrack_uptodate_data(rt, role):
uptodate = rt.uptodate()
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
if uptodate:
return "Yes"
else:
return "No"
if role == QtCore.Qt.ForegroundRole:
if uptodate:
return QtGui.QColor(*U... | Return the data for the uptodate status
:param rt: the :class:`jukeboxcore.reftrack.Reftrack` holds the data
:type rt: :class:`jukeboxcore.reftrack.Reftrack`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the uptodate status
:rtype: depending on role
:rais... |
2,006 | def _process_file(input_file, output_file, apikey):
bytes_ = read_binary(input_file)
compressed = shrink(bytes_, apikey)
if compressed.success and compressed.bytes:
write_binary(output_file, compressed.bytes)
else:
if compressed.errno in FATAL_ERRORS:
raise StopProcessi... | Shrinks input_file to output_file.
This function should be used only inside process_directory.
It takes input_file, tries to shrink it and if shrink was successful
save compressed image to output_file. Otherwise raise exception.
@return compressed: PNGResponse |
2,007 | def _build_migrated_variables(checkpoint_reader, name_value_fn):
names_to_shapes = checkpoint_reader.get_variable_to_shape_map()
new_name_to_variable = {}
name_to_new_name = {}
for name in names_to_shapes:
value = checkpoint_reader.get_tensor(name)
new_name, new_value = name_value_fn(name, value)
... | Builds the TensorFlow variables of the migrated checkpoint.
Args:
checkpoint_reader: A `tf.train.NewCheckPointReader` of the checkpoint to
be read from.
name_value_fn: Function taking two arguments, `name` and `value`, which
returns the pair of new name and value for that a variable of that name.... |
2,008 | def fasper(x, y, ofac, hifac, n_threads, MACC=4):
n = long(len(x))
if n != len(y):
print()
return
nout = int(0.5*ofac*hifac*n)
nfreqt = long(ofac*hifac*n*MACC)
nfreq = 64
while nfreq < nfreqt:
nfreq = 2*nfreq
nd... | Given abscissas x (which need not be equally spaced) and ordinates
y, and given a desired oversampling factor ofac (a typical value
being 4 or larger). this routine creates an array wk1 with a
sequence of nout increasing frequencies (not angular frequencies)
up to hifac times the "average" Nyquist frequ... |
2,009 | def clip_polygon(self, points):
self.gsave()
self._path_polygon(points)
self.__clip_stack.append(self.__clip_box)
self.__clip_box = _intersect_box(self.__clip_box, _compute_bounding_box(points))
self.clip_sub() | Create a polygonal clip region. You must call endclip() after
you completed drawing. See also the polygon method. |
2,010 | def get_sections(self, gradebook_id=, simple=False):
params = dict(includeMembers=)
section_data = self.get(
.format(
gradebookId=gradebook_id or self.gradebook_id
),
params=params
)
if simple:
sections = self.unr... | Get the sections for a gradebook.
Return a dictionary of types of sections containing a list of that
type for a given gradebook. Specified by a gradebookid.
If simple=True, a list of dictionaries is provided for each
section regardless of type. The dictionary only contains one
... |
2,011 | def send_message(self, message):
if self._error:
raise compat.saved_exc(self._error)
elif self._transport is None:
raise JsonRpcError()
self._version.check_message(message)
self._writer.write(serialize(message)) | Send a raw JSON-RPC message.
The *message* argument must be a dictionary containing a valid JSON-RPC
message according to the version passed into the constructor. |
2,012 | def write(self):
self._assure_writable("write")
if not self._dirty:
return
if isinstance(self._file_or_files, (list, tuple)):
raise AssertionError("Cannot write back if there is not exactly a single file to write to, have %i files"
... | Write changes to our file, if there are changes at all
:raise IOError: if this is a read-only writer instance or if we could not obtain
a file lock |
2,013 | def asDigraph(self):
from ._visualize import makeDigraph
return makeDigraph(
self._automaton,
stateAsString=lambda state: state.method.__name__,
inputAsString=lambda input: input.method.__name__,
outputAsString=lambda output: output.method.__name_... | Generate a L{graphviz.Digraph} that represents this machine's
states and transitions.
@return: L{graphviz.Digraph} object; for more information, please
see the documentation for
U{graphviz<https://graphviz.readthedocs.io/>} |
2,014 | def issue(self, issue_instance_id):
with self.db.make_session() as session:
selected_issue = (
session.query(IssueInstance)
.filter(IssueInstance.id == issue_instance_id)
.scalar()
)
if selected_issue is None:
... | Select an issue.
Parameters:
issue_instance_id: int id of the issue instance to select
Note: We are selecting issue instances, even though the command is called
issue. |
2,015 | def get_permissions(self, user_id):
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"} | Fetches the permissions object from the graph. |
2,016 | def addSourceId(self, value):
if isinstance(value, Source_Id):
self.source_ids.append(value)
else:
raise (TypeError,
% type(source_id)) | Adds SourceId to External_Info |
2,017 | def remove_user_from_acl(self, name, user):
if name not in self._acl:
return False
if user in self._acl[name][]:
self._acl[name][].remove(user)
if user in self._acl[name][]:
self._acl[name][].remove(user)
return True | Remove a user from the given acl (both allow and deny). |
2,018 | def make_data(n,width):
x = dict([(i,100*random.random()) for i in range(1,n+1)])
y = dict([(i,100*random.random()) for i in range(1,n+1)])
c = {}
for i in range(1,n+1):
for j in range(1,n+1):
if j != i:
c[i,j] = distance(x[i],y[i],x[j],y[j])
e = {1:0}
l... | make_data: compute matrix distance and time windows. |
2,019 | def _events(self):
with self.app.events_lock:
res = self.app.get_events()
return serialize(res, True) | Get the monitoring events from the daemon
This is used by the arbiter to get the monitoring events from all its satellites
:return: Events list serialized
:rtype: list |
2,020 | def get_layers_output(self, dataset):
layers_out = []
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
for l in self.layer_nodes:
layers_out.append(l.ev... | Get output from each layer of the network.
:param dataset: input data
:return: list of np array, element i is the output of layer i |
2,021 | def symbol(self, index):
if isinstance(index, str):
return index
elif (index < 0) or (index >= self.symtab.table_len):
self.error("symbol table index out of range")
sym = self.symtab.table[index]
if sym.kind == SharedData.KINDS.LO... | Generates symbol name from index |
2,022 | def readSB(self, bits):
shift = 32 - bits
return int32(self.readbits(bits) << shift) >> shift | Read a signed int using the specified number of bits |
2,023 | def traverse_imports(names):
pending = [names]
while pending:
node = pending.pop()
if node.type == token.NAME:
yield node.value
elif node.type == syms.dotted_name:
yield "".join([ch.value for ch in node.children])
elif node.type == syms.dotted_as_name... | Walks over all the names imported in a dotted_as_names node. |
2,024 | def constraint_matrices(model, array_type=, include_vars=False,
zero_tol=1e-6):
if array_type not in (, ) and not dok_matrix:
raise ValueError()
array_builder = {
: np.array, : dok_matrix, : lil_matrix,
: pd.DataFrame,
}[array_type]
Problem = namedt... | Create a matrix representation of the problem.
This is used for alternative solution approaches that do not use optlang.
The function will construct the equality matrix, inequality matrix and
bounds for the complete problem.
Notes
-----
To accomodate non-zero equalities the problem will add th... |
2,025 | def create_from_pytz(cls, tz_info):
zone_name = tz_info.zone
utc_transition_times_list_raw = getattr(tz_info,
,
None)
utc_transition_times_list = [tuple(utt.timetuple())
... | Create an instance using the result of the timezone() call in
"pytz". |
2,026 | def get_snapshots(self):
ec2 = self.get_ec2_connection()
rs = ec2.get_all_snapshots()
all_vols = [self.volume_id] + self.past_volume_ids
snaps = []
for snapshot in rs:
if snapshot.volume_id in all_vols:
if snapshot.progress == :
... | Returns a list of all completed snapshots for this volume ID. |
2,027 | def remote(*args, **kwargs):
worker = get_global_worker()
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return make_decorator(worker=worker)(args[0])
error_string = ("The @ray.remote decorator must be applied either "
"with no arguments and no... | Define a remote function or an actor class.
This can be used with no arguments to define a remote function or actor as
follows:
.. code-block:: python
@ray.remote
def f():
return 1
@ray.remote
class Foo(object):
def method(self):
re... |
2,028 | def initialize():
global is_initialized
yaml.add_multi_constructor(, multi_constructor)
yaml.add_multi_constructor(, multi_constructor_pkl)
yaml.add_multi_constructor(, multi_constructor_import)
yaml.add_multi_constructor(, multi_constructor_include)
def import_constructor(loader, nod... | Initialize the configuration system by installing YAML handlers.
Automatically done on first call to load() specified in this file. |
2,029 | def all(self, data={}, **kwargs):
return super(VirtualAccount, self).all(data, **kwargs) | Fetch all Virtual Account entities
Returns:
Dictionary of Virtual Account data |
2,030 | def receive_response(self, transaction):
host, port = transaction.response.source
key_token = hash(str(host) + str(port) + str(transaction.response.token))
if key_token in self._block1_sent and transaction.response.block1 is not None:
item = self._block1_sent[key_token]
... | Handles the Blocks option in a incoming response.
:type transaction: Transaction
:param transaction: the transaction that owns the response
:rtype : Transaction
:return: the edited transaction |
2,031 | def _request(self, *args, **kwargs):
self._amend_request_kwargs(kwargs)
_response = self._requests_session.request(*args, **kwargs)
try:
_response.raise_for_status()
except HTTPError as e:
if e.response is not None:
raise_from(Con... | Make requests using configured :class:`requests.Session`.
Any error details will be extracted to an :class:`HTTPError`
which will contain relevant error details when printed. |
2,032 | def _fail_with_undefined_error(self, *args, **kwargs):
if self._undefined_hint is None:
if self._undefined_obj is missing:
hint = % self._undefined_name
elif not isinstance(self._undefined_name, basestring):
hint = % (
object... | Regular callback function for undefined objects that raises an
`UndefinedError` on call. |
2,033 | def remove_group(self, group = None):
if group is None:
raise KPError("Need group to remove a group")
elif type(group) is not v1Group:
raise KPError("group must be v1Group")
children = []
entries = []
if group in self.groups:
... | This method removes a group.
The group needed to remove the group.
group must be a v1Group. |
2,034 | def _FormatExpression(self, frame, expression):
rc, value = _EvaluateExpression(frame, expression)
if not rc:
message = _FormatMessage(value[][],
value[].get())
return + message +
return self._FormatValue(value) | Evaluates a single watched expression and formats it into a string form.
If expression evaluation fails, returns error message string.
Args:
frame: Python stack frame in which the expression is evaluated.
expression: string expression to evaluate.
Returns:
Formatted expression value tha... |
2,035 | def hash160(msg_bytes):
h = hashlib.new()
if in riemann.get_current_network_name():
h.update(blake256(msg_bytes))
return h.digest()
h.update(sha256(msg_bytes))
return h.digest() | byte-like -> bytes |
2,036 | def GetOptionBool(self, section, option):
return (not self.config.has_option(section, option)
or self.config.getboolean(section, option)) | Get the value of an option in the config file.
Args:
section: string, the section of the config file to check.
option: string, the option to retrieve the value of.
Returns:
bool, True if the option is enabled or not set. |
2,037 | def get_files_by_path(path):
if os.path.isfile(path):
return [path]
if os.path.isdir(path):
return get_morph_files(path)
raise IOError( % path) | Get a file or set of files from a file path
Return list of files with path |
2,038 | def _get_data_from_rawfile(path_to_data, raw_data_id):
loaded = pickle.load(open(path_to_data, "rb"))
raw_datasets = loaded[]
for raw_dataset in raw_datasets:
if raw_dataset[].raw_data_id == raw_data_id:
return raw_dataset[]
return None | Get a HandwrittenData object that has ``raw_data_id`` from a pickle file
``path_to_data``.
:returns: The HandwrittenData object if ``raw_data_id`` is in
path_to_data, otherwise ``None``. |
2,039 | def components(self, visible=True):
if self._on:
self._quality.append_on_chord(self.on, self.root)
return self._quality.get_components(root=self._root, visible=visible) | Return the component notes of chord
:param bool visible: returns the name of notes if True else list of int
:rtype: list[(str or int)]
:return: component notes of chord |
2,040 | def make_mask(filename, ext, trail_coords, sublen=75, subwidth=200, order=3,
sigma=4, pad=10, plot=False, verbose=False):
if not HAS_OPDEP:
raise ImportError()
if verbose:
t_beg = time.time()
fname = .format(filename, ext)
image = fits.getdata(filename, ext)
dx ... | Create DQ mask for an image for a given satellite trail.
This mask can be added to existing DQ data using :func:`update_dq`.
.. note::
Unlike :func:`detsat`, multiprocessing is not available for
this function.
Parameters
----------
filename : str
FITS image filename.
... |
2,041 | def network_info(name=None, **kwargs):
*
result = {}
conn = __get_conn(**kwargs)
def _net_get_leases(net):
leases = net.DHCPLeases()
for lease in leases:
if lease[] == libvirt.VIR_IP_ADDR_TYPE_IPV4:
lease[] =
elif lease[] == libvirt.VIR_... | Return informations on a virtual network provided its name.
:param name: virtual network name
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
If no name is p... |
2,042 | def create_router(self, name, tenant_id, subnet_lst):
try:
body = {: {: name, : tenant_id,
: True}}
router = self.neutronclient.create_router(body=body)
rout_dict = router.get()
rout_id = rout_dict.get()
except Excep... | Create a openstack router and add the interfaces. |
2,043 | def _validate_response(url, response):
if response[] not in [GooglePlaces.RESPONSE_STATUS_OK,
GooglePlaces.RESPONSE_STATUS_ZERO_RESULTS]:
error_detail = ( %
(url, response[]))
raise GooglePlacesError(error_detail) | Validates that the response from Google was successful. |
2,044 | def complete_pool_name(arg):
search_string =
if arg is not None:
search_string += arg
res = Pool.search({
: ,
: ,
: search_string
})
ret = []
for p in res[]:
ret.append(p.name)
return ret | Returns list of matching pool names |
2,045 | def get(cls, user_id, db_session=None):
db_session = get_db_session(db_session)
return db_session.query(cls.model).get(user_id) | Fetch row using primary key -
will use existing object in session if already present
:param user_id:
:param db_session:
:return: |
2,046 | def main():
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument(
, , help=, action=
)
args = parser.parse_args()
generator = SignatureGenerator(debug=args.verbose)
crash_data = json.loads(sys.stdin.read())
ret = generator.generate(crash_data)
pr... | Takes crash data via stdin and generates a Socorro signature |
2,047 | def font_size_splitter(font_map):
small_font = []
medium_font = []
large_font = []
xlarge_font = []
fonts = set(font_map.keys()) - set(RANDOM_FILTERED_FONTS)
for font in fonts:
length = max(map(len, font_map[font][0].values()))
if length <= FONT_SMALL_THRESHOLD:
... | Split fonts to 4 category (small,medium,large,xlarge) by maximum length of letter in each font.
:param font_map: input fontmap
:type font_map : dict
:return: splitted fonts as dict |
2,048 | def is_promisc(ip, fake_bcast="ff:ff:00:00:00:00", **kargs):
responses = srp1(Ether(dst=fake_bcast) / ARP(op="who-has", pdst=ip), type=ETH_P_ARP, iface_hint=ip, timeout=1, verbose=0, **kargs)
return responses is not None | Try to guess if target is in Promisc mode. The target is provided by its ip. |
2,049 | def json_decode(data_type, serialized_obj, caller_permissions=None,
alias_validators=None, strict=True, old_style=False):
try:
deserialized_obj = json.loads(serialized_obj)
except ValueError:
raise bv.ValidationError()
else:
return json_compat_obj_decode(
... | Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deserialize.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
alias_validators ... |
2,050 | def add_highlight(self, artist, *args, **kwargs):
hl = _pick_info.make_highlight(
artist, *args,
**ChainMap({"highlight_kwargs": self.highlight_kwargs}, kwargs))
if hl:
artist.axes.add_artist(hl)
return hl | Create, add, and return a highlighting artist.
This method is should be called with an "unpacked" `Selection`,
possibly with some fields set to None.
It is up to the caller to register the artist with the proper
`Selection` (by calling ``sel.extras.append`` on the result of this
... |
2,051 | def dump(self):
print("pagesize=%08x, reccount=%08x, pagecount=%08x" % (self.pagesize, self.reccount, self.pagecount))
self.dumpfree()
self.dumptree(self.firstindex) | raw dump of all records in the b-tree |
2,052 | def replace_volume_attachment(self, name, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.replace_volume_attachment_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_volume_attachment_with_http_info(name, body, **kwargs)
r... | replace the specified VolumeAttachment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_volume_attachment(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool... |
2,053 | def get_subgraph(graph,
seed_method: Optional[str] = None,
seed_data: Optional[Any] = None,
expand_nodes: Optional[List[BaseEntity]] = None,
remove_nodes: Optional[List[BaseEntity]] = None,
):
if seed_method == SEED_TYPE_... | Run a pipeline query on graph with multiple sub-graph filters and expanders.
Order of Operations:
1. Seeding by given function name and data
2. Add nodes
3. Remove nodes
:param pybel.BELGraph graph: A BEL graph
:param seed_method: The name of the get_subgraph_by_* function to use
:param s... |
2,054 | def _edge_list_to_dataframe(ls, src_column_name, dst_column_name):
assert HAS_PANDAS,
cols = reduce(set.union, (set(e.attr.keys()) for e in ls))
df = pd.DataFrame({
src_column_name: [e.src_vid for e in ls],
dst_column_name: [e.dst_vid for e in ls]})
for c in cols:
df[c] = [... | Convert a list of edges into dataframe. |
2,055 | def get_storage_hash(storage):
if isinstance(storage, LazyObject):
if storage._wrapped is None:
storage._setup()
storage = storage._wrapped
if not isinstance(storage, six.string_types):
storage_cls = storage.__class__
storage = % (storage_cls.__module__, st... | Return a hex string hash for a storage object (or string containing
'full.path.ClassName' referring to a storage object). |
2,056 | def list(self, id, seq):
schema = CaptureSchema(exclude=(, ))
resp = self.service.list(self._base(id, seq))
return self.service.decode(schema, resp, many=True) | Get a list of captures.
:param id: Result ID as an int.
:param seq: TestResult sequence ID as an int.
:return: :class:`captures.Capture <captures.Capture>` list |
2,057 | def getColorHSV(name):
try:
x = getColorInfoList()[getColorList().index(name.upper())]
except:
return (-1, -1, -1)
r = x[1] / 255.
g = x[2] / 255.
b = x[3] / 255.
cmax = max(r, g, b)
V = round(cmax * 100, 1)
cmin = min(r, g, b)
delta = cmax - cmin
if del... | Retrieve the hue, saturation, value triple of a color name.
Returns:
a triple (degree, percent, percent). If not found (-1, -1, -1) is returned. |
2,058 | def package_locations(self, package_keyname):
mask = "mask[description, keyname, locations]"
package = self.get_package_by_key(package_keyname, mask=)
regions = self.package_svc.getRegions(id=package[], mask=mask)
return regions | List datacenter locations for a package keyname
:param str package_keyname: The package for which to get the items.
:returns: List of locations a package is orderable in |
2,059 | def get_permission_requests(parser, token):
return PermissionsForObjectNode.handle_token(parser, token,
approved=False,
name=) | Retrieves all permissions requests associated with the given obj and user
and assigns the result to a context variable.
Syntax::
{% get_permission_requests obj %}
{% for perm in permissions %}
{{ perm }}
{% endfor %}
{% get_permission_requests obj as "my_permission... |
2,060 | def error(self, error):
if self.direction not in [, , ] and error is not None:
raise ValueError("error only accepted for x, y, z dimensions")
if isinstance(error, u.Quantity):
error = error.to(self.unit).value
self._error = error | set the error |
2,061 | def _get_stats_columns(cls, table, relation_type):
column_names = cls._get_stats_column_names()
clustering_value = None
if table.clustering_fields is not None:
clustering_value = .join(table.clustering_fields)
column_values = (
... | Given a table, return an iterator of key/value pairs for stats
column names/values. |
2,062 | def _match_type(self, i):
self.col_match = self.RE_TYPE.match(self._source[i])
if self.col_match is not None:
self.section = "types"
self.el_type = CustomType
self.el_name = self.col_match.group("name")
return True
else:
... | Looks at line 'i' to see if the line matches a module user type def. |
2,063 | def distinct_words(string_matrix: List[List[str]]) -> Set[str]:
return set([word
for sentence in string_matrix
for word in sentence]) | Diagnostic function
:param string_matrix:
:return:
>>> dl = distinct_words([['the', 'quick', 'brown'], ['here', 'lies', 'the', 'fox']])
>>> sorted(dl)
['brown', 'fox', 'here', 'lies', 'quick', 'the'] |
2,064 | def get_slopes(data, s_freq, level=, smooth=0.05):
data = negative(data)
nan_array = empty((5,))
nan_array[:] = nan
idx_trough = data.argmin()
idx_peak = data.argmax()
if idx_trough >= idx_peak:
return nan_array, nan_array
zero_crossings_0 = where(diff(sign(data[:idx_trou... | Get the slopes (average and/or maximum) for each quadrant of a slow
wave, as well as the combination of quadrants 2 and 3.
Parameters
----------
data : ndarray
raw data as vector
s_freq : int
sampling frequency
level : str
if 'average', returns average slopes (uV / s). i... |
2,065 | def health(self, index=None, params=None):
return self.transport.perform_request(, _make_path(,
, index), params=params) | Get a very simple status on the health of the cluster.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html>`_
:arg index: Limit the information returned to a specific index
:arg level: Specify the level of detail for returned information,
default 'cl... |
2,066 | def should_include_file_in_search(file_name, extensions, exclude_dirs):
return (exclude_dirs is None or not any(file_name.startswith(d) for d in exclude_dirs)) and \
any(file_name.endswith(e) for e in extensions) | Whether or not a filename matches a search criteria according to arguments.
Args:
file_name (str): A file path to check.
extensions (list): A list of file extensions file should match.
exclude_dirs (list): A list of directories to exclude from search.
Returns:
A boolean of whet... |
2,067 | def setdim(P, dim=None):
P = P.copy()
ldim = P.dim
if not dim:
dim = ldim+1
if dim==ldim:
return P
P.dim = dim
if dim>ldim:
key = numpy.zeros(dim, dtype=int)
for lkey in P.keys:
key[:ldim] = lkey
P.A[tuple(key)] = P.A.pop(lkey)
... | Adjust the dimensions of a polynomial.
Output the results into Poly object
Args:
P (Poly) : Input polynomial
dim (int) : The dimensions of the output polynomial. If omitted,
increase polynomial with one dimension. If the new dim is
smaller then P's dimensions, v... |
2,068 | def slice_around_gaps (values, maxgap):
if not (maxgap > 0):
raise ValueError ( % maxgap)
values = np.asarray (values)
delta = values[1:] - values[:-1]
if np.any (delta < 0):
raise ValueError ()
whgap = np.where (delta > maxgap)[0] + 1
prev_idx = None
for ga... | Given an ordered array of values, generate a set of slices that traverse
all of the values. Within each slice, no gap between adjacent values is
larger than `maxgap`. In other words, these slices break the array into
chunks separated by gaps of size larger than maxgap. |
2,069 | def _check_channel_state_for_update(
self,
channel_identifier: ChannelID,
closer: Address,
update_nonce: Nonce,
block_identifier: BlockSpecification,
) -> Optional[str]:
msg = None
closer_details = self._detail_participant(
... | Check the channel state on chain to see if it has been updated.
Compare the nonce, we are about to update the contract with, with the
updated nonce in the onchain state and, if it's the same, return a
message with which the caller should raise a RaidenRecoverableError.
If all is okay r... |
2,070 | def perform_remote_action(i):
import urllib
try: import urllib.request as urllib2
except: import urllib2
try: from urllib.parse import urlencode
except: from urllib import urlencode
rr={:0}
act=i.get(,)
o=i.get(,)
if o==:
i[]=
i[]=
... | Input: { See 'perform_action' function }
Output: { See 'perform_action' function } |
2,071 | def get_term_pillar(filter_name,
term_name,
pillar_key=,
pillarenv=None,
saltenv=None):
return __salt__[](filter_name,
term_name,
pillar_... | Helper that can be used inside a state SLS,
in order to get the term configuration given its name,
under a certain filter uniquely identified by its name.
filter_name
The name of the filter.
term_name
The name of the term.
pillar_key: ``acl``
The root key of the whole poli... |
2,072 | def check(self):
status = True
synced = True
xbin = self.xbin.value()
ybin = self.ybin.value()
nwin = self.nwin.value()
g = get_root(self).globals
for xsw, ysw, nxw, nyw in \
zip(self.xs[:nwin], self.ys[:nwin],
... | Checks the values of the windows. If any problems are found,
it flags them by changing the background colour. Only active
windows are checked.
Returns status, flag for whether parameters are viable. |
2,073 | def read_csv(filename, delimiter=",", skip=0, guess_type=True, has_header=True, use_types={}):
with open(filename, ) as f:
if has_header:
header = f.readline().strip().split(delimiter)
else:
header = None
for i in range(skip):
f.readline()
... | Read a CSV file
Usage
-----
>>> data = read_csv(filename, delimiter=delimiter, skip=skip,
guess_type=guess_type, has_header=True, use_types={})
# Use specific types
>>> types = {"sepal.length": int, "petal.width": float}
>>> data = read_csv(filename, guess_type=guess_type, use... |
2,074 | def osd_page_handler(config=None, identifier=None, prefix=None, **args):
template_dir = os.path.join(os.path.dirname(__file__), )
with open(os.path.join(template_dir, ), ) as f:
template = f.read()
d = dict(prefix=prefix,
identifier=identifier,
api_version=config.api_v... | Flask handler to produce HTML response for OpenSeadragon view of identifier.
Arguments:
config - Config object for this IIIF handler
identifier - identifier of image/generator
prefix - path prefix
**args - other aguments ignored |
2,075 | def ref2names2commdct(ref2names, commdct):
for comm in commdct:
for cdct in comm:
try:
refs = cdct[][0]
validobjects = ref2names[refs]
cdct.update({:validobjects})
except KeyError as e:
continue
return commdct | embed ref2names into commdct |
2,076 | def create(cls, name, division, api=None):
division = Transform.to_division(division)
api = api if api else cls._API
data = {
: name,
: division
}
extra = {
: cls.__name__,
: data
}
logger.info(, extra=ext... | Create team within a division
:param name: Team name.
:param division: Parent division.
:param api: Api instance.
:return: Team object. |
2,077 | def deref(self, ctx):
if self in ctx.call_nodes:
raise CyclicReferenceError(ctx, self)
if self in ctx.cached_results:
return ctx.cached_results[self]
try:
ctx.call_nodes.add(self)
ctx.call_stack.append(self)
result = self.ev... | Returns the value this reference is pointing to. This method uses 'ctx' to resolve the reference and return
the value this reference references.
If the call was already made, it returns a cached result.
It also makes sure there's no cyclic reference, and if so raises CyclicReferenceError. |
2,078 | def visit_ellipsis(self, node, parent):
return nodes.Ellipsis(
getattr(node, "lineno", None), getattr(node, "col_offset", None), parent
) | visit an Ellipsis node by returning a fresh instance of it |
2,079 | def program_files(self, executable):
if self._get_version() == 6:
paths = self.REQUIRED_PATHS_6
elif self._get_version() > 6:
paths = self.REQUIRED_PATHS_7_1
return paths | Determine the file paths to be adopted |
2,080 | def _match_processes(self, pid, name, cur_process):
cur_pid, cur_name = self._get_tuple(cur_process.split())
pid_match = False
if not pid:
pid_match = True
elif pid == cur_pid:
pid_match = True
name_match = False
if not name:
name_match = True
elif name == cur_name:
... | Determine whether user-specified "pid/processes" contain this process
:param pid: The user input of pid
:param name: The user input of process name
:param process: current process info
:return: True or Not; (if both pid/process are given, then both of them need to match) |
2,081 | def get(self, name, param=None):
if name not in self.attribs:
raise exceptions.SoftLayerError()
call_details = self.attribs[name]
if call_details.get():
if not param:
raise exceptions.SoftLayerError(
)
params = tuple... | Retreive a metadata attribute.
:param string name: name of the attribute to retrieve. See `attribs`
:param param: Required parameter for some attributes |
2,082 | def getlocals(back=2):
import inspect
fr = inspect.currentframe()
try:
while fr and back != 0:
fr1 = fr
fr = fr.f_back
back -= 1
except:
pass
return fr1.f_locals | Get the local variables some levels back (-1 is top). |
2,083 | def network(n):
tpm(n.tpm)
connectivity_matrix(n.cm)
if n.cm.shape[0] != n.size:
raise ValueError("Connectivity matrix must be NxN, where N is the "
"number of nodes in the network.")
return True | Validate a |Network|.
Checks the TPM and connectivity matrix. |
2,084 | def validate(self):
if not isinstance(self.location, Location):
raise TypeError(u.format(
type(self.location).__name__, self.location))
if not self.location.field:
raise ValueError(u
u.format(self.location))
if not i... | Validate that the OutputContextField is correctly representable. |
2,085 | def sign_execute_deposit(deposit_params, key_pair):
signature = sign_transaction(transaction=deposit_params[],
private_key_hex=private_key_to_hex(key_pair=key_pair))
return {: signature} | Function to execute the deposit request by signing the transaction generated by the create deposit function.
Execution of this function is as follows::
sign_execute_deposit(deposit_details=create_deposit, key_pair=key_pair)
The expected return result for this function is as follows::
{
... |
2,086 | def compile_file_into_spirv(filepath, stage, optimization=,
warnings_as_errors=False):
with open(filepath, ) as f:
content = f.read()
return compile_into_spirv(content, stage, filepath,
optimization=optimization,
... | Compile shader file into Spir-V binary.
This function uses shaderc to compile your glsl file code into Spir-V
code.
Args:
filepath (strs): Absolute path to your shader file
stage (str): Pipeline stage in ['vert', 'tesc', 'tese', 'geom',
'frag', 'comp']
optimiza... |
2,087 | def _dstr(degrees, places=1, signed=False):
r
if isnan(degrees):
return
sgn, d, m, s, etc = _sexagesimalize_to_int(degrees, places)
sign = if sgn < 0.0 else if signed else
return %02d.%0*d"' % (sign, d, m, s, places, etc) | r"""Convert floating point `degrees` into a sexagesimal string.
>>> _dstr(181.875)
'181deg 52\' 30.0"'
>>> _dstr(181.875, places=3)
'181deg 52\' 30.000"'
>>> _dstr(181.875, signed=True)
'+181deg 52\' 30.0"'
>>> _dstr(float('nan'))
'nan' |
2,088 | def isSet(self, param):
param = self._resolveParam(param)
return param in self._paramMap | Checks whether a param is explicitly set by user. |
2,089 | def send_event_to_salt(self, result):
s a dictionary which has the final data and topic.
senddatatopic__rolemastersock_direvent.fire_master'](data=data, tag=topic) | This function identifies whether the engine is running on the master
or the minion and sends the data to the master event bus accordingly.
:param result: It's a dictionary which has the final data and topic. |
2,090 | def unhex(s):
bits = 0
for c in s:
if <= c <= :
i = ord()
elif <= c <= :
i = ord()-10
elif <= c <= :
i = ord()-10
else:
break
bits = bits*16 + (ord(c) - i)
return bits | Get the integer value of a hexadecimal number. |
2,091 | def save_config(self):
if not self.opts[][1]:
if logger.isEnabledFor(logging.INFO):
logger.info()
return 1
txt =utf-8dark window
copyfile(self.config_file, self.config_file + )
if self.opts[][1] is None:
self.opts[][1] =
... | Save config file
Creates config.restore (back up file)
Returns:
-1: Error saving config
0: Config saved successfully
1: Config not saved (not modified |
2,092 | def check_cgroup_availability_in_thread(options):
thread = _CheckCgroupsThread(options)
thread.start()
thread.join()
if thread.error:
raise thread.error | Run check_cgroup_availability() in a separate thread to detect the following problem:
If "cgexec --sticky" is used to tell cgrulesengd to not interfere
with our child processes, the sticky flag unfortunately works only
for processes spawned by the main thread, not those spawned by other threads
(and thi... |
2,093 | def run():
global WORKBENCH
args = client_helper.grab_server_args()
WORKBENCH = zerorpc.Client(timeout=300, heartbeat=60)
WORKBENCH.connect(+args[]++args[])
data_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), )
file_list = [os.path.join(data_path... | This client pulls PCAP files for building report.
Returns:
A list with `view_pcap` , `meta` and `filename` objects. |
2,094 | def __ensure_provisioning_writes(
table_name, table_key, gsi_name, gsi_key, num_consec_write_checks):
if not get_gsi_option(table_key, gsi_key, ):
logger.info(
.format(
table_name, gsi_name))
return False, dynamodb.get_provisioned_gsi_write_units... | Ensure that provisioning of writes is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type table_key: str
:param table_key: Table configuration option key name
:type gsi_name: str
:param gsi_name: Name of the GSI
:type gsi_key: str
:param gsi_key: Configurat... |
2,095 | def im2mat(I):
return I.reshape((I.shape[0] * I.shape[1], I.shape[2])) | Converts and image to matrix (one pixel per line) |
2,096 | def _rds_cluster_tags(model, dbs, session_factory, generator, retry):
client = local_session(session_factory).client()
def process_tags(db):
try:
db[] = retry(
client.list_tags_for_resource,
ResourceName=generator(db[model.id]))[]
return db
... | Augment rds clusters with their respective tags. |
2,097 | def revoke(self, auth, codetype, code, defer=False):
return self._call(, auth, [codetype, code], defer) | Given an activation code, the associated entity is revoked after which the activation
code can no longer be used.
Args:
auth: Takes the owner's cik
codetype: The type of code to revoke (client | share)
code: Code specified by <codetype> (cik | share-activation-code) |
2,098 | def _get_model_parameters_estimations(self, error_model):
if error_model.dependance == NIDM_INDEPEDENT_ERROR:
if error_model.variance_homo:
estimation_method = STATO_OLS
else:
estimation_method = STATO_WLS
else:
estimation_meth... | Infer model estimation method from the 'error_model'. Return an object
of type ModelParametersEstimation. |
2,099 | def order_assets(self, asset_ids, composition_id):
if (not isinstance(composition_id, ABCId) and
composition_id.get_identifier_namespace() != ):
raise errors.InvalidArgument()
composition_map, collection = self._get_composition_collection(composition_id)
comp... | Reorders a set of assets in a composition.
arg: asset_ids (osid.id.Id[]): ``Ids`` for a set of
``Assets``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: NotFound - ``composition_id`` not found or, an
``asset_id`` not... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.