docstring stringlengths 52 499 | function stringlengths 67 35.2k | __index_level_0__ int64 52.6k 1.16M |
|---|---|---|
Run tox.
Build package and run unit tests against several pythons.
Args:
args: Optional arguments passed to tox.
Example:
fab tox:'-e py36 -r' | def tox(args=''):
basedir = dirname(__file__)
latest_pythons = _determine_latest_pythons()
# e.g. highest_minor_python: '3.6'
highest_minor_python = _highest_minor(latest_pythons)
_local_needs_pythons(flo('cd {basedir} && '
'python{highest_minor_python} -m tox {... | 767,330 |
Instantiation an instance of the Slack API
Args:
token: {str} (required) API token, read from SLACK_TOKEN env var
auth_test: {bool} verify this token
verify: {bool} verify all API calls return with a True 'ok'
lazy: {bool} Don't populate properties until called | def __init__(self, token=None, auth_test=False, verify=True, lazy=False):
try:
self.token = token if token else os.environ['SLACK_TOKEN']
except KeyError:
raise ValueError('If not providing a token, must set SLACK_TOKEN envvar')
if auth_test:
respons... | 768,476 |
Low-level method to call the Slack API.
Args:
method: {str} method name to call
params: {dict} GET parameters
The token will always be added | def _call_api(self, method, params=None):
url = self.url.format(method=method)
if not params:
params = {'token': self.token}
else:
params['token'] = self.token
logger.debug('Send request to %s', url)
response = requests.get(url, params=params).jso... | 768,477 |
Checks the format of the sakefile dictionary
to ensure it conforms to specification
Args:
A dictionary that is the parsed Sakefile (from sake.py)
The setting dictionary (for print functions)
Returns:
True if the Sakefile is conformant
False if not | def check_integrity(sakefile, settings):
sprint = settings["sprint"]
error = settings["error"]
sprint("Call to check_integrity issued", level="verbose")
if not sakefile:
error("Sakefile is empty")
return False
# checking for duplicate targets
if len(sakefile.keys()) != len(s... | 768,483 |
Checks the integrity of a specific target. Gets called
multiple times from check_integrity()
Args:
The target name
The dictionary values of that target
A boolean representing whether it is a meta-target
A boolean representing whether it is the "all" target
A string repre... | def check_target_integrity(key, values, meta=False, all=False, parent=None):
# logic to audit "all" target
if all:
if not values:
print("Warning: target 'all' is empty")
# will check if it has unrecognized target later
return True
errmes = "target '{}' is not allow... | 768,484 |
Takes sha1 hash of all dependencies and outputs of all targets
Args:
The graph we are going to build
The settings dictionary
Returns:
A dictionary where the keys are the filenames and the
value is the sha1 hash | def take_shas_of_all_files(G, settings):
global ERROR_FN
sprint = settings["sprint"]
error = settings["error"]
ERROR_FN = error
sha_dict = {}
all_files = []
for target in G.nodes(data=True):
sprint("About to take shas of files in target '{}'".format(target[0]),
le... | 768,490 |
Determines if a target needs to run. This can happen in two ways:
(a) If a dependency of the target has changed
(b) If an output of the target is missing
Args:
The graph we are going to build
The name of the target
The dictionary of the current shas held in memory
The dictio... | def needs_to_run(G, target, in_mem_shas, from_store, settings):
force = settings["force"]
sprint = settings["sprint"]
if(force):
sprint("Target rebuild is being forced so {} needs to run".format(target),
level="verbose")
return True
node_dict = get_the_node_dict(G, t... | 768,491 |
Runs the commands supplied as an argument
It will exit the program if the commands return a
non-zero code
Args:
the commands to run
The settings dictionary | def run_commands(commands, settings):
sprint = settings["sprint"]
quiet = settings["quiet"]
error = settings["error"]
enhanced_errors = True
the_shell = None
if settings["no_enhanced_errors"]:
enhanced_errors = False
if "shell" in settings:
the_shell = settings["shell"]
... | 768,492 |
Wrapper function that sends to commands in a target's 'formula'
to run_commands()
Args:
The graph we are going to build
The target to run
The settings dictionary | def run_the_target(G, target, settings):
sprint = settings["sprint"]
sprint("Running target {}".format(target))
the_formula = get_the_node_dict(G, target)["formula"]
run_commands(the_formula, settings) | 768,493 |
This is the master function that performs the building.
Args:
A graph (often a subgraph)
The settings dictionary
An optional list of files to not update the shas of
(needed when building specific targets)
Returns:
0 if successful
UN-success results in a fatal ... | def build_this_graph(G, settings, dont_update_shas_of=None):
verbose = settings["verbose"]
quiet = settings["quiet"]
force = settings["force"]
recon = settings["recon"]
parallel = settings["parallel"]
error = settings["error"]
sprint = settings["sprint"]
if not dont_update_shas_of:... | 768,501 |
Returns the prettily formatted help strings (for printing)
Args:
A dictionary that is the parsed Sakefile (from sake.py)
NOTE:
the list sorting in this function is required for this
function to be deterministic | def get_help(sakefile):
full_string = "You can 'sake' one of the following...\n\n"
errmes = "target '{}' is not allowed to not have help message\n"
outerlines = []
for target in sakefile:
if target == "all":
# this doesn't have a help message
continue
middle_... | 768,517 |
Function to help construct_graph() identify dependencies
Args:
A dependency
A flag indication verbosity
A (populated) NetworkX DiGraph
Returns:
A list of targets that build given dependency | def check_for_dep_in_outputs(dep, verbose, G):
if verbose:
print("checking dep {}".format(dep))
ret_list = []
for node in G.nodes(data=True):
if "output" not in node[1]:
continue
for out in node[1]['output']:
if fnmatch.fnmatch(out, dep):
... | 768,520 |
Takes the sakefile dictionary and builds a NetworkX graph
Args:
A dictionary that is the parsed Sakefile (from sake.py)
The settings dictionary
Returns:
A NetworkX graph | def construct_graph(sakefile, settings):
verbose = settings["verbose"]
sprint = settings["sprint"]
G = nx.DiGraph()
sprint("Going to construct Graph", level="verbose")
for target in sakefile:
if target == "all":
# we don't want this node
continue
if "form... | 768,525 |
Removes all the output files from all targets. Takes
the graph as the only argument
Args:
The networkx graph object
The settings dictionary
Returns:
0 if successful
1 if removing even one file failed | def clean_all(G, settings):
quiet = settings["quiet"]
recon = settings["recon"]
sprint = settings["sprint"]
error = settings["error"]
all_outputs = []
for node in G.nodes(data=True):
if "output" in node[1]:
for item in get_all_outputs(node[1]):
all_output... | 768,528 |
Writes the graph G in dot file format for graphviz visualization.
Args:
a Networkx graph
A filename to name the dot files | def write_dot_file(G, filename):
with io.open(filename, "w") as fh:
fh.write("strict digraph DependencyDiagram {\n")
edge_list = G.edges()
node_list = set(G.nodes())
if edge_list:
for edge in sorted(edge_list):
source, targ = edge
node... | 768,529 |
Uses networkX to draw a graphviz dot file either (a) calls the
graphviz command "dot" to turn it into a SVG and remove the
dotfile (default), or (b) if no_graphviz is True, just output
the graphviz dot file
Args:
a NetworkX DiGraph
the settings dictionary
a filename (a default i... | def visualize(G, settings, filename="dependencies", no_graphviz=False):
error = settings["error"]
if no_graphviz:
write_dot_file(G, filename)
return 0
write_dot_file(G, "tempdot")
renderer = "svg"
if re.search("\.jpg$", filename, re.IGNORECASE):
renderer = "jpg"
elif... | 768,530 |
High-level function for creating messages. Return packed bytes.
Args:
text: {str}
channel: {str} Either name or ID | def make_message(self, text, channel):
try:
channel_id = self.slack.channel_from_name(channel)['id']
except ValueError:
channel_id = channel
return pack({
'text': text,
'type': 'message',
'channel': channel_id,
'id'... | 768,541 |
This function returns the fields for a schema that matches the provided
nautilus model.
Args:
model (nautilus.model.BaseModel): The model to base the field list on
Returns:
(dict<field_name: str, graphqlType>): A mapping of field names to
graphql types | def fields_for_model(model):
# the attribute arguments (no filters)
args = {field.name.lower() : convert_peewee_field(field) \
for field in model.fields()}
# use the field arguments, without the segments
return args | 768,964 |
This factory returns an action handler that creates a new instance of
the specified model when a create action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to create when the action
received.
Retur... | def create_handler(Model, name=None, **kwds):
async def action_handler(service, action_type, payload, props, notify=True, **kwds):
# if the payload represents a new instance of `Model`
if action_type == get_crud_action('create', name or Model):
# print('handling create for ' + name ... | 768,966 |
Creates a comment block
Args:
text (str): content of comment without #
comment_prefix (str): character indicating start of comment
Returns:
self for chaining | def comment(self, text, comment_prefix='#'):
comment = Comment(self._container)
if not text.startswith(comment_prefix):
text = "{} {}".format(comment_prefix, text)
if not text.endswith('\n'):
text = "{}{}".format(text, '\n')
comment.add_line(text)
... | 768,978 |
Creates a section block
Args:
section (str or :class:`Section`): name of section or object
Returns:
self for chaining | def section(self, section):
if not isinstance(self._container, ConfigUpdater):
raise ValueError("Sections can only be added at section level!")
if isinstance(section, str):
# create a new section
section = Section(section, container=self._container)
e... | 768,979 |
Creates a vertical space of newlines
Args:
newlines (int): number of empty lines
Returns:
self for chaining | def space(self, newlines=1):
space = Space()
for line in range(newlines):
space.add_line('\n')
self._container.structure.insert(self._idx, space)
self._idx += 1
return self | 768,980 |
Creates a new option inside a section
Args:
key (str): key of the option
value (str or None): value of the option
**kwargs: are passed to the constructor of :class:`Option`
Returns:
self for chaining | def option(self, key, value=None, **kwargs):
if not isinstance(self._container, Section):
raise ValueError("Options can only be added inside a section!")
option = Option(key, value, container=self._container, **kwargs)
option.value = value
self._container.structure.i... | 768,981 |
Add a Comment object to the section
Used during initial parsing mainly
Args:
line (str): one line in the comment | def add_comment(self, line):
if not isinstance(self.last_item, Comment):
comment = Comment(self._structure)
self._structure.append(comment)
self.last_item.add_line(line)
return self | 768,983 |
Add a Space object to the section
Used during initial parsing mainly
Args:
line (str): one line that defines the space, maybe whitespaces | def add_space(self, line):
if not isinstance(self.last_item, Space):
space = Space(self._structure)
self._structure.append(space)
self.last_item.add_line(line)
return self | 768,984 |
Set an option for chaining.
Args:
option (str): option name
value (str): value, default None | def set(self, option, value=None):
option = self._container.optionxform(option)
if option in self.options():
self.__getitem__(option).value = value
else:
self.__setitem__(option, value)
return self | 768,992 |
Sets the value to a given list of options, e.g. multi-line values
Args:
values (list): list of values
separator (str): separator for values, default: line separator
indent (str): indentation depth in case of line separator | def set_values(self, values, separator='\n', indent=4*' '):
self._updated = True
self._multiline_value_joined = True
self._values = values
if separator == '\n':
values.insert(0, '')
separator = separator + indent
self._value = separator.join(value... | 768,999 |
Read and parse a filename.
Args:
filename (str): path to file
encoding (str): encoding of file, default None | def read(self, filename, encoding=None):
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
self._filename = os.path.abspath(filename) | 769,002 |
Call ConfigParser to validate config
Args:
kwargs: are passed to :class:`configparser.ConfigParser` | def validate_format(self, **kwargs):
args = dict(
dict_type=self._dict,
allow_no_value=self._allow_no_value,
inline_comment_prefixes=self._inline_comment_prefixes,
strict=self._strict,
empty_lines_in_values=self._empty_lines_in_values
... | 769,009 |
Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT.
Args:
section (str or :class:`Section`): name or Section type | def add_section(self, section):
if section in self.sections():
raise DuplicateSectionError(section)
if isinstance(section, str):
# create a new section
section = Section(section, container=self)
elif not isinstance(section, Section):
raise... | 769,013 |
Returns list of configuration options for the named section.
Args:
section (str): name of section
Returns:
list: list of option names | def options(self, section):
if not self.has_section(section):
raise NoSectionError(section) from None
return self.__getitem__(section).options() | 769,014 |
Gets an option value for a given section.
Args:
section (str): section name
option (str): option name
Returns:
:class:`Option`: Option object holding key/value pair | def get(self, section, option):
if not self.has_section(section):
raise NoSectionError(section) from None
section = self.__getitem__(section)
option = self.optionxform(option)
try:
value = section[option]
except KeyError:
raise NoOpti... | 769,015 |
Return a list of (name, value) tuples for options or sections.
If section is given, return a list of tuples with (name, value) for
each option in the section. Otherwise, return a list of tuples with
(section_name, section_type) for each section.
Args:
section (str): optiona... | def items(self, section=_UNSET):
if section is _UNSET:
return [(sect.name, sect) for sect in self.sections_blocks()]
section = self.__getitem__(section)
return [(opt.key, opt) for opt in section.option_blocks()] | 769,016 |
Checks for the existence of a given option in a given section.
Args:
section (str): name of section
option (str): name of option
Returns:
bool: whether the option exists in the given section | def has_option(self, section, option):
if section not in self.sections():
return False
else:
option = self.optionxform(option)
return option in self[section] | 769,017 |
Set an option.
Args:
section (str): section name
option (str): option name
value (str): value, default None | def set(self, section, option, value=None):
try:
section = self.__getitem__(section)
except KeyError:
raise NoSectionError(section) from None
option = self.optionxform(option)
if option in section:
section[option].value = value
else:
... | 769,018 |
Remove an option.
Args:
section (str): section name
option (str): option name
Returns:
bool: whether the option was actually removed | def remove_option(self, section, option):
try:
section = self.__getitem__(section)
except KeyError:
raise NoSectionError(section) from None
option = self.optionxform(option)
existed = option in section.options()
if existed:
del section... | 769,019 |
Remove a file section.
Args:
name: name of the section
Returns:
bool: whether the section was actually removed | def remove_section(self, name):
existed = self.has_section(name)
if existed:
idx = self._get_section_idx(name)
del self._structure[idx]
return existed | 769,020 |
This function renders the template desginated by the argument to the
designated directory using the given context.
Args:
template (string) : the source template to use (relative to ./templates)
out_dir (string) : the name of the output directory
context (dict) : the ... | def render_template(template, out_dir='.', context=None):
# the directory containing templates
template_directory = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..',
'templates',
t... | 769,024 |
This factory returns an action handler that deletes a new instance of
the specified model when a delete action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to delete when the action
received.
Retur... | def delete_handler(Model, name=None, **kwds):
# necessary imports
from nautilus.database import db
async def action_handler(service, action_type, payload, props, notify=True, **kwds):
# if the payload represents a new instance of `model`
if action_type == get_crud_action('delete', name... | 769,029 |
This factory returns an action handler that responds to read requests
by resolving the payload as a graphql query against the internal schema.
Args:
Model (nautilus.BaseModel): The model to delete when the action
received.
Returns:
function(type, payloa... | def read_handler(Model, name=None, **kwds):
async def action_handler(service, action_type, payload, props, **kwds):
# if the payload represents a new instance of `model`
if action_type == get_crud_action('read', name or Model):
# the props of the message
message_props = ... | 769,030 |
This action handler factory reaturns an action handler that
responds to actions with CRUD types (following nautilus conventions)
and performs the necessary mutation on the model's database.
Args:
Model (nautilus.BaseModel): The model to delete when the action
receive... | def crud_handler(Model, name=None, **kwds):
# import the necessary modules
from nautilus.network.events import combine_action_handlers
from . import update_handler, create_handler, delete_handler, read_handler
# combine them into one handler
return combine_action_handlers(
create_hand... | 769,031 |
This factory returns an action handler that updates a new instance of
the specified model when a update action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to update when the action
received.
Retur... | def update_handler(Model, name=None, **kwds):
async def action_handler(service, action_type, payload, props, notify=True, **kwds):
# if the payload represents a new instance of `Model`
if action_type == get_crud_action('update', name or Model):
try:
# the props of th... | 769,047 |
This function starts the service's network intefaces.
Args:
port (int): The port for the http server. | def run(self, host="localhost", port=8000, shutdown_timeout=60.0, **kwargs):
print("Running service on http://localhost:%i. " % port + \
"Press Ctrl+C to terminate.")
# apply the configuration to the service config
self.config.port = port
... | 769,073 |
This method provides a programatic way of added invidual routes
to the http server.
Args:
url (str): the url to be handled by the request_handler
request_handler (nautilus.network.RequestHandler): The request handler | def add_http_endpoint(self, url, request_handler):
self.app.router.add_route('*', url, request_handler) | 769,075 |
This function is used to provide a sessionToken for later requests.
Args:
uid (str): The | async def register_user(self, password, **kwds):
# so make one
user = await self._create_remote_user(password=password, **kwds)
# if there is no pk field
if not 'pk' in user:
# make sure the user has a pk field
user['pk'] = user['id']
# the query... | 769,094 |
This function checks if there is a user with the same uid in the
remote user service
Args:
**kwds : the filters of the user to check for
Returns:
(bool): wether or not there is a matching user | async def _check_for_matching_user(self, **user_filters):
# there is a matching user if there are no errors and no results from
user_data = self._get_matching_user(user_filters)
# return true if there were no errors and at lease one result
return not user_data['errors'] and le... | 769,100 |
This method creates a service record in the remote user service
with the given email.
Args:
uid (str): the user identifier to create
Returns:
(dict): a summary of the user that was created | async def _create_remote_user(self, **payload):
# the action for reading user entries
read_action = get_crud_action(method='create', model='user')
# see if there is a matching user
user_data = await self.event_broker.ask(
action_type=read_action,
payload... | 769,101 |
Load datamat at path.
Parameters:
path : string
Absolute path of the file to load from. | def load(path, variable='Datamat'):
f = h5py.File(path,'r')
try:
dm = fromhdf5(f[variable])
finally:
f.close()
return dm | 769,770 |
Saves Datamat to path.
Parameters:
path : string
Absolute path of the file to save to. | def save(self, path):
f = h5py.File(path, 'w')
try:
fm_group = f.create_group('Datamat')
for field in self.fieldnames():
try:
fm_group.create_dataset(field, data = self.__dict__[field])
except (TypeError,) as e:
... | 769,778 |
Returns an iterator that iterates over unique values of field
Parameters:
field : string
Filters the datamat for every unique value in field and yields
the filtered datamat.
Returns:
datamat : Datamat that is filtered according to one of the uniqu... | def by_field(self, field):
for value in np.unique(self.__dict__[field]):
yield self.filter(self.__dict__[field] == value) | 769,780 |
Add a new field to the datamat.
Parameters:
name : string
Name of the new field
data : list
Data for the new field, must be same length as all other fields. | def add_field(self, name, data):
if name in self._fields:
raise ValueError
if not len(data) == self._num_fix:
raise ValueError
self._fields.append(name)
self.__dict__[name] = data | 769,783 |
Remove a field from the datamat.
Parameters:
name : string
Name of the field to be removed | def rm_field(self, name):
if not name in self._fields:
raise ValueError
self._fields.remove(name)
del self.__dict__[name] | 769,786 |
Computes the distribution of angle and length
combinations that were made as first saccades
Parameters:
fm : ocupy.fixmat
The fixation data to be analysed | def firstSacDist(fm):
ang, leng, ad, ld = anglendiff(fm, return_abs=True)
y_arg = leng[0][np.roll(fm.fix == min(fm.fix), 1)]/fm.pixels_per_degree
x_arg = reshift(ang[0][np.roll(fm.fix == min(fm.fix), 1)])
bins = [list(range(int(ceil(np.nanmax(y_arg)))+1)), np.linspace(-180, 180, 361)]
return ... | 769,797 |
Computes the distribution of trajectory lengths, i.e.
the number of saccades that were made as a part of one trajectory
Parameters:
fm : ocupy.fixmat
The fixation data to be analysed | def trajLenDist(fm):
trajLen = np.roll(fm.fix, 1)[fm.fix == min(fm.fix)]
val, borders = np.histogram(trajLen,
bins=np.linspace(-0.5, max(trajLen)+0.5, max(trajLen)+2))
cumsum = np.cumsum(val.astype(float) / val.sum())
return cumsum, borders | 769,798 |
Prepares the data to be replicated. Calculates the second-order length
and angle dependencies between saccades and stores them in a fitted
histogram.
Parameters:
fit : function, optional
The method to use for fitting the histogram
full_H1 : twod... | def initializeData(self, fit = None, full_H1=None, max_length = 40,
in_deg = True):
a, l, ad, ld = anglendiff(self.fm, roll=1, return_abs = True)
if in_deg:
self.fm.pixels_per_degree = 1
samples = np.zeros([3, len(l[0])])
samples[0] = l[0]/se... | 769,802 |
Calculates the coordinates after a specific saccade was made.
Parameters:
(x,y) : tuple of floats or ints
The coordinates before the saccade was made
angle : float or int
The angle that the next saccade encloses with the
horizonta... | def _calc_xy(self, xxx_todo_changeme, angle, length):
(x, y) = xxx_todo_changeme
return (x+(cos(radians(angle))*length),
y+(sin(radians(angle))*length)) | 769,803 |
Generates a given number of trajectories, using the method sample().
Returns a fixmat with the generated data.
Parameters:
num_samples : int, optional
The number of trajectories that shall be generated. | def sample_many(self, num_samples = 2000):
x = []
y = []
fix = []
sample = []
# XXX: Delete ProgressBar
pbar = ProgressBar(widgets=[Percentage(),Bar()], maxval=num_samples).start()
for s in range(0, num_samples):
for i, ... | 769,806 |
Load fixmat at path.
Parameters:
path : string
Absolute path of the file to load from. | def load(path):
f = h5py.File(path,'r')
if 'Fixmat' in f:
fm_group = f['Fixmat']
else:
fm_group = f['Datamat']
fields = {}
params = {}
for field, value in list(fm_group.items()):
fields[field] = np.array(value)
for param, value in list(fm_group.attrs.items()):
... | 769,809 |
Computes the relative bias, i.e. the distribution of saccade angles
and amplitudes.
Parameters:
fm : DataMat
The fixation data to use
scale_factor : double
Returns:
2D probability distribution of saccade angles and amplitudes. | def relative_bias(fm, scale_factor = 1, estimator = None):
assert 'fix' in fm.fieldnames(), "Can not work without fixation numbers"
excl = fm.fix - np.roll(fm.fix, 1) != 1
# Now calculate the direction where the NEXT fixation goes to
diff_x = (np.roll(fm.x, 1) - fm.x)[~excl]
diff_y = (np.rol... | 769,811 |
Loads a single fixmat (fixmatfile).
Parameters:
fixmatfile : string
The matlab fixmat that should be loaded.
categories : instance of stimuli.Categories, optional
Links data in categories to data in fixmat. | def FixmatFactory(fixmatfile, categories = None, var_name = 'fixmat', field_name='x'):
try:
data = loadmat(fixmatfile, struct_as_record = False)
keys = list(data.keys())
data = data[var_name][0][0]
except KeyError:
raise RuntimeError('%s is not a field of the matlab structur... | 769,813 |
Constructs an categories object for all image / category
combinations in the fixmat.
Parameters:
fm: FixMat
Used for extracting valid category/image combination.
loader: loader
Loader that accesses the stimuli for this fixmat
Returns:
Categories object | def FixmatStimuliFactory(fm, loader):
# Find all feature names
features = []
if loader.ftrpath:
assert os.access(loader.ftrpath, os.R_OK)
features = os.listdir(os.path.join(loader.ftrpath, str(fm.category[0])))
# Find all images in all categories
img_per_cat = {}
for ... | 769,831 |
Computes Chao-Shen corrected KL-divergence between prediction
and fdm made from fixations in fm.
Parameters :
prediction : np.ndarray
a fixation density map
fm : FixMat object | def kldiv_cs_model(prediction, fm):
# compute histogram of fixations needed for ChaoShen corrected kl-div
# image category must exist (>-1) and image_size must be non-empty
assert(len(fm.image_size) == 2 and (fm.image_size[0] > 0) and
(fm.image_size[1] > 0))
assert(-1 not in fm.category)
... | 769,915 |
approximates the area under the roc curve for sets of actuals and controls.
Uses all values appearing in actuals as thresholds and lower sum
interpolation. Also returns arrays of the true positive rate and the false
positive rate that can be used for plotting the roc curve.
Parameters:
actuals ... | def fast_roc(actuals, controls):
assert(type(actuals) is np.ndarray)
assert(type(controls) is np.ndarray)
actuals = np.ravel(actuals)
controls = np.ravel(controls)
if np.isnan(actuals).any():
raise RuntimeError('NaN found in actuals')
if np.isnan(controls).any():
raise Runt... | 769,921 |
Histogram based implementation of AUC unde ROC curve.
Parameters:
actuals : list
A list of numeric values for positive observations.
controls : list
A list of numeric values for negative observations. | def faster_roc(actuals, controls):
assert(type(actuals) is np.ndarray)
assert(type(controls) is np.ndarray)
if len(actuals)<500:
raise RuntimeError('This method might be incorrect when '+
'not enough actuals are present. Needs to be checked before '+
'proceeding... | 769,922 |
Insert event in queue, and keep it sorted assuming queue is sorted.
If event is already in queue, insert it to the right of the rightmost
event (to keep FIFO order).
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
Args:
event: a (time in sec since u... | def insort_event_right(self, event, lo=0, hi=None):
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(self.queue)
while lo < hi:
mid = (lo + hi) // 2
if event[0] < self.queue[mid][0]:
hi = mid
else:
lo = mid + 1
self.queue.... | 770,403 |
Helper for GQL parsing to extract values from GQL expressions.
This can extract the value from a GQL literal, return a Parameter
for a GQL bound parameter (:1 or :foo), and interprets casts like
KEY(...) and plain lists of values like (1, 2, 3).
Args:
func: A string indicating what kind of thing this is.
... | def _args_to_val(func, args):
from .google_imports import gql # Late import, to avoid name conflict.
vals = []
for arg in args:
if isinstance(arg, (int, long, basestring)):
val = Parameter(arg)
elif isinstance(arg, gql.Literal):
val = arg.Get()
else:
raise TypeError('Unexpected a... | 770,410 |
Helper for FQL parsing to turn a property name into a property object.
Args:
modelclass: The model class specified in the query.
name: The property name. This may contain dots which indicate
sub-properties of structured properties.
Returns:
A Property object.
Raises:
KeyError if the prop... | def _get_prop_from_modelclass(modelclass, name):
if name == '__key__':
return modelclass._key
parts = name.split('.')
part, more = parts[0], parts[1:]
prop = modelclass._properties.get(part)
if prop is None:
if issubclass(modelclass, model.Expando):
prop = model.GenericProperty(part)
els... | 770,411 |
Parse a GQL query string.
Args:
query_string: Full GQL query, e.g. 'SELECT * FROM Kind WHERE prop = 1'.
*args, **kwds: If present, used to call bind().
Returns:
An instance of query_class. | def gql(query_string, *args, **kwds):
qry = _gql(query_string)
if args or kwds:
qry = qry._bind(args, kwds)
return qry | 770,412 |
Parse a GQL query string (internal version).
Args:
query_string: Full GQL query, e.g. 'SELECT * FROM Kind WHERE prop = 1'.
query_class: Optional class to use, default Query.
Returns:
An instance of query_class. | def _gql(query_string, query_class=Query):
from .google_imports import gql # Late import, to avoid name conflict.
gql_qry = gql.GQL(query_string)
kind = gql_qry.kind()
if kind is None:
# The query must be lacking a "FROM <kind>" class. Let Expando
# stand in for the model class (it won't actually b... | 770,413 |
Constructor.
Args:
key: The Parameter key, must be either an integer or a string. | def __init__(self, key):
if not isinstance(key, (int, long, basestring)):
raise TypeError('Parameter key must be an integer or string, not %s' %
(key,))
self.__key = key | 770,419 |
Constructor.
Args:
kind: Optional kind string.
ancestor: Optional ancestor Key.
filters: Optional Node representing a filter expression tree.
orders: Optional datastore_query.Order object.
app: Optional app id.
namespace: Optional namespace.
default_options: Optional QueryO... | def __init__(self, kind=None, ancestor=None, filters=None, orders=None,
app=None, namespace=None, default_options=None,
projection=None, group_by=None):
# TODO(arfuller): Accept projection=Model.key to mean keys_only.
# TODO(arfuller): Consider adding incremental function
... | 770,444 |
An auto-batching wrapper for memcache.get() or .get_multi().
Args:
key: Key to set. This must be a string; no prefix is applied.
for_cas: If True, request and store CAS ids on the Context.
namespace: Optional namespace.
deadline: Optional deadline for the RPC.
Returns:
A Future ... | def memcache_get(self, key, for_cas=False, namespace=None, use_cache=False,
deadline=None):
if not isinstance(key, basestring):
raise TypeError('key must be a string; received %r' % key)
if not isinstance(for_cas, bool):
raise TypeError('for_cas must be a bool; received %r' %... | 770,482 |
Init.
Args:
todo_tasklet: the tasklet that actually fires RPC and waits on a MultiRPC.
It should take a list of (future, arg) pairs and an "options" as
arguments. "options" are rpc options.
limit: max number of items to batch for each distinct value of "options". | def __init__(self, todo_tasklet, limit):
self._todo_tasklet = todo_tasklet
self._limit = limit
# A map from "options" to a list of (future, arg) tuple.
# future is the future return from a single async operations.
self._queues = {}
self._running = [] # A list of in-flight todo_tasklet futu... | 770,514 |
Adds an arg and gets back a future.
Args:
arg: one argument for _todo_tasklet.
options: rpc options.
Return:
An instance of future, representing the result of running
_todo_tasklet without batching. | def add(self, arg, options=None):
fut = tasklets.Future('%s.add(%s, %s)' % (self, arg, options))
todo = self._queues.get(options)
if todo is None:
utils.logging_debug('AutoBatcher(%s): creating new queue for %r',
self._todo_tasklet.__name__, options)
if not self._q... | 770,516 |
Passes exception along.
Args:
batch_fut: the batch future returned by running todo_tasklet.
todo: (fut, option) pair. fut is the future return by each add() call.
If the batch fut was successful, it has already called fut.set_result()
on other individual futs. This method only handles when the... | def _finished_callback(self, batch_fut, todo):
self._running.remove(batch_fut)
err = batch_fut.get_exception()
if err is not None:
tb = batch_fut.get_traceback()
for (fut, _) in todo:
if not fut.done():
fut.set_exception(err, tb) | 770,519 |
Return all namespaces in the specified range.
Args:
start: only return namespaces >= start if start is not None.
end: only return namespaces < end if end is not None.
Returns:
A list of namespace names between the (optional) start and end values. | def get_namespaces(start=None, end=None):
q = Namespace.query()
if start is not None:
q = q.filter(Namespace.key >= Namespace.key_for_namespace(start))
if end is not None:
q = q.filter(Namespace.key < Namespace.key_for_namespace(end))
return [x.namespace_name for x in q] | 770,523 |
Return all kinds in the specified range, for the current namespace.
Args:
start: only return kinds >= start if start is not None.
end: only return kinds < end if end is not None.
Returns:
A list of kind names between the (optional) start and end values. | def get_kinds(start=None, end=None):
q = Kind.query()
if start is not None and start != '':
q = q.filter(Kind.key >= Kind.key_for_kind(start))
if end is not None:
if end == '':
return []
q = q.filter(Kind.key < Kind.key_for_kind(end))
return [x.kind_name for x in q] | 770,524 |
Return all properties of kind in the specified range.
NOTE: This function does not return unindexed properties.
Args:
kind: name of kind whose properties you want.
start: only return properties >= start if start is not None.
end: only return properties < end if end is not None.
Returns:
A list ... | def get_properties_of_kind(kind, start=None, end=None):
q = Property.query(ancestor=Property.key_for_kind(kind))
if start is not None and start != '':
q = q.filter(Property.key >= Property.key_for_property(kind, start))
if end is not None:
if end == '':
return []
q = q.filter(Property.key < P... | 770,525 |
Return all representations of properties of kind in the specified range.
NOTE: This function does not return unindexed properties.
Args:
kind: name of kind whose properties you want.
start: only return properties >= start if start is not None.
end: only return properties < end if end is not None.
R... | def get_representations_of_kind(kind, start=None, end=None):
q = Property.query(ancestor=Property.key_for_kind(kind))
if start is not None and start != '':
q = q.filter(Property.key >= Property.key_for_property(kind, start))
if end is not None:
if end == '':
return {}
q = q.filter(Property.ke... | 770,526 |
Return the version of the entity group containing key.
Args:
key: a key for an entity group whose __entity_group__ key you want.
Returns:
The version of the entity group containing key. This version is
guaranteed to increase on every change to the entity group. The version
may increase even in the... | def get_entity_group_version(key):
eg = EntityGroup.key_for_entity_group(key).get()
if eg:
return eg.version
else:
return None | 770,527 |
Return the Key for a namespace.
Args:
namespace: A string giving the namespace whose key is requested.
Returns:
The Key for the namespace. | def key_for_namespace(cls, namespace):
if namespace:
return model.Key(cls.KIND_NAME, namespace)
else:
return model.Key(cls.KIND_NAME, cls.EMPTY_NAMESPACE_ID) | 770,528 |
Return the __property__ key for property of kind.
Args:
kind: kind whose key is requested.
property: property whose key is requested.
Returns:
The key for property of kind. | def key_for_property(cls, kind, property):
return model.Key(Kind.KIND_NAME, kind, Property.KIND_NAME, property) | 770,529 |
Return the kind specified by a given __property__ key.
Args:
key: key whose kind name is requested.
Returns:
The kind specified by key. | def key_to_kind(cls, key):
if key.kind() == Kind.KIND_NAME:
return key.id()
else:
return key.parent().id() | 770,530 |
Return the key for the entity group containing key.
Args:
key: a key for an entity group whose __entity_group__ key you want.
Returns:
The __entity_group__ key for the entity group containing key. | def key_for_entity_group(cls, key):
return model.Key(cls.KIND_NAME, cls.ID, parent=key.root()) | 770,531 |
Helper to construct a ContextOptions object from keyword arguments.
Args:
ctx_options: A dict of keyword arguments.
config_cls: Optional Configuration class to use, default ContextOptions.
Note that either 'options' or 'config' can be used to pass another
Configuration object, but not both. If another ... | def _make_ctx_options(ctx_options, config_cls=ContextOptions):
if not ctx_options:
return None
for key in list(ctx_options):
translation = _OPTION_TRANSLATIONS.get(key)
if translation:
if translation in ctx_options:
raise ValueError('Cannot specify %s and %s at the same time' %
... | 770,534 |
Set the context cache policy function.
Args:
func: A function that accepts a Key instance as argument and returns
a bool indicating if it should be cached. May be None. | def set_cache_policy(self, func):
if func is None:
func = self.default_cache_policy
elif isinstance(func, bool):
func = lambda unused_key, flag=func: flag
self._cache_policy = func | 770,543 |
Return whether to use the context cache for this key.
Args:
key: Key instance.
options: ContextOptions instance, or None.
Returns:
True if the key should be cached, False otherwise. | def _use_cache(self, key, options=None):
flag = ContextOptions.use_cache(options)
if flag is None:
flag = self._cache_policy(key)
if flag is None:
flag = ContextOptions.use_cache(self._conn.config)
if flag is None:
flag = True
return flag | 770,544 |
Set the memcache policy function.
Args:
func: A function that accepts a Key instance as argument and returns
a bool indicating if it should be cached. May be None. | def set_memcache_policy(self, func):
if func is None:
func = self.default_memcache_policy
elif isinstance(func, bool):
func = lambda unused_key, flag=func: flag
self._memcache_policy = func | 770,545 |
Return whether to use memcache for this key.
Args:
key: Key instance.
options: ContextOptions instance, or None.
Returns:
True if the key should be cached in memcache, False otherwise. | def _use_memcache(self, key, options=None):
flag = ContextOptions.use_memcache(options)
if flag is None:
flag = self._memcache_policy(key)
if flag is None:
flag = ContextOptions.use_memcache(self._conn.config)
if flag is None:
flag = True
return flag | 770,546 |
Default datastore policy.
This defers to _use_datastore on the Model class.
Args:
key: Key instance.
Returns:
A bool or None. | def default_datastore_policy(key):
flag = None
if key is not None:
modelclass = model.Model._kind_map.get(key.kind())
if modelclass is not None:
policy = getattr(modelclass, '_use_datastore', None)
if policy is not None:
if isinstance(policy, bool):
flag = ... | 770,547 |
Set the context datastore policy function.
Args:
func: A function that accepts a Key instance as argument and returns
a bool indicating if it should use the datastore. May be None. | def set_datastore_policy(self, func):
if func is None:
func = self.default_datastore_policy
elif isinstance(func, bool):
func = lambda unused_key, flag=func: flag
self._datastore_policy = func | 770,548 |
Return whether to use the datastore for this key.
Args:
key: Key instance.
options: ContextOptions instance, or None.
Returns:
True if the datastore should be used, False otherwise. | def _use_datastore(self, key, options=None):
flag = ContextOptions.use_datastore(options)
if flag is None:
flag = self._datastore_policy(key)
if flag is None:
flag = ContextOptions.use_datastore(self._conn.config)
if flag is None:
flag = True
return flag | 770,549 |
Default memcache timeout policy.
This defers to _memcache_timeout on the Model class.
Args:
key: Key instance.
Returns:
Memcache timeout to use (integer), or None. | def default_memcache_timeout_policy(key):
timeout = None
if key is not None and isinstance(key, model.Key):
modelclass = model.Model._kind_map.get(key.kind())
if modelclass is not None:
policy = getattr(modelclass, '_memcache_timeout', None)
if policy is not None:
if i... | 770,550 |
Set the policy function for memcache timeout (expiration).
Args:
func: A function that accepts a key instance as argument and returns
an integer indicating the desired memcache timeout. May be None.
If the function returns 0 it implies the default timeout. | def set_memcache_timeout_policy(self, func):
if func is None:
func = self.default_memcache_timeout_policy
elif isinstance(func, (int, long)):
func = lambda unused_key, flag=func: flag
self._memcache_timeout_policy = func | 770,551 |
Returns a cached Model instance given the entity key if available.
Args:
key: Key instance.
Returns:
A Model instance if the key exists in the cache. | def _load_from_cache_if_available(self, key):
if key in self._cache:
entity = self._cache[key] # May be None, meaning "doesn't exist".
if entity is None or entity._key == key:
# If entity's key didn't change later, it is ok.
# See issue 13. http://goo.gl/jxjOP
raise taskle... | 770,553 |
Return a Model instance given the entity key.
It will use the context cache if the cache policy for the given
key is enabled.
Args:
key: Key instance.
**ctx_options: Context options.
Returns:
A Model instance if the key exists in the datastore; None otherwise. | def get(self, key, **ctx_options):
options = _make_ctx_options(ctx_options)
use_cache = self._use_cache(key, options)
if use_cache:
self._load_from_cache_if_available(key)
use_datastore = self._use_datastore(key, options)
if (use_datastore and
isinstance(self._conn, datastore_rpc... | 770,554 |
Marks a task as done.
Args:
task_id: The integer id of the task to update.
Raises:
ValueError: if the requested task doesn't exist. | def mark_done(task_id):
task = Task.get_by_id(task_id)
if task is None:
raise ValueError('Task with id %d does not exist' % task_id)
task.done = True
task.put() | 770,582 |
Converts a list of tasks to a list of string representations.
Args:
tasks: A list of the tasks to convert.
Returns:
A list of string formatted tasks. | def format_tasks(tasks):
return ['%d : %s (%s)' % (task.key.id(),
task.description,
('done' if task.done
else 'created %s' % task.created))
for task in tasks] | 770,583 |
Accepts a string command and performs an action.
Args:
command: the command to run as a string. | def handle_command(command):
try:
cmds = command.split(None, 1)
cmd = cmds[0]
if cmd == 'new':
add_task(get_arg(cmds))
elif cmd == 'done':
mark_done(int(get_arg(cmds)))
elif cmd == 'list':
for task in format_tasks(list_tasks()):
print task
elif cmd == 'delete':
... | 770,584 |
Parse a BlobInfo record from file upload field_storage.
Args:
field_storage: cgi.FieldStorage that represents uploaded blob.
Returns:
BlobInfo record as parsed from the field-storage instance.
None if there was no field_storage.
Raises:
BlobInfoParseError when provided field_storage does not co... | def parse_blob_info(field_storage):
if field_storage is None:
return None
field_name = field_storage.name
def get_value(dct, name):
value = dct.get(name, None)
if value is None:
raise BlobInfoParseError(
'Field %s has no %s.' % (field_name, name))
return value
filename = ge... | 770,590 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.