id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
16,700
payu-org/payu
payu/manifest.py
PayuManifest.make_link
def make_link(self, filepath): """ Payu integration function for creating symlinks in work directories which point back to the original file. """ # Check file exists. It may have been deleted but still in manifest if not os.path.exists(self.fullpath(filepath)): print('File not found: {filepath}'.format( filepath=self.fullpath(filepath))) if self.contains(filepath): print('removing from manifest') self.delete(filepath) self.needsync = True else: try: destdir = os.path.dirname(filepath) # Make destination directory if not already exists # Necessary because sometimes this is called before # individual model setup if not os.path.exists(destdir): os.makedirs(destdir) if self.copy_file(filepath): shutil.copy(self.fullpath(filepath), filepath) perm = (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR) os.chmod(filepath, perm) else: make_symlink(self.fullpath(filepath), filepath) except Exception: action = 'copying' if self.copy_file else 'linking' print('payu: error: {action} orig: {orig} ' 'local: {local}'.format(action=action, orig=self.fullpath(filepath), local=filepath)) raise
python
def make_link(self, filepath): # Check file exists. It may have been deleted but still in manifest if not os.path.exists(self.fullpath(filepath)): print('File not found: {filepath}'.format( filepath=self.fullpath(filepath))) if self.contains(filepath): print('removing from manifest') self.delete(filepath) self.needsync = True else: try: destdir = os.path.dirname(filepath) # Make destination directory if not already exists # Necessary because sometimes this is called before # individual model setup if not os.path.exists(destdir): os.makedirs(destdir) if self.copy_file(filepath): shutil.copy(self.fullpath(filepath), filepath) perm = (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR) os.chmod(filepath, perm) else: make_symlink(self.fullpath(filepath), filepath) except Exception: action = 'copying' if self.copy_file else 'linking' print('payu: error: {action} orig: {orig} ' 'local: {local}'.format(action=action, orig=self.fullpath(filepath), local=filepath)) raise
[ "def", "make_link", "(", "self", ",", "filepath", ")", ":", "# Check file exists. It may have been deleted but still in manifest", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "fullpath", "(", "filepath", ")", ")", ":", "print", "(", "'File n...
Payu integration function for creating symlinks in work directories which point back to the original file.
[ "Payu", "integration", "function", "for", "creating", "symlinks", "in", "work", "directories", "which", "point", "back", "to", "the", "original", "file", "." ]
1442a9a226012eff248b8097cc1eaabc3e224867
https://github.com/payu-org/payu/blob/1442a9a226012eff248b8097cc1eaabc3e224867/payu/manifest.py#L183-L217
16,701
payu-org/payu
payu/manifest.py
Manifest.add_filepath
def add_filepath(self, manifest, filepath, fullpath, copy=False): """ Wrapper to the add_filepath function in PayuManifest. Prevents outside code from directly calling anything in PayuManifest. """ filepath = os.path.normpath(filepath) if self.manifests[manifest].add_filepath(filepath, fullpath, copy): # Only link if filepath was added self.manifests[manifest].make_link(filepath)
python
def add_filepath(self, manifest, filepath, fullpath, copy=False): filepath = os.path.normpath(filepath) if self.manifests[manifest].add_filepath(filepath, fullpath, copy): # Only link if filepath was added self.manifests[manifest].make_link(filepath)
[ "def", "add_filepath", "(", "self", ",", "manifest", ",", "filepath", ",", "fullpath", ",", "copy", "=", "False", ")", ":", "filepath", "=", "os", ".", "path", ".", "normpath", "(", "filepath", ")", "if", "self", ".", "manifests", "[", "manifest", "]",...
Wrapper to the add_filepath function in PayuManifest. Prevents outside code from directly calling anything in PayuManifest.
[ "Wrapper", "to", "the", "add_filepath", "function", "in", "PayuManifest", ".", "Prevents", "outside", "code", "from", "directly", "calling", "anything", "in", "PayuManifest", "." ]
1442a9a226012eff248b8097cc1eaabc3e224867
https://github.com/payu-org/payu/blob/1442a9a226012eff248b8097cc1eaabc3e224867/payu/manifest.py#L393-L401
16,702
payu-org/payu
payu/runlog.py
commit_hash
def commit_hash(dir='.'): """ Return commit hash for HEAD of checked out branch of the specified directory. """ cmd = ['git', 'rev-parse', 'HEAD'] try: with open(os.devnull, 'w') as devnull: revision_hash = subprocess.check_output( cmd, cwd=dir, stderr=devnull ) if sys.version_info.major > 2: revision_hash = revision_hash.decode('ascii') return revision_hash.strip() except subprocess.CalledProcessError: return None
python
def commit_hash(dir='.'): cmd = ['git', 'rev-parse', 'HEAD'] try: with open(os.devnull, 'w') as devnull: revision_hash = subprocess.check_output( cmd, cwd=dir, stderr=devnull ) if sys.version_info.major > 2: revision_hash = revision_hash.decode('ascii') return revision_hash.strip() except subprocess.CalledProcessError: return None
[ "def", "commit_hash", "(", "dir", "=", "'.'", ")", ":", "cmd", "=", "[", "'git'", ",", "'rev-parse'", ",", "'HEAD'", "]", "try", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "devnull", ":", "revision_hash", "=", "subprocess"...
Return commit hash for HEAD of checked out branch of the specified directory.
[ "Return", "commit", "hash", "for", "HEAD", "of", "checked", "out", "branch", "of", "the", "specified", "directory", "." ]
1442a9a226012eff248b8097cc1eaabc3e224867
https://github.com/payu-org/payu/blob/1442a9a226012eff248b8097cc1eaabc3e224867/payu/runlog.py#L283-L304
16,703
payu-org/payu
payu/runlog.py
Runlog.create_manifest
def create_manifest(self): """Construct the list of files to be tracked by the runlog.""" config_path = os.path.join(self.expt.control_path, DEFAULT_CONFIG_FNAME) self.manifest = [] if os.path.isfile(config_path): self.manifest.append(config_path) for model in self.expt.models: config_files = model.config_files + model.optional_config_files self.manifest.extend(os.path.join(model.control_path, f) for f in config_files) # Add file manifests to runlog manifest for mf in self.expt.manifest: self.manifest.append(mf.path)
python
def create_manifest(self): config_path = os.path.join(self.expt.control_path, DEFAULT_CONFIG_FNAME) self.manifest = [] if os.path.isfile(config_path): self.manifest.append(config_path) for model in self.expt.models: config_files = model.config_files + model.optional_config_files self.manifest.extend(os.path.join(model.control_path, f) for f in config_files) # Add file manifests to runlog manifest for mf in self.expt.manifest: self.manifest.append(mf.path)
[ "def", "create_manifest", "(", "self", ")", ":", "config_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "expt", ".", "control_path", ",", "DEFAULT_CONFIG_FNAME", ")", "self", ".", "manifest", "=", "[", "]", "if", "os", ".", "path", ".", ...
Construct the list of files to be tracked by the runlog.
[ "Construct", "the", "list", "of", "files", "to", "be", "tracked", "by", "the", "runlog", "." ]
1442a9a226012eff248b8097cc1eaabc3e224867
https://github.com/payu-org/payu/blob/1442a9a226012eff248b8097cc1eaabc3e224867/payu/runlog.py#L53-L71
16,704
payu-org/payu
payu/runlog.py
Runlog.push
def push(self): """Push the changes to the remote repository. Usage: payu push This command pushes local runlog changes to the remote runlog repository, currently named `payu`, using the SSH key associated with this experiment. For an experiment `test`, it is equivalent to the following command:: ssh-agent bash -c " ssh-add $HOME/.ssh/payu/id_rsa_payu_test git push --all payu " """ expt_name = self.config.get('name', self.expt.name) default_ssh_key = 'id_rsa_payu_' + expt_name ssh_key = self.config.get('sshid', default_ssh_key) ssh_key_path = os.path.join(os.path.expanduser('~'), '.ssh', 'payu', ssh_key) if not os.path.isfile(ssh_key_path): print('payu: error: Github SSH key {key} not found.' ''.format(key=ssh_key_path)) print('payu: error: Run `payu ghsetup` to generate a new key.') sys.exit(-1) cmd = ('ssh-agent bash -c "ssh-add {key}; git push --all payu"' ''.format(key=ssh_key_path)) subprocess.check_call(shlex.split(cmd), cwd=self.expt.control_path)
python
def push(self): expt_name = self.config.get('name', self.expt.name) default_ssh_key = 'id_rsa_payu_' + expt_name ssh_key = self.config.get('sshid', default_ssh_key) ssh_key_path = os.path.join(os.path.expanduser('~'), '.ssh', 'payu', ssh_key) if not os.path.isfile(ssh_key_path): print('payu: error: Github SSH key {key} not found.' ''.format(key=ssh_key_path)) print('payu: error: Run `payu ghsetup` to generate a new key.') sys.exit(-1) cmd = ('ssh-agent bash -c "ssh-add {key}; git push --all payu"' ''.format(key=ssh_key_path)) subprocess.check_call(shlex.split(cmd), cwd=self.expt.control_path)
[ "def", "push", "(", "self", ")", ":", "expt_name", "=", "self", ".", "config", ".", "get", "(", "'name'", ",", "self", ".", "expt", ".", "name", ")", "default_ssh_key", "=", "'id_rsa_payu_'", "+", "expt_name", "ssh_key", "=", "self", ".", "config", "."...
Push the changes to the remote repository. Usage: payu push This command pushes local runlog changes to the remote runlog repository, currently named `payu`, using the SSH key associated with this experiment. For an experiment `test`, it is equivalent to the following command:: ssh-agent bash -c " ssh-add $HOME/.ssh/payu/id_rsa_payu_test git push --all payu "
[ "Push", "the", "changes", "to", "the", "remote", "repository", "." ]
1442a9a226012eff248b8097cc1eaabc3e224867
https://github.com/payu-org/payu/blob/1442a9a226012eff248b8097cc1eaabc3e224867/payu/runlog.py#L107-L138
16,705
mozilla/python_moztelemetry
moztelemetry/shared_telemetry_utils.py
add_expiration_postfix
def add_expiration_postfix(expiration): """ Formats the expiration version and adds a version postfix if needed. :param expiration: the expiration version string. :return: the modified expiration string. """ if re.match(r'^[1-9][0-9]*$', expiration): return expiration + ".0a1" if re.match(r'^[1-9][0-9]*\.0$', expiration): return expiration + "a1" return expiration
python
def add_expiration_postfix(expiration): if re.match(r'^[1-9][0-9]*$', expiration): return expiration + ".0a1" if re.match(r'^[1-9][0-9]*\.0$', expiration): return expiration + "a1" return expiration
[ "def", "add_expiration_postfix", "(", "expiration", ")", ":", "if", "re", ".", "match", "(", "r'^[1-9][0-9]*$'", ",", "expiration", ")", ":", "return", "expiration", "+", "\".0a1\"", "if", "re", ".", "match", "(", "r'^[1-9][0-9]*\\.0$'", ",", "expiration", ")"...
Formats the expiration version and adds a version postfix if needed. :param expiration: the expiration version string. :return: the modified expiration string.
[ "Formats", "the", "expiration", "version", "and", "adds", "a", "version", "postfix", "if", "needed", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/shared_telemetry_utils.py#L123-L135
16,706
mozilla/python_moztelemetry
moztelemetry/shared_telemetry_utils.py
load_yaml_file
def load_yaml_file(filename): """ Load a YAML file from disk, throw a ParserError on failure.""" try: with open(filename, 'r') as f: return yaml.safe_load(f) except IOError as e: raise ParserError('Error opening ' + filename + ': ' + e.message) except ValueError as e: raise ParserError('Error parsing processes in {}: {}' .format(filename, e.message))
python
def load_yaml_file(filename): try: with open(filename, 'r') as f: return yaml.safe_load(f) except IOError as e: raise ParserError('Error opening ' + filename + ': ' + e.message) except ValueError as e: raise ParserError('Error parsing processes in {}: {}' .format(filename, e.message))
[ "def", "load_yaml_file", "(", "filename", ")", ":", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "return", "yaml", ".", "safe_load", "(", "f", ")", "except", "IOError", "as", "e", ":", "raise", "ParserError", "(", "'E...
Load a YAML file from disk, throw a ParserError on failure.
[ "Load", "a", "YAML", "file", "from", "disk", "throw", "a", "ParserError", "on", "failure", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/shared_telemetry_utils.py#L138-L147
16,707
mozilla/python_moztelemetry
moztelemetry/shared_telemetry_utils.py
StringTable.writeDefinition
def writeDefinition(self, f, name): """Writes the string table to a file as a C const char array. This writes out the string table as one single C char array for memory size reasons, separating the individual strings with '\0' characters. This way we can index directly into the string array and avoid the additional storage costs for the pointers to them (and potential extra relocations for those). :param f: the output stream. :param name: the name of the output array. """ entries = self.table.items() entries.sort(key=lambda x: x[1]) # Avoid null-in-string warnings with GCC and potentially # overlong string constants; write everything out the long way. def explodeToCharArray(string): def toCChar(s): if s == "'": return "'\\''" else: return "'%s'" % s return ", ".join(map(toCChar, string)) f.write("const char %s[] = {\n" % name) for (string, offset) in entries: if "*/" in string: raise ValueError("String in string table contains unexpected sequence '*/': %s" % string) e = explodeToCharArray(string) if e: f.write(" /* %5d - \"%s\" */ %s, '\\0',\n" % (offset, string, explodeToCharArray(string))) else: f.write(" /* %5d - \"%s\" */ '\\0',\n" % (offset, string)) f.write("};\n\n")
python
def writeDefinition(self, f, name): entries = self.table.items() entries.sort(key=lambda x: x[1]) # Avoid null-in-string warnings with GCC and potentially # overlong string constants; write everything out the long way. def explodeToCharArray(string): def toCChar(s): if s == "'": return "'\\''" else: return "'%s'" % s return ", ".join(map(toCChar, string)) f.write("const char %s[] = {\n" % name) for (string, offset) in entries: if "*/" in string: raise ValueError("String in string table contains unexpected sequence '*/': %s" % string) e = explodeToCharArray(string) if e: f.write(" /* %5d - \"%s\" */ %s, '\\0',\n" % (offset, string, explodeToCharArray(string))) else: f.write(" /* %5d - \"%s\" */ '\\0',\n" % (offset, string)) f.write("};\n\n")
[ "def", "writeDefinition", "(", "self", ",", "f", ",", "name", ")", ":", "entries", "=", "self", ".", "table", ".", "items", "(", ")", "entries", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", "# Avoid null-in-string warnin...
Writes the string table to a file as a C const char array. This writes out the string table as one single C char array for memory size reasons, separating the individual strings with '\0' characters. This way we can index directly into the string array and avoid the additional storage costs for the pointers to them (and potential extra relocations for those). :param f: the output stream. :param name: the name of the output array.
[ "Writes", "the", "string", "table", "to", "a", "file", "as", "a", "C", "const", "char", "array", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/shared_telemetry_utils.py#L74-L110
16,708
Karaage-Cluster/karaage
karaage/templatetags/karaage_tags.py
comments
def comments(context, obj): """ Render comments for obj. """ content_type = ContentType.objects.get_for_model(obj.__class__) comment_list = LogEntry.objects.filter( content_type=content_type, object_id=obj.pk, action_flag=COMMENT ) return { 'obj': obj, 'comment_list': comment_list, 'is_admin': context['is_admin'], }
python
def comments(context, obj): content_type = ContentType.objects.get_for_model(obj.__class__) comment_list = LogEntry.objects.filter( content_type=content_type, object_id=obj.pk, action_flag=COMMENT ) return { 'obj': obj, 'comment_list': comment_list, 'is_admin': context['is_admin'], }
[ "def", "comments", "(", "context", ",", "obj", ")", ":", "content_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "obj", ".", "__class__", ")", "comment_list", "=", "LogEntry", ".", "objects", ".", "filter", "(", "content_type", "=", ...
Render comments for obj.
[ "Render", "comments", "for", "obj", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/templatetags/karaage_tags.py#L98-L110
16,709
Karaage-Cluster/karaage
karaage/machines/xmlrpc.py
get_disk_quota
def get_disk_quota(username, machine_name=None): """ Returns disk quota for username in KB """ try: ua = Account.objects.get( username=username, date_deleted__isnull=True) except Account.DoesNotExist: return 'Account not found' result = ua.get_disk_quota() if result is None: return False return result * 1048576
python
def get_disk_quota(username, machine_name=None): try: ua = Account.objects.get( username=username, date_deleted__isnull=True) except Account.DoesNotExist: return 'Account not found' result = ua.get_disk_quota() if result is None: return False return result * 1048576
[ "def", "get_disk_quota", "(", "username", ",", "machine_name", "=", "None", ")", ":", "try", ":", "ua", "=", "Account", ".", "objects", ".", "get", "(", "username", "=", "username", ",", "date_deleted__isnull", "=", "True", ")", "except", "Account", ".", ...
Returns disk quota for username in KB
[ "Returns", "disk", "quota", "for", "username", "in", "KB" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/machines/xmlrpc.py#L25-L41
16,710
mozilla/python_moztelemetry
moztelemetry/standards.py
snap_to_beginning_of_week
def snap_to_beginning_of_week(day, weekday_start="Sunday"): """ Get the first day of the current week. :param day: The input date to snap. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A date representing the first day of the current week. """ delta_days = ((day.weekday() + 1) % 7) if weekday_start is "Sunday" else day.weekday() return day - timedelta(days=delta_days)
python
def snap_to_beginning_of_week(day, weekday_start="Sunday"): delta_days = ((day.weekday() + 1) % 7) if weekday_start is "Sunday" else day.weekday() return day - timedelta(days=delta_days)
[ "def", "snap_to_beginning_of_week", "(", "day", ",", "weekday_start", "=", "\"Sunday\"", ")", ":", "delta_days", "=", "(", "(", "day", ".", "weekday", "(", ")", "+", "1", ")", "%", "7", ")", "if", "weekday_start", "is", "\"Sunday\"", "else", "day", ".", ...
Get the first day of the current week. :param day: The input date to snap. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A date representing the first day of the current week.
[ "Get", "the", "first", "day", "of", "the", "current", "week", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/standards.py#L96-L104
16,711
mozilla/python_moztelemetry
moztelemetry/standards.py
get_last_week_range
def get_last_week_range(weekday_start="Sunday"): """ Gets the date for the first and the last day of the previous complete week. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A tuple containing two date objects, for the first and the last day of the week respectively. """ today = date.today() # Get the first day of the past complete week. start_of_week = snap_to_beginning_of_week(today, weekday_start) - timedelta(weeks=1) end_of_week = start_of_week + timedelta(days=6) return (start_of_week, end_of_week)
python
def get_last_week_range(weekday_start="Sunday"): today = date.today() # Get the first day of the past complete week. start_of_week = snap_to_beginning_of_week(today, weekday_start) - timedelta(weeks=1) end_of_week = start_of_week + timedelta(days=6) return (start_of_week, end_of_week)
[ "def", "get_last_week_range", "(", "weekday_start", "=", "\"Sunday\"", ")", ":", "today", "=", "date", ".", "today", "(", ")", "# Get the first day of the past complete week.", "start_of_week", "=", "snap_to_beginning_of_week", "(", "today", ",", "weekday_start", ")", ...
Gets the date for the first and the last day of the previous complete week. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A tuple containing two date objects, for the first and the last day of the week respectively.
[ "Gets", "the", "date", "for", "the", "first", "and", "the", "last", "day", "of", "the", "previous", "complete", "week", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/standards.py#L116-L127
16,712
mozilla/python_moztelemetry
moztelemetry/standards.py
get_last_month_range
def get_last_month_range(): """ Gets the date for the first and the last day of the previous complete month. :returns: A tuple containing two date objects, for the first and the last day of the month respectively. """ today = date.today() # Get the last day for the previous month. end_of_last_month = snap_to_beginning_of_month(today) - timedelta(days=1) start_of_last_month = snap_to_beginning_of_month(end_of_last_month) return (start_of_last_month, end_of_last_month)
python
def get_last_month_range(): today = date.today() # Get the last day for the previous month. end_of_last_month = snap_to_beginning_of_month(today) - timedelta(days=1) start_of_last_month = snap_to_beginning_of_month(end_of_last_month) return (start_of_last_month, end_of_last_month)
[ "def", "get_last_month_range", "(", ")", ":", "today", "=", "date", ".", "today", "(", ")", "# Get the last day for the previous month.", "end_of_last_month", "=", "snap_to_beginning_of_month", "(", "today", ")", "-", "timedelta", "(", "days", "=", "1", ")", "star...
Gets the date for the first and the last day of the previous complete month. :returns: A tuple containing two date objects, for the first and the last day of the month respectively.
[ "Gets", "the", "date", "for", "the", "first", "and", "the", "last", "day", "of", "the", "previous", "complete", "month", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/standards.py#L130-L140
16,713
mozilla/python_moztelemetry
moztelemetry/standards.py
read_main_summary
def read_main_summary(spark, submission_date_s3=None, sample_id=None, mergeSchema=True, path='s3://telemetry-parquet/main_summary/v4'): """ Efficiently read main_summary parquet data. Read data from the given path, optionally filtering to a specified set of partition values first. This can save a time, particularly if `mergeSchema` is True. Args: spark: Spark session submission_date_s3: Optional list of values to filter the `submission_date_s3` partition. Default is to read all partitions. Each value should be in the form `YYYYMMDD`. sample_id: Optional list of values to filter the `sample_id` partition. Default is to read all partitions. mergeSchema (bool): Determines whether or not to merge the schemas of the resulting parquet files (ie. whether to support schema evolution or not). Default is to merge schemas. path (str): Location (disk or S3) from which to read data. Default is to read from the "production" location on S3. Returns: A DataFrame loaded from the specified partitions. """ base_path = path # Specifying basePath retains the partition fields even # if we read a bunch of paths separately. reader = spark.read.option("basePath", base_path) if mergeSchema: reader = reader.option("mergeSchema", "true") if submission_date_s3 is not None and sample_id is None: paths = ["{}/submission_date_s3={}/".format(base_path, s) for s in submission_date_s3] return reader.parquet(*paths) if submission_date_s3 is not None and sample_id is not None: paths = [] for sd in submission_date_s3: for si in sample_id: paths.append("{}/submission_date_s3={}/sample_id={}/".format( base_path, sd, si)) return reader.parquet(*paths) if submission_date_s3 is None and sample_id is not None: # Ugh, why? We would have to iterate the entire path to identify # all the submission_date_s3 partitions, which may end up being # slower. data = reader.parquet(base_path) sids = ["{}".format(s) for s in sample_id] criteria = "sample_id IN ({})".format(",".join(sids)) return data.where(criteria) # Neither partition is filtered. return reader.parquet(base_path)
python
def read_main_summary(spark, submission_date_s3=None, sample_id=None, mergeSchema=True, path='s3://telemetry-parquet/main_summary/v4'): base_path = path # Specifying basePath retains the partition fields even # if we read a bunch of paths separately. reader = spark.read.option("basePath", base_path) if mergeSchema: reader = reader.option("mergeSchema", "true") if submission_date_s3 is not None and sample_id is None: paths = ["{}/submission_date_s3={}/".format(base_path, s) for s in submission_date_s3] return reader.parquet(*paths) if submission_date_s3 is not None and sample_id is not None: paths = [] for sd in submission_date_s3: for si in sample_id: paths.append("{}/submission_date_s3={}/sample_id={}/".format( base_path, sd, si)) return reader.parquet(*paths) if submission_date_s3 is None and sample_id is not None: # Ugh, why? We would have to iterate the entire path to identify # all the submission_date_s3 partitions, which may end up being # slower. data = reader.parquet(base_path) sids = ["{}".format(s) for s in sample_id] criteria = "sample_id IN ({})".format(",".join(sids)) return data.where(criteria) # Neither partition is filtered. return reader.parquet(base_path)
[ "def", "read_main_summary", "(", "spark", ",", "submission_date_s3", "=", "None", ",", "sample_id", "=", "None", ",", "mergeSchema", "=", "True", ",", "path", "=", "'s3://telemetry-parquet/main_summary/v4'", ")", ":", "base_path", "=", "path", "# Specifying basePath...
Efficiently read main_summary parquet data. Read data from the given path, optionally filtering to a specified set of partition values first. This can save a time, particularly if `mergeSchema` is True. Args: spark: Spark session submission_date_s3: Optional list of values to filter the `submission_date_s3` partition. Default is to read all partitions. Each value should be in the form `YYYYMMDD`. sample_id: Optional list of values to filter the `sample_id` partition. Default is to read all partitions. mergeSchema (bool): Determines whether or not to merge the schemas of the resulting parquet files (ie. whether to support schema evolution or not). Default is to merge schemas. path (str): Location (disk or S3) from which to read data. Default is to read from the "production" location on S3. Returns: A DataFrame loaded from the specified partitions.
[ "Efficiently", "read", "main_summary", "parquet", "data", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/standards.py#L143-L202
16,714
mozilla/python_moztelemetry
moztelemetry/standards.py
sampler
def sampler(dataframe, modulo, column="client_id", sample_id=42): """ Collect a sample of clients given an input column Filter dataframe based on the modulus of the CRC32 of a given string column matching a given sample_id. if dataframe has already been filtered by sample_id, then modulo should be a multiple of 100, column should be "client_id", and the given sample_id should match the value previously used, optionally plus multiples of 100. Args: dataframe: A Dataframe to be sampled modulo (int): selects a 1/modulo sampling of dataframe column (str): name of a string column to sample on sample_id (int): modulus result to select for sampling Returns: A DataFrame sampled on the given inputs. """ return dataframe \ .withColumn( "sampler", udf(lambda key: (crc32(key or "") & 0xffffffff) % modulo)(column), ).where("sampler = %s" % sample_id).drop("sampler")
python
def sampler(dataframe, modulo, column="client_id", sample_id=42): return dataframe \ .withColumn( "sampler", udf(lambda key: (crc32(key or "") & 0xffffffff) % modulo)(column), ).where("sampler = %s" % sample_id).drop("sampler")
[ "def", "sampler", "(", "dataframe", ",", "modulo", ",", "column", "=", "\"client_id\"", ",", "sample_id", "=", "42", ")", ":", "return", "dataframe", ".", "withColumn", "(", "\"sampler\"", ",", "udf", "(", "lambda", "key", ":", "(", "crc32", "(", "key", ...
Collect a sample of clients given an input column Filter dataframe based on the modulus of the CRC32 of a given string column matching a given sample_id. if dataframe has already been filtered by sample_id, then modulo should be a multiple of 100, column should be "client_id", and the given sample_id should match the value previously used, optionally plus multiples of 100. Args: dataframe: A Dataframe to be sampled modulo (int): selects a 1/modulo sampling of dataframe column (str): name of a string column to sample on sample_id (int): modulus result to select for sampling Returns: A DataFrame sampled on the given inputs.
[ "Collect", "a", "sample", "of", "clients", "given", "an", "input", "column" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/standards.py#L205-L227
16,715
Karaage-Cluster/karaage
karaage/plugins/kgusage/views.py
progress
def progress(request): """ Check status of task. """ if 'delete' in request.GET: models.MachineCache.objects.all().delete() models.InstituteCache.objects.all().delete() models.PersonCache.objects.all().delete() models.ProjectCache.objects.all().delete() return render( template_name='main.html', context={'content': 'Deleted'}, request=request) if request.method == 'POST': if 'task_id' in request.POST: result = Task.AsyncResult(request.POST['task_id']) if result.failed(): value = { 'info': {}, 'ready': result.ready(), } else: value = { 'info': result.info, 'ready': result.ready(), } return HttpResponse( json.dumps(value), content_type="application/json") return None
python
def progress(request): if 'delete' in request.GET: models.MachineCache.objects.all().delete() models.InstituteCache.objects.all().delete() models.PersonCache.objects.all().delete() models.ProjectCache.objects.all().delete() return render( template_name='main.html', context={'content': 'Deleted'}, request=request) if request.method == 'POST': if 'task_id' in request.POST: result = Task.AsyncResult(request.POST['task_id']) if result.failed(): value = { 'info': {}, 'ready': result.ready(), } else: value = { 'info': result.info, 'ready': result.ready(), } return HttpResponse( json.dumps(value), content_type="application/json") return None
[ "def", "progress", "(", "request", ")", ":", "if", "'delete'", "in", "request", ".", "GET", ":", "models", ".", "MachineCache", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")", "models", ".", "InstituteCache", ".", "objects", ".", "all", ...
Check status of task.
[ "Check", "status", "of", "task", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgusage/views.py#L56-L83
16,716
Karaage-Cluster/karaage
karaage/plugins/kgusage/views.py
synchronise
def synchronise(func): """ If task already queued, running, or finished, don't restart. """ def inner(request, *args): lock_id = '%s-%s-built-%s' % ( datetime.date.today(), func.__name__, ",".join([str(a) for a in args])) if cache.add(lock_id, 'true', LOCK_EXPIRE): result = func(request, *args) cache.set(lock_id, result.task_id) else: task_id = cache.get(lock_id) if not task_id: return None cache.set(lock_id, "") result = Task.AsyncResult(task_id) if result.ready(): result.forget() return None return result return inner
python
def synchronise(func): def inner(request, *args): lock_id = '%s-%s-built-%s' % ( datetime.date.today(), func.__name__, ",".join([str(a) for a in args])) if cache.add(lock_id, 'true', LOCK_EXPIRE): result = func(request, *args) cache.set(lock_id, result.task_id) else: task_id = cache.get(lock_id) if not task_id: return None cache.set(lock_id, "") result = Task.AsyncResult(task_id) if result.ready(): result.forget() return None return result return inner
[ "def", "synchronise", "(", "func", ")", ":", "def", "inner", "(", "request", ",", "*", "args", ")", ":", "lock_id", "=", "'%s-%s-built-%s'", "%", "(", "datetime", ".", "date", ".", "today", "(", ")", ",", "func", ".", "__name__", ",", "\",\"", ".", ...
If task already queued, running, or finished, don't restart.
[ "If", "task", "already", "queued", "running", "or", "finished", "don", "t", "restart", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgusage/views.py#L86-L107
16,717
mozilla/python_moztelemetry
moztelemetry/dataset.py
_group_by_size_greedy
def _group_by_size_greedy(obj_list, tot_groups): """Partition a list of objects in even buckets The idea is to choose the bucket for an object in a round-robin fashion. The list of objects is sorted to also try to keep the total size in bytes as balanced as possible. :param obj_list: a list of dict-like objects with a 'size' property :param tot_groups: number of partitions to split the data into. :return: a list of lists, one for each partition. """ sorted_list = sorted(obj_list, key=lambda x: x['size'], reverse=True) groups = [[] for _ in range(tot_groups)] for index, obj in enumerate(sorted_list): current_group = groups[index % len(groups)] current_group.append(obj) return groups
python
def _group_by_size_greedy(obj_list, tot_groups): sorted_list = sorted(obj_list, key=lambda x: x['size'], reverse=True) groups = [[] for _ in range(tot_groups)] for index, obj in enumerate(sorted_list): current_group = groups[index % len(groups)] current_group.append(obj) return groups
[ "def", "_group_by_size_greedy", "(", "obj_list", ",", "tot_groups", ")", ":", "sorted_list", "=", "sorted", "(", "obj_list", ",", "key", "=", "lambda", "x", ":", "x", "[", "'size'", "]", ",", "reverse", "=", "True", ")", "groups", "=", "[", "[", "]", ...
Partition a list of objects in even buckets The idea is to choose the bucket for an object in a round-robin fashion. The list of objects is sorted to also try to keep the total size in bytes as balanced as possible. :param obj_list: a list of dict-like objects with a 'size' property :param tot_groups: number of partitions to split the data into. :return: a list of lists, one for each partition.
[ "Partition", "a", "list", "of", "objects", "in", "even", "buckets" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L27-L42
16,718
mozilla/python_moztelemetry
moztelemetry/dataset.py
_group_by_equal_size
def _group_by_equal_size(obj_list, tot_groups, threshold=pow(2, 32)): """Partition a list of objects evenly and by file size Files are placed according to largest file in the smallest bucket. If the file is larger than the given threshold, then it is placed in a new bucket by itself. :param obj_list: a list of dict-like objects with a 'size' property :param tot_groups: number of partitions to split the data :param threshold: the maximum size of each bucket :return: a list of lists, one for each partition """ sorted_obj_list = sorted([(obj['size'], obj) for obj in obj_list], reverse=True) groups = [(random.random(), []) for _ in range(tot_groups)] if tot_groups <= 1: groups = _group_by_size_greedy(obj_list, tot_groups) return groups heapq.heapify(groups) for obj in sorted_obj_list: if obj[0] > threshold: heapq.heappush(groups, (obj[0], [obj[1]])) else: size, files = heapq.heappop(groups) size += obj[0] files.append(obj[1]) heapq.heappush(groups, (size, files)) groups = [group[1] for group in groups] return groups
python
def _group_by_equal_size(obj_list, tot_groups, threshold=pow(2, 32)): sorted_obj_list = sorted([(obj['size'], obj) for obj in obj_list], reverse=True) groups = [(random.random(), []) for _ in range(tot_groups)] if tot_groups <= 1: groups = _group_by_size_greedy(obj_list, tot_groups) return groups heapq.heapify(groups) for obj in sorted_obj_list: if obj[0] > threshold: heapq.heappush(groups, (obj[0], [obj[1]])) else: size, files = heapq.heappop(groups) size += obj[0] files.append(obj[1]) heapq.heappush(groups, (size, files)) groups = [group[1] for group in groups] return groups
[ "def", "_group_by_equal_size", "(", "obj_list", ",", "tot_groups", ",", "threshold", "=", "pow", "(", "2", ",", "32", ")", ")", ":", "sorted_obj_list", "=", "sorted", "(", "[", "(", "obj", "[", "'size'", "]", ",", "obj", ")", "for", "obj", "in", "obj...
Partition a list of objects evenly and by file size Files are placed according to largest file in the smallest bucket. If the file is larger than the given threshold, then it is placed in a new bucket by itself. :param obj_list: a list of dict-like objects with a 'size' property :param tot_groups: number of partitions to split the data :param threshold: the maximum size of each bucket :return: a list of lists, one for each partition
[ "Partition", "a", "list", "of", "objects", "evenly", "and", "by", "file", "size" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L45-L72
16,719
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.select
def select(self, *properties, **aliased_properties): """Specify which properties of the dataset must be returned Property extraction is based on `JMESPath <http://jmespath.org>`_ expressions. This method returns a new Dataset narrowed down by the given selection. :param properties: JMESPath to use for the property extraction. The JMESPath string will be used as a key in the output dictionary. :param aliased_properties: Same as properties, but the output dictionary will contain the parameter name instead of the JMESPath string. """ if not (properties or aliased_properties): return self merged_properties = dict(zip(properties, properties)) merged_properties.update(aliased_properties) for prop_name in (merged_properties.keys()): if prop_name in self.selection: raise Exception('The property {} has already been selected'.format(prop_name)) new_selection = self.selection.copy() new_selection.update(merged_properties) return self._copy(selection=new_selection)
python
def select(self, *properties, **aliased_properties): if not (properties or aliased_properties): return self merged_properties = dict(zip(properties, properties)) merged_properties.update(aliased_properties) for prop_name in (merged_properties.keys()): if prop_name in self.selection: raise Exception('The property {} has already been selected'.format(prop_name)) new_selection = self.selection.copy() new_selection.update(merged_properties) return self._copy(selection=new_selection)
[ "def", "select", "(", "self", ",", "*", "properties", ",", "*", "*", "aliased_properties", ")", ":", "if", "not", "(", "properties", "or", "aliased_properties", ")", ":", "return", "self", "merged_properties", "=", "dict", "(", "zip", "(", "properties", ",...
Specify which properties of the dataset must be returned Property extraction is based on `JMESPath <http://jmespath.org>`_ expressions. This method returns a new Dataset narrowed down by the given selection. :param properties: JMESPath to use for the property extraction. The JMESPath string will be used as a key in the output dictionary. :param aliased_properties: Same as properties, but the output dictionary will contain the parameter name instead of the JMESPath string.
[ "Specify", "which", "properties", "of", "the", "dataset", "must", "be", "returned" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L174-L197
16,720
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.where
def where(self, **kwargs): """Return a new Dataset refined using the given condition :param kwargs: a map of `dimension` => `condition` to filter the elements of the dataset. `condition` can either be an exact value or a callable returning a boolean value. If `condition` is a value, it is converted to a string, then sanitized. If `condition` is a callable, note that it will be passed sanitized values -- i.e., characters outside [a-zA-Z0-9_.] are converted to `_`. """ clauses = copy(self.clauses) for dimension, condition in kwargs.items(): if dimension in self.clauses: raise Exception('There should be only one clause for {}'.format(dimension)) if dimension not in self.schema: raise Exception('The dimension {} doesn\'t exist'.format(dimension)) if isfunction(condition) or isinstance(condition, functools.partial): clauses[dimension] = condition else: clauses[dimension] = functools.partial((lambda x, y: x == y), self._sanitize_dimension(str(condition))) return self._copy(clauses=clauses)
python
def where(self, **kwargs): clauses = copy(self.clauses) for dimension, condition in kwargs.items(): if dimension in self.clauses: raise Exception('There should be only one clause for {}'.format(dimension)) if dimension not in self.schema: raise Exception('The dimension {} doesn\'t exist'.format(dimension)) if isfunction(condition) or isinstance(condition, functools.partial): clauses[dimension] = condition else: clauses[dimension] = functools.partial((lambda x, y: x == y), self._sanitize_dimension(str(condition))) return self._copy(clauses=clauses)
[ "def", "where", "(", "self", ",", "*", "*", "kwargs", ")", ":", "clauses", "=", "copy", "(", "self", ".", "clauses", ")", "for", "dimension", ",", "condition", "in", "kwargs", ".", "items", "(", ")", ":", "if", "dimension", "in", "self", ".", "clau...
Return a new Dataset refined using the given condition :param kwargs: a map of `dimension` => `condition` to filter the elements of the dataset. `condition` can either be an exact value or a callable returning a boolean value. If `condition` is a value, it is converted to a string, then sanitized. If `condition` is a callable, note that it will be passed sanitized values -- i.e., characters outside [a-zA-Z0-9_.] are converted to `_`.
[ "Return", "a", "new", "Dataset", "refined", "using", "the", "given", "condition" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L231-L251
16,721
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.summaries
def summaries(self, sc, limit=None): """Summary of the files contained in the current dataset Every item in the summary is a dict containing a key name and the corresponding size of the key item in bytes, e.g.:: {'key': 'full/path/to/my/key', 'size': 200} :param limit: Max number of objects to retrieve :return: An iterable of summaries """ clauses = copy(self.clauses) schema = self.schema if self.prefix: schema = ['prefix'] + schema # Add a clause for the prefix that always returns True, in case # the output is not filtered at all (so that we do a scan/filter # on the prefix directory) clauses['prefix'] = lambda x: True with futures.ThreadPoolExecutor(self.max_concurrency) as executor: scanned = self._scan(schema, [self.prefix], clauses, executor) keys = sc.parallelize(scanned).flatMap(self.store.list_keys) return keys.take(limit) if limit else keys.collect()
python
def summaries(self, sc, limit=None): clauses = copy(self.clauses) schema = self.schema if self.prefix: schema = ['prefix'] + schema # Add a clause for the prefix that always returns True, in case # the output is not filtered at all (so that we do a scan/filter # on the prefix directory) clauses['prefix'] = lambda x: True with futures.ThreadPoolExecutor(self.max_concurrency) as executor: scanned = self._scan(schema, [self.prefix], clauses, executor) keys = sc.parallelize(scanned).flatMap(self.store.list_keys) return keys.take(limit) if limit else keys.collect()
[ "def", "summaries", "(", "self", ",", "sc", ",", "limit", "=", "None", ")", ":", "clauses", "=", "copy", "(", "self", ".", "clauses", ")", "schema", "=", "self", ".", "schema", "if", "self", ".", "prefix", ":", "schema", "=", "[", "'prefix'", "]", ...
Summary of the files contained in the current dataset Every item in the summary is a dict containing a key name and the corresponding size of the key item in bytes, e.g.:: {'key': 'full/path/to/my/key', 'size': 200} :param limit: Max number of objects to retrieve :return: An iterable of summaries
[ "Summary", "of", "the", "files", "contained", "in", "the", "current", "dataset" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L268-L291
16,722
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.records
def records(self, sc, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None): """Retrieve the elements of a Dataset :param sc: a SparkContext object :param group_by: specifies a partition strategy for the objects :param limit: maximum number of objects to retrieve :param decode: an optional transformation to apply to the objects retrieved :param sample: percentage of results to return. Useful to return a sample of the dataset. This parameter is ignored when `limit` is set. :param seed: initialize internal state of the random number generator (42 by default). This is used to make the dataset sampling reproducible. It can be set to None to obtain different samples. :param summaries: an iterable containing a summary for each item in the dataset. If None, it will computed calling the summaries dataset. :return: a Spark rdd containing the elements retrieved """ decode = decode or message_parser.parse_heka_message summaries = summaries or self.summaries(sc, limit) # Calculate the sample if summaries is not empty and limit is not set if summaries and limit is None and sample != 1: if sample < 0 or sample > 1: raise ValueError('sample must be between 0 and 1') print( "WARNING: THIS IS NOT A REPRESENTATIVE SAMPLE.\n" "This 'sampling' is based on s3 files and is highly\n" "susceptible to skew. Use only for quicker performance\n" "while prototyping." ) # We want this sample to be reproducible. # See https://bugzilla.mozilla.org/show_bug.cgi?id=1318681 seed_state = random.getstate() try: random.seed(seed) summaries = random.sample(summaries, int(len(summaries) * sample)) finally: random.setstate(seed_state) # Obtain size in MB total_size = functools.reduce(lambda acc, item: acc + item['size'], summaries, 0) total_size_mb = total_size / float(1 << 20) print("fetching %.5fMB in %s files..." % (total_size_mb, len(summaries))) if group_by == 'equal_size': groups = _group_by_equal_size(summaries, 10*sc.defaultParallelism) elif group_by == 'greedy': groups = _group_by_size_greedy(summaries, 10*sc.defaultParallelism) else: raise Exception("group_by specification is invalid") self._compile_selection() keys = ( sc.parallelize(groups, len(groups)) .flatMap(lambda x: x) .map(lambda x: x['key']) ) file_handles = keys.map(self.store.get_key) # decode(fp: file-object) -> list[dict] data = file_handles.flatMap(decode) return data.map(self._apply_selection)
python
def records(self, sc, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None): decode = decode or message_parser.parse_heka_message summaries = summaries or self.summaries(sc, limit) # Calculate the sample if summaries is not empty and limit is not set if summaries and limit is None and sample != 1: if sample < 0 or sample > 1: raise ValueError('sample must be between 0 and 1') print( "WARNING: THIS IS NOT A REPRESENTATIVE SAMPLE.\n" "This 'sampling' is based on s3 files and is highly\n" "susceptible to skew. Use only for quicker performance\n" "while prototyping." ) # We want this sample to be reproducible. # See https://bugzilla.mozilla.org/show_bug.cgi?id=1318681 seed_state = random.getstate() try: random.seed(seed) summaries = random.sample(summaries, int(len(summaries) * sample)) finally: random.setstate(seed_state) # Obtain size in MB total_size = functools.reduce(lambda acc, item: acc + item['size'], summaries, 0) total_size_mb = total_size / float(1 << 20) print("fetching %.5fMB in %s files..." % (total_size_mb, len(summaries))) if group_by == 'equal_size': groups = _group_by_equal_size(summaries, 10*sc.defaultParallelism) elif group_by == 'greedy': groups = _group_by_size_greedy(summaries, 10*sc.defaultParallelism) else: raise Exception("group_by specification is invalid") self._compile_selection() keys = ( sc.parallelize(groups, len(groups)) .flatMap(lambda x: x) .map(lambda x: x['key']) ) file_handles = keys.map(self.store.get_key) # decode(fp: file-object) -> list[dict] data = file_handles.flatMap(decode) return data.map(self._apply_selection)
[ "def", "records", "(", "self", ",", "sc", ",", "group_by", "=", "'greedy'", ",", "limit", "=", "None", ",", "sample", "=", "1", ",", "seed", "=", "42", ",", "decode", "=", "None", ",", "summaries", "=", "None", ")", ":", "decode", "=", "decode", ...
Retrieve the elements of a Dataset :param sc: a SparkContext object :param group_by: specifies a partition strategy for the objects :param limit: maximum number of objects to retrieve :param decode: an optional transformation to apply to the objects retrieved :param sample: percentage of results to return. Useful to return a sample of the dataset. This parameter is ignored when `limit` is set. :param seed: initialize internal state of the random number generator (42 by default). This is used to make the dataset sampling reproducible. It can be set to None to obtain different samples. :param summaries: an iterable containing a summary for each item in the dataset. If None, it will computed calling the summaries dataset. :return: a Spark rdd containing the elements retrieved
[ "Retrieve", "the", "elements", "of", "a", "Dataset" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L293-L357
16,723
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.dataframe
def dataframe(self, spark, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None, schema=None, table_name=None): """Convert RDD returned from records function to a dataframe :param spark: a SparkSession object :param group_by: specifies a paritition strategy for the objects :param limit: maximum number of objects to retrieve :param decode: an optional transformation to apply to the objects retrieved :param sample: percentage of results to return. Useful to return a sample of the dataset. This parameter is ignored when 'limit' is set. :param seed: initialize internal state of the random number generator (42 by default). This is used to make the dataset sampling reproducible. It an be set to None to obtain different samples. :param summaries: an iterable containing the summary for each item in the dataset. If None, it will compute calling the summaries dataset. :param schema: a Spark schema that overrides automatic conversion to a dataframe :param table_name: allows resulting dataframe to easily be queried using SparkSQL :return: a Spark DataFrame """ rdd = self.records(spark.sparkContext, group_by, limit, sample, seed, decode, summaries) if not schema: df = rdd.map(lambda d: Row(**d)).toDF() else: df = spark.createDataFrame(rdd, schema=schema) if table_name: df.createOrReplaceTempView(table_name) return df
python
def dataframe(self, spark, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None, schema=None, table_name=None): rdd = self.records(spark.sparkContext, group_by, limit, sample, seed, decode, summaries) if not schema: df = rdd.map(lambda d: Row(**d)).toDF() else: df = spark.createDataFrame(rdd, schema=schema) if table_name: df.createOrReplaceTempView(table_name) return df
[ "def", "dataframe", "(", "self", ",", "spark", ",", "group_by", "=", "'greedy'", ",", "limit", "=", "None", ",", "sample", "=", "1", ",", "seed", "=", "42", ",", "decode", "=", "None", ",", "summaries", "=", "None", ",", "schema", "=", "None", ",",...
Convert RDD returned from records function to a dataframe :param spark: a SparkSession object :param group_by: specifies a paritition strategy for the objects :param limit: maximum number of objects to retrieve :param decode: an optional transformation to apply to the objects retrieved :param sample: percentage of results to return. Useful to return a sample of the dataset. This parameter is ignored when 'limit' is set. :param seed: initialize internal state of the random number generator (42 by default). This is used to make the dataset sampling reproducible. It an be set to None to obtain different samples. :param summaries: an iterable containing the summary for each item in the dataset. If None, it will compute calling the summaries dataset. :param schema: a Spark schema that overrides automatic conversion to a dataframe :param table_name: allows resulting dataframe to easily be queried using SparkSQL :return: a Spark DataFrame
[ "Convert", "RDD", "returned", "from", "records", "function", "to", "a", "dataframe" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L359-L385
16,724
mozilla/python_moztelemetry
moztelemetry/dataset.py
Dataset.from_source
def from_source(source_name): """Create a Dataset configured for the given source_name This is particularly convenient when the user doesn't know the list of dimensions or the bucket name, but only the source name. Usage example:: records = Dataset.from_source('telemetry').where( docType='main', submissionDate='20160701', appUpdateChannel='nightly' ) """ meta_bucket = 'net-mozaws-prod-us-west-2-pipeline-metadata' store = S3Store(meta_bucket) try: source = json.loads(store.get_key('sources.json').read().decode('utf-8'))[source_name] except KeyError: raise Exception('Unknown source {}'.format(source_name)) schema = store.get_key('{}/schema.json'.format(source['metadata_prefix'])).read().decode('utf-8') dimensions = [f['field_name'] for f in json.loads(schema)['dimensions']] return Dataset(source['bucket'], dimensions, prefix=source['prefix'])
python
def from_source(source_name): meta_bucket = 'net-mozaws-prod-us-west-2-pipeline-metadata' store = S3Store(meta_bucket) try: source = json.loads(store.get_key('sources.json').read().decode('utf-8'))[source_name] except KeyError: raise Exception('Unknown source {}'.format(source_name)) schema = store.get_key('{}/schema.json'.format(source['metadata_prefix'])).read().decode('utf-8') dimensions = [f['field_name'] for f in json.loads(schema)['dimensions']] return Dataset(source['bucket'], dimensions, prefix=source['prefix'])
[ "def", "from_source", "(", "source_name", ")", ":", "meta_bucket", "=", "'net-mozaws-prod-us-west-2-pipeline-metadata'", "store", "=", "S3Store", "(", "meta_bucket", ")", "try", ":", "source", "=", "json", ".", "loads", "(", "store", ".", "get_key", "(", "'sourc...
Create a Dataset configured for the given source_name This is particularly convenient when the user doesn't know the list of dimensions or the bucket name, but only the source name. Usage example:: records = Dataset.from_source('telemetry').where( docType='main', submissionDate='20160701', appUpdateChannel='nightly' )
[ "Create", "a", "Dataset", "configured", "for", "the", "given", "source_name" ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/dataset.py#L388-L412
16,725
Karaage-Cluster/karaage
karaage/people/emails.py
send_bounced_warning
def send_bounced_warning(person, leader_list): """Sends an email to each project leader for person informing them that person's email has bounced""" context = CONTEXT.copy() context['person'] = person for lp in leader_list: leader = lp['leader'] context['project'] = lp['project'] context['receiver'] = leader to_email = leader.email subject = render_to_string( 'karaage/people/emails/bounced_email_subject.txt', context) body = render_to_string( 'karaage/people/emails/bounced_email_body.txt', context) send_mail( subject.replace('\n', ''), body, settings.ACCOUNTS_EMAIL, [to_email]) log.change( leader, 'Sent email about bounced emails from %s' % person)
python
def send_bounced_warning(person, leader_list): context = CONTEXT.copy() context['person'] = person for lp in leader_list: leader = lp['leader'] context['project'] = lp['project'] context['receiver'] = leader to_email = leader.email subject = render_to_string( 'karaage/people/emails/bounced_email_subject.txt', context) body = render_to_string( 'karaage/people/emails/bounced_email_body.txt', context) send_mail( subject.replace('\n', ''), body, settings.ACCOUNTS_EMAIL, [to_email]) log.change( leader, 'Sent email about bounced emails from %s' % person)
[ "def", "send_bounced_warning", "(", "person", ",", "leader_list", ")", ":", "context", "=", "CONTEXT", ".", "copy", "(", ")", "context", "[", "'person'", "]", "=", "person", "for", "lp", "in", "leader_list", ":", "leader", "=", "lp", "[", "'leader'", "]"...
Sends an email to each project leader for person informing them that person's email has bounced
[ "Sends", "an", "email", "to", "each", "project", "leader", "for", "person", "informing", "them", "that", "person", "s", "email", "has", "bounced" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/people/emails.py#L42-L64
16,726
Karaage-Cluster/karaage
karaage/people/emails.py
send_reset_password_email
def send_reset_password_email(person): """Sends an email to user allowing them to set their password.""" uid = urlsafe_base64_encode(force_bytes(person.pk)).decode("ascii") token = default_token_generator.make_token(person) url = '%s/persons/reset/%s/%s/' % ( settings.REGISTRATION_BASE_URL, uid, token) context = CONTEXT.copy() context.update({ 'url': url, 'receiver': person, }) to_email = person.email subject, body = render_email('reset_password', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
python
def send_reset_password_email(person): uid = urlsafe_base64_encode(force_bytes(person.pk)).decode("ascii") token = default_token_generator.make_token(person) url = '%s/persons/reset/%s/%s/' % ( settings.REGISTRATION_BASE_URL, uid, token) context = CONTEXT.copy() context.update({ 'url': url, 'receiver': person, }) to_email = person.email subject, body = render_email('reset_password', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
[ "def", "send_reset_password_email", "(", "person", ")", ":", "uid", "=", "urlsafe_base64_encode", "(", "force_bytes", "(", "person", ".", "pk", ")", ")", ".", "decode", "(", "\"ascii\"", ")", "token", "=", "default_token_generator", ".", "make_token", "(", "pe...
Sends an email to user allowing them to set their password.
[ "Sends", "an", "email", "to", "user", "allowing", "them", "to", "set", "their", "password", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/people/emails.py#L67-L83
16,727
Karaage-Cluster/karaage
karaage/people/emails.py
send_confirm_password_email
def send_confirm_password_email(person): """Sends an email to user allowing them to confirm their password.""" url = '%s/profile/login/%s/' % ( settings.REGISTRATION_BASE_URL, person.username) context = CONTEXT.copy() context.update({ 'url': url, 'receiver': person, }) to_email = person.email subject, body = render_email('confirm_password', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
python
def send_confirm_password_email(person): url = '%s/profile/login/%s/' % ( settings.REGISTRATION_BASE_URL, person.username) context = CONTEXT.copy() context.update({ 'url': url, 'receiver': person, }) to_email = person.email subject, body = render_email('confirm_password', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
[ "def", "send_confirm_password_email", "(", "person", ")", ":", "url", "=", "'%s/profile/login/%s/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "person", ".", "username", ")", "context", "=", "CONTEXT", ".", "copy", "(", ")", "context", ".", "upd...
Sends an email to user allowing them to confirm their password.
[ "Sends", "an", "email", "to", "user", "allowing", "them", "to", "confirm", "their", "password", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/people/emails.py#L86-L100
16,728
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/states.py
StateWaitingForApproval.check_can_approve
def check_can_approve(self, request, application, roles): """ Check the person's authorization. """ try: authorised_persons = self.get_authorised_persons(application) authorised_persons.get(pk=request.user.pk) return True except Person.DoesNotExist: return False
python
def check_can_approve(self, request, application, roles): try: authorised_persons = self.get_authorised_persons(application) authorised_persons.get(pk=request.user.pk) return True except Person.DoesNotExist: return False
[ "def", "check_can_approve", "(", "self", ",", "request", ",", "application", ",", "roles", ")", ":", "try", ":", "authorised_persons", "=", "self", ".", "get_authorised_persons", "(", "application", ")", "authorised_persons", ".", "get", "(", "pk", "=", "reque...
Check the person's authorization.
[ "Check", "the", "person", "s", "authorization", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/states.py#L57-L64
16,729
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/states.py
StateWaitingForApproval.enter_state
def enter_state(self, request, application): """ This is becoming the new current state. """ authorised_persons = self.get_email_persons(application) link, is_secret = self.get_request_email_link(application) emails.send_request_email( self.authorised_text, self.authorised_role, authorised_persons, application, link, is_secret)
python
def enter_state(self, request, application): authorised_persons = self.get_email_persons(application) link, is_secret = self.get_request_email_link(application) emails.send_request_email( self.authorised_text, self.authorised_role, authorised_persons, application, link, is_secret)
[ "def", "enter_state", "(", "self", ",", "request", ",", "application", ")", ":", "authorised_persons", "=", "self", ".", "get_email_persons", "(", "application", ")", "link", ",", "is_secret", "=", "self", ".", "get_request_email_link", "(", "application", ")", ...
This is becoming the new current state.
[ "This", "is", "becoming", "the", "new", "current", "state", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/states.py#L73-L82
16,730
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/states.py
StateWithSteps.add_step
def add_step(self, step, step_id): """ Add a step to the list. The first step added becomes the initial step. """ assert step_id not in self._steps assert step_id not in self._order assert isinstance(step, Step) self._steps[step_id] = step self._order.append(step_id)
python
def add_step(self, step, step_id): assert step_id not in self._steps assert step_id not in self._order assert isinstance(step, Step) self._steps[step_id] = step self._order.append(step_id)
[ "def", "add_step", "(", "self", ",", "step", ",", "step_id", ")", ":", "assert", "step_id", "not", "in", "self", ".", "_steps", "assert", "step_id", "not", "in", "self", ".", "_order", "assert", "isinstance", "(", "step", ",", "Step", ")", "self", ".",...
Add a step to the list. The first step added becomes the initial step.
[ "Add", "a", "step", "to", "the", "list", ".", "The", "first", "step", "added", "becomes", "the", "initial", "step", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/states.py#L381-L389
16,731
Karaage-Cluster/karaage
karaage/datastores/__init__.py
_init_datastores
def _init_datastores(): """ Initialize all datastores. """ global _DATASTORES array = settings.DATASTORES for config in array: cls = _lookup(config['ENGINE']) ds = _get_datastore(cls, DataStore, config) _DATASTORES.append(ds) legacy_settings = getattr(settings, 'MACHINE_CATEGORY_DATASTORES', None) if legacy_settings is not None: warnings.warn( "MACHINE_CATEGORY_DATASTORES is deprecated, " "please change to use DATASTORES", ) for name in ['ldap']: array = settings.MACHINE_CATEGORY_DATASTORES.get(name, []) for config in array: cls = _lookup(config['ENGINE']) ds = _get_datastore(cls, DataStore, config) _DATASTORES.append(ds)
python
def _init_datastores(): global _DATASTORES array = settings.DATASTORES for config in array: cls = _lookup(config['ENGINE']) ds = _get_datastore(cls, DataStore, config) _DATASTORES.append(ds) legacy_settings = getattr(settings, 'MACHINE_CATEGORY_DATASTORES', None) if legacy_settings is not None: warnings.warn( "MACHINE_CATEGORY_DATASTORES is deprecated, " "please change to use DATASTORES", ) for name in ['ldap']: array = settings.MACHINE_CATEGORY_DATASTORES.get(name, []) for config in array: cls = _lookup(config['ENGINE']) ds = _get_datastore(cls, DataStore, config) _DATASTORES.append(ds)
[ "def", "_init_datastores", "(", ")", ":", "global", "_DATASTORES", "array", "=", "settings", ".", "DATASTORES", "for", "config", "in", "array", ":", "cls", "=", "_lookup", "(", "config", "[", "'ENGINE'", "]", ")", "ds", "=", "_get_datastore", "(", "cls", ...
Initialize all datastores.
[ "Initialize", "all", "datastores", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L52-L71
16,732
Karaage-Cluster/karaage
karaage/datastores/__init__.py
get_group_details
def get_group_details(group): """ Get group details. """ result = [] for datastore in _get_datastores(): value = datastore.get_group_details(group) value['datastore'] = datastore.config['DESCRIPTION'] result.append(value) return result
python
def get_group_details(group): result = [] for datastore in _get_datastores(): value = datastore.get_group_details(group) value['datastore'] = datastore.config['DESCRIPTION'] result.append(value) return result
[ "def", "get_group_details", "(", "group", ")", ":", "result", "=", "[", "]", "for", "datastore", "in", "_get_datastores", "(", ")", ":", "value", "=", "datastore", ".", "get_group_details", "(", "group", ")", "value", "[", "'datastore'", "]", "=", "datasto...
Get group details.
[ "Get", "group", "details", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L192-L199
16,733
Karaage-Cluster/karaage
karaage/datastores/__init__.py
set_project_pid
def set_project_pid(project, old_pid, new_pid): """ Project's PID was changed. """ for datastore in _get_datastores(): datastore.save_project(project) datastore.set_project_pid(project, old_pid, new_pid)
python
def set_project_pid(project, old_pid, new_pid): for datastore in _get_datastores(): datastore.save_project(project) datastore.set_project_pid(project, old_pid, new_pid)
[ "def", "set_project_pid", "(", "project", ",", "old_pid", ",", "new_pid", ")", ":", "for", "datastore", "in", "_get_datastores", "(", ")", ":", "datastore", ".", "save_project", "(", "project", ")", "datastore", ".", "set_project_pid", "(", "project", ",", "...
Project's PID was changed.
[ "Project", "s", "PID", "was", "changed", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L228-L232
16,734
Karaage-Cluster/karaage
karaage/datastores/__init__.py
add_accounts_to_group
def add_accounts_to_group(accounts_query, group): """ Add accounts to group. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_group(account, group)
python
def add_accounts_to_group(accounts_query, group): query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_group(account, group)
[ "def", "add_accounts_to_group", "(", "accounts_query", ",", "group", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "add_account_to_group", "(", "account", ",", "group...
Add accounts to group.
[ "Add", "accounts", "to", "group", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L265-L271
16,735
Karaage-Cluster/karaage
karaage/datastores/__init__.py
remove_accounts_from_group
def remove_accounts_from_group(accounts_query, group): """ Remove accounts from group. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_group(account, group)
python
def remove_accounts_from_group(accounts_query, group): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_group(account, group)
[ "def", "remove_accounts_from_group", "(", "accounts_query", ",", "group", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "remove_account_from_group", "(", "account", ","...
Remove accounts from group.
[ "Remove", "accounts", "from", "group", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L274-L280
16,736
Karaage-Cluster/karaage
karaage/datastores/__init__.py
add_accounts_to_project
def add_accounts_to_project(accounts_query, project): """ Add accounts to project. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_project(account, project)
python
def add_accounts_to_project(accounts_query, project): query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_project(account, project)
[ "def", "add_accounts_to_project", "(", "accounts_query", ",", "project", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "add_account_to_project", "(", "account", ",", ...
Add accounts to project.
[ "Add", "accounts", "to", "project", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L283-L289
16,737
Karaage-Cluster/karaage
karaage/datastores/__init__.py
remove_accounts_from_project
def remove_accounts_from_project(accounts_query, project): """ Remove accounts from project. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_project(account, project)
python
def remove_accounts_from_project(accounts_query, project): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_project(account, project)
[ "def", "remove_accounts_from_project", "(", "accounts_query", ",", "project", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "remove_account_from_project", "(", "account",...
Remove accounts from project.
[ "Remove", "accounts", "from", "project", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L292-L298
16,738
Karaage-Cluster/karaage
karaage/datastores/__init__.py
add_accounts_to_institute
def add_accounts_to_institute(accounts_query, institute): """ Add accounts to institute. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_institute(account, institute)
python
def add_accounts_to_institute(accounts_query, institute): query = accounts_query.filter(date_deleted__isnull=True) for account in query: add_account_to_institute(account, institute)
[ "def", "add_accounts_to_institute", "(", "accounts_query", ",", "institute", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "add_account_to_institute", "(", "account", "...
Add accounts to institute.
[ "Add", "accounts", "to", "institute", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L301-L307
16,739
Karaage-Cluster/karaage
karaage/datastores/__init__.py
remove_accounts_from_institute
def remove_accounts_from_institute(accounts_query, institute): """ Remove accounts from institute. """ query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_institute(account, institute)
python
def remove_accounts_from_institute(accounts_query, institute): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_institute(account, institute)
[ "def", "remove_accounts_from_institute", "(", "accounts_query", ",", "institute", ")", ":", "query", "=", "accounts_query", ".", "filter", "(", "date_deleted__isnull", "=", "True", ")", "for", "account", "in", "query", ":", "remove_account_from_institute", "(", "acc...
Remove accounts from institute.
[ "Remove", "accounts", "from", "institute", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/__init__.py#L310-L316
16,740
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase._filter_string
def _filter_string(value): """ Filter the string so MAM doesn't have heart failure.""" if value is None: value = "" # replace whitespace with space value = value.replace("\n", " ") value = value.replace("\t", " ") # CSV seperator value = value.replace("|", " ") # remove leading/trailing whitespace value = value.strip() # hack because MAM doesn't quote sql correctly value = value.replace("\\", "") # Used for stripping non-ascii characters value = ''.join(c for c in value if 31 < ord(c) < 127) return value
python
def _filter_string(value): if value is None: value = "" # replace whitespace with space value = value.replace("\n", " ") value = value.replace("\t", " ") # CSV seperator value = value.replace("|", " ") # remove leading/trailing whitespace value = value.strip() # hack because MAM doesn't quote sql correctly value = value.replace("\\", "") # Used for stripping non-ascii characters value = ''.join(c for c in value if 31 < ord(c) < 127) return value
[ "def", "_filter_string", "(", "value", ")", ":", "if", "value", "is", "None", ":", "value", "=", "\"\"", "# replace whitespace with space", "value", "=", "value", ".", "replace", "(", "\"\\n\"", ",", "\" \"", ")", "value", "=", "value", ".", "replace", "("...
Filter the string so MAM doesn't have heart failure.
[ "Filter", "the", "string", "so", "MAM", "doesn", "t", "have", "heart", "failure", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L57-L78
16,741
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_user
def get_user(self, username): """ Get the user details from MAM. """ cmd = ["glsuser", "-u", username, "--raw"] results = self._read_output(cmd) if len(results) == 0: return None elif len(results) > 1: logger.error( "Command returned multiple results for '%s'." % username) raise RuntimeError( "Command returned multiple results for '%s'." % username) the_result = results[0] the_name = the_result["Name"] if username.lower() != the_name.lower(): logger.error( "We expected username '%s' but got username '%s'." % (username, the_name)) raise RuntimeError( "We expected username '%s' but got username '%s'." % (username, the_name)) return the_result
python
def get_user(self, username): cmd = ["glsuser", "-u", username, "--raw"] results = self._read_output(cmd) if len(results) == 0: return None elif len(results) > 1: logger.error( "Command returned multiple results for '%s'." % username) raise RuntimeError( "Command returned multiple results for '%s'." % username) the_result = results[0] the_name = the_result["Name"] if username.lower() != the_name.lower(): logger.error( "We expected username '%s' but got username '%s'." % (username, the_name)) raise RuntimeError( "We expected username '%s' but got username '%s'." % (username, the_name)) return the_result
[ "def", "get_user", "(", "self", ",", "username", ")", ":", "cmd", "=", "[", "\"glsuser\"", ",", "\"-u\"", ",", "username", ",", "\"--raw\"", "]", "results", "=", "self", ".", "_read_output", "(", "cmd", ")", "if", "len", "(", "results", ")", "==", "0...
Get the user details from MAM.
[ "Get", "the", "user", "details", "from", "MAM", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L208-L231
16,742
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_user_balance
def get_user_balance(self, username): """ Get the user balance details from MAM. """ cmd = ["gbalance", "-u", username, "--raw"] results = self._read_output(cmd) if len(results) == 0: return None return results
python
def get_user_balance(self, username): cmd = ["gbalance", "-u", username, "--raw"] results = self._read_output(cmd) if len(results) == 0: return None return results
[ "def", "get_user_balance", "(", "self", ",", "username", ")", ":", "cmd", "=", "[", "\"gbalance\"", ",", "\"-u\"", ",", "username", ",", "\"--raw\"", "]", "results", "=", "self", ".", "_read_output", "(", "cmd", ")", "if", "len", "(", "results", ")", "...
Get the user balance details from MAM.
[ "Get", "the", "user", "balance", "details", "from", "MAM", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L233-L241
16,743
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_users_in_project
def get_users_in_project(self, projectname): """ Get list of users in project from MAM. """ ds_project = self.get_project(projectname) if ds_project is None: logger.error( "Project '%s' does not exist in MAM" % projectname) raise RuntimeError( "Project '%s' does not exist in MAM" % projectname) user_list = [] if ds_project["Users"] != "": user_list = ds_project["Users"].lower().split(",") return user_list
python
def get_users_in_project(self, projectname): ds_project = self.get_project(projectname) if ds_project is None: logger.error( "Project '%s' does not exist in MAM" % projectname) raise RuntimeError( "Project '%s' does not exist in MAM" % projectname) user_list = [] if ds_project["Users"] != "": user_list = ds_project["Users"].lower().split(",") return user_list
[ "def", "get_users_in_project", "(", "self", ",", "projectname", ")", ":", "ds_project", "=", "self", ".", "get_project", "(", "projectname", ")", "if", "ds_project", "is", "None", ":", "logger", ".", "error", "(", "\"Project '%s' does not exist in MAM\"", "%", "...
Get list of users in project from MAM.
[ "Get", "list", "of", "users", "in", "project", "from", "MAM", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L271-L283
16,744
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_projects_in_user
def get_projects_in_user(self, username): """ Get list of projects in user from MAM. """ ds_balance = self.get_user_balance(username) if ds_balance is None: return [] project_list = [] for bal in ds_balance: project_list.append(bal["Name"]) return project_list
python
def get_projects_in_user(self, username): ds_balance = self.get_user_balance(username) if ds_balance is None: return [] project_list = [] for bal in ds_balance: project_list.append(bal["Name"]) return project_list
[ "def", "get_projects_in_user", "(", "self", ",", "username", ")", ":", "ds_balance", "=", "self", ".", "get_user_balance", "(", "username", ")", "if", "ds_balance", "is", "None", ":", "return", "[", "]", "project_list", "=", "[", "]", "for", "bal", "in", ...
Get list of projects in user from MAM.
[ "Get", "list", "of", "projects", "in", "user", "from", "MAM", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L285-L294
16,745
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_account_details
def get_account_details(self, account): """ Get the account details """ result = self.get_user(account.username) if result is None: result = {} return result
python
def get_account_details(self, account): result = self.get_user(account.username) if result is None: result = {} return result
[ "def", "get_account_details", "(", "self", ",", "account", ")", ":", "result", "=", "self", ".", "get_user", "(", "account", ".", "username", ")", "if", "result", "is", "None", ":", "result", "=", "{", "}", "return", "result" ]
Get the account details
[ "Get", "the", "account", "details" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L390-L395
16,746
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.get_project_details
def get_project_details(self, project): """ Get the project details. """ result = self.get_project(project.pid) if result is None: result = {} return result
python
def get_project_details(self, project): result = self.get_project(project.pid) if result is None: result = {} return result
[ "def", "get_project_details", "(", "self", ",", "project", ")", ":", "result", "=", "self", ".", "get_project", "(", "project", ".", "pid", ")", "if", "result", "is", "None", ":", "result", "=", "{", "}", "return", "result" ]
Get the project details.
[ "Get", "the", "project", "details", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L456-L461
16,747
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStoreBase.delete_institute
def delete_institute(self, institute): """ Called when institute is deleted. """ name = institute.name logger.debug("institute_deleted '%s'" % name) # institute deleted self._call(["goldsh", "Organization", "Delete", "Name==%s" % name]) logger.debug("returning") return
python
def delete_institute(self, institute): name = institute.name logger.debug("institute_deleted '%s'" % name) # institute deleted self._call(["goldsh", "Organization", "Delete", "Name==%s" % name]) logger.debug("returning") return
[ "def", "delete_institute", "(", "self", ",", "institute", ")", ":", "name", "=", "institute", ".", "name", "logger", ".", "debug", "(", "\"institute_deleted '%s'\"", "%", "name", ")", "# institute deleted", "self", ".", "_call", "(", "[", "\"goldsh\"", ",", ...
Called when institute is deleted.
[ "Called", "when", "institute", "is", "deleted", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L492-L501
16,748
Karaage-Cluster/karaage
karaage/datastores/mam.py
MamDataStore71.add_account_to_project
def add_account_to_project(self, account, project): """ Add account to project. """ username = account.username projectname = project.pid self._call([ "gchproject", "--add-user", username, "-p", projectname], ignore_errors=[74])
python
def add_account_to_project(self, account, project): username = account.username projectname = project.pid self._call([ "gchproject", "--add-user", username, "-p", projectname], ignore_errors=[74])
[ "def", "add_account_to_project", "(", "self", ",", "account", ",", "project", ")", ":", "username", "=", "account", ".", "username", "projectname", "=", "project", ".", "pid", "self", ".", "_call", "(", "[", "\"gchproject\"", ",", "\"--add-user\"", ",", "use...
Add account to project.
[ "Add", "account", "to", "project", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/datastores/mam.py#L523-L531
16,749
mozilla/python_moztelemetry
moztelemetry/zeppelin.py
show
def show(fig, width=600): """ Renders a Matplotlib figure in Zeppelin. :param fig: a Matplotlib figure :param width: the width in pixel of the rendered figure, defaults to 600 Usage example:: import matplotlib.pyplot as plt from moztelemetry.zeppelin import show fig = plt.figure() plt.plot([1, 2, 3]) show(fig) """ img = StringIO() fig.savefig(img, format='svg') img.seek(0) print("%html <div style='width:{}px'>{}</div>".format(width, img.buf))
python
def show(fig, width=600): img = StringIO() fig.savefig(img, format='svg') img.seek(0) print("%html <div style='width:{}px'>{}</div>".format(width, img.buf))
[ "def", "show", "(", "fig", ",", "width", "=", "600", ")", ":", "img", "=", "StringIO", "(", ")", "fig", ".", "savefig", "(", "img", ",", "format", "=", "'svg'", ")", "img", ".", "seek", "(", "0", ")", "print", "(", "\"%html <div style='width:{}px'>{}...
Renders a Matplotlib figure in Zeppelin. :param fig: a Matplotlib figure :param width: the width in pixel of the rendered figure, defaults to 600 Usage example:: import matplotlib.pyplot as plt from moztelemetry.zeppelin import show fig = plt.figure() plt.plot([1, 2, 3]) show(fig)
[ "Renders", "a", "Matplotlib", "figure", "in", "Zeppelin", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/zeppelin.py#L8-L26
16,750
daler/trackhub
trackhub/__init__.py
default_hub
def default_hub(hub_name, genome, email, short_label=None, long_label=None): """ Returns a fully-connected set of hub components using default filenames. Parameters ---------- hub_name : str Name of the hub genome : str Assembly name (hg38, dm6, etc) email : str Email to include with hub. short_label : str Short label for the hub. If None, defaults to the value of `hub_name` long_label : str Long label for the hub. If None, defaults to the value of `short_label`. """ if short_label is None: short_label = hub_name if long_label is None: long_label = short_label hub = Hub( hub=hub_name, short_label=short_label, long_label=long_label, email=email) genome = Genome(genome) genomes_file = GenomesFile() trackdb = TrackDb() hub.add_genomes_file(genomes_file) genomes_file.add_genome(genome) genome.add_trackdb(trackdb) return hub, genomes_file, genome, trackdb
python
def default_hub(hub_name, genome, email, short_label=None, long_label=None): if short_label is None: short_label = hub_name if long_label is None: long_label = short_label hub = Hub( hub=hub_name, short_label=short_label, long_label=long_label, email=email) genome = Genome(genome) genomes_file = GenomesFile() trackdb = TrackDb() hub.add_genomes_file(genomes_file) genomes_file.add_genome(genome) genome.add_trackdb(trackdb) return hub, genomes_file, genome, trackdb
[ "def", "default_hub", "(", "hub_name", ",", "genome", ",", "email", ",", "short_label", "=", "None", ",", "long_label", "=", "None", ")", ":", "if", "short_label", "is", "None", ":", "short_label", "=", "hub_name", "if", "long_label", "is", "None", ":", ...
Returns a fully-connected set of hub components using default filenames. Parameters ---------- hub_name : str Name of the hub genome : str Assembly name (hg38, dm6, etc) email : str Email to include with hub. short_label : str Short label for the hub. If None, defaults to the value of `hub_name` long_label : str Long label for the hub. If None, defaults to the value of `short_label`.
[ "Returns", "a", "fully", "-", "connected", "set", "of", "hub", "components", "using", "default", "filenames", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/__init__.py#L18-L57
16,751
mozilla/python_moztelemetry
moztelemetry/parse_histograms.py
from_files
def from_files(filenames, strict_type_checks=True): """Return an iterator that provides a sequence of Histograms for the histograms defined in filenames. """ if strict_type_checks: load_whitelist() all_histograms = OrderedDict() for filename in filenames: parser = FILENAME_PARSERS[os.path.basename(filename)] histograms = parser(filename, strict_type_checks) # OrderedDicts are important, because then the iteration order over # the parsed histograms is stable, which makes the insertion into # all_histograms stable, which makes ordering in generated files # stable, which makes builds more deterministic. if not isinstance(histograms, OrderedDict): raise ParserError("Histogram parser did not provide an OrderedDict.") for name, definition in iteritems(histograms): if name in all_histograms: raise ParserError('Duplicate histogram name "%s".' % name) all_histograms[name] = definition # We require that all USE_COUNTER2_* histograms be defined in a contiguous # block. use_counter_indices = filter(lambda x: x[1].startswith("USE_COUNTER2_"), enumerate(iterkeys(all_histograms))) if use_counter_indices: lower_bound = use_counter_indices[0][0] upper_bound = use_counter_indices[-1][0] n_counters = upper_bound - lower_bound + 1 if n_counters != len(use_counter_indices): raise ParserError("Use counter histograms must be defined in a contiguous block.") # Check that histograms that were removed from Histograms.json etc. # are also removed from the whitelists. if whitelists is not None: all_whitelist_entries = itertools.chain.from_iterable(whitelists.itervalues()) orphaned = set(all_whitelist_entries) - set(iterkeys(all_histograms)) if len(orphaned) > 0: msg = 'The following entries are orphaned and should be removed from ' \ 'histogram-whitelists.json:\n%s' raise ParserError(msg % (', '.join(sorted(orphaned)))) for name, definition in iteritems(all_histograms): yield Histogram(name, definition, strict_type_checks=strict_type_checks)
python
def from_files(filenames, strict_type_checks=True): if strict_type_checks: load_whitelist() all_histograms = OrderedDict() for filename in filenames: parser = FILENAME_PARSERS[os.path.basename(filename)] histograms = parser(filename, strict_type_checks) # OrderedDicts are important, because then the iteration order over # the parsed histograms is stable, which makes the insertion into # all_histograms stable, which makes ordering in generated files # stable, which makes builds more deterministic. if not isinstance(histograms, OrderedDict): raise ParserError("Histogram parser did not provide an OrderedDict.") for name, definition in iteritems(histograms): if name in all_histograms: raise ParserError('Duplicate histogram name "%s".' % name) all_histograms[name] = definition # We require that all USE_COUNTER2_* histograms be defined in a contiguous # block. use_counter_indices = filter(lambda x: x[1].startswith("USE_COUNTER2_"), enumerate(iterkeys(all_histograms))) if use_counter_indices: lower_bound = use_counter_indices[0][0] upper_bound = use_counter_indices[-1][0] n_counters = upper_bound - lower_bound + 1 if n_counters != len(use_counter_indices): raise ParserError("Use counter histograms must be defined in a contiguous block.") # Check that histograms that were removed from Histograms.json etc. # are also removed from the whitelists. if whitelists is not None: all_whitelist_entries = itertools.chain.from_iterable(whitelists.itervalues()) orphaned = set(all_whitelist_entries) - set(iterkeys(all_histograms)) if len(orphaned) > 0: msg = 'The following entries are orphaned and should be removed from ' \ 'histogram-whitelists.json:\n%s' raise ParserError(msg % (', '.join(sorted(orphaned)))) for name, definition in iteritems(all_histograms): yield Histogram(name, definition, strict_type_checks=strict_type_checks)
[ "def", "from_files", "(", "filenames", ",", "strict_type_checks", "=", "True", ")", ":", "if", "strict_type_checks", ":", "load_whitelist", "(", ")", "all_histograms", "=", "OrderedDict", "(", ")", "for", "filename", "in", "filenames", ":", "parser", "=", "FIL...
Return an iterator that provides a sequence of Histograms for the histograms defined in filenames.
[ "Return", "an", "iterator", "that", "provides", "a", "sequence", "of", "Histograms", "for", "the", "histograms", "defined", "in", "filenames", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/parse_histograms.py#L598-L644
16,752
mozilla/python_moztelemetry
moztelemetry/parse_histograms.py
Histogram.ranges
def ranges(self): """Return an array of lower bounds for each bucket in the histogram.""" bucket_fns = { 'boolean': linear_buckets, 'flag': linear_buckets, 'count': linear_buckets, 'enumerated': linear_buckets, 'categorical': linear_buckets, 'linear': linear_buckets, 'exponential': exponential_buckets, } if self._kind not in bucket_fns: raise ParserError('Unknown kind "%s" for histogram "%s".' % (self._kind, self._name)) fn = bucket_fns[self._kind] return fn(self.low(), self.high(), self.n_buckets())
python
def ranges(self): bucket_fns = { 'boolean': linear_buckets, 'flag': linear_buckets, 'count': linear_buckets, 'enumerated': linear_buckets, 'categorical': linear_buckets, 'linear': linear_buckets, 'exponential': exponential_buckets, } if self._kind not in bucket_fns: raise ParserError('Unknown kind "%s" for histogram "%s".' % (self._kind, self._name)) fn = bucket_fns[self._kind] return fn(self.low(), self.high(), self.n_buckets())
[ "def", "ranges", "(", "self", ")", ":", "bucket_fns", "=", "{", "'boolean'", ":", "linear_buckets", ",", "'flag'", ":", "linear_buckets", ",", "'count'", ":", "linear_buckets", ",", "'enumerated'", ":", "linear_buckets", ",", "'categorical'", ":", "linear_bucket...
Return an array of lower bounds for each bucket in the histogram.
[ "Return", "an", "array", "of", "lower", "bounds", "for", "each", "bucket", "in", "the", "histogram", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/parse_histograms.py#L193-L209
16,753
Karaage-Cluster/karaage
karaage/common/create_update.py
apply_extra_context
def apply_extra_context(extra_context, context): """ Adds items from extra_context dict to context. If a value in extra_context is callable, then it is called and the result is added to context. """ for key, value in six.iteritems(extra_context): if callable(value): context[key] = value() else: context[key] = value
python
def apply_extra_context(extra_context, context): for key, value in six.iteritems(extra_context): if callable(value): context[key] = value() else: context[key] = value
[ "def", "apply_extra_context", "(", "extra_context", ",", "context", ")", ":", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "extra_context", ")", ":", "if", "callable", "(", "value", ")", ":", "context", "[", "key", "]", "=", "value", ...
Adds items from extra_context dict to context. If a value in extra_context is callable, then it is called and the result is added to context.
[ "Adds", "items", "from", "extra_context", "dict", "to", "context", ".", "If", "a", "value", "in", "extra_context", "is", "callable", "then", "it", "is", "called", "and", "the", "result", "is", "added", "to", "context", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L12-L21
16,754
Karaage-Cluster/karaage
karaage/common/create_update.py
get_model_and_form_class
def get_model_and_form_class(model, form_class): """ Returns a model and form class based on the model and form_class parameters that were passed to the generic view. If ``form_class`` is given then its associated model will be returned along with ``form_class`` itself. Otherwise, if ``model`` is given, ``model`` itself will be returned along with a ``ModelForm`` class created from ``model``. """ if form_class: return form_class._meta.model, form_class if model: # The inner Meta class fails if model = model is used for some reason. tmp_model = model # TODO: we should be able to construct a ModelForm without creating # and passing in a temporary inner class. class Meta: model = tmp_model class_name = model.__name__ + 'Form' form_class = ModelFormMetaclass( class_name, (ModelForm,), {'Meta': Meta}) return model, form_class raise GenericViewError("Generic view must be called with either a model or" " form_class argument.")
python
def get_model_and_form_class(model, form_class): if form_class: return form_class._meta.model, form_class if model: # The inner Meta class fails if model = model is used for some reason. tmp_model = model # TODO: we should be able to construct a ModelForm without creating # and passing in a temporary inner class. class Meta: model = tmp_model class_name = model.__name__ + 'Form' form_class = ModelFormMetaclass( class_name, (ModelForm,), {'Meta': Meta}) return model, form_class raise GenericViewError("Generic view must be called with either a model or" " form_class argument.")
[ "def", "get_model_and_form_class", "(", "model", ",", "form_class", ")", ":", "if", "form_class", ":", "return", "form_class", ".", "_meta", ".", "model", ",", "form_class", "if", "model", ":", "# The inner Meta class fails if model = model is used for some reason.", "t...
Returns a model and form class based on the model and form_class parameters that were passed to the generic view. If ``form_class`` is given then its associated model will be returned along with ``form_class`` itself. Otherwise, if ``model`` is given, ``model`` itself will be returned along with a ``ModelForm`` class created from ``model``.
[ "Returns", "a", "model", "and", "form", "class", "based", "on", "the", "model", "and", "form_class", "parameters", "that", "were", "passed", "to", "the", "generic", "view", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L24-L50
16,755
Karaage-Cluster/karaage
karaage/common/create_update.py
redirect
def redirect(post_save_redirect, obj): """ Returns a HttpResponseRedirect to ``post_save_redirect``. ``post_save_redirect`` should be a string, and can contain named string- substitution place holders of ``obj`` field names. If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method, then raise ImproperlyConfigured. This function is meant to handle the post_save_redirect parameter to the ``create_object`` and ``update_object`` views. """ if post_save_redirect: return HttpResponseRedirect(post_save_redirect % obj.__dict__) elif hasattr(obj, 'get_absolute_url'): return HttpResponseRedirect(obj.get_absolute_url()) else: raise ImproperlyConfigured( "No URL to redirect to. Either pass a post_save_redirect" " parameter to the generic view or define a get_absolute_url" " method on the Model.")
python
def redirect(post_save_redirect, obj): if post_save_redirect: return HttpResponseRedirect(post_save_redirect % obj.__dict__) elif hasattr(obj, 'get_absolute_url'): return HttpResponseRedirect(obj.get_absolute_url()) else: raise ImproperlyConfigured( "No URL to redirect to. Either pass a post_save_redirect" " parameter to the generic view or define a get_absolute_url" " method on the Model.")
[ "def", "redirect", "(", "post_save_redirect", ",", "obj", ")", ":", "if", "post_save_redirect", ":", "return", "HttpResponseRedirect", "(", "post_save_redirect", "%", "obj", ".", "__dict__", ")", "elif", "hasattr", "(", "obj", ",", "'get_absolute_url'", ")", ":"...
Returns a HttpResponseRedirect to ``post_save_redirect``. ``post_save_redirect`` should be a string, and can contain named string- substitution place holders of ``obj`` field names. If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method, then raise ImproperlyConfigured. This function is meant to handle the post_save_redirect parameter to the ``create_object`` and ``update_object`` views.
[ "Returns", "a", "HttpResponseRedirect", "to", "post_save_redirect", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L53-L75
16,756
Karaage-Cluster/karaage
karaage/common/create_update.py
lookup_object
def lookup_object(model, object_id, slug, slug_field): """ Return the ``model`` object with the passed ``object_id``. If ``object_id`` is None, then return the object whose ``slug_field`` equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed, then raise Http404 exception. """ lookup_kwargs = {} if object_id: lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id elif slug and slug_field: lookup_kwargs['%s__exact' % slug_field] = slug else: raise GenericViewError( "Generic view must be called with either an object_id or a" " slug/slug_field.") try: return model.objects.get(**lookup_kwargs) except ObjectDoesNotExist: raise Http404("No %s found for %s" % (model._meta.verbose_name, lookup_kwargs))
python
def lookup_object(model, object_id, slug, slug_field): lookup_kwargs = {} if object_id: lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id elif slug and slug_field: lookup_kwargs['%s__exact' % slug_field] = slug else: raise GenericViewError( "Generic view must be called with either an object_id or a" " slug/slug_field.") try: return model.objects.get(**lookup_kwargs) except ObjectDoesNotExist: raise Http404("No %s found for %s" % (model._meta.verbose_name, lookup_kwargs))
[ "def", "lookup_object", "(", "model", ",", "object_id", ",", "slug", ",", "slug_field", ")", ":", "lookup_kwargs", "=", "{", "}", "if", "object_id", ":", "lookup_kwargs", "[", "'%s__exact'", "%", "model", ".", "_meta", ".", "pk", ".", "name", "]", "=", ...
Return the ``model`` object with the passed ``object_id``. If ``object_id`` is None, then return the object whose ``slug_field`` equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed, then raise Http404 exception.
[ "Return", "the", "model", "object", "with", "the", "passed", "object_id", ".", "If", "object_id", "is", "None", "then", "return", "the", "object", "whose", "slug_field", "equals", "the", "passed", "slug", ".", "If", "slug", "and", "slug_field", "are", "not",...
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L78-L98
16,757
Karaage-Cluster/karaage
karaage/common/create_update.py
create_object
def create_object( request, model=None, template_name=None, template_loader=loader, extra_context=None, post_save_redirect=None, login_required=False, context_processors=None, form_class=None): """ Generic object-creation function. Templates: ``<app_label>/<model_name>_form.html`` Context: form the form for the object """ if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) model, form_class = get_model_and_form_class(model, form_class) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): new_object = form.save() msg = ugettext("The %(verbose_name)s was created successfully.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return redirect(post_save_redirect, new_object) else: form = form_class() # Create the template, context, response if not template_name: template_name = "%s/%s_form.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { 'form': form, } apply_extra_context(extra_context, c) return HttpResponse(t.render(context=c, request=request))
python
def create_object( request, model=None, template_name=None, template_loader=loader, extra_context=None, post_save_redirect=None, login_required=False, context_processors=None, form_class=None): if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) model, form_class = get_model_and_form_class(model, form_class) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): new_object = form.save() msg = ugettext("The %(verbose_name)s was created successfully.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return redirect(post_save_redirect, new_object) else: form = form_class() # Create the template, context, response if not template_name: template_name = "%s/%s_form.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { 'form': form, } apply_extra_context(extra_context, c) return HttpResponse(t.render(context=c, request=request))
[ "def", "create_object", "(", "request", ",", "model", "=", "None", ",", "template_name", "=", "None", ",", "template_loader", "=", "loader", ",", "extra_context", "=", "None", ",", "post_save_redirect", "=", "None", ",", "login_required", "=", "False", ",", ...
Generic object-creation function. Templates: ``<app_label>/<model_name>_form.html`` Context: form the form for the object
[ "Generic", "object", "-", "creation", "function", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L101-L140
16,758
Karaage-Cluster/karaage
karaage/common/create_update.py
update_object
def update_object( request, model=None, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, post_save_redirect=None, login_required=False, context_processors=None, template_object_name='object', form_class=None): """ Generic object-update function. Templates: ``<app_label>/<model_name>_form.html`` Context: form the form for the object object the original object being edited """ if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) model, form_class = get_model_and_form_class(model, form_class) obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': form = form_class(request.POST, request.FILES, instance=obj) if form.is_valid(): obj = form.save() msg = ugettext("The %(verbose_name)s was updated successfully.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return redirect(post_save_redirect, obj) else: form = form_class(instance=obj) if not template_name: template_name = "%s/%s_form.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { 'form': form, template_object_name: obj, } apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
python
def update_object( request, model=None, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, post_save_redirect=None, login_required=False, context_processors=None, template_object_name='object', form_class=None): if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) model, form_class = get_model_and_form_class(model, form_class) obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': form = form_class(request.POST, request.FILES, instance=obj) if form.is_valid(): obj = form.save() msg = ugettext("The %(verbose_name)s was updated successfully.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return redirect(post_save_redirect, obj) else: form = form_class(instance=obj) if not template_name: template_name = "%s/%s_form.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { 'form': form, template_object_name: obj, } apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
[ "def", "update_object", "(", "request", ",", "model", "=", "None", ",", "object_id", "=", "None", ",", "slug", "=", "None", ",", "slug_field", "=", "'slug'", ",", "template_name", "=", "None", ",", "template_loader", "=", "loader", ",", "extra_context", "=...
Generic object-update function. Templates: ``<app_label>/<model_name>_form.html`` Context: form the form for the object object the original object being edited
[ "Generic", "object", "-", "update", "function", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L143-L188
16,759
Karaage-Cluster/karaage
karaage/common/create_update.py
delete_object
def delete_object( request, model, post_delete_redirect, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, login_required=False, context_processors=None, template_object_name='object'): """ Generic object-delete function. The given template will be used to confirm deletetion if this view is fetched using GET; for safty, deletion will only be performed if this view is POSTed. Templates: ``<app_label>/<model_name>_confirm_delete.html`` Context: object the original object being deleted """ if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': obj.delete() msg = ugettext("The %(verbose_name)s was deleted.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(post_delete_redirect) else: if not template_name: template_name = "%s/%s_confirm_delete.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { template_object_name: obj, } apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
python
def delete_object( request, model, post_delete_redirect, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, login_required=False, context_processors=None, template_object_name='object'): if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': obj.delete() msg = ugettext("The %(verbose_name)s was deleted.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(post_delete_redirect) else: if not template_name: template_name = "%s/%s_confirm_delete.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { template_object_name: obj, } apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
[ "def", "delete_object", "(", "request", ",", "model", ",", "post_delete_redirect", ",", "object_id", "=", "None", ",", "slug", "=", "None", ",", "slug_field", "=", "'slug'", ",", "template_name", "=", "None", ",", "template_loader", "=", "loader", ",", "extr...
Generic object-delete function. The given template will be used to confirm deletetion if this view is fetched using GET; for safty, deletion will only be performed if this view is POSTed. Templates: ``<app_label>/<model_name>_confirm_delete.html`` Context: object the original object being deleted
[ "Generic", "object", "-", "delete", "function", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/common/create_update.py#L191-L231
16,760
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
get_url
def get_url(request, application, roles, label=None): """ Retrieve a link that will work for the current user. """ args = [] if label is not None: args.append(label) # don't use secret_token unless we have to if 'is_admin' in roles: # Administrators can access anything without secrets require_secret = False elif 'is_applicant' not in roles: # we never give secrets to anybody but the applicant require_secret = False elif not request.user.is_authenticated: # If applicant is not logged in, we redirect them to secret URL require_secret = True elif request.user != application.applicant: # If logged in as different person, we redirect them to secret # URL. This could happen if the application was open with a different # email address, and the applicant is logged in when accessing it. require_secret = True else: # otherwise redirect them to URL that requires correct login. require_secret = False # return required url if not require_secret: url = reverse( 'kg_application_detail', args=[application.pk, application.state] + args) else: url = reverse( 'kg_application_unauthenticated', args=[application.secret_token, application.state] + args) return url
python
def get_url(request, application, roles, label=None): args = [] if label is not None: args.append(label) # don't use secret_token unless we have to if 'is_admin' in roles: # Administrators can access anything without secrets require_secret = False elif 'is_applicant' not in roles: # we never give secrets to anybody but the applicant require_secret = False elif not request.user.is_authenticated: # If applicant is not logged in, we redirect them to secret URL require_secret = True elif request.user != application.applicant: # If logged in as different person, we redirect them to secret # URL. This could happen if the application was open with a different # email address, and the applicant is logged in when accessing it. require_secret = True else: # otherwise redirect them to URL that requires correct login. require_secret = False # return required url if not require_secret: url = reverse( 'kg_application_detail', args=[application.pk, application.state] + args) else: url = reverse( 'kg_application_unauthenticated', args=[application.secret_token, application.state] + args) return url
[ "def", "get_url", "(", "request", ",", "application", ",", "roles", ",", "label", "=", "None", ")", ":", "args", "=", "[", "]", "if", "label", "is", "not", "None", ":", "args", ".", "append", "(", "label", ")", "# don't use secret_token unless we have to",...
Retrieve a link that will work for the current user.
[ "Retrieve", "a", "link", "that", "will", "work", "for", "the", "current", "user", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L69-L103
16,761
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
get_admin_email_link
def get_admin_email_link(application): """ Retrieve a link that can be emailed to the administrator. """ url = '%s/applications/%d/' % (settings.ADMIN_BASE_URL, application.pk) is_secret = False return url, is_secret
python
def get_admin_email_link(application): url = '%s/applications/%d/' % (settings.ADMIN_BASE_URL, application.pk) is_secret = False return url, is_secret
[ "def", "get_admin_email_link", "(", "application", ")", ":", "url", "=", "'%s/applications/%d/'", "%", "(", "settings", ".", "ADMIN_BASE_URL", ",", "application", ".", "pk", ")", "is_secret", "=", "False", "return", "url", ",", "is_secret" ]
Retrieve a link that can be emailed to the administrator.
[ "Retrieve", "a", "link", "that", "can", "be", "emailed", "to", "the", "administrator", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L106-L110
16,762
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
get_registration_email_link
def get_registration_email_link(application): """ Retrieve a link that can be emailed to the logged other users. """ url = '%s/applications/%d/' % ( settings.REGISTRATION_BASE_URL, application.pk) is_secret = False return url, is_secret
python
def get_registration_email_link(application): url = '%s/applications/%d/' % ( settings.REGISTRATION_BASE_URL, application.pk) is_secret = False return url, is_secret
[ "def", "get_registration_email_link", "(", "application", ")", ":", "url", "=", "'%s/applications/%d/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "application", ".", "pk", ")", "is_secret", "=", "False", "return", "url", ",", "is_secret" ]
Retrieve a link that can be emailed to the logged other users.
[ "Retrieve", "a", "link", "that", "can", "be", "emailed", "to", "the", "logged", "other", "users", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L113-L118
16,763
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
get_email_link
def get_email_link(application): """ Retrieve a link that can be emailed to the applicant. """ # don't use secret_token unless we have to if (application.content_type.model == 'person' and application.applicant.has_usable_password()): url = '%s/applications/%d/' % ( settings.REGISTRATION_BASE_URL, application.pk) is_secret = False else: url = '%s/applications/%s/' % ( settings.REGISTRATION_BASE_URL, application.secret_token) is_secret = True return url, is_secret
python
def get_email_link(application): # don't use secret_token unless we have to if (application.content_type.model == 'person' and application.applicant.has_usable_password()): url = '%s/applications/%d/' % ( settings.REGISTRATION_BASE_URL, application.pk) is_secret = False else: url = '%s/applications/%s/' % ( settings.REGISTRATION_BASE_URL, application.secret_token) is_secret = True return url, is_secret
[ "def", "get_email_link", "(", "application", ")", ":", "# don't use secret_token unless we have to", "if", "(", "application", ".", "content_type", ".", "model", "==", "'person'", "and", "application", ".", "applicant", ".", "has_usable_password", "(", ")", ")", ":"...
Retrieve a link that can be emailed to the applicant.
[ "Retrieve", "a", "link", "that", "can", "be", "emailed", "to", "the", "applicant", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L121-L133
16,764
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
StateMachine.start
def start(self, request, application, extra_roles=None): """ Continue the state machine at first state. """ # Get the authentication of the current user roles = self._get_roles_for_request(request, application) if extra_roles is not None: roles.update(extra_roles) # Ensure current user is authenticated. If user isn't applicant, # leader, delegate or admin, they probably shouldn't be here. if 'is_authorised' not in roles: return HttpResponseForbidden('<h1>Access Denied</h1>') # Go to first state. return self._next(request, application, roles, self._first_state)
python
def start(self, request, application, extra_roles=None): # Get the authentication of the current user roles = self._get_roles_for_request(request, application) if extra_roles is not None: roles.update(extra_roles) # Ensure current user is authenticated. If user isn't applicant, # leader, delegate or admin, they probably shouldn't be here. if 'is_authorised' not in roles: return HttpResponseForbidden('<h1>Access Denied</h1>') # Go to first state. return self._next(request, application, roles, self._first_state)
[ "def", "start", "(", "self", ",", "request", ",", "application", ",", "extra_roles", "=", "None", ")", ":", "# Get the authentication of the current user", "roles", "=", "self", ".", "_get_roles_for_request", "(", "request", ",", "application", ")", "if", "extra_r...
Continue the state machine at first state.
[ "Continue", "the", "state", "machine", "at", "first", "state", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L158-L171
16,765
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
StateMachine.process
def process( self, request, application, expected_state, label, extra_roles=None): """ Process the view request at the current state. """ # Get the authentication of the current user roles = self._get_roles_for_request(request, application) if extra_roles is not None: roles.update(extra_roles) # Ensure current user is authenticated. If user isn't applicant, # leader, delegate or admin, they probably shouldn't be here. if 'is_authorised' not in roles: return HttpResponseForbidden('<h1>Access Denied</h1>') # If user didn't supply state on URL, redirect to full URL. if expected_state is None: url = get_url(request, application, roles, label) return HttpResponseRedirect(url) # Check that the current state is valid. if application.state not in self._config: raise RuntimeError("Invalid current state '%s'" % application.state) # If state user expected is different to state we are in, warn user # and jump to expected state. if expected_state != application.state: # post data will be lost if request.method == "POST": messages.warning( request, "Discarding request and jumping to current state.") # note we discard the label, it probably isn't relevant for new # state url = get_url(request, application, roles) return HttpResponseRedirect(url) # Get the current state for this application state_config = self._config[application.state] # Finally do something instance = load_state_instance(state_config) if request.method == "GET": # if method is GET, state does not ever change. response = instance.get_next_config(request, application, label, roles) assert isinstance(response, HttpResponse) return response elif request.method == "POST": # if method is POST, it can return a HttpResponse or a string response = instance.get_next_config(request, application, label, roles) if isinstance(response, HttpResponse): # If it returned a HttpResponse, state not changed, just # display return response else: # If it returned a string, lookit up in the actions for this # state next_config = response # Go to the next state return self._next(request, application, roles, next_config) else: # Shouldn't happen, user did something weird return HttpResponseBadRequest("<h1>Bad Request</h1>")
python
def process( self, request, application, expected_state, label, extra_roles=None): # Get the authentication of the current user roles = self._get_roles_for_request(request, application) if extra_roles is not None: roles.update(extra_roles) # Ensure current user is authenticated. If user isn't applicant, # leader, delegate or admin, they probably shouldn't be here. if 'is_authorised' not in roles: return HttpResponseForbidden('<h1>Access Denied</h1>') # If user didn't supply state on URL, redirect to full URL. if expected_state is None: url = get_url(request, application, roles, label) return HttpResponseRedirect(url) # Check that the current state is valid. if application.state not in self._config: raise RuntimeError("Invalid current state '%s'" % application.state) # If state user expected is different to state we are in, warn user # and jump to expected state. if expected_state != application.state: # post data will be lost if request.method == "POST": messages.warning( request, "Discarding request and jumping to current state.") # note we discard the label, it probably isn't relevant for new # state url = get_url(request, application, roles) return HttpResponseRedirect(url) # Get the current state for this application state_config = self._config[application.state] # Finally do something instance = load_state_instance(state_config) if request.method == "GET": # if method is GET, state does not ever change. response = instance.get_next_config(request, application, label, roles) assert isinstance(response, HttpResponse) return response elif request.method == "POST": # if method is POST, it can return a HttpResponse or a string response = instance.get_next_config(request, application, label, roles) if isinstance(response, HttpResponse): # If it returned a HttpResponse, state not changed, just # display return response else: # If it returned a string, lookit up in the actions for this # state next_config = response # Go to the next state return self._next(request, application, roles, next_config) else: # Shouldn't happen, user did something weird return HttpResponseBadRequest("<h1>Bad Request</h1>")
[ "def", "process", "(", "self", ",", "request", ",", "application", ",", "expected_state", ",", "label", ",", "extra_roles", "=", "None", ")", ":", "# Get the authentication of the current user", "roles", "=", "self", ".", "_get_roles_for_request", "(", "request", ...
Process the view request at the current state.
[ "Process", "the", "view", "request", "at", "the", "current", "state", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L173-L237
16,766
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
StateMachine._get_roles_for_request
def _get_roles_for_request(request, application): """ Check the authentication of the current user. """ roles = application.get_roles_for_person(request.user) if common.is_admin(request): roles.add("is_admin") roles.add('is_authorised') return roles
python
def _get_roles_for_request(request, application): roles = application.get_roles_for_person(request.user) if common.is_admin(request): roles.add("is_admin") roles.add('is_authorised') return roles
[ "def", "_get_roles_for_request", "(", "request", ",", "application", ")", ":", "roles", "=", "application", ".", "get_roles_for_person", "(", "request", ".", "user", ")", "if", "common", ".", "is_admin", "(", "request", ")", ":", "roles", ".", "add", "(", ...
Check the authentication of the current user.
[ "Check", "the", "authentication", "of", "the", "current", "user", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L243-L251
16,767
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
StateMachine._next
def _next(self, request, application, roles, next_config): """ Continue the state machine at given state. """ # we only support state changes for POST requests if request.method == "POST": key = None # If next state is a transition, process it while True: # We do not expect to get a direct state transition here. assert next_config['type'] in ['goto', 'transition'] while next_config['type'] == 'goto': key = next_config['key'] next_config = self._config[key] instance = load_instance(next_config) if not isinstance(instance, Transition): break next_config = instance.get_next_config(request, application, roles) # lookup next state assert key is not None state_key = key # enter that state instance.enter_state(request, application) application.state = state_key application.save() # log details log.change(application.application_ptr, "state: %s" % instance.name) # redirect to this new state url = get_url(request, application, roles) return HttpResponseRedirect(url) else: return HttpResponseBadRequest("<h1>Bad Request</h1>")
python
def _next(self, request, application, roles, next_config): # we only support state changes for POST requests if request.method == "POST": key = None # If next state is a transition, process it while True: # We do not expect to get a direct state transition here. assert next_config['type'] in ['goto', 'transition'] while next_config['type'] == 'goto': key = next_config['key'] next_config = self._config[key] instance = load_instance(next_config) if not isinstance(instance, Transition): break next_config = instance.get_next_config(request, application, roles) # lookup next state assert key is not None state_key = key # enter that state instance.enter_state(request, application) application.state = state_key application.save() # log details log.change(application.application_ptr, "state: %s" % instance.name) # redirect to this new state url = get_url(request, application, roles) return HttpResponseRedirect(url) else: return HttpResponseBadRequest("<h1>Bad Request</h1>")
[ "def", "_next", "(", "self", ",", "request", ",", "application", ",", "roles", ",", "next_config", ")", ":", "# we only support state changes for POST requests", "if", "request", ".", "method", "==", "\"POST\"", ":", "key", "=", "None", "# If next state is a transit...
Continue the state machine at given state.
[ "Continue", "the", "state", "machine", "at", "given", "state", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L253-L291
16,768
Karaage-Cluster/karaage
karaage/plugins/kgapplications/views/base.py
State.get_next_action
def get_next_action(self, request, application, label, roles): """ Django view method. We provide a default detail view for applications. """ # We only provide a view for when no label provided if label is not None: return HttpResponseBadRequest("<h1>Bad Request</h1>") # only certain actions make sense for default view actions = self.get_actions(request, application, roles) # process the request in default view if request.method == "GET": context = self.context context.update({ 'application': application, 'actions': actions, 'state': self.name, 'roles': roles}) return render( template_name='kgapplications/common_detail.html', context=context, request=request) elif request.method == "POST": for action in actions: if action in request.POST: return action # we don't know how to handle this request. return HttpResponseBadRequest("<h1>Bad Request</h1>")
python
def get_next_action(self, request, application, label, roles): # We only provide a view for when no label provided if label is not None: return HttpResponseBadRequest("<h1>Bad Request</h1>") # only certain actions make sense for default view actions = self.get_actions(request, application, roles) # process the request in default view if request.method == "GET": context = self.context context.update({ 'application': application, 'actions': actions, 'state': self.name, 'roles': roles}) return render( template_name='kgapplications/common_detail.html', context=context, request=request) elif request.method == "POST": for action in actions: if action in request.POST: return action # we don't know how to handle this request. return HttpResponseBadRequest("<h1>Bad Request</h1>")
[ "def", "get_next_action", "(", "self", ",", "request", ",", "application", ",", "label", ",", "roles", ")", ":", "# We only provide a view for when no label provided", "if", "label", "is", "not", "None", ":", "return", "HttpResponseBadRequest", "(", "\"<h1>Bad Request...
Django view method. We provide a default detail view for applications.
[ "Django", "view", "method", ".", "We", "provide", "a", "default", "detail", "view", "for", "applications", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/views/base.py#L326-L355
16,769
mozilla/python_moztelemetry
moztelemetry/histogram.py
Histogram.get_value
def get_value(self, only_median=False, autocast=True): """ Returns a scalar for flag and count histograms. Otherwise it returns either the raw histogram represented as a pandas Series or just the median if only_median is True. If autocast is disabled the underlying pandas series is always returned as is. """ if not autocast: return self.buckets if self.kind in ["exponential", "linear", "enumerated", "boolean"]: return float(self.percentile(50)) if only_median else self.buckets elif self.kind == "categorical" and not only_median: return self.buckets elif self.kind == "count": return int(self.buckets[0]) elif self.kind == "flag": return self.buckets[1] == 1 else: assert(False)
python
def get_value(self, only_median=False, autocast=True): if not autocast: return self.buckets if self.kind in ["exponential", "linear", "enumerated", "boolean"]: return float(self.percentile(50)) if only_median else self.buckets elif self.kind == "categorical" and not only_median: return self.buckets elif self.kind == "count": return int(self.buckets[0]) elif self.kind == "flag": return self.buckets[1] == 1 else: assert(False)
[ "def", "get_value", "(", "self", ",", "only_median", "=", "False", ",", "autocast", "=", "True", ")", ":", "if", "not", "autocast", ":", "return", "self", ".", "buckets", "if", "self", ".", "kind", "in", "[", "\"exponential\"", ",", "\"linear\"", ",", ...
Returns a scalar for flag and count histograms. Otherwise it returns either the raw histogram represented as a pandas Series or just the median if only_median is True. If autocast is disabled the underlying pandas series is always returned as is.
[ "Returns", "a", "scalar", "for", "flag", "and", "count", "histograms", ".", "Otherwise", "it", "returns", "either", "the", "raw", "histogram", "represented", "as", "a", "pandas", "Series", "or", "just", "the", "median", "if", "only_median", "is", "True", "."...
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/histogram.py#L178-L198
16,770
mozilla/python_moztelemetry
moztelemetry/histogram.py
Histogram.percentile
def percentile(self, percentile): """ Returns the nth percentile of the histogram. """ assert(percentile >= 0 and percentile <= 100) assert(self.kind in ["exponential", "linear", "enumerated", "boolean"]) fraction = percentile / 100 to_count = fraction * self.buckets.sum() percentile_bucket = 0 for percentile_bucket in range(len(self.buckets)): freq = self.buckets.values[percentile_bucket] if to_count - freq <= 0: break to_count -= freq percentile_lower_boundary = self.buckets.index[percentile_bucket] percentile_frequency = self.buckets.values[percentile_bucket] if percentile_bucket == len(self.buckets) - 1 or percentile_frequency == 0: return percentile_lower_boundary width = self.buckets.index[percentile_bucket + 1] - self.buckets.index[percentile_bucket] return percentile_lower_boundary + width * to_count / percentile_frequency
python
def percentile(self, percentile): assert(percentile >= 0 and percentile <= 100) assert(self.kind in ["exponential", "linear", "enumerated", "boolean"]) fraction = percentile / 100 to_count = fraction * self.buckets.sum() percentile_bucket = 0 for percentile_bucket in range(len(self.buckets)): freq = self.buckets.values[percentile_bucket] if to_count - freq <= 0: break to_count -= freq percentile_lower_boundary = self.buckets.index[percentile_bucket] percentile_frequency = self.buckets.values[percentile_bucket] if percentile_bucket == len(self.buckets) - 1 or percentile_frequency == 0: return percentile_lower_boundary width = self.buckets.index[percentile_bucket + 1] - self.buckets.index[percentile_bucket] return percentile_lower_boundary + width * to_count / percentile_frequency
[ "def", "percentile", "(", "self", ",", "percentile", ")", ":", "assert", "(", "percentile", ">=", "0", "and", "percentile", "<=", "100", ")", "assert", "(", "self", ".", "kind", "in", "[", "\"exponential\"", ",", "\"linear\"", ",", "\"enumerated\"", ",", ...
Returns the nth percentile of the histogram.
[ "Returns", "the", "nth", "percentile", "of", "the", "histogram", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/histogram.py#L204-L226
16,771
daler/trackhub
trackhub/track.py
BaseTrack.tracktype
def tracktype(self, tracktype): """ When setting the track type, the valid parameters for this track type need to be set as well. """ self._tracktype = tracktype if tracktype is not None: if 'bed' in tracktype.lower(): tracktype = 'bigBed' elif 'wig' in tracktype.lower(): tracktype = 'bigWig' self.params.update(constants.track_typespecific_fields[tracktype])
python
def tracktype(self, tracktype): self._tracktype = tracktype if tracktype is not None: if 'bed' in tracktype.lower(): tracktype = 'bigBed' elif 'wig' in tracktype.lower(): tracktype = 'bigWig' self.params.update(constants.track_typespecific_fields[tracktype])
[ "def", "tracktype", "(", "self", ",", "tracktype", ")", ":", "self", ".", "_tracktype", "=", "tracktype", "if", "tracktype", "is", "not", "None", ":", "if", "'bed'", "in", "tracktype", ".", "lower", "(", ")", ":", "tracktype", "=", "'bigBed'", "elif", ...
When setting the track type, the valid parameters for this track type need to be set as well.
[ "When", "setting", "the", "track", "type", "the", "valid", "parameters", "for", "this", "track", "type", "need", "to", "be", "set", "as", "well", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L245-L256
16,772
daler/trackhub
trackhub/track.py
BaseTrack.add_subgroups
def add_subgroups(self, subgroups): """ Update the subgroups for this track. Note that in contrast to :meth:`CompositeTrack`, which takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups, this method takes a single dictionary indicating the particular subgroups for this track. Parameters ---------- subgroups : dict Dictionary of subgroups, e.g., {'celltype': 'K562', 'treatment': 'a'}. Each key must match a SubGroupDefinition name in the composite's subgroups list. Each value must match a key in that SubGroupDefinition.mapping dictionary. """ if subgroups is None: subgroups = {} assert isinstance(subgroups, dict) self.subgroups.update(subgroups)
python
def add_subgroups(self, subgroups): if subgroups is None: subgroups = {} assert isinstance(subgroups, dict) self.subgroups.update(subgroups)
[ "def", "add_subgroups", "(", "self", ",", "subgroups", ")", ":", "if", "subgroups", "is", "None", ":", "subgroups", "=", "{", "}", "assert", "isinstance", "(", "subgroups", ",", "dict", ")", "self", ".", "subgroups", ".", "update", "(", "subgroups", ")" ...
Update the subgroups for this track. Note that in contrast to :meth:`CompositeTrack`, which takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups, this method takes a single dictionary indicating the particular subgroups for this track. Parameters ---------- subgroups : dict Dictionary of subgroups, e.g., {'celltype': 'K562', 'treatment': 'a'}. Each key must match a SubGroupDefinition name in the composite's subgroups list. Each value must match a key in that SubGroupDefinition.mapping dictionary.
[ "Update", "the", "subgroups", "for", "this", "track", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L300-L321
16,773
daler/trackhub
trackhub/track.py
BaseTrack._str_subgroups
def _str_subgroups(self): """ helper function to render subgroups as a string """ if not self.subgroups: return "" return ['subGroups %s' % ' '.join(['%s=%s' % (k, v) for (k, v) in self.subgroups.items()])]
python
def _str_subgroups(self): if not self.subgroups: return "" return ['subGroups %s' % ' '.join(['%s=%s' % (k, v) for (k, v) in self.subgroups.items()])]
[ "def", "_str_subgroups", "(", "self", ")", ":", "if", "not", "self", ".", "subgroups", ":", "return", "\"\"", "return", "[", "'subGroups %s'", "%", "' '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ...
helper function to render subgroups as a string
[ "helper", "function", "to", "render", "subgroups", "as", "a", "string" ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L363-L371
16,774
daler/trackhub
trackhub/track.py
CompositeTrack.add_subgroups
def add_subgroups(self, subgroups): """ Add a list of SubGroupDefinition objects to this composite. Note that in contrast to :meth:`BaseTrack`, which takes a single dictionary indicating the particular subgroups for the track, this method takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups for the composite. :param subgroups: List of SubGroupDefinition objects. """ if subgroups is None: subgroups = {} _subgroups = {} for sg in subgroups: assert isinstance(sg, SubGroupDefinition) _subgroups[sg.name] = sg self.subgroups = _subgroups
python
def add_subgroups(self, subgroups): if subgroups is None: subgroups = {} _subgroups = {} for sg in subgroups: assert isinstance(sg, SubGroupDefinition) _subgroups[sg.name] = sg self.subgroups = _subgroups
[ "def", "add_subgroups", "(", "self", ",", "subgroups", ")", ":", "if", "subgroups", "is", "None", ":", "subgroups", "=", "{", "}", "_subgroups", "=", "{", "}", "for", "sg", "in", "subgroups", ":", "assert", "isinstance", "(", "sg", ",", "SubGroupDefiniti...
Add a list of SubGroupDefinition objects to this composite. Note that in contrast to :meth:`BaseTrack`, which takes a single dictionary indicating the particular subgroups for the track, this method takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups for the composite. :param subgroups: List of SubGroupDefinition objects.
[ "Add", "a", "list", "of", "SubGroupDefinition", "objects", "to", "this", "composite", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L454-L472
16,775
daler/trackhub
trackhub/track.py
CompositeTrack.add_view
def add_view(self, view): """ Add a ViewTrack object to this composite. :param view: A ViewTrack object. """ self.add_child(view) self.views.append(view)
python
def add_view(self, view): self.add_child(view) self.views.append(view)
[ "def", "add_view", "(", "self", ",", "view", ")", ":", "self", ".", "add_child", "(", "view", ")", "self", ".", "views", ".", "append", "(", "view", ")" ]
Add a ViewTrack object to this composite. :param view: A ViewTrack object.
[ "Add", "a", "ViewTrack", "object", "to", "this", "composite", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L481-L490
16,776
daler/trackhub
trackhub/track.py
CompositeTrack._str_subgroups
def _str_subgroups(self): """ renders subgroups to a list of strings """ s = [] i = 0 # if there are any views, there must be a subGroup1 view View tag=val # as the first one. So create it automatically here if len(self.views) > 0: mapping = dict((i.view, i.view) for i in self.views) view_subgroup = SubGroupDefinition( name='view', label='Views', mapping=mapping) i += 1 s.append('subGroup%s %s' % (i, view_subgroup)) for subgroup in self.subgroups.values(): i += 1 s.append('subGroup%s %s' % (i, subgroup)) return s
python
def _str_subgroups(self): s = [] i = 0 # if there are any views, there must be a subGroup1 view View tag=val # as the first one. So create it automatically here if len(self.views) > 0: mapping = dict((i.view, i.view) for i in self.views) view_subgroup = SubGroupDefinition( name='view', label='Views', mapping=mapping) i += 1 s.append('subGroup%s %s' % (i, view_subgroup)) for subgroup in self.subgroups.values(): i += 1 s.append('subGroup%s %s' % (i, subgroup)) return s
[ "def", "_str_subgroups", "(", "self", ")", ":", "s", "=", "[", "]", "i", "=", "0", "# if there are any views, there must be a subGroup1 view View tag=val", "# as the first one. So create it automatically here", "if", "len", "(", "self", ".", "views", ")", ">", "0", "...
renders subgroups to a list of strings
[ "renders", "subgroups", "to", "a", "list", "of", "strings" ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L492-L514
16,777
daler/trackhub
trackhub/track.py
ViewTrack.add_tracks
def add_tracks(self, subtracks): """ Add one or more tracks to this view. subtracks : Track or iterable of Tracks A single Track instance or an iterable of them. """ if isinstance(subtracks, Track): subtracks = [subtracks] for subtrack in subtracks: subtrack.subgroups['view'] = self.view self.add_child(subtrack) self.subtracks.append(subtrack)
python
def add_tracks(self, subtracks): if isinstance(subtracks, Track): subtracks = [subtracks] for subtrack in subtracks: subtrack.subgroups['view'] = self.view self.add_child(subtrack) self.subtracks.append(subtrack)
[ "def", "add_tracks", "(", "self", ",", "subtracks", ")", ":", "if", "isinstance", "(", "subtracks", ",", "Track", ")", ":", "subtracks", "=", "[", "subtracks", "]", "for", "subtrack", "in", "subtracks", ":", "subtrack", ".", "subgroups", "[", "'view'", "...
Add one or more tracks to this view. subtracks : Track or iterable of Tracks A single Track instance or an iterable of them.
[ "Add", "one", "or", "more", "tracks", "to", "this", "view", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L570-L582
16,778
daler/trackhub
trackhub/track.py
SuperTrack.add_tracks
def add_tracks(self, subtracks): """ Add one or more tracks. subtrack : Track or iterable of Tracks """ if isinstance(subtracks, BaseTrack): subtracks = [subtracks] for subtrack in subtracks: self.add_child(subtrack) self.subtracks.append(subtrack)
python
def add_tracks(self, subtracks): if isinstance(subtracks, BaseTrack): subtracks = [subtracks] for subtrack in subtracks: self.add_child(subtrack) self.subtracks.append(subtrack)
[ "def", "add_tracks", "(", "self", ",", "subtracks", ")", ":", "if", "isinstance", "(", "subtracks", ",", "BaseTrack", ")", ":", "subtracks", "=", "[", "subtracks", "]", "for", "subtrack", "in", "subtracks", ":", "self", ".", "add_child", "(", "subtrack", ...
Add one or more tracks. subtrack : Track or iterable of Tracks
[ "Add", "one", "or", "more", "tracks", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L624-L634
16,779
mozilla/python_moztelemetry
moztelemetry/parse_scalars.py
load_scalars
def load_scalars(filename, strict_type_checks=True): """Parses a YAML file containing the scalar definition. :param filename: the YAML file containing the scalars definition. :raises ParserError: if the scalar file cannot be opened or parsed. """ # Parse the scalar definitions from the YAML file. scalars = None try: with open(filename, 'r') as f: scalars = yaml.safe_load(f) except IOError as e: raise ParserError('Error opening ' + filename + ': ' + e.message) except ValueError as e: raise ParserError('Error parsing scalars in {}: {}' '.\nSee: {}'.format(filename, e.message, BASE_DOC_URL)) scalar_list = [] # Scalars are defined in a fixed two-level hierarchy within the definition file. # The first level contains the category name, while the second level contains the # probe name (e.g. "category.name: probe: ..."). for category_name in scalars: category = scalars[category_name] # Make sure that the category has at least one probe in it. if not category or len(category) == 0: raise ParserError('Category "{}" must have at least one probe in it' + '.\nSee: {}'.format(category_name, BASE_DOC_URL)) for probe_name in category: # We found a scalar type. Go ahead and parse it. scalar_info = category[probe_name] scalar_list.append( ScalarType(category_name, probe_name, scalar_info, strict_type_checks)) return scalar_list
python
def load_scalars(filename, strict_type_checks=True): # Parse the scalar definitions from the YAML file. scalars = None try: with open(filename, 'r') as f: scalars = yaml.safe_load(f) except IOError as e: raise ParserError('Error opening ' + filename + ': ' + e.message) except ValueError as e: raise ParserError('Error parsing scalars in {}: {}' '.\nSee: {}'.format(filename, e.message, BASE_DOC_URL)) scalar_list = [] # Scalars are defined in a fixed two-level hierarchy within the definition file. # The first level contains the category name, while the second level contains the # probe name (e.g. "category.name: probe: ..."). for category_name in scalars: category = scalars[category_name] # Make sure that the category has at least one probe in it. if not category or len(category) == 0: raise ParserError('Category "{}" must have at least one probe in it' + '.\nSee: {}'.format(category_name, BASE_DOC_URL)) for probe_name in category: # We found a scalar type. Go ahead and parse it. scalar_info = category[probe_name] scalar_list.append( ScalarType(category_name, probe_name, scalar_info, strict_type_checks)) return scalar_list
[ "def", "load_scalars", "(", "filename", ",", "strict_type_checks", "=", "True", ")", ":", "# Parse the scalar definitions from the YAML file.", "scalars", "=", "None", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "scalars", "=", ...
Parses a YAML file containing the scalar definition. :param filename: the YAML file containing the scalars definition. :raises ParserError: if the scalar file cannot be opened or parsed.
[ "Parses", "a", "YAML", "file", "containing", "the", "scalar", "definition", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/parse_scalars.py#L294-L331
16,780
mozilla/python_moztelemetry
moztelemetry/parse_scalars.py
ScalarType.validate_values
def validate_values(self, definition): """This function checks that the fields have the correct values. :param definition: the dictionary containing the scalar properties. :raises ParserError: if a scalar definition field contains an unexpected value. """ if not self._strict_type_checks: return # Validate the scalar kind. scalar_kind = definition.get('kind') if scalar_kind not in SCALAR_TYPES_MAP.keys(): raise ParserError(self._name + ' - unknown scalar kind: ' + scalar_kind + '.\nSee: {}'.format(BASE_DOC_URL)) # Validate the collection policy. collection_policy = definition.get('release_channel_collection', None) if collection_policy and collection_policy not in ['opt-in', 'opt-out']: raise ParserError(self._name + ' - unknown collection policy: ' + collection_policy + '.\nSee: {}#optional-fields'.format(BASE_DOC_URL)) # Validate the cpp_guard. cpp_guard = definition.get('cpp_guard') if cpp_guard and re.match(r'\W', cpp_guard): raise ParserError(self._name + ' - invalid cpp_guard: ' + cpp_guard + '.\nSee: {}#optional-fields'.format(BASE_DOC_URL)) # Validate record_in_processes. record_in_processes = definition.get('record_in_processes', []) for proc in record_in_processes: if not utils.is_valid_process_name(proc): raise ParserError(self._name + ' - unknown value in record_in_processes: ' + proc + '.\nSee: {}'.format(BASE_DOC_URL)) # Validate the expiration version. # Historical versions of Scalars.json may contain expiration versions # using the deprecated format 'N.Na1'. Those scripts set # self._strict_type_checks to false. expires = definition.get('expires') if not utils.validate_expiration_version(expires) and self._strict_type_checks: raise ParserError('{} - invalid expires: {}.\nSee: {}#required-fields' .format(self._name, expires, BASE_DOC_URL))
python
def validate_values(self, definition): if not self._strict_type_checks: return # Validate the scalar kind. scalar_kind = definition.get('kind') if scalar_kind not in SCALAR_TYPES_MAP.keys(): raise ParserError(self._name + ' - unknown scalar kind: ' + scalar_kind + '.\nSee: {}'.format(BASE_DOC_URL)) # Validate the collection policy. collection_policy = definition.get('release_channel_collection', None) if collection_policy and collection_policy not in ['opt-in', 'opt-out']: raise ParserError(self._name + ' - unknown collection policy: ' + collection_policy + '.\nSee: {}#optional-fields'.format(BASE_DOC_URL)) # Validate the cpp_guard. cpp_guard = definition.get('cpp_guard') if cpp_guard and re.match(r'\W', cpp_guard): raise ParserError(self._name + ' - invalid cpp_guard: ' + cpp_guard + '.\nSee: {}#optional-fields'.format(BASE_DOC_URL)) # Validate record_in_processes. record_in_processes = definition.get('record_in_processes', []) for proc in record_in_processes: if not utils.is_valid_process_name(proc): raise ParserError(self._name + ' - unknown value in record_in_processes: ' + proc + '.\nSee: {}'.format(BASE_DOC_URL)) # Validate the expiration version. # Historical versions of Scalars.json may contain expiration versions # using the deprecated format 'N.Na1'. Those scripts set # self._strict_type_checks to false. expires = definition.get('expires') if not utils.validate_expiration_version(expires) and self._strict_type_checks: raise ParserError('{} - invalid expires: {}.\nSee: {}#required-fields' .format(self._name, expires, BASE_DOC_URL))
[ "def", "validate_values", "(", "self", ",", "definition", ")", ":", "if", "not", "self", ".", "_strict_type_checks", ":", "return", "# Validate the scalar kind.", "scalar_kind", "=", "definition", ".", "get", "(", "'kind'", ")", "if", "scalar_kind", "not", "in",...
This function checks that the fields have the correct values. :param definition: the dictionary containing the scalar properties. :raises ParserError: if a scalar definition field contains an unexpected value.
[ "This", "function", "checks", "that", "the", "fields", "have", "the", "correct", "values", "." ]
09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb
https://github.com/mozilla/python_moztelemetry/blob/09ddf1ec7d953a4308dfdcb0ed968f27bd5921bb/moztelemetry/parse_scalars.py#L158-L200
16,781
daler/trackhub
trackhub/base.py
HubComponent.add_child
def add_child(self, child): """ Adds self as parent to child, and then adds child. """ child.parent = self self.children.append(child) return child
python
def add_child(self, child): child.parent = self self.children.append(child) return child
[ "def", "add_child", "(", "self", ",", "child", ")", ":", "child", ".", "parent", "=", "self", "self", ".", "children", ".", "append", "(", "child", ")", "return", "child" ]
Adds self as parent to child, and then adds child.
[ "Adds", "self", "as", "parent", "to", "child", "and", "then", "adds", "child", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/base.py#L59-L65
16,782
daler/trackhub
trackhub/base.py
HubComponent.add_parent
def add_parent(self, parent): """ Adds self as child of parent, then adds parent. """ parent.add_child(self) self.parent = parent return parent
python
def add_parent(self, parent): parent.add_child(self) self.parent = parent return parent
[ "def", "add_parent", "(", "self", ",", "parent", ")", ":", "parent", ".", "add_child", "(", "self", ")", "self", ".", "parent", "=", "parent", "return", "parent" ]
Adds self as child of parent, then adds parent.
[ "Adds", "self", "as", "child", "of", "parent", "then", "adds", "parent", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/base.py#L67-L73
16,783
daler/trackhub
trackhub/base.py
HubComponent.root
def root(self, cls=None, level=0): """ Returns the top-most HubComponent in the hierarchy. If `cls` is not None, then return the top-most attribute HubComponent that is an instance of class `cls`. For a fully-constructed track hub (and `cls=None`), this should return a a Hub object for every component in the hierarchy. """ if cls is None: if self.parent is None: return self, level else: if isinstance(self, cls): if not isinstance(self.parent, cls): return self, level if self.parent is None: return None, None return self.parent.root(cls, level - 1)
python
def root(self, cls=None, level=0): if cls is None: if self.parent is None: return self, level else: if isinstance(self, cls): if not isinstance(self.parent, cls): return self, level if self.parent is None: return None, None return self.parent.root(cls, level - 1)
[ "def", "root", "(", "self", ",", "cls", "=", "None", ",", "level", "=", "0", ")", ":", "if", "cls", "is", "None", ":", "if", "self", ".", "parent", "is", "None", ":", "return", "self", ",", "level", "else", ":", "if", "isinstance", "(", "self", ...
Returns the top-most HubComponent in the hierarchy. If `cls` is not None, then return the top-most attribute HubComponent that is an instance of class `cls`. For a fully-constructed track hub (and `cls=None`), this should return a a Hub object for every component in the hierarchy.
[ "Returns", "the", "top", "-", "most", "HubComponent", "in", "the", "hierarchy", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/base.py#L75-L96
16,784
daler/trackhub
trackhub/base.py
HubComponent.leaves
def leaves(self, cls, level=0, intermediate=False): """ Returns an iterator of the HubComponent leaves that are of class `cls`. If `intermediate` is True, then return any intermediate classes as well. """ if intermediate: if isinstance(self, cls): yield self, level elif len(self.children) == 0: if isinstance(self, cls): yield self, level else: raise StopIteration for child in self.children: for leaf, _level in child.leaves(cls, level + 1, intermediate=intermediate): yield leaf, _level
python
def leaves(self, cls, level=0, intermediate=False): if intermediate: if isinstance(self, cls): yield self, level elif len(self.children) == 0: if isinstance(self, cls): yield self, level else: raise StopIteration for child in self.children: for leaf, _level in child.leaves(cls, level + 1, intermediate=intermediate): yield leaf, _level
[ "def", "leaves", "(", "self", ",", "cls", ",", "level", "=", "0", ",", "intermediate", "=", "False", ")", ":", "if", "intermediate", ":", "if", "isinstance", "(", "self", ",", "cls", ")", ":", "yield", "self", ",", "level", "elif", "len", "(", "sel...
Returns an iterator of the HubComponent leaves that are of class `cls`. If `intermediate` is True, then return any intermediate classes as well.
[ "Returns", "an", "iterator", "of", "the", "HubComponent", "leaves", "that", "are", "of", "class", "cls", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/base.py#L98-L116
16,785
daler/trackhub
trackhub/base.py
HubComponent.render
def render(self, staging=None): """ Renders the object to file, returning a list of created files. Calls validation code, and, as long as each child is also a subclass of :class:`HubComponent`, the rendering is recursive. """ self.validate() created_files = OrderedDict() if staging is None: staging = tempfile.mkdtemp() this = self._render(staging) if this: created_files[repr(self)] = this for child in self.children: created_files[repr(child)] = child.render(staging) return created_files
python
def render(self, staging=None): self.validate() created_files = OrderedDict() if staging is None: staging = tempfile.mkdtemp() this = self._render(staging) if this: created_files[repr(self)] = this for child in self.children: created_files[repr(child)] = child.render(staging) return created_files
[ "def", "render", "(", "self", ",", "staging", "=", "None", ")", ":", "self", ".", "validate", "(", ")", "created_files", "=", "OrderedDict", "(", ")", "if", "staging", "is", "None", ":", "staging", "=", "tempfile", ".", "mkdtemp", "(", ")", "this", "...
Renders the object to file, returning a list of created files. Calls validation code, and, as long as each child is also a subclass of :class:`HubComponent`, the rendering is recursive.
[ "Renders", "the", "object", "to", "file", "returning", "a", "list", "of", "created", "files", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/base.py#L118-L134
16,786
Karaage-Cluster/karaage
karaage/plugins/kgapplications/emails.py
send_request_email
def send_request_email( authorised_text, authorised_role, authorised_persons, application, link, is_secret): """Sends an email to admin asking to approve user application""" context = CONTEXT.copy() context['requester'] = application.applicant context['link'] = link context['is_secret'] = is_secret context['application'] = application context['authorised_text'] = authorised_text _send_request_email( context, authorised_role, authorised_persons, "common_request")
python
def send_request_email( authorised_text, authorised_role, authorised_persons, application, link, is_secret): context = CONTEXT.copy() context['requester'] = application.applicant context['link'] = link context['is_secret'] = is_secret context['application'] = application context['authorised_text'] = authorised_text _send_request_email( context, authorised_role, authorised_persons, "common_request")
[ "def", "send_request_email", "(", "authorised_text", ",", "authorised_role", ",", "authorised_persons", ",", "application", ",", "link", ",", "is_secret", ")", ":", "context", "=", "CONTEXT", ".", "copy", "(", ")", "context", "[", "'requester'", "]", "=", "app...
Sends an email to admin asking to approve user application
[ "Sends", "an", "email", "to", "admin", "asking", "to", "approve", "user", "application" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/emails.py#L66-L79
16,787
Karaage-Cluster/karaage
karaage/plugins/kgapplications/emails.py
send_invite_email
def send_invite_email(application, link, is_secret): """ Sends an email inviting someone to create an account""" if not application.applicant.email: return context = CONTEXT.copy() context['receiver'] = application.applicant context['application'] = application context['link'] = link context['is_secret'] = is_secret to_email = application.applicant.email subject, body = render_email('common_invite', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
python
def send_invite_email(application, link, is_secret): if not application.applicant.email: return context = CONTEXT.copy() context['receiver'] = application.applicant context['application'] = application context['link'] = link context['is_secret'] = is_secret to_email = application.applicant.email subject, body = render_email('common_invite', context) send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
[ "def", "send_invite_email", "(", "application", ",", "link", ",", "is_secret", ")", ":", "if", "not", "application", ".", "applicant", ".", "email", ":", "return", "context", "=", "CONTEXT", ".", "copy", "(", ")", "context", "[", "'receiver'", "]", "=", ...
Sends an email inviting someone to create an account
[ "Sends", "an", "email", "inviting", "someone", "to", "create", "an", "account" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/emails.py#L98-L113
16,788
Karaage-Cluster/karaage
karaage/plugins/kgapplications/emails.py
send_approved_email
def send_approved_email( application, created_person, created_account, link, is_secret): """Sends an email informing person application is approved""" if not application.applicant.email: return context = CONTEXT.copy() context['receiver'] = application.applicant context['application'] = application context['created_person'] = created_person context['created_account'] = created_account context['link'] = link context['is_secret'] = is_secret subject, body = render_email('common_approved', context) to_email = application.applicant.email send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
python
def send_approved_email( application, created_person, created_account, link, is_secret): if not application.applicant.email: return context = CONTEXT.copy() context['receiver'] = application.applicant context['application'] = application context['created_person'] = created_person context['created_account'] = created_account context['link'] = link context['is_secret'] = is_secret subject, body = render_email('common_approved', context) to_email = application.applicant.email send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])
[ "def", "send_approved_email", "(", "application", ",", "created_person", ",", "created_account", ",", "link", ",", "is_secret", ")", ":", "if", "not", "application", ".", "applicant", ".", "email", ":", "return", "context", "=", "CONTEXT", ".", "copy", "(", ...
Sends an email informing person application is approved
[ "Sends", "an", "email", "informing", "person", "application", "is", "approved" ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgapplications/emails.py#L116-L132
16,789
Karaage-Cluster/karaage
karaage/people/models.py
_add_person_to_group
def _add_person_to_group(person, group): """ Call datastores after adding a person to a group. """ from karaage.datastores import add_accounts_to_group from karaage.datastores import add_accounts_to_project from karaage.datastores import add_accounts_to_institute a_list = person.account_set add_accounts_to_group(a_list, group) for project in group.project_set.all(): add_accounts_to_project(a_list, project) for institute in group.institute_set.all(): add_accounts_to_institute(a_list, institute)
python
def _add_person_to_group(person, group): from karaage.datastores import add_accounts_to_group from karaage.datastores import add_accounts_to_project from karaage.datastores import add_accounts_to_institute a_list = person.account_set add_accounts_to_group(a_list, group) for project in group.project_set.all(): add_accounts_to_project(a_list, project) for institute in group.institute_set.all(): add_accounts_to_institute(a_list, institute)
[ "def", "_add_person_to_group", "(", "person", ",", "group", ")", ":", "from", "karaage", ".", "datastores", "import", "add_accounts_to_group", "from", "karaage", ".", "datastores", "import", "add_accounts_to_project", "from", "karaage", ".", "datastores", "import", ...
Call datastores after adding a person to a group.
[ "Call", "datastores", "after", "adding", "a", "person", "to", "a", "group", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/people/models.py#L461-L472
16,790
Karaage-Cluster/karaage
karaage/people/models.py
_remove_person_from_group
def _remove_person_from_group(person, group): """ Call datastores after removing a person from a group. """ from karaage.datastores import remove_accounts_from_group from karaage.datastores import remove_accounts_from_project from karaage.datastores import remove_accounts_from_institute a_list = person.account_set remove_accounts_from_group(a_list, group) for project in group.project_set.all(): remove_accounts_from_project(a_list, project) for institute in group.institute_set.all(): remove_accounts_from_institute(a_list, institute)
python
def _remove_person_from_group(person, group): from karaage.datastores import remove_accounts_from_group from karaage.datastores import remove_accounts_from_project from karaage.datastores import remove_accounts_from_institute a_list = person.account_set remove_accounts_from_group(a_list, group) for project in group.project_set.all(): remove_accounts_from_project(a_list, project) for institute in group.institute_set.all(): remove_accounts_from_institute(a_list, institute)
[ "def", "_remove_person_from_group", "(", "person", ",", "group", ")", ":", "from", "karaage", ".", "datastores", "import", "remove_accounts_from_group", "from", "karaage", ".", "datastores", "import", "remove_accounts_from_project", "from", "karaage", ".", "datastores",...
Call datastores after removing a person from a group.
[ "Call", "datastores", "after", "removing", "a", "person", "from", "a", "group", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/people/models.py#L475-L486
16,791
daler/trackhub
trackhub/helpers.py
dimensions_from_subgroups
def dimensions_from_subgroups(s): """ Given a sorted list of subgroups, return a string appropriate to provide as a composite track's `dimensions` arg. Parameters ---------- s : list of SubGroup objects (or anything with a `name` attribute) """ letters = 'XYABCDEFGHIJKLMNOPQRSTUVWZ' return ' '.join(['dim{0}={1}'.format(dim, sg.name) for dim, sg in zip(letters, s)])
python
def dimensions_from_subgroups(s): letters = 'XYABCDEFGHIJKLMNOPQRSTUVWZ' return ' '.join(['dim{0}={1}'.format(dim, sg.name) for dim, sg in zip(letters, s)])
[ "def", "dimensions_from_subgroups", "(", "s", ")", ":", "letters", "=", "'XYABCDEFGHIJKLMNOPQRSTUVWZ'", "return", "' '", ".", "join", "(", "[", "'dim{0}={1}'", ".", "format", "(", "dim", ",", "sg", ".", "name", ")", "for", "dim", ",", "sg", "in", "zip", ...
Given a sorted list of subgroups, return a string appropriate to provide as a composite track's `dimensions` arg. Parameters ---------- s : list of SubGroup objects (or anything with a `name` attribute)
[ "Given", "a", "sorted", "list", "of", "subgroups", "return", "a", "string", "appropriate", "to", "provide", "as", "a", "composite", "track", "s", "dimensions", "arg", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L12-L22
16,792
daler/trackhub
trackhub/helpers.py
filter_composite_from_subgroups
def filter_composite_from_subgroups(s): """ Given a sorted list of subgroups, return a string appropriate to provide as the a composite track's `filterComposite` argument >>> import trackhub >>> trackhub.helpers.filter_composite_from_subgroups(['cell', 'ab', 'lab', 'knockdown']) 'dimA dimB' Parameters ---------- s : list A list representing the ordered subgroups, ideally the same list provided to `dimensions_from_subgroups`. The values are not actually used, just the number of items. """ dims = [] for letter, sg in zip('ABCDEFGHIJKLMNOPQRSTUVWZ', s[2:]): dims.append('dim{0}'.format(letter)) if dims: return ' '.join(dims)
python
def filter_composite_from_subgroups(s): dims = [] for letter, sg in zip('ABCDEFGHIJKLMNOPQRSTUVWZ', s[2:]): dims.append('dim{0}'.format(letter)) if dims: return ' '.join(dims)
[ "def", "filter_composite_from_subgroups", "(", "s", ")", ":", "dims", "=", "[", "]", "for", "letter", ",", "sg", "in", "zip", "(", "'ABCDEFGHIJKLMNOPQRSTUVWZ'", ",", "s", "[", "2", ":", "]", ")", ":", "dims", ".", "append", "(", "'dim{0}'", ".", "forma...
Given a sorted list of subgroups, return a string appropriate to provide as the a composite track's `filterComposite` argument >>> import trackhub >>> trackhub.helpers.filter_composite_from_subgroups(['cell', 'ab', 'lab', 'knockdown']) 'dimA dimB' Parameters ---------- s : list A list representing the ordered subgroups, ideally the same list provided to `dimensions_from_subgroups`. The values are not actually used, just the number of items.
[ "Given", "a", "sorted", "list", "of", "subgroups", "return", "a", "string", "appropriate", "to", "provide", "as", "the", "a", "composite", "track", "s", "filterComposite", "argument" ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L25-L45
16,793
daler/trackhub
trackhub/helpers.py
hex2rgb
def hex2rgb(h): """ Convert hex colors to RGB tuples Parameters ---------- h : str String hex color value >>> hex2rgb("#ff0033") '255,0,51' """ if not h.startswith('#') or len(h) != 7: raise ValueError("Does not look like a hex color: '{0}'".format(h)) return ','.join(map(str, ( int(h[1:3], 16), int(h[3:5], 16), int(h[5:7], 16), )))
python
def hex2rgb(h): if not h.startswith('#') or len(h) != 7: raise ValueError("Does not look like a hex color: '{0}'".format(h)) return ','.join(map(str, ( int(h[1:3], 16), int(h[3:5], 16), int(h[5:7], 16), )))
[ "def", "hex2rgb", "(", "h", ")", ":", "if", "not", "h", ".", "startswith", "(", "'#'", ")", "or", "len", "(", "h", ")", "!=", "7", ":", "raise", "ValueError", "(", "\"Does not look like a hex color: '{0}'\"", ".", "format", "(", "h", ")", ")", "return"...
Convert hex colors to RGB tuples Parameters ---------- h : str String hex color value >>> hex2rgb("#ff0033") '255,0,51'
[ "Convert", "hex", "colors", "to", "RGB", "tuples" ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L48-L66
16,794
daler/trackhub
trackhub/helpers.py
sanitize
def sanitize(s, strict=True): """ Sanitize a string. Spaces are converted to underscore; if strict=True they are then removed. Parameters ---------- s : str String to sanitize strict : bool If True, only alphanumeric characters are allowed. If False, a limited set of additional characters (-._) will be allowed. """ allowed = ''.join( [ 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'abcdefghijklmnopqrstuvwxyz', '0123456789', ] ) if not strict: allowed += '-_.' s = str(s).replace(' ', '_') return ''.join([i for i in s if i in allowed])
python
def sanitize(s, strict=True): allowed = ''.join( [ 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'abcdefghijklmnopqrstuvwxyz', '0123456789', ] ) if not strict: allowed += '-_.' s = str(s).replace(' ', '_') return ''.join([i for i in s if i in allowed])
[ "def", "sanitize", "(", "s", ",", "strict", "=", "True", ")", ":", "allowed", "=", "''", ".", "join", "(", "[", "'ABCDEFGHIJKLMNOPQRSTUVWXYZ'", ",", "'abcdefghijklmnopqrstuvwxyz'", ",", "'0123456789'", ",", "]", ")", "if", "not", "strict", ":", "allowed", ...
Sanitize a string. Spaces are converted to underscore; if strict=True they are then removed. Parameters ---------- s : str String to sanitize strict : bool If True, only alphanumeric characters are allowed. If False, a limited set of additional characters (-._) will be allowed.
[ "Sanitize", "a", "string", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L69-L97
16,795
daler/trackhub
trackhub/helpers.py
auto_track_url
def auto_track_url(track): """ Automatically sets the bigDataUrl for `track`. Requirements: * the track must be fully connected, such that its root is a Hub object * the root Hub object must have the Hub.url attribute set * the track must have the `source` attribute set """ hub = track.root(cls=Hub) if hub is None: raise ValueError( "track is not fully connected because the root is %s" % repr(hub)) if hub.url is None: raise ValueError("hub.url is not set") if track.source is None: raise ValueError("track.source is not set")
python
def auto_track_url(track): hub = track.root(cls=Hub) if hub is None: raise ValueError( "track is not fully connected because the root is %s" % repr(hub)) if hub.url is None: raise ValueError("hub.url is not set") if track.source is None: raise ValueError("track.source is not set")
[ "def", "auto_track_url", "(", "track", ")", ":", "hub", "=", "track", ".", "root", "(", "cls", "=", "Hub", ")", "if", "hub", "is", "None", ":", "raise", "ValueError", "(", "\"track is not fully connected because the root is %s\"", "%", "repr", "(", "hub", ")...
Automatically sets the bigDataUrl for `track`. Requirements: * the track must be fully connected, such that its root is a Hub object * the root Hub object must have the Hub.url attribute set * the track must have the `source` attribute set
[ "Automatically", "sets", "the", "bigDataUrl", "for", "track", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L100-L118
16,796
daler/trackhub
trackhub/helpers.py
print_rendered_results
def print_rendered_results(results_dict): """ Pretty-prints the rendered results dictionary. Rendered results can be multiply-nested dictionaries; this uses JSON serialization to print a nice representation. """ class _HubComponentEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, base.HubComponent): return repr(o) return json.JSONEncoder.default(self, o) formatted = json.dumps(results_dict, indent=4, cls=_HubComponentEncoder) # the returned string contains lines with trailing spaces, which causes # doctests to fail. So fix that here. for s in formatted.splitlines(): print(s.rstrip())
python
def print_rendered_results(results_dict): class _HubComponentEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, base.HubComponent): return repr(o) return json.JSONEncoder.default(self, o) formatted = json.dumps(results_dict, indent=4, cls=_HubComponentEncoder) # the returned string contains lines with trailing spaces, which causes # doctests to fail. So fix that here. for s in formatted.splitlines(): print(s.rstrip())
[ "def", "print_rendered_results", "(", "results_dict", ")", ":", "class", "_HubComponentEncoder", "(", "json", ".", "JSONEncoder", ")", ":", "def", "default", "(", "self", ",", "o", ")", ":", "if", "isinstance", "(", "o", ",", "base", ".", "HubComponent", "...
Pretty-prints the rendered results dictionary. Rendered results can be multiply-nested dictionaries; this uses JSON serialization to print a nice representation.
[ "Pretty", "-", "prints", "the", "rendered", "results", "dictionary", "." ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L134-L150
16,797
daler/trackhub
trackhub/helpers.py
example_bigbeds
def example_bigbeds(): """ Returns list of example bigBed files """ hits = [] d = data_dir() for fn in os.listdir(d): fn = os.path.join(d, fn) if os.path.splitext(fn)[-1] == '.bigBed': hits.append(os.path.abspath(fn)) return hits
python
def example_bigbeds(): hits = [] d = data_dir() for fn in os.listdir(d): fn = os.path.join(d, fn) if os.path.splitext(fn)[-1] == '.bigBed': hits.append(os.path.abspath(fn)) return hits
[ "def", "example_bigbeds", "(", ")", ":", "hits", "=", "[", "]", "d", "=", "data_dir", "(", ")", "for", "fn", "in", "os", ".", "listdir", "(", "d", ")", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "d", ",", "fn", ")", "if", "os", "....
Returns list of example bigBed files
[ "Returns", "list", "of", "example", "bigBed", "files" ]
e4655f79177822529f80b923df117e38e28df702
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/helpers.py#L161-L171
16,798
Karaage-Cluster/karaage
karaage/plugins/kgusage/graphs.py
get_colour
def get_colour(index): """ get color number index. """ colours = [ 'red', 'blue', 'green', 'pink', 'yellow', 'magenta', 'orange', 'cyan', ] default_colour = 'purple' if index < len(colours): return colours[index] else: return default_colour
python
def get_colour(index): colours = [ 'red', 'blue', 'green', 'pink', 'yellow', 'magenta', 'orange', 'cyan', ] default_colour = 'purple' if index < len(colours): return colours[index] else: return default_colour
[ "def", "get_colour", "(", "index", ")", ":", "colours", "=", "[", "'red'", ",", "'blue'", ",", "'green'", ",", "'pink'", ",", "'yellow'", ",", "'magenta'", ",", "'orange'", ",", "'cyan'", ",", "]", "default_colour", "=", "'purple'", "if", "index", "<", ...
get color number index.
[ "get", "color", "number", "index", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgusage/graphs.py#L32-L42
16,799
Karaage-Cluster/karaage
karaage/plugins/kgusage/graphs.py
get_project_trend_graph_url
def get_project_trend_graph_url(project, start, end): """Generates a bar graph for a project. """ filename = get_project_trend_graph_filename(project, start, end) urls = { 'graph_url': urlparse.urljoin(GRAPH_URL, filename + ".png"), 'data_url': urlparse.urljoin(GRAPH_URL, filename + ".csv"), } return urls
python
def get_project_trend_graph_url(project, start, end): filename = get_project_trend_graph_filename(project, start, end) urls = { 'graph_url': urlparse.urljoin(GRAPH_URL, filename + ".png"), 'data_url': urlparse.urljoin(GRAPH_URL, filename + ".csv"), } return urls
[ "def", "get_project_trend_graph_url", "(", "project", ",", "start", ",", "end", ")", ":", "filename", "=", "get_project_trend_graph_filename", "(", "project", ",", "start", ",", "end", ")", "urls", "=", "{", "'graph_url'", ":", "urlparse", ".", "urljoin", "(",...
Generates a bar graph for a project.
[ "Generates", "a", "bar", "graph", "for", "a", "project", "." ]
2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9
https://github.com/Karaage-Cluster/karaage/blob/2f4c8b4e2d728b3fcbb151160c49000f1c04f5c9/karaage/plugins/kgusage/graphs.py#L102-L111