repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
nion-software/nionswift
nion/swift/model/Utility.py
clean_list
def clean_list(l0, clean_item_fn=None): """ Return a json-clean list. Will log info message for failures. """ clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list() for index, item in enumerate(l0): cleaned_item = clean_item_fn(item) l.append(cleaned_item) return l
python
def clean_list(l0, clean_item_fn=None): """ Return a json-clean list. Will log info message for failures. """ clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list() for index, item in enumerate(l0): cleaned_item = clean_item_fn(item) l.append(cleaned_item) return l
[ "def", "clean_list", "(", "l0", ",", "clean_item_fn", "=", "None", ")", ":", "clean_item_fn", "=", "clean_item_fn", "if", "clean_item_fn", "else", "clean_item", "l", "=", "list", "(", ")", "for", "index", ",", "item", "in", "enumerate", "(", "l0", ")", ":", "cleaned_item", "=", "clean_item_fn", "(", "item", ")", "l", ".", "append", "(", "cleaned_item", ")", "return", "l" ]
Return a json-clean list. Will log info message for failures.
[ "Return", "a", "json", "-", "clean", "list", ".", "Will", "log", "info", "message", "for", "failures", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Utility.py#L115-L124
train
nion-software/nionswift
nion/swift/model/Utility.py
clean_tuple
def clean_tuple(t0, clean_item_fn=None): """ Return a json-clean tuple. Will log info message for failures. """ clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list() for index, item in enumerate(t0): cleaned_item = clean_item_fn(item) l.append(cleaned_item) return tuple(l)
python
def clean_tuple(t0, clean_item_fn=None): """ Return a json-clean tuple. Will log info message for failures. """ clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list() for index, item in enumerate(t0): cleaned_item = clean_item_fn(item) l.append(cleaned_item) return tuple(l)
[ "def", "clean_tuple", "(", "t0", ",", "clean_item_fn", "=", "None", ")", ":", "clean_item_fn", "=", "clean_item_fn", "if", "clean_item_fn", "else", "clean_item", "l", "=", "list", "(", ")", "for", "index", ",", "item", "in", "enumerate", "(", "t0", ")", ":", "cleaned_item", "=", "clean_item_fn", "(", "item", ")", "l", ".", "append", "(", "cleaned_item", ")", "return", "tuple", "(", "l", ")" ]
Return a json-clean tuple. Will log info message for failures.
[ "Return", "a", "json", "-", "clean", "tuple", ".", "Will", "log", "info", "message", "for", "failures", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Utility.py#L127-L136
train
nion-software/nionswift
nion/swift/model/Utility.py
clean_item
def clean_item(i): """ Return a json-clean item or None. Will log info message for failure. """ itype = type(i) if itype == dict: return clean_dict(i) elif itype == list: return clean_list(i) elif itype == tuple: return clean_tuple(i) elif itype == numpy.float32: return float(i) elif itype == numpy.float64: return float(i) elif itype == numpy.int16: return int(i) elif itype == numpy.uint16: return int(i) elif itype == numpy.int32: return int(i) elif itype == numpy.uint32: return int(i) elif itype == numpy.int64: return int(i) elif itype == numpy.uint64: return int(i) elif itype == float: return i elif itype == str: return i elif itype == int: return i elif itype == bool: return i elif itype == type(None): return i logging.info("[1] Unable to handle type %s", itype) import traceback traceback.print_stack() return None
python
def clean_item(i): """ Return a json-clean item or None. Will log info message for failure. """ itype = type(i) if itype == dict: return clean_dict(i) elif itype == list: return clean_list(i) elif itype == tuple: return clean_tuple(i) elif itype == numpy.float32: return float(i) elif itype == numpy.float64: return float(i) elif itype == numpy.int16: return int(i) elif itype == numpy.uint16: return int(i) elif itype == numpy.int32: return int(i) elif itype == numpy.uint32: return int(i) elif itype == numpy.int64: return int(i) elif itype == numpy.uint64: return int(i) elif itype == float: return i elif itype == str: return i elif itype == int: return i elif itype == bool: return i elif itype == type(None): return i logging.info("[1] Unable to handle type %s", itype) import traceback traceback.print_stack() return None
[ "def", "clean_item", "(", "i", ")", ":", "itype", "=", "type", "(", "i", ")", "if", "itype", "==", "dict", ":", "return", "clean_dict", "(", "i", ")", "elif", "itype", "==", "list", ":", "return", "clean_list", "(", "i", ")", "elif", "itype", "==", "tuple", ":", "return", "clean_tuple", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "float32", ":", "return", "float", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "float64", ":", "return", "float", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "int16", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "uint16", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "int32", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "uint32", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "int64", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "uint64", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "float", ":", "return", "i", "elif", "itype", "==", "str", ":", "return", "i", "elif", "itype", "==", "int", ":", "return", "i", "elif", "itype", "==", "bool", ":", "return", "i", "elif", "itype", "==", "type", "(", "None", ")", ":", "return", "i", "logging", ".", "info", "(", "\"[1] Unable to handle type %s\"", ",", "itype", ")", "import", "traceback", "traceback", ".", "print_stack", "(", ")", "return", "None" ]
Return a json-clean item or None. Will log info message for failure.
[ "Return", "a", "json", "-", "clean", "item", "or", "None", ".", "Will", "log", "info", "message", "for", "failure", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Utility.py#L139-L179
train
nion-software/nionswift
nion/swift/model/Utility.py
clean_item_no_list
def clean_item_no_list(i): """ Return a json-clean item or None. Will log info message for failure. """ itype = type(i) if itype == dict: return clean_dict(i, clean_item_no_list) elif itype == list: return clean_tuple(i, clean_item_no_list) elif itype == tuple: return clean_tuple(i, clean_item_no_list) elif itype == numpy.float32: return float(i) elif itype == numpy.float64: return float(i) elif itype == numpy.int16: return int(i) elif itype == numpy.uint16: return int(i) elif itype == numpy.int32: return int(i) elif itype == numpy.uint32: return int(i) elif itype == float: return i elif itype == str: return i elif itype == int: return i elif itype == bool: return i elif itype == type(None): return i logging.info("[2] Unable to handle type %s", itype) return None
python
def clean_item_no_list(i): """ Return a json-clean item or None. Will log info message for failure. """ itype = type(i) if itype == dict: return clean_dict(i, clean_item_no_list) elif itype == list: return clean_tuple(i, clean_item_no_list) elif itype == tuple: return clean_tuple(i, clean_item_no_list) elif itype == numpy.float32: return float(i) elif itype == numpy.float64: return float(i) elif itype == numpy.int16: return int(i) elif itype == numpy.uint16: return int(i) elif itype == numpy.int32: return int(i) elif itype == numpy.uint32: return int(i) elif itype == float: return i elif itype == str: return i elif itype == int: return i elif itype == bool: return i elif itype == type(None): return i logging.info("[2] Unable to handle type %s", itype) return None
[ "def", "clean_item_no_list", "(", "i", ")", ":", "itype", "=", "type", "(", "i", ")", "if", "itype", "==", "dict", ":", "return", "clean_dict", "(", "i", ",", "clean_item_no_list", ")", "elif", "itype", "==", "list", ":", "return", "clean_tuple", "(", "i", ",", "clean_item_no_list", ")", "elif", "itype", "==", "tuple", ":", "return", "clean_tuple", "(", "i", ",", "clean_item_no_list", ")", "elif", "itype", "==", "numpy", ".", "float32", ":", "return", "float", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "float64", ":", "return", "float", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "int16", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "uint16", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "int32", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "numpy", ".", "uint32", ":", "return", "int", "(", "i", ")", "elif", "itype", "==", "float", ":", "return", "i", "elif", "itype", "==", "str", ":", "return", "i", "elif", "itype", "==", "int", ":", "return", "i", "elif", "itype", "==", "bool", ":", "return", "i", "elif", "itype", "==", "type", "(", "None", ")", ":", "return", "i", "logging", ".", "info", "(", "\"[2] Unable to handle type %s\"", ",", "itype", ")", "return", "None" ]
Return a json-clean item or None. Will log info message for failure.
[ "Return", "a", "json", "-", "clean", "item", "or", "None", ".", "Will", "log", "info", "message", "for", "failure", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Utility.py#L182-L216
train
nion-software/nionswift
nion/swift/model/Utility.py
sample_stack_all
def sample_stack_all(count=10, interval=0.1): """Sample the stack in a thread and print it at regular intervals.""" def print_stack_all(l, ll): l1 = list() l1.append("*** STACKTRACE - START ***") code = [] for threadId, stack in sys._current_frames().items(): sub_code = [] sub_code.append("# ThreadID: %s" % threadId) for filename, lineno, name, line in traceback.extract_stack(stack): sub_code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) if line: sub_code.append(" %s" % (line.strip())) if not "in select" in sub_code[-2] and \ not "in wait" in sub_code[-2] and \ not "in print_stack_all" in sub_code[-2] and \ not "in sample_stack_all" in sub_code[-2] and \ not "in checkcache" in sub_code[-2] and \ not "do_sleep" in sub_code[-2] and \ not "sleep" in sub_code[-1] and \ not any(["in do_sample" in s for s in sub_code]): code.extend(sub_code) for line in code: l1.append(line) l1.append("*** STACKTRACE - END ***") with l: ll.extend(l1) def do_sample(): l = threading.RLock() ll = list() for i in range(count): print_stack_all(l, ll) time.sleep(interval) with l: print("\n".join(ll)) threading.Thread(target=do_sample).start()
python
def sample_stack_all(count=10, interval=0.1): """Sample the stack in a thread and print it at regular intervals.""" def print_stack_all(l, ll): l1 = list() l1.append("*** STACKTRACE - START ***") code = [] for threadId, stack in sys._current_frames().items(): sub_code = [] sub_code.append("# ThreadID: %s" % threadId) for filename, lineno, name, line in traceback.extract_stack(stack): sub_code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) if line: sub_code.append(" %s" % (line.strip())) if not "in select" in sub_code[-2] and \ not "in wait" in sub_code[-2] and \ not "in print_stack_all" in sub_code[-2] and \ not "in sample_stack_all" in sub_code[-2] and \ not "in checkcache" in sub_code[-2] and \ not "do_sleep" in sub_code[-2] and \ not "sleep" in sub_code[-1] and \ not any(["in do_sample" in s for s in sub_code]): code.extend(sub_code) for line in code: l1.append(line) l1.append("*** STACKTRACE - END ***") with l: ll.extend(l1) def do_sample(): l = threading.RLock() ll = list() for i in range(count): print_stack_all(l, ll) time.sleep(interval) with l: print("\n".join(ll)) threading.Thread(target=do_sample).start()
[ "def", "sample_stack_all", "(", "count", "=", "10", ",", "interval", "=", "0.1", ")", ":", "def", "print_stack_all", "(", "l", ",", "ll", ")", ":", "l1", "=", "list", "(", ")", "l1", ".", "append", "(", "\"*** STACKTRACE - START ***\"", ")", "code", "=", "[", "]", "for", "threadId", ",", "stack", "in", "sys", ".", "_current_frames", "(", ")", ".", "items", "(", ")", ":", "sub_code", "=", "[", "]", "sub_code", ".", "append", "(", "\"# ThreadID: %s\"", "%", "threadId", ")", "for", "filename", ",", "lineno", ",", "name", ",", "line", "in", "traceback", ".", "extract_stack", "(", "stack", ")", ":", "sub_code", ".", "append", "(", "'File: \"%s\", line %d, in %s'", "%", "(", "filename", ",", "lineno", ",", "name", ")", ")", "if", "line", ":", "sub_code", ".", "append", "(", "\" %s\"", "%", "(", "line", ".", "strip", "(", ")", ")", ")", "if", "not", "\"in select\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"in wait\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"in print_stack_all\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"in sample_stack_all\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"in checkcache\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"do_sleep\"", "in", "sub_code", "[", "-", "2", "]", "and", "not", "\"sleep\"", "in", "sub_code", "[", "-", "1", "]", "and", "not", "any", "(", "[", "\"in do_sample\"", "in", "s", "for", "s", "in", "sub_code", "]", ")", ":", "code", ".", "extend", "(", "sub_code", ")", "for", "line", "in", "code", ":", "l1", ".", "append", "(", "line", ")", "l1", ".", "append", "(", "\"*** STACKTRACE - END ***\"", ")", "with", "l", ":", "ll", ".", "extend", "(", "l1", ")", "def", "do_sample", "(", ")", ":", "l", "=", "threading", ".", "RLock", "(", ")", "ll", "=", "list", "(", ")", "for", "i", "in", "range", "(", "count", ")", ":", "print_stack_all", "(", "l", ",", "ll", ")", "time", ".", "sleep", "(", "interval", ")", "with", "l", ":", "print", "(", "\"\\n\"", ".", "join", "(", "ll", ")", ")", "threading", ".", "Thread", "(", "target", "=", "do_sample", ")", ".", "start", "(", ")" ]
Sample the stack in a thread and print it at regular intervals.
[ "Sample", "the", "stack", "in", "a", "thread", "and", "print", "it", "at", "regular", "intervals", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Utility.py#L312-L350
train
mgraffg/EvoDAG
EvoDAG/gp.py
Individual.decision_function
def decision_function(self, X): "Decision function i.e. the raw data of the prediction" self._X = Model.convert_features(X) self._eval() return self._ind[0].hy
python
def decision_function(self, X): "Decision function i.e. the raw data of the prediction" self._X = Model.convert_features(X) self._eval() return self._ind[0].hy
[ "def", "decision_function", "(", "self", ",", "X", ")", ":", "self", ".", "_X", "=", "Model", ".", "convert_features", "(", "X", ")", "self", ".", "_eval", "(", ")", "return", "self", ".", "_ind", "[", "0", "]", ".", "hy" ]
Decision function i.e. the raw data of the prediction
[ "Decision", "function", "i", ".", "e", ".", "the", "raw", "data", "of", "the", "prediction" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L37-L41
train
mgraffg/EvoDAG
EvoDAG/gp.py
Individual._eval
def _eval(self): "Evaluates a individual using recursion and self._pos as pointer" pos = self._pos self._pos += 1 node = self._ind[pos] if isinstance(node, Function): args = [self._eval() for x in range(node.nargs)] node.eval(args) for x in args: x.hy = None x.hy_test = None else: node.eval(self._X) return node
python
def _eval(self): "Evaluates a individual using recursion and self._pos as pointer" pos = self._pos self._pos += 1 node = self._ind[pos] if isinstance(node, Function): args = [self._eval() for x in range(node.nargs)] node.eval(args) for x in args: x.hy = None x.hy_test = None else: node.eval(self._X) return node
[ "def", "_eval", "(", "self", ")", ":", "pos", "=", "self", ".", "_pos", "self", ".", "_pos", "+=", "1", "node", "=", "self", ".", "_ind", "[", "pos", "]", "if", "isinstance", "(", "node", ",", "Function", ")", ":", "args", "=", "[", "self", ".", "_eval", "(", ")", "for", "x", "in", "range", "(", "node", ".", "nargs", ")", "]", "node", ".", "eval", "(", "args", ")", "for", "x", "in", "args", ":", "x", ".", "hy", "=", "None", "x", ".", "hy_test", "=", "None", "else", ":", "node", ".", "eval", "(", "self", ".", "_X", ")", "return", "node" ]
Evaluates a individual using recursion and self._pos as pointer
[ "Evaluates", "a", "individual", "using", "recursion", "and", "self", ".", "_pos", "as", "pointer" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L43-L56
train
mgraffg/EvoDAG
EvoDAG/gp.py
Population.create_random_ind_full
def create_random_ind_full(self, depth=0): "Random individual using full method" lst = [] self._create_random_ind_full(depth=depth, output=lst) return lst
python
def create_random_ind_full(self, depth=0): "Random individual using full method" lst = [] self._create_random_ind_full(depth=depth, output=lst) return lst
[ "def", "create_random_ind_full", "(", "self", ",", "depth", "=", "0", ")", ":", "lst", "=", "[", "]", "self", ".", "_create_random_ind_full", "(", "depth", "=", "depth", ",", "output", "=", "lst", ")", "return", "lst" ]
Random individual using full method
[ "Random", "individual", "using", "full", "method" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L82-L86
train
mgraffg/EvoDAG
EvoDAG/gp.py
Population.grow_use_function
def grow_use_function(self, depth=0): "Select either function or terminal in grow method" if depth == 0: return False if depth == self._depth: return True return np.random.random() < 0.5
python
def grow_use_function(self, depth=0): "Select either function or terminal in grow method" if depth == 0: return False if depth == self._depth: return True return np.random.random() < 0.5
[ "def", "grow_use_function", "(", "self", ",", "depth", "=", "0", ")", ":", "if", "depth", "==", "0", ":", "return", "False", "if", "depth", "==", "self", ".", "_depth", ":", "return", "True", "return", "np", ".", "random", ".", "random", "(", ")", "<", "0.5" ]
Select either function or terminal in grow method
[ "Select", "either", "function", "or", "terminal", "in", "grow", "method" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L98-L104
train
mgraffg/EvoDAG
EvoDAG/gp.py
Population.create_random_ind_grow
def create_random_ind_grow(self, depth=0): "Random individual using grow method" lst = [] self._depth = depth self._create_random_ind_grow(depth=depth, output=lst) return lst
python
def create_random_ind_grow(self, depth=0): "Random individual using grow method" lst = [] self._depth = depth self._create_random_ind_grow(depth=depth, output=lst) return lst
[ "def", "create_random_ind_grow", "(", "self", ",", "depth", "=", "0", ")", ":", "lst", "=", "[", "]", "self", ".", "_depth", "=", "depth", "self", ".", "_create_random_ind_grow", "(", "depth", "=", "depth", ",", "output", "=", "lst", ")", "return", "lst" ]
Random individual using grow method
[ "Random", "individual", "using", "grow", "method" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L106-L111
train
mgraffg/EvoDAG
EvoDAG/gp.py
Population.create_population
def create_population(self, popsize=1000, min_depth=2, max_depth=4, X=None): "Creates random population using ramped half-and-half method" import itertools args = [x for x in itertools.product(range(min_depth, max_depth+1), [True, False])] index = 0 output = [] while len(output) < popsize: depth, full = args[index] index += 1 if index >= len(args): index = 0 if full: ind = self.create_random_ind_full(depth=depth) else: ind = self.create_random_ind_grow(depth=depth) flag = True if X is not None: x = Individual(ind) x.decision_function(X) flag = x.individual[0].isfinite() l_vars = (flag, len(output), full, depth, len(ind)) l_str = " flag: %s len(output): %s full: %s depth: %s len(ind): %s" self._logger.debug(l_str % l_vars) if flag: output.append(ind) return output
python
def create_population(self, popsize=1000, min_depth=2, max_depth=4, X=None): "Creates random population using ramped half-and-half method" import itertools args = [x for x in itertools.product(range(min_depth, max_depth+1), [True, False])] index = 0 output = [] while len(output) < popsize: depth, full = args[index] index += 1 if index >= len(args): index = 0 if full: ind = self.create_random_ind_full(depth=depth) else: ind = self.create_random_ind_grow(depth=depth) flag = True if X is not None: x = Individual(ind) x.decision_function(X) flag = x.individual[0].isfinite() l_vars = (flag, len(output), full, depth, len(ind)) l_str = " flag: %s len(output): %s full: %s depth: %s len(ind): %s" self._logger.debug(l_str % l_vars) if flag: output.append(ind) return output
[ "def", "create_population", "(", "self", ",", "popsize", "=", "1000", ",", "min_depth", "=", "2", ",", "max_depth", "=", "4", ",", "X", "=", "None", ")", ":", "import", "itertools", "args", "=", "[", "x", "for", "x", "in", "itertools", ".", "product", "(", "range", "(", "min_depth", ",", "max_depth", "+", "1", ")", ",", "[", "True", ",", "False", "]", ")", "]", "index", "=", "0", "output", "=", "[", "]", "while", "len", "(", "output", ")", "<", "popsize", ":", "depth", ",", "full", "=", "args", "[", "index", "]", "index", "+=", "1", "if", "index", ">=", "len", "(", "args", ")", ":", "index", "=", "0", "if", "full", ":", "ind", "=", "self", ".", "create_random_ind_full", "(", "depth", "=", "depth", ")", "else", ":", "ind", "=", "self", ".", "create_random_ind_grow", "(", "depth", "=", "depth", ")", "flag", "=", "True", "if", "X", "is", "not", "None", ":", "x", "=", "Individual", "(", "ind", ")", "x", ".", "decision_function", "(", "X", ")", "flag", "=", "x", ".", "individual", "[", "0", "]", ".", "isfinite", "(", ")", "l_vars", "=", "(", "flag", ",", "len", "(", "output", ")", ",", "full", ",", "depth", ",", "len", "(", "ind", ")", ")", "l_str", "=", "\" flag: %s len(output): %s full: %s depth: %s len(ind): %s\"", "self", ".", "_logger", ".", "debug", "(", "l_str", "%", "l_vars", ")", "if", "flag", ":", "output", ".", "append", "(", "ind", ")", "return", "output" ]
Creates random population using ramped half-and-half method
[ "Creates", "random", "population", "using", "ramped", "half", "-", "and", "-", "half", "method" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/gp.py#L123-L152
train
mgraffg/EvoDAG
EvoDAG/model.py
Model.decision_function
def decision_function(self, X, **kwargs): "Decision function i.e. the raw data of the prediction" if X is None: return self._hy_test X = self.convert_features(X) if len(X) < self.nvar: _ = 'Number of variables differ, trained with %s given %s' % (self.nvar, len(X)) raise RuntimeError(_) hist = self._hist for node in hist: if node.height: node.eval(hist) else: node.eval(X) node.normalize() r = node.hy for i in hist[:-1]: i.hy = None i.hy_test = None gc.collect() return r
python
def decision_function(self, X, **kwargs): "Decision function i.e. the raw data of the prediction" if X is None: return self._hy_test X = self.convert_features(X) if len(X) < self.nvar: _ = 'Number of variables differ, trained with %s given %s' % (self.nvar, len(X)) raise RuntimeError(_) hist = self._hist for node in hist: if node.height: node.eval(hist) else: node.eval(X) node.normalize() r = node.hy for i in hist[:-1]: i.hy = None i.hy_test = None gc.collect() return r
[ "def", "decision_function", "(", "self", ",", "X", ",", "*", "*", "kwargs", ")", ":", "if", "X", "is", "None", ":", "return", "self", ".", "_hy_test", "X", "=", "self", ".", "convert_features", "(", "X", ")", "if", "len", "(", "X", ")", "<", "self", ".", "nvar", ":", "_", "=", "'Number of variables differ, trained with %s given %s'", "%", "(", "self", ".", "nvar", ",", "len", "(", "X", ")", ")", "raise", "RuntimeError", "(", "_", ")", "hist", "=", "self", ".", "_hist", "for", "node", "in", "hist", ":", "if", "node", ".", "height", ":", "node", ".", "eval", "(", "hist", ")", "else", ":", "node", ".", "eval", "(", "X", ")", "node", ".", "normalize", "(", ")", "r", "=", "node", ".", "hy", "for", "i", "in", "hist", "[", ":", "-", "1", "]", ":", "i", ".", "hy", "=", "None", "i", ".", "hy_test", "=", "None", "gc", ".", "collect", "(", ")", "return", "r" ]
Decision function i.e. the raw data of the prediction
[ "Decision", "function", "i", ".", "e", ".", "the", "raw", "data", "of", "the", "prediction" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/model.py#L154-L174
train
mgraffg/EvoDAG
EvoDAG/model.py
Ensemble.fitness_vs
def fitness_vs(self): "Median Fitness in the validation set" l = [x.fitness_vs for x in self.models] return np.median(l)
python
def fitness_vs(self): "Median Fitness in the validation set" l = [x.fitness_vs for x in self.models] return np.median(l)
[ "def", "fitness_vs", "(", "self", ")", ":", "l", "=", "[", "x", ".", "fitness_vs", "for", "x", "in", "self", ".", "models", "]", "return", "np", ".", "median", "(", "l", ")" ]
Median Fitness in the validation set
[ "Median", "Fitness", "in", "the", "validation", "set" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/model.py#L335-L338
train
mgraffg/EvoDAG
EvoDAG/model.py
Ensemble.graphviz
def graphviz(self, directory, **kwargs): "Directory to store the graphviz models" import os if not os.path.isdir(directory): os.mkdir(directory) output = os.path.join(directory, 'evodag-%s') for k, m in enumerate(self.models): m.graphviz(output % k, **kwargs)
python
def graphviz(self, directory, **kwargs): "Directory to store the graphviz models" import os if not os.path.isdir(directory): os.mkdir(directory) output = os.path.join(directory, 'evodag-%s') for k, m in enumerate(self.models): m.graphviz(output % k, **kwargs)
[ "def", "graphviz", "(", "self", ",", "directory", ",", "*", "*", "kwargs", ")", ":", "import", "os", "if", "not", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "os", ".", "mkdir", "(", "directory", ")", "output", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "'evodag-%s'", ")", "for", "k", ",", "m", "in", "enumerate", "(", "self", ".", "models", ")", ":", "m", ".", "graphviz", "(", "output", "%", "k", ",", "*", "*", "kwargs", ")" ]
Directory to store the graphviz models
[ "Directory", "to", "store", "the", "graphviz", "models" ]
e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5
https://github.com/mgraffg/EvoDAG/blob/e11fa1fd1ca9e69cca92696c86661a3dc7b3a1d5/EvoDAG/model.py#L438-L445
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleMemberProduct.load_data
def load_data(self, num_samples=1000, percentiles=None): """ Args: num_samples: Number of random samples at each grid point percentiles: Which percentiles to extract from the random samples Returns: """ self.percentiles = percentiles self.num_samples = num_samples if self.model_name.lower() in ["wrf"]: mo = ModelOutput(self.ensemble_name, self.member, self.run_date, self.variable, self.start_date, self.end_date, self.path, self.map_file, self.single_step) mo.load_data() self.data = mo.data[:] if mo.units == "m": self.data *= 1000 self.units = "mm" else: self.units = mo.units else: if self.track_forecasts is None: self.load_track_data() self.units = "mm" self.data = np.zeros((self.forecast_hours.size, self.mapping_data["lon"].shape[0], self.mapping_data["lon"].shape[1]), dtype=np.float32) if self.percentiles is not None: self.percentile_data = np.zeros([len(self.percentiles)] + list(self.data.shape)) full_condition_name = "condition_" + self.condition_model_name.replace(" ", "-") dist_model_name = "dist" + "_" + self.model_name.replace(" ", "-") for track_forecast in self.track_forecasts: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_params = step["properties"][dist_model_name] if self.condition_model_name is not None: condition = step["properties"][full_condition_name] else: condition = None forecast_time = self.run_date + timedelta(hours=times[s]) if forecast_time in self.times: t = np.where(self.times == forecast_time)[0][0] mask = np.array(step["properties"]["masks"], dtype=int).ravel() rankings = np.argsort(np.array(step["properties"]["timesteps"]).ravel()[mask==1]) i = np.array(step["properties"]["i"], dtype=int).ravel()[mask == 1][rankings] j = np.array(step["properties"]["j"], dtype=int).ravel()[mask == 1][rankings] if rankings.size > 0 and forecast_params[0] > 0.1 and 1 < forecast_params[2] < 100: raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1], scale=forecast_params[2], size=(num_samples, rankings.size)), axis=1) if self.percentiles is None: samples = raw_samples.mean(axis=0) if condition >= self.condition_threshold: self.data[t, i, j] = samples else: for p, percentile in enumerate(self.percentiles): if percentile != "mean": if condition >= self.condition_threshold: self.percentile_data[p, t, i, j] = np.percentile(raw_samples, percentile, axis=0) else: if condition >= self.condition_threshold: self.percentile_data[p, t, i, j] = np.mean(raw_samples, axis=0) samples = raw_samples.mean(axis=0) if condition >= self.condition_threshold: self.data[t, i, j] = samples
python
def load_data(self, num_samples=1000, percentiles=None): """ Args: num_samples: Number of random samples at each grid point percentiles: Which percentiles to extract from the random samples Returns: """ self.percentiles = percentiles self.num_samples = num_samples if self.model_name.lower() in ["wrf"]: mo = ModelOutput(self.ensemble_name, self.member, self.run_date, self.variable, self.start_date, self.end_date, self.path, self.map_file, self.single_step) mo.load_data() self.data = mo.data[:] if mo.units == "m": self.data *= 1000 self.units = "mm" else: self.units = mo.units else: if self.track_forecasts is None: self.load_track_data() self.units = "mm" self.data = np.zeros((self.forecast_hours.size, self.mapping_data["lon"].shape[0], self.mapping_data["lon"].shape[1]), dtype=np.float32) if self.percentiles is not None: self.percentile_data = np.zeros([len(self.percentiles)] + list(self.data.shape)) full_condition_name = "condition_" + self.condition_model_name.replace(" ", "-") dist_model_name = "dist" + "_" + self.model_name.replace(" ", "-") for track_forecast in self.track_forecasts: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_params = step["properties"][dist_model_name] if self.condition_model_name is not None: condition = step["properties"][full_condition_name] else: condition = None forecast_time = self.run_date + timedelta(hours=times[s]) if forecast_time in self.times: t = np.where(self.times == forecast_time)[0][0] mask = np.array(step["properties"]["masks"], dtype=int).ravel() rankings = np.argsort(np.array(step["properties"]["timesteps"]).ravel()[mask==1]) i = np.array(step["properties"]["i"], dtype=int).ravel()[mask == 1][rankings] j = np.array(step["properties"]["j"], dtype=int).ravel()[mask == 1][rankings] if rankings.size > 0 and forecast_params[0] > 0.1 and 1 < forecast_params[2] < 100: raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1], scale=forecast_params[2], size=(num_samples, rankings.size)), axis=1) if self.percentiles is None: samples = raw_samples.mean(axis=0) if condition >= self.condition_threshold: self.data[t, i, j] = samples else: for p, percentile in enumerate(self.percentiles): if percentile != "mean": if condition >= self.condition_threshold: self.percentile_data[p, t, i, j] = np.percentile(raw_samples, percentile, axis=0) else: if condition >= self.condition_threshold: self.percentile_data[p, t, i, j] = np.mean(raw_samples, axis=0) samples = raw_samples.mean(axis=0) if condition >= self.condition_threshold: self.data[t, i, j] = samples
[ "def", "load_data", "(", "self", ",", "num_samples", "=", "1000", ",", "percentiles", "=", "None", ")", ":", "self", ".", "percentiles", "=", "percentiles", "self", ".", "num_samples", "=", "num_samples", "if", "self", ".", "model_name", ".", "lower", "(", ")", "in", "[", "\"wrf\"", "]", ":", "mo", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "self", ".", "member", ",", "self", ".", "run_date", ",", "self", ".", "variable", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "self", ".", "path", ",", "self", ".", "map_file", ",", "self", ".", "single_step", ")", "mo", ".", "load_data", "(", ")", "self", ".", "data", "=", "mo", ".", "data", "[", ":", "]", "if", "mo", ".", "units", "==", "\"m\"", ":", "self", ".", "data", "*=", "1000", "self", ".", "units", "=", "\"mm\"", "else", ":", "self", ".", "units", "=", "mo", ".", "units", "else", ":", "if", "self", ".", "track_forecasts", "is", "None", ":", "self", ".", "load_track_data", "(", ")", "self", ".", "units", "=", "\"mm\"", "self", ".", "data", "=", "np", ".", "zeros", "(", "(", "self", ".", "forecast_hours", ".", "size", ",", "self", ".", "mapping_data", "[", "\"lon\"", "]", ".", "shape", "[", "0", "]", ",", "self", ".", "mapping_data", "[", "\"lon\"", "]", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "np", ".", "float32", ")", "if", "self", ".", "percentiles", "is", "not", "None", ":", "self", ".", "percentile_data", "=", "np", ".", "zeros", "(", "[", "len", "(", "self", ".", "percentiles", ")", "]", "+", "list", "(", "self", ".", "data", ".", "shape", ")", ")", "full_condition_name", "=", "\"condition_\"", "+", "self", ".", "condition_model_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "dist_model_name", "=", "\"dist\"", "+", "\"_\"", "+", "self", ".", "model_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "for", "track_forecast", "in", "self", ".", "track_forecasts", ":", "times", "=", "track_forecast", "[", "\"properties\"", "]", "[", "\"times\"", "]", "for", "s", ",", "step", "in", "enumerate", "(", "track_forecast", "[", "\"features\"", "]", ")", ":", "forecast_params", "=", "step", "[", "\"properties\"", "]", "[", "dist_model_name", "]", "if", "self", ".", "condition_model_name", "is", "not", "None", ":", "condition", "=", "step", "[", "\"properties\"", "]", "[", "full_condition_name", "]", "else", ":", "condition", "=", "None", "forecast_time", "=", "self", ".", "run_date", "+", "timedelta", "(", "hours", "=", "times", "[", "s", "]", ")", "if", "forecast_time", "in", "self", ".", "times", ":", "t", "=", "np", ".", "where", "(", "self", ".", "times", "==", "forecast_time", ")", "[", "0", "]", "[", "0", "]", "mask", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"masks\"", "]", ",", "dtype", "=", "int", ")", ".", "ravel", "(", ")", "rankings", "=", "np", ".", "argsort", "(", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"timesteps\"", "]", ")", ".", "ravel", "(", ")", "[", "mask", "==", "1", "]", ")", "i", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"i\"", "]", ",", "dtype", "=", "int", ")", ".", "ravel", "(", ")", "[", "mask", "==", "1", "]", "[", "rankings", "]", "j", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"j\"", "]", ",", "dtype", "=", "int", ")", ".", "ravel", "(", ")", "[", "mask", "==", "1", "]", "[", "rankings", "]", "if", "rankings", ".", "size", ">", "0", "and", "forecast_params", "[", "0", "]", ">", "0.1", "and", "1", "<", "forecast_params", "[", "2", "]", "<", "100", ":", "raw_samples", "=", "np", ".", "sort", "(", "gamma", ".", "rvs", "(", "forecast_params", "[", "0", "]", ",", "loc", "=", "forecast_params", "[", "1", "]", ",", "scale", "=", "forecast_params", "[", "2", "]", ",", "size", "=", "(", "num_samples", ",", "rankings", ".", "size", ")", ")", ",", "axis", "=", "1", ")", "if", "self", ".", "percentiles", "is", "None", ":", "samples", "=", "raw_samples", ".", "mean", "(", "axis", "=", "0", ")", "if", "condition", ">=", "self", ".", "condition_threshold", ":", "self", ".", "data", "[", "t", ",", "i", ",", "j", "]", "=", "samples", "else", ":", "for", "p", ",", "percentile", "in", "enumerate", "(", "self", ".", "percentiles", ")", ":", "if", "percentile", "!=", "\"mean\"", ":", "if", "condition", ">=", "self", ".", "condition_threshold", ":", "self", ".", "percentile_data", "[", "p", ",", "t", ",", "i", ",", "j", "]", "=", "np", ".", "percentile", "(", "raw_samples", ",", "percentile", ",", "axis", "=", "0", ")", "else", ":", "if", "condition", ">=", "self", ".", "condition_threshold", ":", "self", ".", "percentile_data", "[", "p", ",", "t", ",", "i", ",", "j", "]", "=", "np", ".", "mean", "(", "raw_samples", ",", "axis", "=", "0", ")", "samples", "=", "raw_samples", ".", "mean", "(", "axis", "=", "0", ")", "if", "condition", ">=", "self", ".", "condition_threshold", ":", "self", ".", "data", "[", "t", ",", "i", ",", "j", "]", "=", "samples" ]
Args: num_samples: Number of random samples at each grid point percentiles: Which percentiles to extract from the random samples Returns:
[ "Args", ":", "num_samples", ":", "Number", "of", "random", "samples", "at", "each", "grid", "point", "percentiles", ":", "Which", "percentiles", "to", "extract", "from", "the", "random", "samples" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L63-L130
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleMemberProduct.neighborhood_probability
def neighborhood_probability(self, threshold, radius): """ Calculate a probability based on the number of grid points in an area that exceed a threshold. Args: threshold: radius: Returns: """ weights = disk(radius, dtype=np.uint8) thresh_data = np.zeros(self.data.shape[1:], dtype=np.uint8) neighbor_prob = np.zeros(self.data.shape, dtype=np.float32) for t in np.arange(self.data.shape[0]): thresh_data[self.data[t] >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 maximized[maximized < 1] = 0 neighbor_prob[t] = fftconvolve(maximized, weights, mode="same") thresh_data[:] = 0 neighbor_prob[neighbor_prob < 1] = 0 neighbor_prob /= weights.sum() return neighbor_prob
python
def neighborhood_probability(self, threshold, radius): """ Calculate a probability based on the number of grid points in an area that exceed a threshold. Args: threshold: radius: Returns: """ weights = disk(radius, dtype=np.uint8) thresh_data = np.zeros(self.data.shape[1:], dtype=np.uint8) neighbor_prob = np.zeros(self.data.shape, dtype=np.float32) for t in np.arange(self.data.shape[0]): thresh_data[self.data[t] >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 maximized[maximized < 1] = 0 neighbor_prob[t] = fftconvolve(maximized, weights, mode="same") thresh_data[:] = 0 neighbor_prob[neighbor_prob < 1] = 0 neighbor_prob /= weights.sum() return neighbor_prob
[ "def", "neighborhood_probability", "(", "self", ",", "threshold", ",", "radius", ")", ":", "weights", "=", "disk", "(", "radius", ",", "dtype", "=", "np", ".", "uint8", ")", "thresh_data", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "1", ":", "]", ",", "dtype", "=", "np", ".", "uint8", ")", "neighbor_prob", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", ",", "dtype", "=", "np", ".", "float32", ")", "for", "t", "in", "np", ".", "arange", "(", "self", ".", "data", ".", "shape", "[", "0", "]", ")", ":", "thresh_data", "[", "self", ".", "data", "[", "t", "]", ">=", "threshold", "]", "=", "1", "maximized", "=", "fftconvolve", "(", "thresh_data", ",", "weights", ",", "mode", "=", "\"same\"", ")", "maximized", "[", "maximized", ">", "1", "]", "=", "1", "maximized", "[", "maximized", "<", "1", "]", "=", "0", "neighbor_prob", "[", "t", "]", "=", "fftconvolve", "(", "maximized", ",", "weights", ",", "mode", "=", "\"same\"", ")", "thresh_data", "[", ":", "]", "=", "0", "neighbor_prob", "[", "neighbor_prob", "<", "1", "]", "=", "0", "neighbor_prob", "/=", "weights", ".", "sum", "(", ")", "return", "neighbor_prob" ]
Calculate a probability based on the number of grid points in an area that exceed a threshold. Args: threshold: radius: Returns:
[ "Calculate", "a", "probability", "based", "on", "the", "number", "of", "grid", "points", "in", "an", "area", "that", "exceed", "a", "threshold", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L221-L244
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleMemberProduct.encode_grib2_percentile
def encode_grib2_percentile(self): """ Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages """ lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = [1, 0, self.proj_dict['a'], 0, float(self.proj_dict['a']), 0, float(self.proj_dict['b']), self.data.shape[-1], self.data.shape[-2], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0b00000000, 0b01000000, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, -90 * lscale, 0] pdtmp1 = np.array([1, # parameter category Moisture 31, # parameter number Hail 4, # Type of generating process Ensemble Forecast 0, # Background generating process identifier 31, # Generating process or model from NCEP 0, # Hours after reference time data cutoff 0, # Minutes after reference time data cutoff 1, # Forecast time units Hours 0, # Forecast time 1, # Type of first fixed surface Ground 1, # Scale value of first fixed surface 0, # Value of first fixed surface 1, # Type of second fixed surface 1, # Scale value of 2nd fixed surface 0, # Value of 2nd fixed surface 0, # Derived forecast type self.num_samples # Number of ensemble members ], dtype=np.int32) grib_objects = pd.Series(index=self.times, data=[None] * self.times.size, dtype=object) drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) for t, time in enumerate(self.times): time_list = list(self.run_date.utctimetuple()[0:6]) if grib_objects[time] is None: grib_objects[time] = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grib_objects[time].addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 data = self.percentile_data[:, t] / 1000.0 masked_data = np.ma.array(data, mask=data <= 0) for p, percentile in enumerate(self.percentiles): print("GRIB {3} Percentile {0}. Max: {1} Min: {2}".format(percentile, masked_data[p].max(), masked_data[p].min(), time)) if percentile in range(1, 100): pdtmp1[-2] = percentile grib_objects[time].addfield(6, pdtmp1[:-1], 0, drtmp1, masked_data[p]) else: pdtmp1[-2] = 0 grib_objects[time].addfield(2, pdtmp1, 0, drtmp1, masked_data[p]) return grib_objects
python
def encode_grib2_percentile(self): """ Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages """ lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = [1, 0, self.proj_dict['a'], 0, float(self.proj_dict['a']), 0, float(self.proj_dict['b']), self.data.shape[-1], self.data.shape[-2], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0b00000000, 0b01000000, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, -90 * lscale, 0] pdtmp1 = np.array([1, # parameter category Moisture 31, # parameter number Hail 4, # Type of generating process Ensemble Forecast 0, # Background generating process identifier 31, # Generating process or model from NCEP 0, # Hours after reference time data cutoff 0, # Minutes after reference time data cutoff 1, # Forecast time units Hours 0, # Forecast time 1, # Type of first fixed surface Ground 1, # Scale value of first fixed surface 0, # Value of first fixed surface 1, # Type of second fixed surface 1, # Scale value of 2nd fixed surface 0, # Value of 2nd fixed surface 0, # Derived forecast type self.num_samples # Number of ensemble members ], dtype=np.int32) grib_objects = pd.Series(index=self.times, data=[None] * self.times.size, dtype=object) drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) for t, time in enumerate(self.times): time_list = list(self.run_date.utctimetuple()[0:6]) if grib_objects[time] is None: grib_objects[time] = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grib_objects[time].addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 data = self.percentile_data[:, t] / 1000.0 masked_data = np.ma.array(data, mask=data <= 0) for p, percentile in enumerate(self.percentiles): print("GRIB {3} Percentile {0}. Max: {1} Min: {2}".format(percentile, masked_data[p].max(), masked_data[p].min(), time)) if percentile in range(1, 100): pdtmp1[-2] = percentile grib_objects[time].addfield(6, pdtmp1[:-1], 0, drtmp1, masked_data[p]) else: pdtmp1[-2] = 0 grib_objects[time].addfield(2, pdtmp1, 0, drtmp1, masked_data[p]) return grib_objects
[ "def", "encode_grib2_percentile", "(", "self", ")", ":", "lscale", "=", "1e6", "grib_id_start", "=", "[", "7", ",", "0", ",", "14", ",", "14", ",", "2", "]", "gdsinfo", "=", "np", ".", "array", "(", "[", "0", ",", "np", ".", "product", "(", "self", ".", "data", ".", "shape", "[", "-", "2", ":", "]", ")", ",", "0", ",", "0", ",", "30", "]", ",", "dtype", "=", "np", ".", "int32", ")", "lon_0", "=", "self", ".", "proj_dict", "[", "\"lon_0\"", "]", "sw_lon", "=", "self", ".", "grid_dict", "[", "\"sw_lon\"", "]", "if", "lon_0", "<", "0", ":", "lon_0", "+=", "360", "if", "sw_lon", "<", "0", ":", "sw_lon", "+=", "360", "gdtmp1", "=", "[", "1", ",", "0", ",", "self", ".", "proj_dict", "[", "'a'", "]", ",", "0", ",", "float", "(", "self", ".", "proj_dict", "[", "'a'", "]", ")", ",", "0", ",", "float", "(", "self", ".", "proj_dict", "[", "'b'", "]", ")", ",", "self", ".", "data", ".", "shape", "[", "-", "1", "]", ",", "self", ".", "data", ".", "shape", "[", "-", "2", "]", ",", "self", ".", "grid_dict", "[", "\"sw_lat\"", "]", "*", "lscale", ",", "sw_lon", "*", "lscale", ",", "0", ",", "self", ".", "proj_dict", "[", "\"lat_0\"", "]", "*", "lscale", ",", "lon_0", "*", "lscale", ",", "self", ".", "grid_dict", "[", "\"dx\"", "]", "*", "1e3", ",", "self", ".", "grid_dict", "[", "\"dy\"", "]", "*", "1e3", ",", "0b00000000", ",", "0b01000000", ",", "self", ".", "proj_dict", "[", "\"lat_1\"", "]", "*", "lscale", ",", "self", ".", "proj_dict", "[", "\"lat_2\"", "]", "*", "lscale", ",", "-", "90", "*", "lscale", ",", "0", "]", "pdtmp1", "=", "np", ".", "array", "(", "[", "1", ",", "# parameter category Moisture", "31", ",", "# parameter number Hail", "4", ",", "# Type of generating process Ensemble Forecast", "0", ",", "# Background generating process identifier", "31", ",", "# Generating process or model from NCEP", "0", ",", "# Hours after reference time data cutoff", "0", ",", "# Minutes after reference time data cutoff", "1", ",", "# Forecast time units Hours", "0", ",", "# Forecast time", "1", ",", "# Type of first fixed surface Ground", "1", ",", "# Scale value of first fixed surface", "0", ",", "# Value of first fixed surface", "1", ",", "# Type of second fixed surface", "1", ",", "# Scale value of 2nd fixed surface", "0", ",", "# Value of 2nd fixed surface", "0", ",", "# Derived forecast type", "self", ".", "num_samples", "# Number of ensemble members", "]", ",", "dtype", "=", "np", ".", "int32", ")", "grib_objects", "=", "pd", ".", "Series", "(", "index", "=", "self", ".", "times", ",", "data", "=", "[", "None", "]", "*", "self", ".", "times", ".", "size", ",", "dtype", "=", "object", ")", "drtmp1", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "4", ",", "8", ",", "0", "]", ",", "dtype", "=", "np", ".", "int32", ")", "for", "t", ",", "time", "in", "enumerate", "(", "self", ".", "times", ")", ":", "time_list", "=", "list", "(", "self", ".", "run_date", ".", "utctimetuple", "(", ")", "[", "0", ":", "6", "]", ")", "if", "grib_objects", "[", "time", "]", "is", "None", ":", "grib_objects", "[", "time", "]", "=", "Grib2Encode", "(", "0", ",", "np", ".", "array", "(", "grib_id_start", "+", "time_list", "+", "[", "2", ",", "1", "]", ",", "dtype", "=", "np", ".", "int32", ")", ")", "grib_objects", "[", "time", "]", ".", "addgrid", "(", "gdsinfo", ",", "gdtmp1", ")", "pdtmp1", "[", "8", "]", "=", "(", "time", ".", "to_pydatetime", "(", ")", "-", "self", ".", "run_date", ")", ".", "total_seconds", "(", ")", "/", "3600.0", "data", "=", "self", ".", "percentile_data", "[", ":", ",", "t", "]", "/", "1000.0", "masked_data", "=", "np", ".", "ma", ".", "array", "(", "data", ",", "mask", "=", "data", "<=", "0", ")", "for", "p", ",", "percentile", "in", "enumerate", "(", "self", ".", "percentiles", ")", ":", "print", "(", "\"GRIB {3} Percentile {0}. Max: {1} Min: {2}\"", ".", "format", "(", "percentile", ",", "masked_data", "[", "p", "]", ".", "max", "(", ")", ",", "masked_data", "[", "p", "]", ".", "min", "(", ")", ",", "time", ")", ")", "if", "percentile", "in", "range", "(", "1", ",", "100", ")", ":", "pdtmp1", "[", "-", "2", "]", "=", "percentile", "grib_objects", "[", "time", "]", ".", "addfield", "(", "6", ",", "pdtmp1", "[", ":", "-", "1", "]", ",", "0", ",", "drtmp1", ",", "masked_data", "[", "p", "]", ")", "else", ":", "pdtmp1", "[", "-", "2", "]", "=", "0", "grib_objects", "[", "time", "]", ".", "addfield", "(", "2", ",", "pdtmp1", ",", "0", ",", "drtmp1", ",", "masked_data", "[", "p", "]", ")", "return", "grib_objects" ]
Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages
[ "Encodes", "member", "percentile", "data", "to", "GRIB2", "format", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L273-L335
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleMemberProduct.encode_grib2_data
def encode_grib2_data(self): """ Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages """ lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = [1, 0, self.proj_dict['a'], 0, float(self.proj_dict['a']), 0, float(self.proj_dict['b']), self.data.shape[-1], self.data.shape[-2], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0b00000000, 0b01000000, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, -90 * lscale, 0] pdtmp1 = np.array([1, # parameter category Moisture 31, # parameter number Hail 4, # Type of generating process Ensemble Forecast 0, # Background generating process identifier 31, # Generating process or model from NCEP 0, # Hours after reference time data cutoff 0, # Minutes after reference time data cutoff 1, # Forecast time units Hours 0, # Forecast time 1, # Type of first fixed surface Ground 1, # Scale value of first fixed surface 0, # Value of first fixed surface 1, # Type of second fixed surface 1, # Scale value of 2nd fixed surface 0, # Value of 2nd fixed surface 0, # Derived forecast type 1 # Number of ensemble members ], dtype=np.int32) grib_objects = pd.Series(index=self.times, data=[None] * self.times.size, dtype=object) drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) for t, time in enumerate(self.times): time_list = list(self.run_date.utctimetuple()[0:6]) if grib_objects[time] is None: grib_objects[time] = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grib_objects[time].addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 data = self.data[t] / 1000.0 data[np.isnan(data)] = 0 masked_data = np.ma.array(data, mask=data<=0) pdtmp1[-2] = 0 grib_objects[time].addfield(1, pdtmp1, 0, drtmp1, masked_data) return grib_objects
python
def encode_grib2_data(self): """ Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages """ lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = [1, 0, self.proj_dict['a'], 0, float(self.proj_dict['a']), 0, float(self.proj_dict['b']), self.data.shape[-1], self.data.shape[-2], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0b00000000, 0b01000000, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, -90 * lscale, 0] pdtmp1 = np.array([1, # parameter category Moisture 31, # parameter number Hail 4, # Type of generating process Ensemble Forecast 0, # Background generating process identifier 31, # Generating process or model from NCEP 0, # Hours after reference time data cutoff 0, # Minutes after reference time data cutoff 1, # Forecast time units Hours 0, # Forecast time 1, # Type of first fixed surface Ground 1, # Scale value of first fixed surface 0, # Value of first fixed surface 1, # Type of second fixed surface 1, # Scale value of 2nd fixed surface 0, # Value of 2nd fixed surface 0, # Derived forecast type 1 # Number of ensemble members ], dtype=np.int32) grib_objects = pd.Series(index=self.times, data=[None] * self.times.size, dtype=object) drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) for t, time in enumerate(self.times): time_list = list(self.run_date.utctimetuple()[0:6]) if grib_objects[time] is None: grib_objects[time] = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grib_objects[time].addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 data = self.data[t] / 1000.0 data[np.isnan(data)] = 0 masked_data = np.ma.array(data, mask=data<=0) pdtmp1[-2] = 0 grib_objects[time].addfield(1, pdtmp1, 0, drtmp1, masked_data) return grib_objects
[ "def", "encode_grib2_data", "(", "self", ")", ":", "lscale", "=", "1e6", "grib_id_start", "=", "[", "7", ",", "0", ",", "14", ",", "14", ",", "2", "]", "gdsinfo", "=", "np", ".", "array", "(", "[", "0", ",", "np", ".", "product", "(", "self", ".", "data", ".", "shape", "[", "-", "2", ":", "]", ")", ",", "0", ",", "0", ",", "30", "]", ",", "dtype", "=", "np", ".", "int32", ")", "lon_0", "=", "self", ".", "proj_dict", "[", "\"lon_0\"", "]", "sw_lon", "=", "self", ".", "grid_dict", "[", "\"sw_lon\"", "]", "if", "lon_0", "<", "0", ":", "lon_0", "+=", "360", "if", "sw_lon", "<", "0", ":", "sw_lon", "+=", "360", "gdtmp1", "=", "[", "1", ",", "0", ",", "self", ".", "proj_dict", "[", "'a'", "]", ",", "0", ",", "float", "(", "self", ".", "proj_dict", "[", "'a'", "]", ")", ",", "0", ",", "float", "(", "self", ".", "proj_dict", "[", "'b'", "]", ")", ",", "self", ".", "data", ".", "shape", "[", "-", "1", "]", ",", "self", ".", "data", ".", "shape", "[", "-", "2", "]", ",", "self", ".", "grid_dict", "[", "\"sw_lat\"", "]", "*", "lscale", ",", "sw_lon", "*", "lscale", ",", "0", ",", "self", ".", "proj_dict", "[", "\"lat_0\"", "]", "*", "lscale", ",", "lon_0", "*", "lscale", ",", "self", ".", "grid_dict", "[", "\"dx\"", "]", "*", "1e3", ",", "self", ".", "grid_dict", "[", "\"dy\"", "]", "*", "1e3", ",", "0b00000000", ",", "0b01000000", ",", "self", ".", "proj_dict", "[", "\"lat_1\"", "]", "*", "lscale", ",", "self", ".", "proj_dict", "[", "\"lat_2\"", "]", "*", "lscale", ",", "-", "90", "*", "lscale", ",", "0", "]", "pdtmp1", "=", "np", ".", "array", "(", "[", "1", ",", "# parameter category Moisture", "31", ",", "# parameter number Hail", "4", ",", "# Type of generating process Ensemble Forecast", "0", ",", "# Background generating process identifier", "31", ",", "# Generating process or model from NCEP", "0", ",", "# Hours after reference time data cutoff", "0", ",", "# Minutes after reference time data cutoff", "1", ",", "# Forecast time units Hours", "0", ",", "# Forecast time", "1", ",", "# Type of first fixed surface Ground", "1", ",", "# Scale value of first fixed surface", "0", ",", "# Value of first fixed surface", "1", ",", "# Type of second fixed surface", "1", ",", "# Scale value of 2nd fixed surface", "0", ",", "# Value of 2nd fixed surface", "0", ",", "# Derived forecast type", "1", "# Number of ensemble members", "]", ",", "dtype", "=", "np", ".", "int32", ")", "grib_objects", "=", "pd", ".", "Series", "(", "index", "=", "self", ".", "times", ",", "data", "=", "[", "None", "]", "*", "self", ".", "times", ".", "size", ",", "dtype", "=", "object", ")", "drtmp1", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "4", ",", "8", ",", "0", "]", ",", "dtype", "=", "np", ".", "int32", ")", "for", "t", ",", "time", "in", "enumerate", "(", "self", ".", "times", ")", ":", "time_list", "=", "list", "(", "self", ".", "run_date", ".", "utctimetuple", "(", ")", "[", "0", ":", "6", "]", ")", "if", "grib_objects", "[", "time", "]", "is", "None", ":", "grib_objects", "[", "time", "]", "=", "Grib2Encode", "(", "0", ",", "np", ".", "array", "(", "grib_id_start", "+", "time_list", "+", "[", "2", ",", "1", "]", ",", "dtype", "=", "np", ".", "int32", ")", ")", "grib_objects", "[", "time", "]", ".", "addgrid", "(", "gdsinfo", ",", "gdtmp1", ")", "pdtmp1", "[", "8", "]", "=", "(", "time", ".", "to_pydatetime", "(", ")", "-", "self", ".", "run_date", ")", ".", "total_seconds", "(", ")", "/", "3600.0", "data", "=", "self", ".", "data", "[", "t", "]", "/", "1000.0", "data", "[", "np", ".", "isnan", "(", "data", ")", "]", "=", "0", "masked_data", "=", "np", ".", "ma", ".", "array", "(", "data", ",", "mask", "=", "data", "<=", "0", ")", "pdtmp1", "[", "-", "2", "]", "=", "0", "grib_objects", "[", "time", "]", ".", "addfield", "(", "1", ",", "pdtmp1", ",", "0", ",", "drtmp1", ",", "masked_data", ")", "return", "grib_objects" ]
Encodes member percentile data to GRIB2 format. Returns: Series of GRIB2 messages
[ "Encodes", "member", "percentile", "data", "to", "GRIB2", "format", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L337-L391
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleProducts.load_data
def load_data(self): """ Loads data from each ensemble member. """ for m, member in enumerate(self.members): mo = ModelOutput(self.ensemble_name, member, self.run_date, self.variable, self.start_date, self.end_date, self.path, self.map_file, self.single_step) mo.load_data() if self.data is None: self.data = np.zeros((len(self.members), mo.data.shape[0], mo.data.shape[1], mo.data.shape[2]), dtype=np.float32) if mo.units == "m": self.data[m] = mo.data * 1000 self.units = "mm" else: self.data[m] = mo.data if self.units == "": self.units = mo.units del mo.data del mo
python
def load_data(self): """ Loads data from each ensemble member. """ for m, member in enumerate(self.members): mo = ModelOutput(self.ensemble_name, member, self.run_date, self.variable, self.start_date, self.end_date, self.path, self.map_file, self.single_step) mo.load_data() if self.data is None: self.data = np.zeros((len(self.members), mo.data.shape[0], mo.data.shape[1], mo.data.shape[2]), dtype=np.float32) if mo.units == "m": self.data[m] = mo.data * 1000 self.units = "mm" else: self.data[m] = mo.data if self.units == "": self.units = mo.units del mo.data del mo
[ "def", "load_data", "(", "self", ")", ":", "for", "m", ",", "member", "in", "enumerate", "(", "self", ".", "members", ")", ":", "mo", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "member", ",", "self", ".", "run_date", ",", "self", ".", "variable", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "self", ".", "path", ",", "self", ".", "map_file", ",", "self", ".", "single_step", ")", "mo", ".", "load_data", "(", ")", "if", "self", ".", "data", "is", "None", ":", "self", ".", "data", "=", "np", ".", "zeros", "(", "(", "len", "(", "self", ".", "members", ")", ",", "mo", ".", "data", ".", "shape", "[", "0", "]", ",", "mo", ".", "data", ".", "shape", "[", "1", "]", ",", "mo", ".", "data", ".", "shape", "[", "2", "]", ")", ",", "dtype", "=", "np", ".", "float32", ")", "if", "mo", ".", "units", "==", "\"m\"", ":", "self", ".", "data", "[", "m", "]", "=", "mo", ".", "data", "*", "1000", "self", ".", "units", "=", "\"mm\"", "else", ":", "self", ".", "data", "[", "m", "]", "=", "mo", ".", "data", "if", "self", ".", "units", "==", "\"\"", ":", "self", ".", "units", "=", "mo", ".", "units", "del", "mo", ".", "data", "del", "mo" ]
Loads data from each ensemble member.
[ "Loads", "data", "from", "each", "ensemble", "member", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L439-L458
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleProducts.point_consensus
def point_consensus(self, consensus_type): """ Calculate grid-point statistics across ensemble members. Args: consensus_type: mean, std, median, max, or percentile_nn Returns: EnsembleConsensus containing point statistic """ if "mean" in consensus_type: consensus_data = np.mean(self.data, axis=0) elif "std" in consensus_type: consensus_data = np.std(self.data, axis=0) elif "median" in consensus_type: consensus_data = np.median(self.data, axis=0) elif "max" in consensus_type: consensus_data = np.max(self.data, axis=0) elif "percentile" in consensus_type: percentile = int(consensus_type.split("_")[1]) consensus_data = np.percentile(self.data, percentile, axis=0) else: consensus_data = np.zeros(self.data.shape[1:]) consensus = EnsembleConsensus(consensus_data, consensus_type, self.ensemble_name, self.run_date, self.variable, self.start_date, self.end_date, self.units) return consensus
python
def point_consensus(self, consensus_type): """ Calculate grid-point statistics across ensemble members. Args: consensus_type: mean, std, median, max, or percentile_nn Returns: EnsembleConsensus containing point statistic """ if "mean" in consensus_type: consensus_data = np.mean(self.data, axis=0) elif "std" in consensus_type: consensus_data = np.std(self.data, axis=0) elif "median" in consensus_type: consensus_data = np.median(self.data, axis=0) elif "max" in consensus_type: consensus_data = np.max(self.data, axis=0) elif "percentile" in consensus_type: percentile = int(consensus_type.split("_")[1]) consensus_data = np.percentile(self.data, percentile, axis=0) else: consensus_data = np.zeros(self.data.shape[1:]) consensus = EnsembleConsensus(consensus_data, consensus_type, self.ensemble_name, self.run_date, self.variable, self.start_date, self.end_date, self.units) return consensus
[ "def", "point_consensus", "(", "self", ",", "consensus_type", ")", ":", "if", "\"mean\"", "in", "consensus_type", ":", "consensus_data", "=", "np", ".", "mean", "(", "self", ".", "data", ",", "axis", "=", "0", ")", "elif", "\"std\"", "in", "consensus_type", ":", "consensus_data", "=", "np", ".", "std", "(", "self", ".", "data", ",", "axis", "=", "0", ")", "elif", "\"median\"", "in", "consensus_type", ":", "consensus_data", "=", "np", ".", "median", "(", "self", ".", "data", ",", "axis", "=", "0", ")", "elif", "\"max\"", "in", "consensus_type", ":", "consensus_data", "=", "np", ".", "max", "(", "self", ".", "data", ",", "axis", "=", "0", ")", "elif", "\"percentile\"", "in", "consensus_type", ":", "percentile", "=", "int", "(", "consensus_type", ".", "split", "(", "\"_\"", ")", "[", "1", "]", ")", "consensus_data", "=", "np", ".", "percentile", "(", "self", ".", "data", ",", "percentile", ",", "axis", "=", "0", ")", "else", ":", "consensus_data", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "1", ":", "]", ")", "consensus", "=", "EnsembleConsensus", "(", "consensus_data", ",", "consensus_type", ",", "self", ".", "ensemble_name", ",", "self", ".", "run_date", ",", "self", ".", "variable", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "self", ".", "units", ")", "return", "consensus" ]
Calculate grid-point statistics across ensemble members. Args: consensus_type: mean, std, median, max, or percentile_nn Returns: EnsembleConsensus containing point statistic
[ "Calculate", "grid", "-", "point", "statistics", "across", "ensemble", "members", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L460-L485
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleProducts.point_probability
def point_probability(self, threshold): """ Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at that point. Args: threshold: If >= threshold assigns a 1 to member, otherwise 0. Returns: EnsembleConsensus """ point_prob = np.zeros(self.data.shape[1:]) for t in range(self.data.shape[1]): point_prob[t] = np.where(self.data[:, t] >= threshold, 1.0, 0.0).mean(axis=0) return EnsembleConsensus(point_prob, "point_probability", self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}_{1}".format(threshold, self.units.replace(" ", "_")), self.start_date, self.end_date, "")
python
def point_probability(self, threshold): """ Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at that point. Args: threshold: If >= threshold assigns a 1 to member, otherwise 0. Returns: EnsembleConsensus """ point_prob = np.zeros(self.data.shape[1:]) for t in range(self.data.shape[1]): point_prob[t] = np.where(self.data[:, t] >= threshold, 1.0, 0.0).mean(axis=0) return EnsembleConsensus(point_prob, "point_probability", self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}_{1}".format(threshold, self.units.replace(" ", "_")), self.start_date, self.end_date, "")
[ "def", "point_probability", "(", "self", ",", "threshold", ")", ":", "point_prob", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "1", ":", "]", ")", "for", "t", "in", "range", "(", "self", ".", "data", ".", "shape", "[", "1", "]", ")", ":", "point_prob", "[", "t", "]", "=", "np", ".", "where", "(", "self", ".", "data", "[", ":", ",", "t", "]", ">=", "threshold", ",", "1.0", ",", "0.0", ")", ".", "mean", "(", "axis", "=", "0", ")", "return", "EnsembleConsensus", "(", "point_prob", ",", "\"point_probability\"", ",", "self", ".", "ensemble_name", ",", "self", ".", "run_date", ",", "self", ".", "variable", "+", "\"_{0:0.2f}_{1}\"", ".", "format", "(", "threshold", ",", "self", ".", "units", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ")", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "\"\"", ")" ]
Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at that point. Args: threshold: If >= threshold assigns a 1 to member, otherwise 0. Returns: EnsembleConsensus
[ "Determine", "the", "probability", "of", "exceeding", "a", "threshold", "at", "a", "grid", "point", "based", "on", "the", "ensemble", "forecasts", "at", "that", "point", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L487-L504
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleProducts.neighborhood_probability
def neighborhood_probability(self, threshold, radius, sigmas=None): """ Hourly probability of exceeding a threshold based on model values within a specified radius of a point. Args: threshold (float): probability of exceeding this threshold radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects containing neighborhood probabilities for each forecast hour. """ if sigmas is None: sigmas = [0] weights = disk(radius) filtered_prob = [] for sigma in sigmas: filtered_prob.append(EnsembleConsensus(np.zeros(self.data.shape[1:], dtype=np.float32), "neighbor_prob_r_{0:d}_s_{1:d}".format(radius, sigma), self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}".format(threshold), self.start_date, self.end_date, "")) thresh_data = np.zeros(self.data.shape[2:], dtype=np.uint8) neighbor_prob = np.zeros(self.data.shape[2:], dtype=np.float32) for t in range(self.data.shape[1]): for m in range(self.data.shape[0]): thresh_data[self.data[m, t] >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 maximized[maximized < 1] = 0 neighbor_prob += fftconvolve(maximized, weights, mode="same") neighbor_prob[neighbor_prob < 1] = 0 thresh_data[:] = 0 neighbor_prob /= (self.data.shape[0] * float(weights.sum())) for s, sigma in enumerate(sigmas): if sigma > 0: filtered_prob[s].data[t] = gaussian_filter(neighbor_prob, sigma=sigma) else: filtered_prob[s].data[t] = neighbor_prob neighbor_prob[:] = 0 return filtered_prob
python
def neighborhood_probability(self, threshold, radius, sigmas=None): """ Hourly probability of exceeding a threshold based on model values within a specified radius of a point. Args: threshold (float): probability of exceeding this threshold radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects containing neighborhood probabilities for each forecast hour. """ if sigmas is None: sigmas = [0] weights = disk(radius) filtered_prob = [] for sigma in sigmas: filtered_prob.append(EnsembleConsensus(np.zeros(self.data.shape[1:], dtype=np.float32), "neighbor_prob_r_{0:d}_s_{1:d}".format(radius, sigma), self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}".format(threshold), self.start_date, self.end_date, "")) thresh_data = np.zeros(self.data.shape[2:], dtype=np.uint8) neighbor_prob = np.zeros(self.data.shape[2:], dtype=np.float32) for t in range(self.data.shape[1]): for m in range(self.data.shape[0]): thresh_data[self.data[m, t] >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 maximized[maximized < 1] = 0 neighbor_prob += fftconvolve(maximized, weights, mode="same") neighbor_prob[neighbor_prob < 1] = 0 thresh_data[:] = 0 neighbor_prob /= (self.data.shape[0] * float(weights.sum())) for s, sigma in enumerate(sigmas): if sigma > 0: filtered_prob[s].data[t] = gaussian_filter(neighbor_prob, sigma=sigma) else: filtered_prob[s].data[t] = neighbor_prob neighbor_prob[:] = 0 return filtered_prob
[ "def", "neighborhood_probability", "(", "self", ",", "threshold", ",", "radius", ",", "sigmas", "=", "None", ")", ":", "if", "sigmas", "is", "None", ":", "sigmas", "=", "[", "0", "]", "weights", "=", "disk", "(", "radius", ")", "filtered_prob", "=", "[", "]", "for", "sigma", "in", "sigmas", ":", "filtered_prob", ".", "append", "(", "EnsembleConsensus", "(", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "1", ":", "]", ",", "dtype", "=", "np", ".", "float32", ")", ",", "\"neighbor_prob_r_{0:d}_s_{1:d}\"", ".", "format", "(", "radius", ",", "sigma", ")", ",", "self", ".", "ensemble_name", ",", "self", ".", "run_date", ",", "self", ".", "variable", "+", "\"_{0:0.2f}\"", ".", "format", "(", "threshold", ")", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "\"\"", ")", ")", "thresh_data", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "2", ":", "]", ",", "dtype", "=", "np", ".", "uint8", ")", "neighbor_prob", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "2", ":", "]", ",", "dtype", "=", "np", ".", "float32", ")", "for", "t", "in", "range", "(", "self", ".", "data", ".", "shape", "[", "1", "]", ")", ":", "for", "m", "in", "range", "(", "self", ".", "data", ".", "shape", "[", "0", "]", ")", ":", "thresh_data", "[", "self", ".", "data", "[", "m", ",", "t", "]", ">=", "threshold", "]", "=", "1", "maximized", "=", "fftconvolve", "(", "thresh_data", ",", "weights", ",", "mode", "=", "\"same\"", ")", "maximized", "[", "maximized", ">", "1", "]", "=", "1", "maximized", "[", "maximized", "<", "1", "]", "=", "0", "neighbor_prob", "+=", "fftconvolve", "(", "maximized", ",", "weights", ",", "mode", "=", "\"same\"", ")", "neighbor_prob", "[", "neighbor_prob", "<", "1", "]", "=", "0", "thresh_data", "[", ":", "]", "=", "0", "neighbor_prob", "/=", "(", "self", ".", "data", ".", "shape", "[", "0", "]", "*", "float", "(", "weights", ".", "sum", "(", ")", ")", ")", "for", "s", ",", "sigma", "in", "enumerate", "(", "sigmas", ")", ":", "if", "sigma", ">", "0", ":", "filtered_prob", "[", "s", "]", ".", "data", "[", "t", "]", "=", "gaussian_filter", "(", "neighbor_prob", ",", "sigma", "=", "sigma", ")", "else", ":", "filtered_prob", "[", "s", "]", ".", "data", "[", "t", "]", "=", "neighbor_prob", "neighbor_prob", "[", ":", "]", "=", "0", "return", "filtered_prob" ]
Hourly probability of exceeding a threshold based on model values within a specified radius of a point. Args: threshold (float): probability of exceeding this threshold radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects containing neighborhood probabilities for each forecast hour.
[ "Hourly", "probability", "of", "exceeding", "a", "threshold", "based", "on", "model", "values", "within", "a", "specified", "radius", "of", "a", "point", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L506-L546
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleProducts.period_max_neighborhood_probability
def period_max_neighborhood_probability(self, threshold, radius, sigmas=None): """ Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded. Args: threshold (float): splitting threshold for probability calculatations radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects """ if sigmas is None: sigmas = [0] weights = disk(radius) neighborhood_prob = np.zeros(self.data.shape[2:], dtype=np.float32) thresh_data = np.zeros(self.data.shape[2:], dtype=np.uint8) for m in range(self.data.shape[0]): thresh_data[self.data[m].max(axis=0) >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 neighborhood_prob += fftconvolve(maximized, weights, mode="same") neighborhood_prob[neighborhood_prob < 1] = 0 neighborhood_prob /= (self.data.shape[0] * float(weights.sum())) consensus_probs = [] for sigma in sigmas: if sigma > 0: filtered_prob = gaussian_filter(neighborhood_prob, sigma=sigma) else: filtered_prob = neighborhood_prob ec = EnsembleConsensus(filtered_prob, "neighbor_prob_{0:02d}-hour_r_{1:d}_s_{2:d}".format(self.data.shape[1], radius, sigma), self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}".format(float(threshold)), self.start_date, self.end_date, "") consensus_probs.append(ec) return consensus_probs
python
def period_max_neighborhood_probability(self, threshold, radius, sigmas=None): """ Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded. Args: threshold (float): splitting threshold for probability calculatations radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects """ if sigmas is None: sigmas = [0] weights = disk(radius) neighborhood_prob = np.zeros(self.data.shape[2:], dtype=np.float32) thresh_data = np.zeros(self.data.shape[2:], dtype=np.uint8) for m in range(self.data.shape[0]): thresh_data[self.data[m].max(axis=0) >= threshold] = 1 maximized = fftconvolve(thresh_data, weights, mode="same") maximized[maximized > 1] = 1 neighborhood_prob += fftconvolve(maximized, weights, mode="same") neighborhood_prob[neighborhood_prob < 1] = 0 neighborhood_prob /= (self.data.shape[0] * float(weights.sum())) consensus_probs = [] for sigma in sigmas: if sigma > 0: filtered_prob = gaussian_filter(neighborhood_prob, sigma=sigma) else: filtered_prob = neighborhood_prob ec = EnsembleConsensus(filtered_prob, "neighbor_prob_{0:02d}-hour_r_{1:d}_s_{2:d}".format(self.data.shape[1], radius, sigma), self.ensemble_name, self.run_date, self.variable + "_{0:0.2f}".format(float(threshold)), self.start_date, self.end_date, "") consensus_probs.append(ec) return consensus_probs
[ "def", "period_max_neighborhood_probability", "(", "self", ",", "threshold", ",", "radius", ",", "sigmas", "=", "None", ")", ":", "if", "sigmas", "is", "None", ":", "sigmas", "=", "[", "0", "]", "weights", "=", "disk", "(", "radius", ")", "neighborhood_prob", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "2", ":", "]", ",", "dtype", "=", "np", ".", "float32", ")", "thresh_data", "=", "np", ".", "zeros", "(", "self", ".", "data", ".", "shape", "[", "2", ":", "]", ",", "dtype", "=", "np", ".", "uint8", ")", "for", "m", "in", "range", "(", "self", ".", "data", ".", "shape", "[", "0", "]", ")", ":", "thresh_data", "[", "self", ".", "data", "[", "m", "]", ".", "max", "(", "axis", "=", "0", ")", ">=", "threshold", "]", "=", "1", "maximized", "=", "fftconvolve", "(", "thresh_data", ",", "weights", ",", "mode", "=", "\"same\"", ")", "maximized", "[", "maximized", ">", "1", "]", "=", "1", "neighborhood_prob", "+=", "fftconvolve", "(", "maximized", ",", "weights", ",", "mode", "=", "\"same\"", ")", "neighborhood_prob", "[", "neighborhood_prob", "<", "1", "]", "=", "0", "neighborhood_prob", "/=", "(", "self", ".", "data", ".", "shape", "[", "0", "]", "*", "float", "(", "weights", ".", "sum", "(", ")", ")", ")", "consensus_probs", "=", "[", "]", "for", "sigma", "in", "sigmas", ":", "if", "sigma", ">", "0", ":", "filtered_prob", "=", "gaussian_filter", "(", "neighborhood_prob", ",", "sigma", "=", "sigma", ")", "else", ":", "filtered_prob", "=", "neighborhood_prob", "ec", "=", "EnsembleConsensus", "(", "filtered_prob", ",", "\"neighbor_prob_{0:02d}-hour_r_{1:d}_s_{2:d}\"", ".", "format", "(", "self", ".", "data", ".", "shape", "[", "1", "]", ",", "radius", ",", "sigma", ")", ",", "self", ".", "ensemble_name", ",", "self", ".", "run_date", ",", "self", ".", "variable", "+", "\"_{0:0.2f}\"", ".", "format", "(", "float", "(", "threshold", ")", ")", ",", "self", ".", "start_date", ",", "self", ".", "end_date", ",", "\"\"", ")", "consensus_probs", ".", "append", "(", "ec", ")", "return", "consensus_probs" ]
Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded. Args: threshold (float): splitting threshold for probability calculatations radius (int): distance from point in number of grid points to include in neighborhood calculation. sigmas (array of ints): Radii for Gaussian filter used to smooth neighborhood probabilities. Returns: list of EnsembleConsensus objects
[ "Calculates", "the", "neighborhood", "probability", "of", "exceeding", "a", "threshold", "at", "any", "time", "over", "the", "period", "loaded", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L548-L585
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
MachineLearningEnsembleProducts.load_data
def load_data(self, grid_method="gamma", num_samples=1000, condition_threshold=0.5, zero_inflate=False, percentile=None): """ Reads the track forecasts and converts them to grid point values based on random sampling. Args: grid_method: "gamma" by default num_samples: Number of samples drawn from predicted pdf condition_threshold: Objects are not written to the grid if condition model probability is below this threshold. zero_inflate: Whether to sample zeros from a Bernoulli sampler based on the condition model probability percentile: If None, outputs the mean of the samples at each grid point, otherwise outputs the specified percentile from 0 to 100. Returns: 0 if tracks are successfully sampled on to grid. If no tracks are found, returns -1. """ self.percentile = percentile if self.track_forecasts == {}: self.load_track_forecasts() if self.track_forecasts == {}: return -1 if self.data is None: self.data = np.zeros((len(self.members), self.times.size, self.grid_shape[0], self.grid_shape[1]), dtype=np.float32) else: self.data[:] = 0 if grid_method in ["mean", "median", "samples"]: for m, member in enumerate(self.members): print("Sampling " + member) for track_forecast in self.track_forecasts[member]: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_pdf = np.array(step['properties'][self.variable + "_" + self.ensemble_name.replace(" ", "-")]) forecast_time = self.run_date + timedelta(hours=times[s]) t = np.where(self.times == forecast_time)[0][0] mask = np.array(step['properties']["masks"], dtype=int) i = np.array(step['properties']["i"], dtype=int) i = i[mask == 1] j = np.array(step['properties']["j"], dtype=int) j = j[mask == 1] if grid_method == "samples": intensities = np.array(step["properties"]["timesteps"], dtype=float)[mask == 1] rankings = np.argsort(intensities) samples = np.random.choice(self.forecast_bins, size=intensities.size, replace=True, p=forecast_pdf) self.data[m, t, i[rankings], j[rankings]] = samples else: if grid_method == "mean": forecast_value = np.sum(forecast_pdf * self.forecast_bins) elif grid_method == "median": forecast_cdf = np.cumsum(forecast_pdf) forecast_value = self.forecast_bins[np.argmin(np.abs(forecast_cdf - 0.5))] else: forecast_value = 0 self.data[m, t, i, j] = forecast_value if grid_method in ["gamma"]: full_condition_name = "condition_" + self.condition_model_name.replace(" ", "-") dist_model_name = self.variable + "_" + self.ensemble_name.replace(" ", "-") for m, member in enumerate(self.members): for track_forecast in self.track_forecasts[member]: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_params = step["properties"][dist_model_name] if self.condition_model_name is not None: condition = step["properties"][full_condition_name] else: condition = None forecast_time = self.run_date + timedelta(hours=times[s]) if forecast_time in self.times: t = np.where(self.times == forecast_time)[0][0] mask = np.array(step["properties"]["masks"], dtype=int) rankings = np.argsort(step["properties"]["timesteps"])[mask == 1] i = np.array(step["properties"]["i"], dtype=int)[mask == 1][rankings] j = np.array(step["properties"]["j"], dtype=int)[mask == 1][rankings] if rankings.size > 0: raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1], scale=forecast_params[2], size=(num_samples, rankings.size)), axis=1) if zero_inflate: raw_samples *= bernoulli.rvs(condition, size=(num_samples, rankings.size)) if percentile is None: samples = raw_samples.mean(axis=0) else: samples = np.percentile(raw_samples, percentile, axis=0) if condition is None or condition >= condition_threshold: self.data[m, t, i, j] = samples return 0
python
def load_data(self, grid_method="gamma", num_samples=1000, condition_threshold=0.5, zero_inflate=False, percentile=None): """ Reads the track forecasts and converts them to grid point values based on random sampling. Args: grid_method: "gamma" by default num_samples: Number of samples drawn from predicted pdf condition_threshold: Objects are not written to the grid if condition model probability is below this threshold. zero_inflate: Whether to sample zeros from a Bernoulli sampler based on the condition model probability percentile: If None, outputs the mean of the samples at each grid point, otherwise outputs the specified percentile from 0 to 100. Returns: 0 if tracks are successfully sampled on to grid. If no tracks are found, returns -1. """ self.percentile = percentile if self.track_forecasts == {}: self.load_track_forecasts() if self.track_forecasts == {}: return -1 if self.data is None: self.data = np.zeros((len(self.members), self.times.size, self.grid_shape[0], self.grid_shape[1]), dtype=np.float32) else: self.data[:] = 0 if grid_method in ["mean", "median", "samples"]: for m, member in enumerate(self.members): print("Sampling " + member) for track_forecast in self.track_forecasts[member]: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_pdf = np.array(step['properties'][self.variable + "_" + self.ensemble_name.replace(" ", "-")]) forecast_time = self.run_date + timedelta(hours=times[s]) t = np.where(self.times == forecast_time)[0][0] mask = np.array(step['properties']["masks"], dtype=int) i = np.array(step['properties']["i"], dtype=int) i = i[mask == 1] j = np.array(step['properties']["j"], dtype=int) j = j[mask == 1] if grid_method == "samples": intensities = np.array(step["properties"]["timesteps"], dtype=float)[mask == 1] rankings = np.argsort(intensities) samples = np.random.choice(self.forecast_bins, size=intensities.size, replace=True, p=forecast_pdf) self.data[m, t, i[rankings], j[rankings]] = samples else: if grid_method == "mean": forecast_value = np.sum(forecast_pdf * self.forecast_bins) elif grid_method == "median": forecast_cdf = np.cumsum(forecast_pdf) forecast_value = self.forecast_bins[np.argmin(np.abs(forecast_cdf - 0.5))] else: forecast_value = 0 self.data[m, t, i, j] = forecast_value if grid_method in ["gamma"]: full_condition_name = "condition_" + self.condition_model_name.replace(" ", "-") dist_model_name = self.variable + "_" + self.ensemble_name.replace(" ", "-") for m, member in enumerate(self.members): for track_forecast in self.track_forecasts[member]: times = track_forecast["properties"]["times"] for s, step in enumerate(track_forecast["features"]): forecast_params = step["properties"][dist_model_name] if self.condition_model_name is not None: condition = step["properties"][full_condition_name] else: condition = None forecast_time = self.run_date + timedelta(hours=times[s]) if forecast_time in self.times: t = np.where(self.times == forecast_time)[0][0] mask = np.array(step["properties"]["masks"], dtype=int) rankings = np.argsort(step["properties"]["timesteps"])[mask == 1] i = np.array(step["properties"]["i"], dtype=int)[mask == 1][rankings] j = np.array(step["properties"]["j"], dtype=int)[mask == 1][rankings] if rankings.size > 0: raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1], scale=forecast_params[2], size=(num_samples, rankings.size)), axis=1) if zero_inflate: raw_samples *= bernoulli.rvs(condition, size=(num_samples, rankings.size)) if percentile is None: samples = raw_samples.mean(axis=0) else: samples = np.percentile(raw_samples, percentile, axis=0) if condition is None or condition >= condition_threshold: self.data[m, t, i, j] = samples return 0
[ "def", "load_data", "(", "self", ",", "grid_method", "=", "\"gamma\"", ",", "num_samples", "=", "1000", ",", "condition_threshold", "=", "0.5", ",", "zero_inflate", "=", "False", ",", "percentile", "=", "None", ")", ":", "self", ".", "percentile", "=", "percentile", "if", "self", ".", "track_forecasts", "==", "{", "}", ":", "self", ".", "load_track_forecasts", "(", ")", "if", "self", ".", "track_forecasts", "==", "{", "}", ":", "return", "-", "1", "if", "self", ".", "data", "is", "None", ":", "self", ".", "data", "=", "np", ".", "zeros", "(", "(", "len", "(", "self", ".", "members", ")", ",", "self", ".", "times", ".", "size", ",", "self", ".", "grid_shape", "[", "0", "]", ",", "self", ".", "grid_shape", "[", "1", "]", ")", ",", "dtype", "=", "np", ".", "float32", ")", "else", ":", "self", ".", "data", "[", ":", "]", "=", "0", "if", "grid_method", "in", "[", "\"mean\"", ",", "\"median\"", ",", "\"samples\"", "]", ":", "for", "m", ",", "member", "in", "enumerate", "(", "self", ".", "members", ")", ":", "print", "(", "\"Sampling \"", "+", "member", ")", "for", "track_forecast", "in", "self", ".", "track_forecasts", "[", "member", "]", ":", "times", "=", "track_forecast", "[", "\"properties\"", "]", "[", "\"times\"", "]", "for", "s", ",", "step", "in", "enumerate", "(", "track_forecast", "[", "\"features\"", "]", ")", ":", "forecast_pdf", "=", "np", ".", "array", "(", "step", "[", "'properties'", "]", "[", "self", ".", "variable", "+", "\"_\"", "+", "self", ".", "ensemble_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "]", ")", "forecast_time", "=", "self", ".", "run_date", "+", "timedelta", "(", "hours", "=", "times", "[", "s", "]", ")", "t", "=", "np", ".", "where", "(", "self", ".", "times", "==", "forecast_time", ")", "[", "0", "]", "[", "0", "]", "mask", "=", "np", ".", "array", "(", "step", "[", "'properties'", "]", "[", "\"masks\"", "]", ",", "dtype", "=", "int", ")", "i", "=", "np", ".", "array", "(", "step", "[", "'properties'", "]", "[", "\"i\"", "]", ",", "dtype", "=", "int", ")", "i", "=", "i", "[", "mask", "==", "1", "]", "j", "=", "np", ".", "array", "(", "step", "[", "'properties'", "]", "[", "\"j\"", "]", ",", "dtype", "=", "int", ")", "j", "=", "j", "[", "mask", "==", "1", "]", "if", "grid_method", "==", "\"samples\"", ":", "intensities", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"timesteps\"", "]", ",", "dtype", "=", "float", ")", "[", "mask", "==", "1", "]", "rankings", "=", "np", ".", "argsort", "(", "intensities", ")", "samples", "=", "np", ".", "random", ".", "choice", "(", "self", ".", "forecast_bins", ",", "size", "=", "intensities", ".", "size", ",", "replace", "=", "True", ",", "p", "=", "forecast_pdf", ")", "self", ".", "data", "[", "m", ",", "t", ",", "i", "[", "rankings", "]", ",", "j", "[", "rankings", "]", "]", "=", "samples", "else", ":", "if", "grid_method", "==", "\"mean\"", ":", "forecast_value", "=", "np", ".", "sum", "(", "forecast_pdf", "*", "self", ".", "forecast_bins", ")", "elif", "grid_method", "==", "\"median\"", ":", "forecast_cdf", "=", "np", ".", "cumsum", "(", "forecast_pdf", ")", "forecast_value", "=", "self", ".", "forecast_bins", "[", "np", ".", "argmin", "(", "np", ".", "abs", "(", "forecast_cdf", "-", "0.5", ")", ")", "]", "else", ":", "forecast_value", "=", "0", "self", ".", "data", "[", "m", ",", "t", ",", "i", ",", "j", "]", "=", "forecast_value", "if", "grid_method", "in", "[", "\"gamma\"", "]", ":", "full_condition_name", "=", "\"condition_\"", "+", "self", ".", "condition_model_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "dist_model_name", "=", "self", ".", "variable", "+", "\"_\"", "+", "self", ".", "ensemble_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "for", "m", ",", "member", "in", "enumerate", "(", "self", ".", "members", ")", ":", "for", "track_forecast", "in", "self", ".", "track_forecasts", "[", "member", "]", ":", "times", "=", "track_forecast", "[", "\"properties\"", "]", "[", "\"times\"", "]", "for", "s", ",", "step", "in", "enumerate", "(", "track_forecast", "[", "\"features\"", "]", ")", ":", "forecast_params", "=", "step", "[", "\"properties\"", "]", "[", "dist_model_name", "]", "if", "self", ".", "condition_model_name", "is", "not", "None", ":", "condition", "=", "step", "[", "\"properties\"", "]", "[", "full_condition_name", "]", "else", ":", "condition", "=", "None", "forecast_time", "=", "self", ".", "run_date", "+", "timedelta", "(", "hours", "=", "times", "[", "s", "]", ")", "if", "forecast_time", "in", "self", ".", "times", ":", "t", "=", "np", ".", "where", "(", "self", ".", "times", "==", "forecast_time", ")", "[", "0", "]", "[", "0", "]", "mask", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"masks\"", "]", ",", "dtype", "=", "int", ")", "rankings", "=", "np", ".", "argsort", "(", "step", "[", "\"properties\"", "]", "[", "\"timesteps\"", "]", ")", "[", "mask", "==", "1", "]", "i", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"i\"", "]", ",", "dtype", "=", "int", ")", "[", "mask", "==", "1", "]", "[", "rankings", "]", "j", "=", "np", ".", "array", "(", "step", "[", "\"properties\"", "]", "[", "\"j\"", "]", ",", "dtype", "=", "int", ")", "[", "mask", "==", "1", "]", "[", "rankings", "]", "if", "rankings", ".", "size", ">", "0", ":", "raw_samples", "=", "np", ".", "sort", "(", "gamma", ".", "rvs", "(", "forecast_params", "[", "0", "]", ",", "loc", "=", "forecast_params", "[", "1", "]", ",", "scale", "=", "forecast_params", "[", "2", "]", ",", "size", "=", "(", "num_samples", ",", "rankings", ".", "size", ")", ")", ",", "axis", "=", "1", ")", "if", "zero_inflate", ":", "raw_samples", "*=", "bernoulli", ".", "rvs", "(", "condition", ",", "size", "=", "(", "num_samples", ",", "rankings", ".", "size", ")", ")", "if", "percentile", "is", "None", ":", "samples", "=", "raw_samples", ".", "mean", "(", "axis", "=", "0", ")", "else", ":", "samples", "=", "np", ".", "percentile", "(", "raw_samples", ",", "percentile", ",", "axis", "=", "0", ")", "if", "condition", "is", "None", "or", "condition", ">=", "condition_threshold", ":", "self", ".", "data", "[", "m", ",", "t", ",", "i", ",", "j", "]", "=", "samples", "return", "0" ]
Reads the track forecasts and converts them to grid point values based on random sampling. Args: grid_method: "gamma" by default num_samples: Number of samples drawn from predicted pdf condition_threshold: Objects are not written to the grid if condition model probability is below this threshold. zero_inflate: Whether to sample zeros from a Bernoulli sampler based on the condition model probability percentile: If None, outputs the mean of the samples at each grid point, otherwise outputs the specified percentile from 0 to 100. Returns: 0 if tracks are successfully sampled on to grid. If no tracks are found, returns -1.
[ "Reads", "the", "track", "forecasts", "and", "converts", "them", "to", "grid", "point", "values", "based", "on", "random", "sampling", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L631-L721
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
MachineLearningEnsembleProducts.write_grib2
def write_grib2(self, path): """ Writes data to grib2 file. Currently, grib codes are set by hand to hail. Args: path: Path to directory containing grib2 files. Returns: """ if self.percentile is None: var_type = "mean" else: var_type = "p{0:02d}".format(self.percentile) lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = np.array([7, 1, self.proj_dict['a'], 1, self.proj_dict['a'], 1, self.proj_dict['b'], self.data.shape[-2], self.data.shape[-1], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, 0, 0], dtype=np.int32) pdtmp1 = np.array([1, 31, 2, 0, 116, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 192, 0, self.data.shape[0]], dtype=np.int32) for m, member in enumerate(self.members): pdtmp1[-2] = m for t, time in enumerate(self.times): time_list = list(time.utctimetuple()[0:6]) grbe = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grbe.addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) data = self.data[m, t].astype(np.float32) / 1000.0 masked_data = np.ma.array(data, mask=data <= 0) grbe.addfield(1, pdtmp1, 0, drtmp1, masked_data) grbe.end() filename = path + "{0}_{1}_mlhail_{2}_{3}.grib2".format(self.ensemble_name.replace(" ", "-"), member, var_type, time.to_datetime().strftime("%Y%m%d%H%M")) print("Writing to " + filename) grib_file = open(filename, "wb") grib_file.write(grbe.msg) grib_file.close() return
python
def write_grib2(self, path): """ Writes data to grib2 file. Currently, grib codes are set by hand to hail. Args: path: Path to directory containing grib2 files. Returns: """ if self.percentile is None: var_type = "mean" else: var_type = "p{0:02d}".format(self.percentile) lscale = 1e6 grib_id_start = [7, 0, 14, 14, 2] gdsinfo = np.array([0, np.product(self.data.shape[-2:]), 0, 0, 30], dtype=np.int32) lon_0 = self.proj_dict["lon_0"] sw_lon = self.grid_dict["sw_lon"] if lon_0 < 0: lon_0 += 360 if sw_lon < 0: sw_lon += 360 gdtmp1 = np.array([7, 1, self.proj_dict['a'], 1, self.proj_dict['a'], 1, self.proj_dict['b'], self.data.shape[-2], self.data.shape[-1], self.grid_dict["sw_lat"] * lscale, sw_lon * lscale, 0, self.proj_dict["lat_0"] * lscale, lon_0 * lscale, self.grid_dict["dx"] * 1e3, self.grid_dict["dy"] * 1e3, 0, self.proj_dict["lat_1"] * lscale, self.proj_dict["lat_2"] * lscale, 0, 0], dtype=np.int32) pdtmp1 = np.array([1, 31, 2, 0, 116, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 192, 0, self.data.shape[0]], dtype=np.int32) for m, member in enumerate(self.members): pdtmp1[-2] = m for t, time in enumerate(self.times): time_list = list(time.utctimetuple()[0:6]) grbe = Grib2Encode(0, np.array(grib_id_start + time_list + [2, 1], dtype=np.int32)) grbe.addgrid(gdsinfo, gdtmp1) pdtmp1[8] = (time.to_pydatetime() - self.run_date).total_seconds() / 3600.0 drtmp1 = np.array([0, 0, 4, 8, 0], dtype=np.int32) data = self.data[m, t].astype(np.float32) / 1000.0 masked_data = np.ma.array(data, mask=data <= 0) grbe.addfield(1, pdtmp1, 0, drtmp1, masked_data) grbe.end() filename = path + "{0}_{1}_mlhail_{2}_{3}.grib2".format(self.ensemble_name.replace(" ", "-"), member, var_type, time.to_datetime().strftime("%Y%m%d%H%M")) print("Writing to " + filename) grib_file = open(filename, "wb") grib_file.write(grbe.msg) grib_file.close() return
[ "def", "write_grib2", "(", "self", ",", "path", ")", ":", "if", "self", ".", "percentile", "is", "None", ":", "var_type", "=", "\"mean\"", "else", ":", "var_type", "=", "\"p{0:02d}\"", ".", "format", "(", "self", ".", "percentile", ")", "lscale", "=", "1e6", "grib_id_start", "=", "[", "7", ",", "0", ",", "14", ",", "14", ",", "2", "]", "gdsinfo", "=", "np", ".", "array", "(", "[", "0", ",", "np", ".", "product", "(", "self", ".", "data", ".", "shape", "[", "-", "2", ":", "]", ")", ",", "0", ",", "0", ",", "30", "]", ",", "dtype", "=", "np", ".", "int32", ")", "lon_0", "=", "self", ".", "proj_dict", "[", "\"lon_0\"", "]", "sw_lon", "=", "self", ".", "grid_dict", "[", "\"sw_lon\"", "]", "if", "lon_0", "<", "0", ":", "lon_0", "+=", "360", "if", "sw_lon", "<", "0", ":", "sw_lon", "+=", "360", "gdtmp1", "=", "np", ".", "array", "(", "[", "7", ",", "1", ",", "self", ".", "proj_dict", "[", "'a'", "]", ",", "1", ",", "self", ".", "proj_dict", "[", "'a'", "]", ",", "1", ",", "self", ".", "proj_dict", "[", "'b'", "]", ",", "self", ".", "data", ".", "shape", "[", "-", "2", "]", ",", "self", ".", "data", ".", "shape", "[", "-", "1", "]", ",", "self", ".", "grid_dict", "[", "\"sw_lat\"", "]", "*", "lscale", ",", "sw_lon", "*", "lscale", ",", "0", ",", "self", ".", "proj_dict", "[", "\"lat_0\"", "]", "*", "lscale", ",", "lon_0", "*", "lscale", ",", "self", ".", "grid_dict", "[", "\"dx\"", "]", "*", "1e3", ",", "self", ".", "grid_dict", "[", "\"dy\"", "]", "*", "1e3", ",", "0", ",", "self", ".", "proj_dict", "[", "\"lat_1\"", "]", "*", "lscale", ",", "self", ".", "proj_dict", "[", "\"lat_2\"", "]", "*", "lscale", ",", "0", ",", "0", "]", ",", "dtype", "=", "np", ".", "int32", ")", "pdtmp1", "=", "np", ".", "array", "(", "[", "1", ",", "31", ",", "2", ",", "0", ",", "116", ",", "0", ",", "0", ",", "1", ",", "0", ",", "1", ",", "1", ",", "1", ",", "1", ",", "1", ",", "1", ",", "192", ",", "0", ",", "self", ".", "data", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "int32", ")", "for", "m", ",", "member", "in", "enumerate", "(", "self", ".", "members", ")", ":", "pdtmp1", "[", "-", "2", "]", "=", "m", "for", "t", ",", "time", "in", "enumerate", "(", "self", ".", "times", ")", ":", "time_list", "=", "list", "(", "time", ".", "utctimetuple", "(", ")", "[", "0", ":", "6", "]", ")", "grbe", "=", "Grib2Encode", "(", "0", ",", "np", ".", "array", "(", "grib_id_start", "+", "time_list", "+", "[", "2", ",", "1", "]", ",", "dtype", "=", "np", ".", "int32", ")", ")", "grbe", ".", "addgrid", "(", "gdsinfo", ",", "gdtmp1", ")", "pdtmp1", "[", "8", "]", "=", "(", "time", ".", "to_pydatetime", "(", ")", "-", "self", ".", "run_date", ")", ".", "total_seconds", "(", ")", "/", "3600.0", "drtmp1", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "4", ",", "8", ",", "0", "]", ",", "dtype", "=", "np", ".", "int32", ")", "data", "=", "self", ".", "data", "[", "m", ",", "t", "]", ".", "astype", "(", "np", ".", "float32", ")", "/", "1000.0", "masked_data", "=", "np", ".", "ma", ".", "array", "(", "data", ",", "mask", "=", "data", "<=", "0", ")", "grbe", ".", "addfield", "(", "1", ",", "pdtmp1", ",", "0", ",", "drtmp1", ",", "masked_data", ")", "grbe", ".", "end", "(", ")", "filename", "=", "path", "+", "\"{0}_{1}_mlhail_{2}_{3}.grib2\"", ".", "format", "(", "self", ".", "ensemble_name", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", ",", "member", ",", "var_type", ",", "time", ".", "to_datetime", "(", ")", ".", "strftime", "(", "\"%Y%m%d%H%M\"", ")", ")", "print", "(", "\"Writing to \"", "+", "filename", ")", "grib_file", "=", "open", "(", "filename", ",", "\"wb\"", ")", "grib_file", ".", "write", "(", "grbe", ".", "msg", ")", "grib_file", ".", "close", "(", ")", "return" ]
Writes data to grib2 file. Currently, grib codes are set by hand to hail. Args: path: Path to directory containing grib2 files. Returns:
[ "Writes", "data", "to", "grib2", "file", ".", "Currently", "grib", "codes", "are", "set", "by", "hand", "to", "hail", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L723-L773
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleConsensus.init_file
def init_file(self, filename, time_units="seconds since 1970-01-01T00:00"): """ Initializes netCDF file for writing Args: filename: Name of the netCDF file time_units: Units for the time variable in format "<time> since <date string>" Returns: Dataset object """ if os.access(filename, os.R_OK): out_data = Dataset(filename, "r+") else: out_data = Dataset(filename, "w") if len(self.data.shape) == 2: for d, dim in enumerate(["y", "x"]): out_data.createDimension(dim, self.data.shape[d]) else: for d, dim in enumerate(["y", "x"]): out_data.createDimension(dim, self.data.shape[d+1]) out_data.createDimension("time", len(self.times)) time_var = out_data.createVariable("time", "i8", ("time",)) time_var[:] = date2num(self.times.to_pydatetime(), time_units) time_var.units = time_units out_data.Conventions = "CF-1.6" return out_data
python
def init_file(self, filename, time_units="seconds since 1970-01-01T00:00"): """ Initializes netCDF file for writing Args: filename: Name of the netCDF file time_units: Units for the time variable in format "<time> since <date string>" Returns: Dataset object """ if os.access(filename, os.R_OK): out_data = Dataset(filename, "r+") else: out_data = Dataset(filename, "w") if len(self.data.shape) == 2: for d, dim in enumerate(["y", "x"]): out_data.createDimension(dim, self.data.shape[d]) else: for d, dim in enumerate(["y", "x"]): out_data.createDimension(dim, self.data.shape[d+1]) out_data.createDimension("time", len(self.times)) time_var = out_data.createVariable("time", "i8", ("time",)) time_var[:] = date2num(self.times.to_pydatetime(), time_units) time_var.units = time_units out_data.Conventions = "CF-1.6" return out_data
[ "def", "init_file", "(", "self", ",", "filename", ",", "time_units", "=", "\"seconds since 1970-01-01T00:00\"", ")", ":", "if", "os", ".", "access", "(", "filename", ",", "os", ".", "R_OK", ")", ":", "out_data", "=", "Dataset", "(", "filename", ",", "\"r+\"", ")", "else", ":", "out_data", "=", "Dataset", "(", "filename", ",", "\"w\"", ")", "if", "len", "(", "self", ".", "data", ".", "shape", ")", "==", "2", ":", "for", "d", ",", "dim", "in", "enumerate", "(", "[", "\"y\"", ",", "\"x\"", "]", ")", ":", "out_data", ".", "createDimension", "(", "dim", ",", "self", ".", "data", ".", "shape", "[", "d", "]", ")", "else", ":", "for", "d", ",", "dim", "in", "enumerate", "(", "[", "\"y\"", ",", "\"x\"", "]", ")", ":", "out_data", ".", "createDimension", "(", "dim", ",", "self", ".", "data", ".", "shape", "[", "d", "+", "1", "]", ")", "out_data", ".", "createDimension", "(", "\"time\"", ",", "len", "(", "self", ".", "times", ")", ")", "time_var", "=", "out_data", ".", "createVariable", "(", "\"time\"", ",", "\"i8\"", ",", "(", "\"time\"", ",", ")", ")", "time_var", "[", ":", "]", "=", "date2num", "(", "self", ".", "times", ".", "to_pydatetime", "(", ")", ",", "time_units", ")", "time_var", ".", "units", "=", "time_units", "out_data", ".", "Conventions", "=", "\"CF-1.6\"", "return", "out_data" ]
Initializes netCDF file for writing Args: filename: Name of the netCDF file time_units: Units for the time variable in format "<time> since <date string>" Returns: Dataset object
[ "Initializes", "netCDF", "file", "for", "writing" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L793-L818
train
djgagne/hagelslag
hagelslag/processing/EnsembleProducts.py
EnsembleConsensus.write_to_file
def write_to_file(self, out_data): """ Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables are appended to the current file Args: out_data: Full-path and name of output netCDF file """ full_var_name = self.consensus_type + "_" + self.variable if "-hour" in self.consensus_type: if full_var_name not in out_data.variables.keys(): var = out_data.createVariable(full_var_name, "f4", ("y", "x"), zlib=True, least_significant_digit=3, shuffle=True) else: var = out_data.variables[full_var_name] var.coordinates = "y x" else: if full_var_name not in out_data.variables.keys(): var = out_data.createVariable(full_var_name, "f4", ("time", "y", "x"), zlib=True, least_significant_digit=3, shuffle=True) else: var = out_data.variables[full_var_name] var.coordinates = "time y x" var[:] = self.data var.units = self.units var.long_name = self.consensus_type + "_" + self.variable return
python
def write_to_file(self, out_data): """ Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables are appended to the current file Args: out_data: Full-path and name of output netCDF file """ full_var_name = self.consensus_type + "_" + self.variable if "-hour" in self.consensus_type: if full_var_name not in out_data.variables.keys(): var = out_data.createVariable(full_var_name, "f4", ("y", "x"), zlib=True, least_significant_digit=3, shuffle=True) else: var = out_data.variables[full_var_name] var.coordinates = "y x" else: if full_var_name not in out_data.variables.keys(): var = out_data.createVariable(full_var_name, "f4", ("time", "y", "x"), zlib=True, least_significant_digit=3, shuffle=True) else: var = out_data.variables[full_var_name] var.coordinates = "time y x" var[:] = self.data var.units = self.units var.long_name = self.consensus_type + "_" + self.variable return
[ "def", "write_to_file", "(", "self", ",", "out_data", ")", ":", "full_var_name", "=", "self", ".", "consensus_type", "+", "\"_\"", "+", "self", ".", "variable", "if", "\"-hour\"", "in", "self", ".", "consensus_type", ":", "if", "full_var_name", "not", "in", "out_data", ".", "variables", ".", "keys", "(", ")", ":", "var", "=", "out_data", ".", "createVariable", "(", "full_var_name", ",", "\"f4\"", ",", "(", "\"y\"", ",", "\"x\"", ")", ",", "zlib", "=", "True", ",", "least_significant_digit", "=", "3", ",", "shuffle", "=", "True", ")", "else", ":", "var", "=", "out_data", ".", "variables", "[", "full_var_name", "]", "var", ".", "coordinates", "=", "\"y x\"", "else", ":", "if", "full_var_name", "not", "in", "out_data", ".", "variables", ".", "keys", "(", ")", ":", "var", "=", "out_data", ".", "createVariable", "(", "full_var_name", ",", "\"f4\"", ",", "(", "\"time\"", ",", "\"y\"", ",", "\"x\"", ")", ",", "zlib", "=", "True", ",", "least_significant_digit", "=", "3", ",", "shuffle", "=", "True", ")", "else", ":", "var", "=", "out_data", ".", "variables", "[", "full_var_name", "]", "var", ".", "coordinates", "=", "\"time y x\"", "var", "[", ":", "]", "=", "self", ".", "data", "var", ".", "units", "=", "self", ".", "units", "var", ".", "long_name", "=", "self", ".", "consensus_type", "+", "\"_\"", "+", "self", ".", "variable", "return" ]
Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables are appended to the current file Args: out_data: Full-path and name of output netCDF file
[ "Outputs", "data", "to", "a", "netCDF", "file", ".", "If", "the", "file", "does", "not", "exist", "it", "will", "be", "created", ".", "Otherwise", "additional", "variables", "are", "appended", "to", "the", "current", "file" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/EnsembleProducts.py#L820-L846
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.restore
def restore(self, workspace_uuid): """ Restore the workspace to the given workspace_uuid. If workspace_uuid is None then create a new workspace and use it. """ workspace = next((workspace for workspace in self.document_model.workspaces if workspace.uuid == workspace_uuid), None) if workspace is None: workspace = self.new_workspace() self._change_workspace(workspace)
python
def restore(self, workspace_uuid): """ Restore the workspace to the given workspace_uuid. If workspace_uuid is None then create a new workspace and use it. """ workspace = next((workspace for workspace in self.document_model.workspaces if workspace.uuid == workspace_uuid), None) if workspace is None: workspace = self.new_workspace() self._change_workspace(workspace)
[ "def", "restore", "(", "self", ",", "workspace_uuid", ")", ":", "workspace", "=", "next", "(", "(", "workspace", "for", "workspace", "in", "self", ".", "document_model", ".", "workspaces", "if", "workspace", ".", "uuid", "==", "workspace_uuid", ")", ",", "None", ")", "if", "workspace", "is", "None", ":", "workspace", "=", "self", ".", "new_workspace", "(", ")", "self", ".", "_change_workspace", "(", "workspace", ")" ]
Restore the workspace to the given workspace_uuid. If workspace_uuid is None then create a new workspace and use it.
[ "Restore", "the", "workspace", "to", "the", "given", "workspace_uuid", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L338-L347
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.new_workspace
def new_workspace(self, name=None, layout=None, workspace_id=None, index=None) -> WorkspaceLayout.WorkspaceLayout: """ Create a new workspace, insert into document_model, and return it. """ workspace = WorkspaceLayout.WorkspaceLayout() self.document_model.insert_workspace(index if index is not None else len(self.document_model.workspaces), workspace) d = create_image_desc() d["selected"] = True workspace.layout = layout if layout is not None else d workspace.name = name if name is not None else _("Workspace") if workspace_id: workspace.workspace_id = workspace_id return workspace
python
def new_workspace(self, name=None, layout=None, workspace_id=None, index=None) -> WorkspaceLayout.WorkspaceLayout: """ Create a new workspace, insert into document_model, and return it. """ workspace = WorkspaceLayout.WorkspaceLayout() self.document_model.insert_workspace(index if index is not None else len(self.document_model.workspaces), workspace) d = create_image_desc() d["selected"] = True workspace.layout = layout if layout is not None else d workspace.name = name if name is not None else _("Workspace") if workspace_id: workspace.workspace_id = workspace_id return workspace
[ "def", "new_workspace", "(", "self", ",", "name", "=", "None", ",", "layout", "=", "None", ",", "workspace_id", "=", "None", ",", "index", "=", "None", ")", "->", "WorkspaceLayout", ".", "WorkspaceLayout", ":", "workspace", "=", "WorkspaceLayout", ".", "WorkspaceLayout", "(", ")", "self", ".", "document_model", ".", "insert_workspace", "(", "index", "if", "index", "is", "not", "None", "else", "len", "(", "self", ".", "document_model", ".", "workspaces", ")", ",", "workspace", ")", "d", "=", "create_image_desc", "(", ")", "d", "[", "\"selected\"", "]", "=", "True", "workspace", ".", "layout", "=", "layout", "if", "layout", "is", "not", "None", "else", "d", "workspace", ".", "name", "=", "name", "if", "name", "is", "not", "None", "else", "_", "(", "\"Workspace\"", ")", "if", "workspace_id", ":", "workspace", ".", "workspace_id", "=", "workspace_id", "return", "workspace" ]
Create a new workspace, insert into document_model, and return it.
[ "Create", "a", "new", "workspace", "insert", "into", "document_model", "and", "return", "it", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L363-L373
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.ensure_workspace
def ensure_workspace(self, name, layout, workspace_id): """Looks for a workspace with workspace_id. If none is found, create a new one, add it, and change to it. """ workspace = next((workspace for workspace in self.document_model.workspaces if workspace.workspace_id == workspace_id), None) if not workspace: workspace = self.new_workspace(name=name, layout=layout, workspace_id=workspace_id) self._change_workspace(workspace)
python
def ensure_workspace(self, name, layout, workspace_id): """Looks for a workspace with workspace_id. If none is found, create a new one, add it, and change to it. """ workspace = next((workspace for workspace in self.document_model.workspaces if workspace.workspace_id == workspace_id), None) if not workspace: workspace = self.new_workspace(name=name, layout=layout, workspace_id=workspace_id) self._change_workspace(workspace)
[ "def", "ensure_workspace", "(", "self", ",", "name", ",", "layout", ",", "workspace_id", ")", ":", "workspace", "=", "next", "(", "(", "workspace", "for", "workspace", "in", "self", ".", "document_model", ".", "workspaces", "if", "workspace", ".", "workspace_id", "==", "workspace_id", ")", ",", "None", ")", "if", "not", "workspace", ":", "workspace", "=", "self", ".", "new_workspace", "(", "name", "=", "name", ",", "layout", "=", "layout", ",", "workspace_id", "=", "workspace_id", ")", "self", ".", "_change_workspace", "(", "workspace", ")" ]
Looks for a workspace with workspace_id. If none is found, create a new one, add it, and change to it.
[ "Looks", "for", "a", "workspace", "with", "workspace_id", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L375-L383
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.create_workspace
def create_workspace(self) -> None: """ Pose a dialog to name and create a workspace. """ def create_clicked(text): if text: command = Workspace.CreateWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter a name for the workspace"), text=_("Workspace"), accepted_fn=create_clicked, accepted_text=_("Create"), message_box_id="create_workspace")
python
def create_workspace(self) -> None: """ Pose a dialog to name and create a workspace. """ def create_clicked(text): if text: command = Workspace.CreateWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter a name for the workspace"), text=_("Workspace"), accepted_fn=create_clicked, accepted_text=_("Create"), message_box_id="create_workspace")
[ "def", "create_workspace", "(", "self", ")", "->", "None", ":", "def", "create_clicked", "(", "text", ")", ":", "if", "text", ":", "command", "=", "Workspace", ".", "CreateWorkspaceCommand", "(", "self", ",", "text", ")", "command", ".", "perform", "(", ")", "self", ".", "document_controller", ".", "push_undo_command", "(", "command", ")", "self", ".", "pose_get_string_message_box", "(", "caption", "=", "_", "(", "\"Enter a name for the workspace\"", ")", ",", "text", "=", "_", "(", "\"Workspace\"", ")", ",", "accepted_fn", "=", "create_clicked", ",", "accepted_text", "=", "_", "(", "\"Create\"", ")", ",", "message_box_id", "=", "\"create_workspace\"", ")" ]
Pose a dialog to name and create a workspace.
[ "Pose", "a", "dialog", "to", "name", "and", "create", "a", "workspace", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L549-L560
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.rename_workspace
def rename_workspace(self) -> None: """ Pose a dialog to rename the workspace. """ def rename_clicked(text): if len(text) > 0: command = Workspace.RenameWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter new name for workspace"), text=self.__workspace.name, accepted_fn=rename_clicked, accepted_text=_("Rename"), message_box_id="rename_workspace")
python
def rename_workspace(self) -> None: """ Pose a dialog to rename the workspace. """ def rename_clicked(text): if len(text) > 0: command = Workspace.RenameWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter new name for workspace"), text=self.__workspace.name, accepted_fn=rename_clicked, accepted_text=_("Rename"), message_box_id="rename_workspace")
[ "def", "rename_workspace", "(", "self", ")", "->", "None", ":", "def", "rename_clicked", "(", "text", ")", ":", "if", "len", "(", "text", ")", ">", "0", ":", "command", "=", "Workspace", ".", "RenameWorkspaceCommand", "(", "self", ",", "text", ")", "command", ".", "perform", "(", ")", "self", ".", "document_controller", ".", "push_undo_command", "(", "command", ")", "self", ".", "pose_get_string_message_box", "(", "caption", "=", "_", "(", "\"Enter new name for workspace\"", ")", ",", "text", "=", "self", ".", "__workspace", ".", "name", ",", "accepted_fn", "=", "rename_clicked", ",", "accepted_text", "=", "_", "(", "\"Rename\"", ")", ",", "message_box_id", "=", "\"rename_workspace\"", ")" ]
Pose a dialog to rename the workspace.
[ "Pose", "a", "dialog", "to", "rename", "the", "workspace", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L562-L573
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.remove_workspace
def remove_workspace(self): """ Pose a dialog to confirm removal then remove workspace. """ def confirm_clicked(): if len(self.document_model.workspaces) > 1: command = Workspace.RemoveWorkspaceCommand(self) command.perform() self.document_controller.push_undo_command(command) caption = _("Remove workspace named '{0}'?").format(self.__workspace.name) self.pose_confirmation_message_box(caption, confirm_clicked, accepted_text=_("Remove Workspace"), message_box_id="remove_workspace")
python
def remove_workspace(self): """ Pose a dialog to confirm removal then remove workspace. """ def confirm_clicked(): if len(self.document_model.workspaces) > 1: command = Workspace.RemoveWorkspaceCommand(self) command.perform() self.document_controller.push_undo_command(command) caption = _("Remove workspace named '{0}'?").format(self.__workspace.name) self.pose_confirmation_message_box(caption, confirm_clicked, accepted_text=_("Remove Workspace"), message_box_id="remove_workspace")
[ "def", "remove_workspace", "(", "self", ")", ":", "def", "confirm_clicked", "(", ")", ":", "if", "len", "(", "self", ".", "document_model", ".", "workspaces", ")", ">", "1", ":", "command", "=", "Workspace", ".", "RemoveWorkspaceCommand", "(", "self", ")", "command", ".", "perform", "(", ")", "self", ".", "document_controller", ".", "push_undo_command", "(", "command", ")", "caption", "=", "_", "(", "\"Remove workspace named '{0}'?\"", ")", ".", "format", "(", "self", ".", "__workspace", ".", "name", ")", "self", ".", "pose_confirmation_message_box", "(", "caption", ",", "confirm_clicked", ",", "accepted_text", "=", "_", "(", "\"Remove Workspace\"", ")", ",", "message_box_id", "=", "\"remove_workspace\"", ")" ]
Pose a dialog to confirm removal then remove workspace.
[ "Pose", "a", "dialog", "to", "confirm", "removal", "then", "remove", "workspace", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L575-L586
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.clone_workspace
def clone_workspace(self) -> None: """ Pose a dialog to name and clone a workspace. """ def clone_clicked(text): if text: command = Workspace.CloneWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter a name for the workspace"), text=self.__workspace.name, accepted_fn=clone_clicked, accepted_text=_("Clone"), message_box_id="clone_workspace")
python
def clone_workspace(self) -> None: """ Pose a dialog to name and clone a workspace. """ def clone_clicked(text): if text: command = Workspace.CloneWorkspaceCommand(self, text) command.perform() self.document_controller.push_undo_command(command) self.pose_get_string_message_box(caption=_("Enter a name for the workspace"), text=self.__workspace.name, accepted_fn=clone_clicked, accepted_text=_("Clone"), message_box_id="clone_workspace")
[ "def", "clone_workspace", "(", "self", ")", "->", "None", ":", "def", "clone_clicked", "(", "text", ")", ":", "if", "text", ":", "command", "=", "Workspace", ".", "CloneWorkspaceCommand", "(", "self", ",", "text", ")", "command", ".", "perform", "(", ")", "self", ".", "document_controller", ".", "push_undo_command", "(", "command", ")", "self", ".", "pose_get_string_message_box", "(", "caption", "=", "_", "(", "\"Enter a name for the workspace\"", ")", ",", "text", "=", "self", ".", "__workspace", ".", "name", ",", "accepted_fn", "=", "clone_clicked", ",", "accepted_text", "=", "_", "(", "\"Clone\"", ")", ",", "message_box_id", "=", "\"clone_workspace\"", ")" ]
Pose a dialog to name and clone a workspace.
[ "Pose", "a", "dialog", "to", "name", "and", "clone", "a", "workspace", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L588-L599
train
nion-software/nionswift
nion/swift/Workspace.py
Workspace.__replace_displayed_display_item
def __replace_displayed_display_item(self, display_panel, display_item, d=None) -> Undo.UndoableCommand: """ Used in drag/drop support. """ self.document_controller.replaced_display_panel_content = display_panel.save_contents() command = DisplayPanel.ReplaceDisplayPanelCommand(self) if display_item: display_panel.set_display_panel_display_item(display_item, detect_controller=True) elif d is not None: display_panel.change_display_panel_content(d) display_panel.request_focus() self.__sync_layout() return command
python
def __replace_displayed_display_item(self, display_panel, display_item, d=None) -> Undo.UndoableCommand: """ Used in drag/drop support. """ self.document_controller.replaced_display_panel_content = display_panel.save_contents() command = DisplayPanel.ReplaceDisplayPanelCommand(self) if display_item: display_panel.set_display_panel_display_item(display_item, detect_controller=True) elif d is not None: display_panel.change_display_panel_content(d) display_panel.request_focus() self.__sync_layout() return command
[ "def", "__replace_displayed_display_item", "(", "self", ",", "display_panel", ",", "display_item", ",", "d", "=", "None", ")", "->", "Undo", ".", "UndoableCommand", ":", "self", ".", "document_controller", ".", "replaced_display_panel_content", "=", "display_panel", ".", "save_contents", "(", ")", "command", "=", "DisplayPanel", ".", "ReplaceDisplayPanelCommand", "(", "self", ")", "if", "display_item", ":", "display_panel", ".", "set_display_panel_display_item", "(", "display_item", ",", "detect_controller", "=", "True", ")", "elif", "d", "is", "not", "None", ":", "display_panel", ".", "change_display_panel_content", "(", "d", ")", "display_panel", ".", "request_focus", "(", ")", "self", ".", "__sync_layout", "(", ")", "return", "command" ]
Used in drag/drop support.
[ "Used", "in", "drag", "/", "drop", "support", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Workspace.py#L792-L802
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
bootstrap
def bootstrap(score_objs, n_boot=1000): """ Given a set of DistributedROC or DistributedReliability objects, this function performs a bootstrap resampling of the objects and returns n_boot aggregations of them. Args: score_objs: A list of DistributedROC or DistributedReliability objects. Objects must have an __add__ method n_boot (int): Number of bootstrap samples Returns: An array of DistributedROC or DistributedReliability """ all_samples = np.random.choice(score_objs, size=(n_boot, len(score_objs)), replace=True) return all_samples.sum(axis=1)
python
def bootstrap(score_objs, n_boot=1000): """ Given a set of DistributedROC or DistributedReliability objects, this function performs a bootstrap resampling of the objects and returns n_boot aggregations of them. Args: score_objs: A list of DistributedROC or DistributedReliability objects. Objects must have an __add__ method n_boot (int): Number of bootstrap samples Returns: An array of DistributedROC or DistributedReliability """ all_samples = np.random.choice(score_objs, size=(n_boot, len(score_objs)), replace=True) return all_samples.sum(axis=1)
[ "def", "bootstrap", "(", "score_objs", ",", "n_boot", "=", "1000", ")", ":", "all_samples", "=", "np", ".", "random", ".", "choice", "(", "score_objs", ",", "size", "=", "(", "n_boot", ",", "len", "(", "score_objs", ")", ")", ",", "replace", "=", "True", ")", "return", "all_samples", ".", "sum", "(", "axis", "=", "1", ")" ]
Given a set of DistributedROC or DistributedReliability objects, this function performs a bootstrap resampling of the objects and returns n_boot aggregations of them. Args: score_objs: A list of DistributedROC or DistributedReliability objects. Objects must have an __add__ method n_boot (int): Number of bootstrap samples Returns: An array of DistributedROC or DistributedReliability
[ "Given", "a", "set", "of", "DistributedROC", "or", "DistributedReliability", "objects", "this", "function", "performs", "a", "bootstrap", "resampling", "of", "the", "objects", "and", "returns", "n_boot", "aggregations", "of", "them", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L537-L550
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.update
def update(self, forecasts, observations): """ Update the ROC curve with a set of forecasts and observations Args: forecasts: 1D array of forecast values observations: 1D array of observation values. """ for t, threshold in enumerate(self.thresholds): tp = np.count_nonzero((forecasts >= threshold) & (observations >= self.obs_threshold)) fp = np.count_nonzero((forecasts >= threshold) & (observations < self.obs_threshold)) fn = np.count_nonzero((forecasts < threshold) & (observations >= self.obs_threshold)) tn = np.count_nonzero((forecasts < threshold) & (observations < self.obs_threshold)) self.contingency_tables.iloc[t] += [tp, fp, fn, tn]
python
def update(self, forecasts, observations): """ Update the ROC curve with a set of forecasts and observations Args: forecasts: 1D array of forecast values observations: 1D array of observation values. """ for t, threshold in enumerate(self.thresholds): tp = np.count_nonzero((forecasts >= threshold) & (observations >= self.obs_threshold)) fp = np.count_nonzero((forecasts >= threshold) & (observations < self.obs_threshold)) fn = np.count_nonzero((forecasts < threshold) & (observations >= self.obs_threshold)) tn = np.count_nonzero((forecasts < threshold) & (observations < self.obs_threshold)) self.contingency_tables.iloc[t] += [tp, fp, fn, tn]
[ "def", "update", "(", "self", ",", "forecasts", ",", "observations", ")", ":", "for", "t", ",", "threshold", "in", "enumerate", "(", "self", ".", "thresholds", ")", ":", "tp", "=", "np", ".", "count_nonzero", "(", "(", "forecasts", ">=", "threshold", ")", "&", "(", "observations", ">=", "self", ".", "obs_threshold", ")", ")", "fp", "=", "np", ".", "count_nonzero", "(", "(", "forecasts", ">=", "threshold", ")", "&", "(", "observations", "<", "self", ".", "obs_threshold", ")", ")", "fn", "=", "np", ".", "count_nonzero", "(", "(", "forecasts", "<", "threshold", ")", "&", "(", "observations", ">=", "self", ".", "obs_threshold", ")", ")", "tn", "=", "np", ".", "count_nonzero", "(", "(", "forecasts", "<", "threshold", ")", "&", "(", "observations", "<", "self", ".", "obs_threshold", ")", ")", "self", ".", "contingency_tables", ".", "iloc", "[", "t", "]", "+=", "[", "tp", ",", "fp", ",", "fn", ",", "tn", "]" ]
Update the ROC curve with a set of forecasts and observations Args: forecasts: 1D array of forecast values observations: 1D array of observation values.
[ "Update", "the", "ROC", "curve", "with", "a", "set", "of", "forecasts", "and", "observations" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L76-L92
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.merge
def merge(self, other_roc): """ Ingest the values of another DistributedROC object into this one and update the statistics inplace. Args: other_roc: another DistributedROC object. """ if other_roc.thresholds.size == self.thresholds.size and np.all(other_roc.thresholds == self.thresholds): self.contingency_tables += other_roc.contingency_tables else: print("Input table thresholds do not match.")
python
def merge(self, other_roc): """ Ingest the values of another DistributedROC object into this one and update the statistics inplace. Args: other_roc: another DistributedROC object. """ if other_roc.thresholds.size == self.thresholds.size and np.all(other_roc.thresholds == self.thresholds): self.contingency_tables += other_roc.contingency_tables else: print("Input table thresholds do not match.")
[ "def", "merge", "(", "self", ",", "other_roc", ")", ":", "if", "other_roc", ".", "thresholds", ".", "size", "==", "self", ".", "thresholds", ".", "size", "and", "np", ".", "all", "(", "other_roc", ".", "thresholds", "==", "self", ".", "thresholds", ")", ":", "self", ".", "contingency_tables", "+=", "other_roc", ".", "contingency_tables", "else", ":", "print", "(", "\"Input table thresholds do not match.\"", ")" ]
Ingest the values of another DistributedROC object into this one and update the statistics inplace. Args: other_roc: another DistributedROC object.
[ "Ingest", "the", "values", "of", "another", "DistributedROC", "object", "into", "this", "one", "and", "update", "the", "statistics", "inplace", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L108-L118
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.roc_curve
def roc_curve(self): """ Generate a ROC curve from the contingency table by calculating the probability of detection (TP/(TP+FN)) and the probability of false detection (FP/(FP+TN)). Returns: A pandas.DataFrame containing the POD, POFD, and the corresponding probability thresholds. """ pod = self.contingency_tables["TP"].astype(float) / (self.contingency_tables["TP"] + self.contingency_tables["FN"]) pofd = self.contingency_tables["FP"].astype(float) / (self.contingency_tables["FP"] + self.contingency_tables["TN"]) return pd.DataFrame({"POD": pod, "POFD": pofd, "Thresholds": self.thresholds}, columns=["POD", "POFD", "Thresholds"])
python
def roc_curve(self): """ Generate a ROC curve from the contingency table by calculating the probability of detection (TP/(TP+FN)) and the probability of false detection (FP/(FP+TN)). Returns: A pandas.DataFrame containing the POD, POFD, and the corresponding probability thresholds. """ pod = self.contingency_tables["TP"].astype(float) / (self.contingency_tables["TP"] + self.contingency_tables["FN"]) pofd = self.contingency_tables["FP"].astype(float) / (self.contingency_tables["FP"] + self.contingency_tables["TN"]) return pd.DataFrame({"POD": pod, "POFD": pofd, "Thresholds": self.thresholds}, columns=["POD", "POFD", "Thresholds"])
[ "def", "roc_curve", "(", "self", ")", ":", "pod", "=", "self", ".", "contingency_tables", "[", "\"TP\"", "]", ".", "astype", "(", "float", ")", "/", "(", "self", ".", "contingency_tables", "[", "\"TP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"FN\"", "]", ")", "pofd", "=", "self", ".", "contingency_tables", "[", "\"FP\"", "]", ".", "astype", "(", "float", ")", "/", "(", "self", ".", "contingency_tables", "[", "\"FP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"TN\"", "]", ")", "return", "pd", ".", "DataFrame", "(", "{", "\"POD\"", ":", "pod", ",", "\"POFD\"", ":", "pofd", ",", "\"Thresholds\"", ":", "self", ".", "thresholds", "}", ",", "columns", "=", "[", "\"POD\"", ",", "\"POFD\"", ",", "\"Thresholds\"", "]", ")" ]
Generate a ROC curve from the contingency table by calculating the probability of detection (TP/(TP+FN)) and the probability of false detection (FP/(FP+TN)). Returns: A pandas.DataFrame containing the POD, POFD, and the corresponding probability thresholds.
[ "Generate", "a", "ROC", "curve", "from", "the", "contingency", "table", "by", "calculating", "the", "probability", "of", "detection", "(", "TP", "/", "(", "TP", "+", "FN", "))", "and", "the", "probability", "of", "false", "detection", "(", "FP", "/", "(", "FP", "+", "TN", "))", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L120-L133
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.performance_curve
def performance_curve(self): """ Calculate the Probability of Detection and False Alarm Ratio in order to output a performance diagram. Returns: pandas.DataFrame containing POD, FAR, and probability thresholds. """ pod = self.contingency_tables["TP"] / (self.contingency_tables["TP"] + self.contingency_tables["FN"]) far = self.contingency_tables["FP"] / (self.contingency_tables["FP"] + self.contingency_tables["TP"]) far[(self.contingency_tables["FP"] + self.contingency_tables["TP"]) == 0] = np.nan return pd.DataFrame({"POD": pod, "FAR": far, "Thresholds": self.thresholds}, columns=["POD", "FAR", "Thresholds"])
python
def performance_curve(self): """ Calculate the Probability of Detection and False Alarm Ratio in order to output a performance diagram. Returns: pandas.DataFrame containing POD, FAR, and probability thresholds. """ pod = self.contingency_tables["TP"] / (self.contingency_tables["TP"] + self.contingency_tables["FN"]) far = self.contingency_tables["FP"] / (self.contingency_tables["FP"] + self.contingency_tables["TP"]) far[(self.contingency_tables["FP"] + self.contingency_tables["TP"]) == 0] = np.nan return pd.DataFrame({"POD": pod, "FAR": far, "Thresholds": self.thresholds}, columns=["POD", "FAR", "Thresholds"])
[ "def", "performance_curve", "(", "self", ")", ":", "pod", "=", "self", ".", "contingency_tables", "[", "\"TP\"", "]", "/", "(", "self", ".", "contingency_tables", "[", "\"TP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"FN\"", "]", ")", "far", "=", "self", ".", "contingency_tables", "[", "\"FP\"", "]", "/", "(", "self", ".", "contingency_tables", "[", "\"FP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"TP\"", "]", ")", "far", "[", "(", "self", ".", "contingency_tables", "[", "\"FP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"TP\"", "]", ")", "==", "0", "]", "=", "np", ".", "nan", "return", "pd", ".", "DataFrame", "(", "{", "\"POD\"", ":", "pod", ",", "\"FAR\"", ":", "far", ",", "\"Thresholds\"", ":", "self", ".", "thresholds", "}", ",", "columns", "=", "[", "\"POD\"", ",", "\"FAR\"", ",", "\"Thresholds\"", "]", ")" ]
Calculate the Probability of Detection and False Alarm Ratio in order to output a performance diagram. Returns: pandas.DataFrame containing POD, FAR, and probability thresholds.
[ "Calculate", "the", "Probability", "of", "Detection", "and", "False", "Alarm", "Ratio", "in", "order", "to", "output", "a", "performance", "diagram", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L135-L146
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.auc
def auc(self): """ Calculate the Area Under the ROC Curve (AUC). """ roc_curve = self.roc_curve() return np.abs(np.trapz(roc_curve['POD'], x=roc_curve['POFD']))
python
def auc(self): """ Calculate the Area Under the ROC Curve (AUC). """ roc_curve = self.roc_curve() return np.abs(np.trapz(roc_curve['POD'], x=roc_curve['POFD']))
[ "def", "auc", "(", "self", ")", ":", "roc_curve", "=", "self", ".", "roc_curve", "(", ")", "return", "np", ".", "abs", "(", "np", ".", "trapz", "(", "roc_curve", "[", "'POD'", "]", ",", "x", "=", "roc_curve", "[", "'POFD'", "]", ")", ")" ]
Calculate the Area Under the ROC Curve (AUC).
[ "Calculate", "the", "Area", "Under", "the", "ROC", "Curve", "(", "AUC", ")", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L148-L153
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.max_csi
def max_csi(self): """ Calculate the maximum Critical Success Index across all probability thresholds Returns: The maximum CSI as a float """ csi = self.contingency_tables["TP"] / (self.contingency_tables["TP"] + self.contingency_tables["FN"] + self.contingency_tables["FP"]) return csi.max()
python
def max_csi(self): """ Calculate the maximum Critical Success Index across all probability thresholds Returns: The maximum CSI as a float """ csi = self.contingency_tables["TP"] / (self.contingency_tables["TP"] + self.contingency_tables["FN"] + self.contingency_tables["FP"]) return csi.max()
[ "def", "max_csi", "(", "self", ")", ":", "csi", "=", "self", ".", "contingency_tables", "[", "\"TP\"", "]", "/", "(", "self", ".", "contingency_tables", "[", "\"TP\"", "]", "+", "self", ".", "contingency_tables", "[", "\"FN\"", "]", "+", "self", ".", "contingency_tables", "[", "\"FP\"", "]", ")", "return", "csi", ".", "max", "(", ")" ]
Calculate the maximum Critical Success Index across all probability thresholds Returns: The maximum CSI as a float
[ "Calculate", "the", "maximum", "Critical", "Success", "Index", "across", "all", "probability", "thresholds" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L155-L164
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.get_contingency_tables
def get_contingency_tables(self): """ Create an Array of ContingencyTable objects for each probability threshold. Returns: Array of ContingencyTable objects """ return np.array([ContingencyTable(*ct) for ct in self.contingency_tables.values])
python
def get_contingency_tables(self): """ Create an Array of ContingencyTable objects for each probability threshold. Returns: Array of ContingencyTable objects """ return np.array([ContingencyTable(*ct) for ct in self.contingency_tables.values])
[ "def", "get_contingency_tables", "(", "self", ")", ":", "return", "np", ".", "array", "(", "[", "ContingencyTable", "(", "*", "ct", ")", "for", "ct", "in", "self", ".", "contingency_tables", ".", "values", "]", ")" ]
Create an Array of ContingencyTable objects for each probability threshold. Returns: Array of ContingencyTable objects
[ "Create", "an", "Array", "of", "ContingencyTable", "objects", "for", "each", "probability", "threshold", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L171-L178
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedROC.from_str
def from_str(self, in_str): """ Read the DistributedROC string and parse the contingency table values from it. Args: in_str (str): The string output from the __str__ method """ parts = in_str.split(";") for part in parts: var_name, value = part.split(":") if var_name == "Obs_Threshold": self.obs_threshold = float(value) elif var_name == "Thresholds": self.thresholds = np.array(value.split(), dtype=float) self.contingency_tables = pd.DataFrame(columns=self.contingency_tables.columns, data=np.zeros((self.thresholds.size, self.contingency_tables.columns.size))) elif var_name in self.contingency_tables.columns: self.contingency_tables[var_name] = np.array(value.split(), dtype=int)
python
def from_str(self, in_str): """ Read the DistributedROC string and parse the contingency table values from it. Args: in_str (str): The string output from the __str__ method """ parts = in_str.split(";") for part in parts: var_name, value = part.split(":") if var_name == "Obs_Threshold": self.obs_threshold = float(value) elif var_name == "Thresholds": self.thresholds = np.array(value.split(), dtype=float) self.contingency_tables = pd.DataFrame(columns=self.contingency_tables.columns, data=np.zeros((self.thresholds.size, self.contingency_tables.columns.size))) elif var_name in self.contingency_tables.columns: self.contingency_tables[var_name] = np.array(value.split(), dtype=int)
[ "def", "from_str", "(", "self", ",", "in_str", ")", ":", "parts", "=", "in_str", ".", "split", "(", "\";\"", ")", "for", "part", "in", "parts", ":", "var_name", ",", "value", "=", "part", ".", "split", "(", "\":\"", ")", "if", "var_name", "==", "\"Obs_Threshold\"", ":", "self", ".", "obs_threshold", "=", "float", "(", "value", ")", "elif", "var_name", "==", "\"Thresholds\"", ":", "self", ".", "thresholds", "=", "np", ".", "array", "(", "value", ".", "split", "(", ")", ",", "dtype", "=", "float", ")", "self", ".", "contingency_tables", "=", "pd", ".", "DataFrame", "(", "columns", "=", "self", ".", "contingency_tables", ".", "columns", ",", "data", "=", "np", ".", "zeros", "(", "(", "self", ".", "thresholds", ".", "size", ",", "self", ".", "contingency_tables", ".", "columns", ".", "size", ")", ")", ")", "elif", "var_name", "in", "self", ".", "contingency_tables", ".", "columns", ":", "self", ".", "contingency_tables", "[", "var_name", "]", "=", "np", ".", "array", "(", "value", ".", "split", "(", ")", ",", "dtype", "=", "int", ")" ]
Read the DistributedROC string and parse the contingency table values from it. Args: in_str (str): The string output from the __str__ method
[ "Read", "the", "DistributedROC", "string", "and", "parse", "the", "contingency", "table", "values", "from", "it", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L194-L212
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.update
def update(self, forecasts, observations): """ Update the statistics with a set of forecasts and observations. Args: forecasts (numpy.ndarray): Array of forecast probability values observations (numpy.ndarray): Array of observation values """ for t, threshold in enumerate(self.thresholds[:-1]): self.frequencies.loc[t, "Positive_Freq"] += np.count_nonzero((threshold <= forecasts) & (forecasts < self.thresholds[t+1]) & (observations >= self.obs_threshold)) self.frequencies.loc[t, "Total_Freq"] += np.count_nonzero((threshold <= forecasts) & (forecasts < self.thresholds[t+1]))
python
def update(self, forecasts, observations): """ Update the statistics with a set of forecasts and observations. Args: forecasts (numpy.ndarray): Array of forecast probability values observations (numpy.ndarray): Array of observation values """ for t, threshold in enumerate(self.thresholds[:-1]): self.frequencies.loc[t, "Positive_Freq"] += np.count_nonzero((threshold <= forecasts) & (forecasts < self.thresholds[t+1]) & (observations >= self.obs_threshold)) self.frequencies.loc[t, "Total_Freq"] += np.count_nonzero((threshold <= forecasts) & (forecasts < self.thresholds[t+1]))
[ "def", "update", "(", "self", ",", "forecasts", ",", "observations", ")", ":", "for", "t", ",", "threshold", "in", "enumerate", "(", "self", ".", "thresholds", "[", ":", "-", "1", "]", ")", ":", "self", ".", "frequencies", ".", "loc", "[", "t", ",", "\"Positive_Freq\"", "]", "+=", "np", ".", "count_nonzero", "(", "(", "threshold", "<=", "forecasts", ")", "&", "(", "forecasts", "<", "self", ".", "thresholds", "[", "t", "+", "1", "]", ")", "&", "(", "observations", ">=", "self", ".", "obs_threshold", ")", ")", "self", ".", "frequencies", ".", "loc", "[", "t", ",", "\"Total_Freq\"", "]", "+=", "np", ".", "count_nonzero", "(", "(", "threshold", "<=", "forecasts", ")", "&", "(", "forecasts", "<", "self", ".", "thresholds", "[", "t", "+", "1", "]", ")", ")" ]
Update the statistics with a set of forecasts and observations. Args: forecasts (numpy.ndarray): Array of forecast probability values observations (numpy.ndarray): Array of observation values
[ "Update", "the", "statistics", "with", "a", "set", "of", "forecasts", "and", "observations", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L308-L321
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.merge
def merge(self, other_rel): """ Ingest another DistributedReliability and add its contents to the current object. Args: other_rel: a Distributed reliability object. """ if other_rel.thresholds.size == self.thresholds.size and np.all(other_rel.thresholds == self.thresholds): self.frequencies += other_rel.frequencies else: print("Input table thresholds do not match.")
python
def merge(self, other_rel): """ Ingest another DistributedReliability and add its contents to the current object. Args: other_rel: a Distributed reliability object. """ if other_rel.thresholds.size == self.thresholds.size and np.all(other_rel.thresholds == self.thresholds): self.frequencies += other_rel.frequencies else: print("Input table thresholds do not match.")
[ "def", "merge", "(", "self", ",", "other_rel", ")", ":", "if", "other_rel", ".", "thresholds", ".", "size", "==", "self", ".", "thresholds", ".", "size", "and", "np", ".", "all", "(", "other_rel", ".", "thresholds", "==", "self", ".", "thresholds", ")", ":", "self", ".", "frequencies", "+=", "other_rel", ".", "frequencies", "else", ":", "print", "(", "\"Input table thresholds do not match.\"", ")" ]
Ingest another DistributedReliability and add its contents to the current object. Args: other_rel: a Distributed reliability object.
[ "Ingest", "another", "DistributedReliability", "and", "add", "its", "contents", "to", "the", "current", "object", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L340-L350
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.reliability_curve
def reliability_curve(self): """ Calculates the reliability diagram statistics. The key columns are Bin_Start and Positive_Relative_Freq Returns: pandas.DataFrame """ total = self.frequencies["Total_Freq"].sum() curve = pd.DataFrame(columns=["Bin_Start", "Bin_End", "Bin_Center", "Positive_Relative_Freq", "Total_Relative_Freq"]) curve["Bin_Start"] = self.thresholds[:-1] curve["Bin_End"] = self.thresholds[1:] curve["Bin_Center"] = 0.5 * (self.thresholds[:-1] + self.thresholds[1:]) curve["Positive_Relative_Freq"] = self.frequencies["Positive_Freq"] / self.frequencies["Total_Freq"] curve["Total_Relative_Freq"] = self.frequencies["Total_Freq"] / total return curve
python
def reliability_curve(self): """ Calculates the reliability diagram statistics. The key columns are Bin_Start and Positive_Relative_Freq Returns: pandas.DataFrame """ total = self.frequencies["Total_Freq"].sum() curve = pd.DataFrame(columns=["Bin_Start", "Bin_End", "Bin_Center", "Positive_Relative_Freq", "Total_Relative_Freq"]) curve["Bin_Start"] = self.thresholds[:-1] curve["Bin_End"] = self.thresholds[1:] curve["Bin_Center"] = 0.5 * (self.thresholds[:-1] + self.thresholds[1:]) curve["Positive_Relative_Freq"] = self.frequencies["Positive_Freq"] / self.frequencies["Total_Freq"] curve["Total_Relative_Freq"] = self.frequencies["Total_Freq"] / total return curve
[ "def", "reliability_curve", "(", "self", ")", ":", "total", "=", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", ".", "sum", "(", ")", "curve", "=", "pd", ".", "DataFrame", "(", "columns", "=", "[", "\"Bin_Start\"", ",", "\"Bin_End\"", ",", "\"Bin_Center\"", ",", "\"Positive_Relative_Freq\"", ",", "\"Total_Relative_Freq\"", "]", ")", "curve", "[", "\"Bin_Start\"", "]", "=", "self", ".", "thresholds", "[", ":", "-", "1", "]", "curve", "[", "\"Bin_End\"", "]", "=", "self", ".", "thresholds", "[", "1", ":", "]", "curve", "[", "\"Bin_Center\"", "]", "=", "0.5", "*", "(", "self", ".", "thresholds", "[", ":", "-", "1", "]", "+", "self", ".", "thresholds", "[", "1", ":", "]", ")", "curve", "[", "\"Positive_Relative_Freq\"", "]", "=", "self", ".", "frequencies", "[", "\"Positive_Freq\"", "]", "/", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", "curve", "[", "\"Total_Relative_Freq\"", "]", "=", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", "/", "total", "return", "curve" ]
Calculates the reliability diagram statistics. The key columns are Bin_Start and Positive_Relative_Freq Returns: pandas.DataFrame
[ "Calculates", "the", "reliability", "diagram", "statistics", ".", "The", "key", "columns", "are", "Bin_Start", "and", "Positive_Relative_Freq" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L352-L367
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.brier_score_components
def brier_score_components(self): """ Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty. """ rel_curve = self.reliability_curve() total = self.frequencies["Total_Freq"].sum() climo_freq = float(self.frequencies["Positive_Freq"].sum()) / self.frequencies["Total_Freq"].sum() reliability = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Bin_Start"] - rel_curve["Positive_Relative_Freq"]) ** 2) / total resolution = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Positive_Relative_Freq"] - climo_freq) ** 2) \ / total uncertainty = climo_freq * (1 - climo_freq) return reliability, resolution, uncertainty
python
def brier_score_components(self): """ Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty. """ rel_curve = self.reliability_curve() total = self.frequencies["Total_Freq"].sum() climo_freq = float(self.frequencies["Positive_Freq"].sum()) / self.frequencies["Total_Freq"].sum() reliability = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Bin_Start"] - rel_curve["Positive_Relative_Freq"]) ** 2) / total resolution = np.sum(self.frequencies["Total_Freq"] * (rel_curve["Positive_Relative_Freq"] - climo_freq) ** 2) \ / total uncertainty = climo_freq * (1 - climo_freq) return reliability, resolution, uncertainty
[ "def", "brier_score_components", "(", "self", ")", ":", "rel_curve", "=", "self", ".", "reliability_curve", "(", ")", "total", "=", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", ".", "sum", "(", ")", "climo_freq", "=", "float", "(", "self", ".", "frequencies", "[", "\"Positive_Freq\"", "]", ".", "sum", "(", ")", ")", "/", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", ".", "sum", "(", ")", "reliability", "=", "np", ".", "sum", "(", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", "*", "(", "rel_curve", "[", "\"Bin_Start\"", "]", "-", "rel_curve", "[", "\"Positive_Relative_Freq\"", "]", ")", "**", "2", ")", "/", "total", "resolution", "=", "np", ".", "sum", "(", "self", ".", "frequencies", "[", "\"Total_Freq\"", "]", "*", "(", "rel_curve", "[", "\"Positive_Relative_Freq\"", "]", "-", "climo_freq", ")", "**", "2", ")", "/", "total", "uncertainty", "=", "climo_freq", "*", "(", "1", "-", "climo_freq", ")", "return", "reliability", ",", "resolution", ",", "uncertainty" ]
Calculate the components of the Brier score decomposition: reliability, resolution, and uncertainty.
[ "Calculate", "the", "components", "of", "the", "Brier", "score", "decomposition", ":", "reliability", "resolution", "and", "uncertainty", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L369-L381
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.brier_score
def brier_score(self): """ Calculate the Brier Score """ reliability, resolution, uncertainty = self.brier_score_components() return reliability - resolution + uncertainty
python
def brier_score(self): """ Calculate the Brier Score """ reliability, resolution, uncertainty = self.brier_score_components() return reliability - resolution + uncertainty
[ "def", "brier_score", "(", "self", ")", ":", "reliability", ",", "resolution", ",", "uncertainty", "=", "self", ".", "brier_score_components", "(", ")", "return", "reliability", "-", "resolution", "+", "uncertainty" ]
Calculate the Brier Score
[ "Calculate", "the", "Brier", "Score" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L390-L395
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedReliability.brier_skill_score
def brier_skill_score(self): """ Calculate the Brier Skill Score """ reliability, resolution, uncertainty = self.brier_score_components() return (resolution - reliability) / uncertainty
python
def brier_skill_score(self): """ Calculate the Brier Skill Score """ reliability, resolution, uncertainty = self.brier_score_components() return (resolution - reliability) / uncertainty
[ "def", "brier_skill_score", "(", "self", ")", ":", "reliability", ",", "resolution", ",", "uncertainty", "=", "self", ".", "brier_score_components", "(", ")", "return", "(", "resolution", "-", "reliability", ")", "/", "uncertainty" ]
Calculate the Brier Skill Score
[ "Calculate", "the", "Brier", "Skill", "Score" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L397-L402
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedCRPS.update
def update(self, forecasts, observations): """ Update the statistics with forecasts and observations. Args: forecasts: The discrete Cumulative Distribution Functions of observations: """ if len(observations.shape) == 1: obs_cdfs = np.zeros((observations.size, self.thresholds.size)) for o, observation in enumerate(observations): obs_cdfs[o, self.thresholds >= observation] = 1 else: obs_cdfs = observations self.errors["F_2"] += np.sum(forecasts ** 2, axis=0) self.errors["F_O"] += np.sum(forecasts * obs_cdfs, axis=0) self.errors["O_2"] += np.sum(obs_cdfs ** 2, axis=0) self.errors["O"] += np.sum(obs_cdfs, axis=0) self.num_forecasts += forecasts.shape[0]
python
def update(self, forecasts, observations): """ Update the statistics with forecasts and observations. Args: forecasts: The discrete Cumulative Distribution Functions of observations: """ if len(observations.shape) == 1: obs_cdfs = np.zeros((observations.size, self.thresholds.size)) for o, observation in enumerate(observations): obs_cdfs[o, self.thresholds >= observation] = 1 else: obs_cdfs = observations self.errors["F_2"] += np.sum(forecasts ** 2, axis=0) self.errors["F_O"] += np.sum(forecasts * obs_cdfs, axis=0) self.errors["O_2"] += np.sum(obs_cdfs ** 2, axis=0) self.errors["O"] += np.sum(obs_cdfs, axis=0) self.num_forecasts += forecasts.shape[0]
[ "def", "update", "(", "self", ",", "forecasts", ",", "observations", ")", ":", "if", "len", "(", "observations", ".", "shape", ")", "==", "1", ":", "obs_cdfs", "=", "np", ".", "zeros", "(", "(", "observations", ".", "size", ",", "self", ".", "thresholds", ".", "size", ")", ")", "for", "o", ",", "observation", "in", "enumerate", "(", "observations", ")", ":", "obs_cdfs", "[", "o", ",", "self", ".", "thresholds", ">=", "observation", "]", "=", "1", "else", ":", "obs_cdfs", "=", "observations", "self", ".", "errors", "[", "\"F_2\"", "]", "+=", "np", ".", "sum", "(", "forecasts", "**", "2", ",", "axis", "=", "0", ")", "self", ".", "errors", "[", "\"F_O\"", "]", "+=", "np", ".", "sum", "(", "forecasts", "*", "obs_cdfs", ",", "axis", "=", "0", ")", "self", ".", "errors", "[", "\"O_2\"", "]", "+=", "np", ".", "sum", "(", "obs_cdfs", "**", "2", ",", "axis", "=", "0", ")", "self", ".", "errors", "[", "\"O\"", "]", "+=", "np", ".", "sum", "(", "obs_cdfs", ",", "axis", "=", "0", ")", "self", ".", "num_forecasts", "+=", "forecasts", ".", "shape", "[", "0", "]" ]
Update the statistics with forecasts and observations. Args: forecasts: The discrete Cumulative Distribution Functions of observations:
[ "Update", "the", "statistics", "with", "forecasts", "and", "observations", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L455-L473
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedCRPS.crps
def crps(self): """ Calculates the continuous ranked probability score. """ return np.sum(self.errors["F_2"].values - self.errors["F_O"].values * 2.0 + self.errors["O_2"].values) / \ (self.thresholds.size * self.num_forecasts)
python
def crps(self): """ Calculates the continuous ranked probability score. """ return np.sum(self.errors["F_2"].values - self.errors["F_O"].values * 2.0 + self.errors["O_2"].values) / \ (self.thresholds.size * self.num_forecasts)
[ "def", "crps", "(", "self", ")", ":", "return", "np", ".", "sum", "(", "self", ".", "errors", "[", "\"F_2\"", "]", ".", "values", "-", "self", ".", "errors", "[", "\"F_O\"", "]", ".", "values", "*", "2.0", "+", "self", ".", "errors", "[", "\"O_2\"", "]", ".", "values", ")", "/", "(", "self", ".", "thresholds", ".", "size", "*", "self", ".", "num_forecasts", ")" ]
Calculates the continuous ranked probability score.
[ "Calculates", "the", "continuous", "ranked", "probability", "score", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L488-L493
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedCRPS.crps_climo
def crps_climo(self): """ Calculate the climatological CRPS. """ o_bar = self.errors["O"].values / float(self.num_forecasts) crps_c = np.sum(self.num_forecasts * (o_bar ** 2) - o_bar * self.errors["O"].values * 2.0 + self.errors["O_2"].values) / float(self.thresholds.size * self.num_forecasts) return crps_c
python
def crps_climo(self): """ Calculate the climatological CRPS. """ o_bar = self.errors["O"].values / float(self.num_forecasts) crps_c = np.sum(self.num_forecasts * (o_bar ** 2) - o_bar * self.errors["O"].values * 2.0 + self.errors["O_2"].values) / float(self.thresholds.size * self.num_forecasts) return crps_c
[ "def", "crps_climo", "(", "self", ")", ":", "o_bar", "=", "self", ".", "errors", "[", "\"O\"", "]", ".", "values", "/", "float", "(", "self", ".", "num_forecasts", ")", "crps_c", "=", "np", ".", "sum", "(", "self", ".", "num_forecasts", "*", "(", "o_bar", "**", "2", ")", "-", "o_bar", "*", "self", ".", "errors", "[", "\"O\"", "]", ".", "values", "*", "2.0", "+", "self", ".", "errors", "[", "\"O_2\"", "]", ".", "values", ")", "/", "float", "(", "self", ".", "thresholds", ".", "size", "*", "self", ".", "num_forecasts", ")", "return", "crps_c" ]
Calculate the climatological CRPS.
[ "Calculate", "the", "climatological", "CRPS", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L495-L502
train
djgagne/hagelslag
hagelslag/evaluation/ProbabilityMetrics.py
DistributedCRPS.crpss
def crpss(self): """ Calculate the continous ranked probability skill score from existing data. """ crps_f = self.crps() crps_c = self.crps_climo() return 1.0 - float(crps_f) / float(crps_c)
python
def crpss(self): """ Calculate the continous ranked probability skill score from existing data. """ crps_f = self.crps() crps_c = self.crps_climo() return 1.0 - float(crps_f) / float(crps_c)
[ "def", "crpss", "(", "self", ")", ":", "crps_f", "=", "self", ".", "crps", "(", ")", "crps_c", "=", "self", ".", "crps_climo", "(", ")", "return", "1.0", "-", "float", "(", "crps_f", ")", "/", "float", "(", "crps_c", ")" ]
Calculate the continous ranked probability skill score from existing data.
[ "Calculate", "the", "continous", "ranked", "probability", "skill", "score", "from", "existing", "data", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/ProbabilityMetrics.py#L504-L510
train
base4sistemas/satcfe
satcfe/alertas.py
checar
def checar(cliente_sat): """ Checa em sequência os alertas registrados (veja :func:`registrar`) contra os dados da consulta ao status operacional do equipamento SAT. Este método irá então resultar em uma lista dos alertas ativos. :param cliente_sat: Uma instância de :class:`satcfe.clientelocal.ClienteSATLocal` ou :class:`satcfe.clientesathub.ClienteSATHub` onde será invocado o método para consulta ao status operacional do equipamento SAT. :rtype: list """ resposta = cliente_sat.consultar_status_operacional() alertas = [] for classe_alerta in AlertaOperacao.alertas_registrados: alerta = classe_alerta(resposta) if alerta.checar(): alertas.append(alerta) return alertas
python
def checar(cliente_sat): """ Checa em sequência os alertas registrados (veja :func:`registrar`) contra os dados da consulta ao status operacional do equipamento SAT. Este método irá então resultar em uma lista dos alertas ativos. :param cliente_sat: Uma instância de :class:`satcfe.clientelocal.ClienteSATLocal` ou :class:`satcfe.clientesathub.ClienteSATHub` onde será invocado o método para consulta ao status operacional do equipamento SAT. :rtype: list """ resposta = cliente_sat.consultar_status_operacional() alertas = [] for classe_alerta in AlertaOperacao.alertas_registrados: alerta = classe_alerta(resposta) if alerta.checar(): alertas.append(alerta) return alertas
[ "def", "checar", "(", "cliente_sat", ")", ":", "resposta", "=", "cliente_sat", ".", "consultar_status_operacional", "(", ")", "alertas", "=", "[", "]", "for", "classe_alerta", "in", "AlertaOperacao", ".", "alertas_registrados", ":", "alerta", "=", "classe_alerta", "(", "resposta", ")", "if", "alerta", ".", "checar", "(", ")", ":", "alertas", ".", "append", "(", "alerta", ")", "return", "alertas" ]
Checa em sequência os alertas registrados (veja :func:`registrar`) contra os dados da consulta ao status operacional do equipamento SAT. Este método irá então resultar em uma lista dos alertas ativos. :param cliente_sat: Uma instância de :class:`satcfe.clientelocal.ClienteSATLocal` ou :class:`satcfe.clientesathub.ClienteSATHub` onde será invocado o método para consulta ao status operacional do equipamento SAT. :rtype: list
[ "Checa", "em", "sequência", "os", "alertas", "registrados", "(", "veja", ":", "func", ":", "registrar", ")", "contra", "os", "dados", "da", "consulta", "ao", "status", "operacional", "do", "equipamento", "SAT", ".", "Este", "método", "irá", "então", "resultar", "em", "uma", "lista", "dos", "alertas", "ativos", "." ]
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/alertas.py#L375-L395
train
nion-software/nionswift
nion/swift/model/Metadata.py
has_metadata_value
def has_metadata_value(metadata_source, key: str) -> bool: """Return whether the metadata value for the given key exists. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d = getattr(metadata_source, "session_metadata", dict()) for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: return desc['path'][-1] in d desc = key_map.get(key) if desc is not None: d = getattr(metadata_source, "metadata", dict()) for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: return desc['path'][-1] in d raise False
python
def has_metadata_value(metadata_source, key: str) -> bool: """Return whether the metadata value for the given key exists. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d = getattr(metadata_source, "session_metadata", dict()) for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: return desc['path'][-1] in d desc = key_map.get(key) if desc is not None: d = getattr(metadata_source, "metadata", dict()) for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: return desc['path'][-1] in d raise False
[ "def", "has_metadata_value", "(", "metadata_source", ",", "key", ":", "str", ")", "->", "bool", ":", "desc", "=", "session_key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d", "=", "getattr", "(", "metadata_source", ",", "\"session_metadata\"", ",", "dict", "(", ")", ")", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", ":", "return", "desc", "[", "'path'", "]", "[", "-", "1", "]", "in", "d", "desc", "=", "key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d", "=", "getattr", "(", "metadata_source", ",", "\"metadata\"", ",", "dict", "(", ")", ")", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", ":", "return", "desc", "[", "'path'", "]", "[", "-", "1", "]", "in", "d", "raise", "False" ]
Return whether the metadata value for the given key exists. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``.
[ "Return", "whether", "the", "metadata", "value", "for", "the", "given", "key", "exists", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Metadata.py#L66-L92
train
nion-software/nionswift
nion/swift/model/Metadata.py
get_metadata_value
def get_metadata_value(metadata_source, key: str) -> typing.Any: """Get the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: v = getattr(metadata_source, "session_metadata", dict()) for k in desc['path']: v = v.get(k) if v is not None else None return v desc = key_map.get(key) if desc is not None: v = getattr(metadata_source, "metadata", dict()) for k in desc['path']: v = v.get(k) if v is not None else None return v raise KeyError()
python
def get_metadata_value(metadata_source, key: str) -> typing.Any: """Get the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: v = getattr(metadata_source, "session_metadata", dict()) for k in desc['path']: v = v.get(k) if v is not None else None return v desc = key_map.get(key) if desc is not None: v = getattr(metadata_source, "metadata", dict()) for k in desc['path']: v = v.get(k) if v is not None else None return v raise KeyError()
[ "def", "get_metadata_value", "(", "metadata_source", ",", "key", ":", "str", ")", "->", "typing", ".", "Any", ":", "desc", "=", "session_key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "v", "=", "getattr", "(", "metadata_source", ",", "\"session_metadata\"", ",", "dict", "(", ")", ")", "for", "k", "in", "desc", "[", "'path'", "]", ":", "v", "=", "v", ".", "get", "(", "k", ")", "if", "v", "is", "not", "None", "else", "None", "return", "v", "desc", "=", "key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "v", "=", "getattr", "(", "metadata_source", ",", "\"metadata\"", ",", "dict", "(", ")", ")", "for", "k", "in", "desc", "[", "'path'", "]", ":", "v", "=", "v", ".", "get", "(", "k", ")", "if", "v", "is", "not", "None", "else", "None", "return", "v", "raise", "KeyError", "(", ")" ]
Get the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``.
[ "Get", "the", "metadata", "value", "for", "the", "given", "key", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Metadata.py#L94-L118
train
nion-software/nionswift
nion/swift/model/Metadata.py
set_metadata_value
def set_metadata_value(metadata_source, key: str, value: typing.Any) -> None: """Set the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "session_metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: d[desc['path'][-1]] = value metadata_source.session_metadata = d0 return desc = key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: d[desc['path'][-1]] = value metadata_source.metadata = d0 return raise KeyError()
python
def set_metadata_value(metadata_source, key: str, value: typing.Any) -> None: """Set the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "session_metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: d[desc['path'][-1]] = value metadata_source.session_metadata = d0 return desc = key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None: d[desc['path'][-1]] = value metadata_source.metadata = d0 return raise KeyError()
[ "def", "set_metadata_value", "(", "metadata_source", ",", "key", ":", "str", ",", "value", ":", "typing", ".", "Any", ")", "->", "None", ":", "desc", "=", "session_key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d0", "=", "getattr", "(", "metadata_source", ",", "\"session_metadata\"", ",", "dict", "(", ")", ")", "d", "=", "d0", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", ":", "d", "[", "desc", "[", "'path'", "]", "[", "-", "1", "]", "]", "=", "value", "metadata_source", ".", "session_metadata", "=", "d0", "return", "desc", "=", "key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d0", "=", "getattr", "(", "metadata_source", ",", "\"metadata\"", ",", "dict", "(", ")", ")", "d", "=", "d0", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", ":", "d", "[", "desc", "[", "'path'", "]", "[", "-", "1", "]", "]", "=", "value", "metadata_source", ".", "metadata", "=", "d0", "return", "raise", "KeyError", "(", ")" ]
Set the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed by the predefined keys. e.g. 'session.instrument' or 'camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``.
[ "Set", "the", "metadata", "value", "for", "the", "given", "key", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Metadata.py#L120-L152
train
nion-software/nionswift
nion/swift/model/Metadata.py
delete_metadata_value
def delete_metadata_value(metadata_source, key: str) -> None: """Delete the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<dotted>.<group>.<attribute>' format followed by the predefined keys. e.g. 'stem.session.instrument' or 'stm.camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "session_metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None and desc['path'][-1] in d: d.pop(desc['path'][-1], None) metadata_source.session_metadata = d0 return desc = key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None and desc['path'][-1] in d: d.pop(desc['path'][-1], None) metadata_source.metadata = d0 return
python
def delete_metadata_value(metadata_source, key: str) -> None: """Delete the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<dotted>.<group>.<attribute>' format followed by the predefined keys. e.g. 'stem.session.instrument' or 'stm.camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``. """ desc = session_key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "session_metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None and desc['path'][-1] in d: d.pop(desc['path'][-1], None) metadata_source.session_metadata = d0 return desc = key_map.get(key) if desc is not None: d0 = getattr(metadata_source, "metadata", dict()) d = d0 for k in desc['path'][:-1]: d = d.setdefault(k, dict()) if d is not None else None if d is not None and desc['path'][-1] in d: d.pop(desc['path'][-1], None) metadata_source.metadata = d0 return
[ "def", "delete_metadata_value", "(", "metadata_source", ",", "key", ":", "str", ")", "->", "None", ":", "desc", "=", "session_key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d0", "=", "getattr", "(", "metadata_source", ",", "\"session_metadata\"", ",", "dict", "(", ")", ")", "d", "=", "d0", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", "and", "desc", "[", "'path'", "]", "[", "-", "1", "]", "in", "d", ":", "d", ".", "pop", "(", "desc", "[", "'path'", "]", "[", "-", "1", "]", ",", "None", ")", "metadata_source", ".", "session_metadata", "=", "d0", "return", "desc", "=", "key_map", ".", "get", "(", "key", ")", "if", "desc", "is", "not", "None", ":", "d0", "=", "getattr", "(", "metadata_source", ",", "\"metadata\"", ",", "dict", "(", ")", ")", "d", "=", "d0", "for", "k", "in", "desc", "[", "'path'", "]", "[", ":", "-", "1", "]", ":", "d", "=", "d", ".", "setdefault", "(", "k", ",", "dict", "(", ")", ")", "if", "d", "is", "not", "None", "else", "None", "if", "d", "is", "not", "None", "and", "desc", "[", "'path'", "]", "[", "-", "1", "]", "in", "d", ":", "d", ".", "pop", "(", "desc", "[", "'path'", "]", "[", "-", "1", "]", ",", "None", ")", "metadata_source", ".", "metadata", "=", "d0", "return" ]
Delete the metadata value for the given key. There are a set of predefined keys that, when used, will be type checked and be interoperable with other applications. Please consult reference documentation for valid keys. If using a custom key, we recommend structuring your keys in the '<dotted>.<group>.<attribute>' format followed by the predefined keys. e.g. 'stem.session.instrument' or 'stm.camera.binning'. Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer using the ``metadata_value`` methods over directly accessing ``metadata``.
[ "Delete", "the", "metadata", "value", "for", "the", "given", "key", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Metadata.py#L154-L185
train
nion-software/nionswift
nion/swift/LineGraphCanvasItem.py
LineGraphAxes.calculate_y_ticks
def calculate_y_ticks(self, plot_height): """Calculate the y-axis items dependent on the plot height.""" calibrated_data_min = self.calibrated_data_min calibrated_data_max = self.calibrated_data_max calibrated_data_range = calibrated_data_max - calibrated_data_min ticker = self.y_ticker y_ticks = list() for tick_value, tick_label in zip(ticker.values, ticker.labels): if calibrated_data_range != 0.0: y_tick = plot_height - plot_height * (tick_value - calibrated_data_min) / calibrated_data_range else: y_tick = plot_height - plot_height * 0.5 if y_tick >= 0 and y_tick <= plot_height: y_ticks.append((y_tick, tick_label)) return y_ticks
python
def calculate_y_ticks(self, plot_height): """Calculate the y-axis items dependent on the plot height.""" calibrated_data_min = self.calibrated_data_min calibrated_data_max = self.calibrated_data_max calibrated_data_range = calibrated_data_max - calibrated_data_min ticker = self.y_ticker y_ticks = list() for tick_value, tick_label in zip(ticker.values, ticker.labels): if calibrated_data_range != 0.0: y_tick = plot_height - plot_height * (tick_value - calibrated_data_min) / calibrated_data_range else: y_tick = plot_height - plot_height * 0.5 if y_tick >= 0 and y_tick <= plot_height: y_ticks.append((y_tick, tick_label)) return y_ticks
[ "def", "calculate_y_ticks", "(", "self", ",", "plot_height", ")", ":", "calibrated_data_min", "=", "self", ".", "calibrated_data_min", "calibrated_data_max", "=", "self", ".", "calibrated_data_max", "calibrated_data_range", "=", "calibrated_data_max", "-", "calibrated_data_min", "ticker", "=", "self", ".", "y_ticker", "y_ticks", "=", "list", "(", ")", "for", "tick_value", ",", "tick_label", "in", "zip", "(", "ticker", ".", "values", ",", "ticker", ".", "labels", ")", ":", "if", "calibrated_data_range", "!=", "0.0", ":", "y_tick", "=", "plot_height", "-", "plot_height", "*", "(", "tick_value", "-", "calibrated_data_min", ")", "/", "calibrated_data_range", "else", ":", "y_tick", "=", "plot_height", "-", "plot_height", "*", "0.5", "if", "y_tick", ">=", "0", "and", "y_tick", "<=", "plot_height", ":", "y_ticks", ".", "append", "(", "(", "y_tick", ",", "tick_label", ")", ")", "return", "y_ticks" ]
Calculate the y-axis items dependent on the plot height.
[ "Calculate", "the", "y", "-", "axis", "items", "dependent", "on", "the", "plot", "height", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LineGraphCanvasItem.py#L164-L181
train
nion-software/nionswift
nion/swift/LineGraphCanvasItem.py
LineGraphAxes.calculate_x_ticks
def calculate_x_ticks(self, plot_width): """Calculate the x-axis items dependent on the plot width.""" x_calibration = self.x_calibration uncalibrated_data_left = self.__uncalibrated_left_channel uncalibrated_data_right = self.__uncalibrated_right_channel calibrated_data_left = x_calibration.convert_to_calibrated_value(uncalibrated_data_left) if x_calibration is not None else uncalibrated_data_left calibrated_data_right = x_calibration.convert_to_calibrated_value(uncalibrated_data_right) if x_calibration is not None else uncalibrated_data_right calibrated_data_left, calibrated_data_right = min(calibrated_data_left, calibrated_data_right), max(calibrated_data_left, calibrated_data_right) graph_left, graph_right, tick_values, division, precision = Geometry.make_pretty_range(calibrated_data_left, calibrated_data_right) drawn_data_width = self.drawn_right_channel - self.drawn_left_channel x_ticks = list() if drawn_data_width > 0.0: for tick_value in tick_values: label = nice_label(tick_value, precision) data_tick = x_calibration.convert_from_calibrated_value(tick_value) if x_calibration else tick_value x_tick = plot_width * (data_tick - self.drawn_left_channel) / drawn_data_width if x_tick >= 0 and x_tick <= plot_width: x_ticks.append((x_tick, label)) return x_ticks
python
def calculate_x_ticks(self, plot_width): """Calculate the x-axis items dependent on the plot width.""" x_calibration = self.x_calibration uncalibrated_data_left = self.__uncalibrated_left_channel uncalibrated_data_right = self.__uncalibrated_right_channel calibrated_data_left = x_calibration.convert_to_calibrated_value(uncalibrated_data_left) if x_calibration is not None else uncalibrated_data_left calibrated_data_right = x_calibration.convert_to_calibrated_value(uncalibrated_data_right) if x_calibration is not None else uncalibrated_data_right calibrated_data_left, calibrated_data_right = min(calibrated_data_left, calibrated_data_right), max(calibrated_data_left, calibrated_data_right) graph_left, graph_right, tick_values, division, precision = Geometry.make_pretty_range(calibrated_data_left, calibrated_data_right) drawn_data_width = self.drawn_right_channel - self.drawn_left_channel x_ticks = list() if drawn_data_width > 0.0: for tick_value in tick_values: label = nice_label(tick_value, precision) data_tick = x_calibration.convert_from_calibrated_value(tick_value) if x_calibration else tick_value x_tick = plot_width * (data_tick - self.drawn_left_channel) / drawn_data_width if x_tick >= 0 and x_tick <= plot_width: x_ticks.append((x_tick, label)) return x_ticks
[ "def", "calculate_x_ticks", "(", "self", ",", "plot_width", ")", ":", "x_calibration", "=", "self", ".", "x_calibration", "uncalibrated_data_left", "=", "self", ".", "__uncalibrated_left_channel", "uncalibrated_data_right", "=", "self", ".", "__uncalibrated_right_channel", "calibrated_data_left", "=", "x_calibration", ".", "convert_to_calibrated_value", "(", "uncalibrated_data_left", ")", "if", "x_calibration", "is", "not", "None", "else", "uncalibrated_data_left", "calibrated_data_right", "=", "x_calibration", ".", "convert_to_calibrated_value", "(", "uncalibrated_data_right", ")", "if", "x_calibration", "is", "not", "None", "else", "uncalibrated_data_right", "calibrated_data_left", ",", "calibrated_data_right", "=", "min", "(", "calibrated_data_left", ",", "calibrated_data_right", ")", ",", "max", "(", "calibrated_data_left", ",", "calibrated_data_right", ")", "graph_left", ",", "graph_right", ",", "tick_values", ",", "division", ",", "precision", "=", "Geometry", ".", "make_pretty_range", "(", "calibrated_data_left", ",", "calibrated_data_right", ")", "drawn_data_width", "=", "self", ".", "drawn_right_channel", "-", "self", ".", "drawn_left_channel", "x_ticks", "=", "list", "(", ")", "if", "drawn_data_width", ">", "0.0", ":", "for", "tick_value", "in", "tick_values", ":", "label", "=", "nice_label", "(", "tick_value", ",", "precision", ")", "data_tick", "=", "x_calibration", ".", "convert_from_calibrated_value", "(", "tick_value", ")", "if", "x_calibration", "else", "tick_value", "x_tick", "=", "plot_width", "*", "(", "data_tick", "-", "self", ".", "drawn_left_channel", ")", "/", "drawn_data_width", "if", "x_tick", ">=", "0", "and", "x_tick", "<=", "plot_width", ":", "x_ticks", ".", "append", "(", "(", "x_tick", ",", "label", ")", ")", "return", "x_ticks" ]
Calculate the x-axis items dependent on the plot width.
[ "Calculate", "the", "x", "-", "axis", "items", "dependent", "on", "the", "plot", "width", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LineGraphCanvasItem.py#L196-L221
train
nion-software/nionswift
nion/swift/LineGraphCanvasItem.py
LineGraphHorizontalAxisLabelCanvasItem.size_to_content
def size_to_content(self): """ Size the canvas item to the proper height. """ new_sizing = self.copy_sizing() new_sizing.minimum_height = 0 new_sizing.maximum_height = 0 axes = self.__axes if axes and axes.is_valid: if axes.x_calibration and axes.x_calibration.units: new_sizing.minimum_height = self.font_size + 4 new_sizing.maximum_height = self.font_size + 4 self.update_sizing(new_sizing)
python
def size_to_content(self): """ Size the canvas item to the proper height. """ new_sizing = self.copy_sizing() new_sizing.minimum_height = 0 new_sizing.maximum_height = 0 axes = self.__axes if axes and axes.is_valid: if axes.x_calibration and axes.x_calibration.units: new_sizing.minimum_height = self.font_size + 4 new_sizing.maximum_height = self.font_size + 4 self.update_sizing(new_sizing)
[ "def", "size_to_content", "(", "self", ")", ":", "new_sizing", "=", "self", ".", "copy_sizing", "(", ")", "new_sizing", ".", "minimum_height", "=", "0", "new_sizing", ".", "maximum_height", "=", "0", "axes", "=", "self", ".", "__axes", "if", "axes", "and", "axes", ".", "is_valid", ":", "if", "axes", ".", "x_calibration", "and", "axes", ".", "x_calibration", ".", "units", ":", "new_sizing", ".", "minimum_height", "=", "self", ".", "font_size", "+", "4", "new_sizing", ".", "maximum_height", "=", "self", ".", "font_size", "+", "4", "self", ".", "update_sizing", "(", "new_sizing", ")" ]
Size the canvas item to the proper height.
[ "Size", "the", "canvas", "item", "to", "the", "proper", "height", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LineGraphCanvasItem.py#L783-L793
train
nion-software/nionswift
nion/swift/LineGraphCanvasItem.py
LineGraphVerticalAxisScaleCanvasItem.size_to_content
def size_to_content(self, get_font_metrics_fn): """ Size the canvas item to the proper width, the maximum of any label. """ new_sizing = self.copy_sizing() new_sizing.minimum_width = 0 new_sizing.maximum_width = 0 axes = self.__axes if axes and axes.is_valid: # calculate the width based on the label lengths font = "{0:d}px".format(self.font_size) max_width = 0 y_range = axes.calibrated_data_max - axes.calibrated_data_min label = axes.y_ticker.value_label(axes.calibrated_data_max + y_range * 5) max_width = max(max_width, get_font_metrics_fn(font, label).width) label = axes.y_ticker.value_label(axes.calibrated_data_min - y_range * 5) max_width = max(max_width, get_font_metrics_fn(font, label).width) new_sizing.minimum_width = max_width new_sizing.maximum_width = max_width self.update_sizing(new_sizing)
python
def size_to_content(self, get_font_metrics_fn): """ Size the canvas item to the proper width, the maximum of any label. """ new_sizing = self.copy_sizing() new_sizing.minimum_width = 0 new_sizing.maximum_width = 0 axes = self.__axes if axes and axes.is_valid: # calculate the width based on the label lengths font = "{0:d}px".format(self.font_size) max_width = 0 y_range = axes.calibrated_data_max - axes.calibrated_data_min label = axes.y_ticker.value_label(axes.calibrated_data_max + y_range * 5) max_width = max(max_width, get_font_metrics_fn(font, label).width) label = axes.y_ticker.value_label(axes.calibrated_data_min - y_range * 5) max_width = max(max_width, get_font_metrics_fn(font, label).width) new_sizing.minimum_width = max_width new_sizing.maximum_width = max_width self.update_sizing(new_sizing)
[ "def", "size_to_content", "(", "self", ",", "get_font_metrics_fn", ")", ":", "new_sizing", "=", "self", ".", "copy_sizing", "(", ")", "new_sizing", ".", "minimum_width", "=", "0", "new_sizing", ".", "maximum_width", "=", "0", "axes", "=", "self", ".", "__axes", "if", "axes", "and", "axes", ".", "is_valid", ":", "# calculate the width based on the label lengths", "font", "=", "\"{0:d}px\"", ".", "format", "(", "self", ".", "font_size", ")", "max_width", "=", "0", "y_range", "=", "axes", ".", "calibrated_data_max", "-", "axes", ".", "calibrated_data_min", "label", "=", "axes", ".", "y_ticker", ".", "value_label", "(", "axes", ".", "calibrated_data_max", "+", "y_range", "*", "5", ")", "max_width", "=", "max", "(", "max_width", ",", "get_font_metrics_fn", "(", "font", ",", "label", ")", ".", "width", ")", "label", "=", "axes", ".", "y_ticker", ".", "value_label", "(", "axes", ".", "calibrated_data_min", "-", "y_range", "*", "5", ")", "max_width", "=", "max", "(", "max_width", ",", "get_font_metrics_fn", "(", "font", ",", "label", ")", ".", "width", ")", "new_sizing", ".", "minimum_width", "=", "max_width", "new_sizing", ".", "maximum_width", "=", "max_width", "self", ".", "update_sizing", "(", "new_sizing", ")" ]
Size the canvas item to the proper width, the maximum of any label.
[ "Size", "the", "canvas", "item", "to", "the", "proper", "width", "the", "maximum", "of", "any", "label", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LineGraphCanvasItem.py#L869-L892
train
nion-software/nionswift
nion/swift/LineGraphCanvasItem.py
LineGraphVerticalAxisLabelCanvasItem.size_to_content
def size_to_content(self): """ Size the canvas item to the proper width. """ new_sizing = self.copy_sizing() new_sizing.minimum_width = 0 new_sizing.maximum_width = 0 axes = self.__axes if axes and axes.is_valid: if axes.y_calibration and axes.y_calibration.units: new_sizing.minimum_width = self.font_size + 4 new_sizing.maximum_width = self.font_size + 4 self.update_sizing(new_sizing)
python
def size_to_content(self): """ Size the canvas item to the proper width. """ new_sizing = self.copy_sizing() new_sizing.minimum_width = 0 new_sizing.maximum_width = 0 axes = self.__axes if axes and axes.is_valid: if axes.y_calibration and axes.y_calibration.units: new_sizing.minimum_width = self.font_size + 4 new_sizing.maximum_width = self.font_size + 4 self.update_sizing(new_sizing)
[ "def", "size_to_content", "(", "self", ")", ":", "new_sizing", "=", "self", ".", "copy_sizing", "(", ")", "new_sizing", ".", "minimum_width", "=", "0", "new_sizing", ".", "maximum_width", "=", "0", "axes", "=", "self", ".", "__axes", "if", "axes", "and", "axes", ".", "is_valid", ":", "if", "axes", ".", "y_calibration", "and", "axes", ".", "y_calibration", ".", "units", ":", "new_sizing", ".", "minimum_width", "=", "self", ".", "font_size", "+", "4", "new_sizing", ".", "maximum_width", "=", "self", ".", "font_size", "+", "4", "self", ".", "update_sizing", "(", "new_sizing", ")" ]
Size the canvas item to the proper width.
[ "Size", "the", "canvas", "item", "to", "the", "proper", "width", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LineGraphCanvasItem.py#L936-L946
train
ajk8/hatchery
hatchery/snippets.py
get_snippet_content
def get_snippet_content(snippet_name, **format_kwargs): """ Load the content from a snippet file which exists in SNIPPETS_ROOT """ filename = snippet_name + '.snippet' snippet_file = os.path.join(SNIPPETS_ROOT, filename) if not os.path.isfile(snippet_file): raise ValueError('could not find snippet with name ' + filename) ret = helpers.get_file_content(snippet_file) if format_kwargs: ret = ret.format(**format_kwargs) return ret
python
def get_snippet_content(snippet_name, **format_kwargs): """ Load the content from a snippet file which exists in SNIPPETS_ROOT """ filename = snippet_name + '.snippet' snippet_file = os.path.join(SNIPPETS_ROOT, filename) if not os.path.isfile(snippet_file): raise ValueError('could not find snippet with name ' + filename) ret = helpers.get_file_content(snippet_file) if format_kwargs: ret = ret.format(**format_kwargs) return ret
[ "def", "get_snippet_content", "(", "snippet_name", ",", "*", "*", "format_kwargs", ")", ":", "filename", "=", "snippet_name", "+", "'.snippet'", "snippet_file", "=", "os", ".", "path", ".", "join", "(", "SNIPPETS_ROOT", ",", "filename", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "snippet_file", ")", ":", "raise", "ValueError", "(", "'could not find snippet with name '", "+", "filename", ")", "ret", "=", "helpers", ".", "get_file_content", "(", "snippet_file", ")", "if", "format_kwargs", ":", "ret", "=", "ret", ".", "format", "(", "*", "*", "format_kwargs", ")", "return", "ret" ]
Load the content from a snippet file which exists in SNIPPETS_ROOT
[ "Load", "the", "content", "from", "a", "snippet", "file", "which", "exists", "in", "SNIPPETS_ROOT" ]
e068c9f5366d2c98225babb03d4cde36c710194f
https://github.com/ajk8/hatchery/blob/e068c9f5366d2c98225babb03d4cde36c710194f/hatchery/snippets.py#L7-L16
train
nion-software/nionswift
nion/swift/LinePlotCanvasItem.py
LinePlotCanvasItem.update_display_properties
def update_display_properties(self, display_calibration_info, display_properties: typing.Mapping, display_layers: typing.Sequence[typing.Mapping]) -> None: """Update the display values. Called from display panel. This method saves the display values and data and triggers an update. It should be as fast as possible. As a layer, this canvas item will respond to the update by calling prepare_render on the layer's rendering thread. Prepare render will call prepare_display which will construct new axes and update all of the constituent canvas items such as the axes labels and the graph layers. Each will trigger its own update if its inputs have changed. The inefficiencies in this process are that the layer must re-render on each call to this function. There is also a cost within the constituent canvas items to check whether the axes or their data has changed. When the display is associated with a single data item, the data will be """ # may be called from thread; prevent a race condition with closing. with self.__closing_lock: if self.__closed: return displayed_dimensional_scales = display_calibration_info.displayed_dimensional_scales displayed_dimensional_calibrations = display_calibration_info.displayed_dimensional_calibrations self.__data_scale = displayed_dimensional_scales[-1] if len(displayed_dimensional_scales) > 0 else 1 self.__displayed_dimensional_calibration = displayed_dimensional_calibrations[-1] if len(displayed_dimensional_calibrations) > 0 else Calibration.Calibration(scale=displayed_dimensional_scales[-1]) self.__intensity_calibration = display_calibration_info.displayed_intensity_calibration self.__calibration_style = display_calibration_info.calibration_style self.__y_min = display_properties.get("y_min") self.__y_max = display_properties.get("y_max") self.__y_style = display_properties.get("y_style", "linear") self.__left_channel = display_properties.get("left_channel") self.__right_channel = display_properties.get("right_channel") self.__legend_position = display_properties.get("legend_position") self.__display_layers = display_layers if self.__display_values_list and len(self.__display_values_list) > 0: self.__xdata_list = [display_values.display_data_and_metadata if display_values else None for display_values in self.__display_values_list] xdata0 = self.__xdata_list[0] if xdata0: self.__update_frame(xdata0.metadata) else: self.__xdata_list = list() # update the cursor info self.__update_cursor_info() # mark for update. prepare display will mark children for update if necesssary. self.update()
python
def update_display_properties(self, display_calibration_info, display_properties: typing.Mapping, display_layers: typing.Sequence[typing.Mapping]) -> None: """Update the display values. Called from display panel. This method saves the display values and data and triggers an update. It should be as fast as possible. As a layer, this canvas item will respond to the update by calling prepare_render on the layer's rendering thread. Prepare render will call prepare_display which will construct new axes and update all of the constituent canvas items such as the axes labels and the graph layers. Each will trigger its own update if its inputs have changed. The inefficiencies in this process are that the layer must re-render on each call to this function. There is also a cost within the constituent canvas items to check whether the axes or their data has changed. When the display is associated with a single data item, the data will be """ # may be called from thread; prevent a race condition with closing. with self.__closing_lock: if self.__closed: return displayed_dimensional_scales = display_calibration_info.displayed_dimensional_scales displayed_dimensional_calibrations = display_calibration_info.displayed_dimensional_calibrations self.__data_scale = displayed_dimensional_scales[-1] if len(displayed_dimensional_scales) > 0 else 1 self.__displayed_dimensional_calibration = displayed_dimensional_calibrations[-1] if len(displayed_dimensional_calibrations) > 0 else Calibration.Calibration(scale=displayed_dimensional_scales[-1]) self.__intensity_calibration = display_calibration_info.displayed_intensity_calibration self.__calibration_style = display_calibration_info.calibration_style self.__y_min = display_properties.get("y_min") self.__y_max = display_properties.get("y_max") self.__y_style = display_properties.get("y_style", "linear") self.__left_channel = display_properties.get("left_channel") self.__right_channel = display_properties.get("right_channel") self.__legend_position = display_properties.get("legend_position") self.__display_layers = display_layers if self.__display_values_list and len(self.__display_values_list) > 0: self.__xdata_list = [display_values.display_data_and_metadata if display_values else None for display_values in self.__display_values_list] xdata0 = self.__xdata_list[0] if xdata0: self.__update_frame(xdata0.metadata) else: self.__xdata_list = list() # update the cursor info self.__update_cursor_info() # mark for update. prepare display will mark children for update if necesssary. self.update()
[ "def", "update_display_properties", "(", "self", ",", "display_calibration_info", ",", "display_properties", ":", "typing", ".", "Mapping", ",", "display_layers", ":", "typing", ".", "Sequence", "[", "typing", ".", "Mapping", "]", ")", "->", "None", ":", "# may be called from thread; prevent a race condition with closing.", "with", "self", ".", "__closing_lock", ":", "if", "self", ".", "__closed", ":", "return", "displayed_dimensional_scales", "=", "display_calibration_info", ".", "displayed_dimensional_scales", "displayed_dimensional_calibrations", "=", "display_calibration_info", ".", "displayed_dimensional_calibrations", "self", ".", "__data_scale", "=", "displayed_dimensional_scales", "[", "-", "1", "]", "if", "len", "(", "displayed_dimensional_scales", ")", ">", "0", "else", "1", "self", ".", "__displayed_dimensional_calibration", "=", "displayed_dimensional_calibrations", "[", "-", "1", "]", "if", "len", "(", "displayed_dimensional_calibrations", ")", ">", "0", "else", "Calibration", ".", "Calibration", "(", "scale", "=", "displayed_dimensional_scales", "[", "-", "1", "]", ")", "self", ".", "__intensity_calibration", "=", "display_calibration_info", ".", "displayed_intensity_calibration", "self", ".", "__calibration_style", "=", "display_calibration_info", ".", "calibration_style", "self", ".", "__y_min", "=", "display_properties", ".", "get", "(", "\"y_min\"", ")", "self", ".", "__y_max", "=", "display_properties", ".", "get", "(", "\"y_max\"", ")", "self", ".", "__y_style", "=", "display_properties", ".", "get", "(", "\"y_style\"", ",", "\"linear\"", ")", "self", ".", "__left_channel", "=", "display_properties", ".", "get", "(", "\"left_channel\"", ")", "self", ".", "__right_channel", "=", "display_properties", ".", "get", "(", "\"right_channel\"", ")", "self", ".", "__legend_position", "=", "display_properties", ".", "get", "(", "\"legend_position\"", ")", "self", ".", "__display_layers", "=", "display_layers", "if", "self", ".", "__display_values_list", "and", "len", "(", "self", ".", "__display_values_list", ")", ">", "0", ":", "self", ".", "__xdata_list", "=", "[", "display_values", ".", "display_data_and_metadata", "if", "display_values", "else", "None", "for", "display_values", "in", "self", ".", "__display_values_list", "]", "xdata0", "=", "self", ".", "__xdata_list", "[", "0", "]", "if", "xdata0", ":", "self", ".", "__update_frame", "(", "xdata0", ".", "metadata", ")", "else", ":", "self", ".", "__xdata_list", "=", "list", "(", ")", "# update the cursor info", "self", ".", "__update_cursor_info", "(", ")", "# mark for update. prepare display will mark children for update if necesssary.", "self", ".", "update", "(", ")" ]
Update the display values. Called from display panel. This method saves the display values and data and triggers an update. It should be as fast as possible. As a layer, this canvas item will respond to the update by calling prepare_render on the layer's rendering thread. Prepare render will call prepare_display which will construct new axes and update all of the constituent canvas items such as the axes labels and the graph layers. Each will trigger its own update if its inputs have changed. The inefficiencies in this process are that the layer must re-render on each call to this function. There is also a cost within the constituent canvas items to check whether the axes or their data has changed. When the display is associated with a single data item, the data will be
[ "Update", "the", "display", "values", ".", "Called", "from", "display", "panel", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LinePlotCanvasItem.py#L249-L296
train
nion-software/nionswift
nion/swift/LinePlotCanvasItem.py
LinePlotCanvasItem.__view_to_intervals
def __view_to_intervals(self, data_and_metadata: DataAndMetadata.DataAndMetadata, intervals: typing.List[typing.Tuple[float, float]]) -> None: """Change the view to encompass the channels and data represented by the given intervals.""" left = None right = None for interval in intervals: left = min(left, interval[0]) if left is not None else interval[0] right = max(right, interval[1]) if right is not None else interval[1] left = left if left is not None else 0.0 right = right if right is not None else 1.0 left_channel = int(max(0.0, left) * data_and_metadata.data_shape[-1]) right_channel = int(min(1.0, right) * data_and_metadata.data_shape[-1]) data_min = numpy.amin(data_and_metadata.data[..., left_channel:right_channel]) data_max = numpy.amax(data_and_metadata.data[..., left_channel:right_channel]) if data_min > 0 and data_max > 0: y_min = 0.0 y_max = data_max * 1.2 elif data_min < 0 and data_max < 0: y_min = data_min * 1.2 y_max = 0.0 else: y_min = data_min * 1.2 y_max = data_max * 1.2 extra = (right - left) * 0.5 display_left_channel = int(max(0.0, left - extra) * data_and_metadata.data_shape[-1]) display_right_channel = int(min(1.0, right + extra) * data_and_metadata.data_shape[-1]) # command = self.delegate.create_change_display_command() self.delegate.update_display_properties({"left_channel": display_left_channel, "right_channel": display_right_channel, "y_min": y_min, "y_max": y_max})
python
def __view_to_intervals(self, data_and_metadata: DataAndMetadata.DataAndMetadata, intervals: typing.List[typing.Tuple[float, float]]) -> None: """Change the view to encompass the channels and data represented by the given intervals.""" left = None right = None for interval in intervals: left = min(left, interval[0]) if left is not None else interval[0] right = max(right, interval[1]) if right is not None else interval[1] left = left if left is not None else 0.0 right = right if right is not None else 1.0 left_channel = int(max(0.0, left) * data_and_metadata.data_shape[-1]) right_channel = int(min(1.0, right) * data_and_metadata.data_shape[-1]) data_min = numpy.amin(data_and_metadata.data[..., left_channel:right_channel]) data_max = numpy.amax(data_and_metadata.data[..., left_channel:right_channel]) if data_min > 0 and data_max > 0: y_min = 0.0 y_max = data_max * 1.2 elif data_min < 0 and data_max < 0: y_min = data_min * 1.2 y_max = 0.0 else: y_min = data_min * 1.2 y_max = data_max * 1.2 extra = (right - left) * 0.5 display_left_channel = int(max(0.0, left - extra) * data_and_metadata.data_shape[-1]) display_right_channel = int(min(1.0, right + extra) * data_and_metadata.data_shape[-1]) # command = self.delegate.create_change_display_command() self.delegate.update_display_properties({"left_channel": display_left_channel, "right_channel": display_right_channel, "y_min": y_min, "y_max": y_max})
[ "def", "__view_to_intervals", "(", "self", ",", "data_and_metadata", ":", "DataAndMetadata", ".", "DataAndMetadata", ",", "intervals", ":", "typing", ".", "List", "[", "typing", ".", "Tuple", "[", "float", ",", "float", "]", "]", ")", "->", "None", ":", "left", "=", "None", "right", "=", "None", "for", "interval", "in", "intervals", ":", "left", "=", "min", "(", "left", ",", "interval", "[", "0", "]", ")", "if", "left", "is", "not", "None", "else", "interval", "[", "0", "]", "right", "=", "max", "(", "right", ",", "interval", "[", "1", "]", ")", "if", "right", "is", "not", "None", "else", "interval", "[", "1", "]", "left", "=", "left", "if", "left", "is", "not", "None", "else", "0.0", "right", "=", "right", "if", "right", "is", "not", "None", "else", "1.0", "left_channel", "=", "int", "(", "max", "(", "0.0", ",", "left", ")", "*", "data_and_metadata", ".", "data_shape", "[", "-", "1", "]", ")", "right_channel", "=", "int", "(", "min", "(", "1.0", ",", "right", ")", "*", "data_and_metadata", ".", "data_shape", "[", "-", "1", "]", ")", "data_min", "=", "numpy", ".", "amin", "(", "data_and_metadata", ".", "data", "[", "...", ",", "left_channel", ":", "right_channel", "]", ")", "data_max", "=", "numpy", ".", "amax", "(", "data_and_metadata", ".", "data", "[", "...", ",", "left_channel", ":", "right_channel", "]", ")", "if", "data_min", ">", "0", "and", "data_max", ">", "0", ":", "y_min", "=", "0.0", "y_max", "=", "data_max", "*", "1.2", "elif", "data_min", "<", "0", "and", "data_max", "<", "0", ":", "y_min", "=", "data_min", "*", "1.2", "y_max", "=", "0.0", "else", ":", "y_min", "=", "data_min", "*", "1.2", "y_max", "=", "data_max", "*", "1.2", "extra", "=", "(", "right", "-", "left", ")", "*", "0.5", "display_left_channel", "=", "int", "(", "max", "(", "0.0", ",", "left", "-", "extra", ")", "*", "data_and_metadata", ".", "data_shape", "[", "-", "1", "]", ")", "display_right_channel", "=", "int", "(", "min", "(", "1.0", ",", "right", "+", "extra", ")", "*", "data_and_metadata", ".", "data_shape", "[", "-", "1", "]", ")", "# command = self.delegate.create_change_display_command()", "self", ".", "delegate", ".", "update_display_properties", "(", "{", "\"left_channel\"", ":", "display_left_channel", ",", "\"right_channel\"", ":", "display_right_channel", ",", "\"y_min\"", ":", "y_min", ",", "\"y_max\"", ":", "y_max", "}", ")" ]
Change the view to encompass the channels and data represented by the given intervals.
[ "Change", "the", "view", "to", "encompass", "the", "channels", "and", "data", "represented", "by", "the", "given", "intervals", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LinePlotCanvasItem.py#L358-L384
train
nion-software/nionswift
nion/swift/LinePlotCanvasItem.py
LinePlotCanvasItem.__view_to_selected_graphics
def __view_to_selected_graphics(self, data_and_metadata: DataAndMetadata.DataAndMetadata) -> None: """Change the view to encompass the selected graphic intervals.""" all_graphics = self.__graphics graphics = [graphic for graphic_index, graphic in enumerate(all_graphics) if self.__graphic_selection.contains(graphic_index)] intervals = list() for graphic in graphics: if isinstance(graphic, Graphics.IntervalGraphic): intervals.append(graphic.interval) self.__view_to_intervals(data_and_metadata, intervals)
python
def __view_to_selected_graphics(self, data_and_metadata: DataAndMetadata.DataAndMetadata) -> None: """Change the view to encompass the selected graphic intervals.""" all_graphics = self.__graphics graphics = [graphic for graphic_index, graphic in enumerate(all_graphics) if self.__graphic_selection.contains(graphic_index)] intervals = list() for graphic in graphics: if isinstance(graphic, Graphics.IntervalGraphic): intervals.append(graphic.interval) self.__view_to_intervals(data_and_metadata, intervals)
[ "def", "__view_to_selected_graphics", "(", "self", ",", "data_and_metadata", ":", "DataAndMetadata", ".", "DataAndMetadata", ")", "->", "None", ":", "all_graphics", "=", "self", ".", "__graphics", "graphics", "=", "[", "graphic", "for", "graphic_index", ",", "graphic", "in", "enumerate", "(", "all_graphics", ")", "if", "self", ".", "__graphic_selection", ".", "contains", "(", "graphic_index", ")", "]", "intervals", "=", "list", "(", ")", "for", "graphic", "in", "graphics", ":", "if", "isinstance", "(", "graphic", ",", "Graphics", ".", "IntervalGraphic", ")", ":", "intervals", ".", "append", "(", "graphic", ".", "interval", ")", "self", ".", "__view_to_intervals", "(", "data_and_metadata", ",", "intervals", ")" ]
Change the view to encompass the selected graphic intervals.
[ "Change", "the", "view", "to", "encompass", "the", "selected", "graphic", "intervals", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LinePlotCanvasItem.py#L387-L395
train
nion-software/nionswift
nion/swift/LinePlotCanvasItem.py
LinePlotCanvasItem.prepare_display
def prepare_display(self): """Prepare the display. This method gets called by the canvas layout/draw engine after being triggered by a call to `update`. When data or display parameters change, the internal state of the line plot gets updated. This method takes that internal state and updates the child canvas items. This method is always run on a thread and should be fast but doesn't need to be instant. """ displayed_dimensional_calibration = self.__displayed_dimensional_calibration intensity_calibration = self.__intensity_calibration calibration_style = self.__calibration_style y_min = self.__y_min y_max = self.__y_max y_style = self.__y_style left_channel = self.__left_channel right_channel = self.__right_channel scalar_xdata_list = None def calculate_scalar_xdata(xdata_list): scalar_xdata_list = list() for xdata in xdata_list: if xdata: scalar_data = Image.scalar_from_array(xdata.data) scalar_data = Image.convert_to_grayscale(scalar_data) scalar_intensity_calibration = calibration_style.get_intensity_calibration(xdata) scalar_dimensional_calibrations = calibration_style.get_dimensional_calibrations(xdata.dimensional_shape, xdata.dimensional_calibrations) if displayed_dimensional_calibration.units == scalar_dimensional_calibrations[-1].units and intensity_calibration.units == scalar_intensity_calibration.units: # the data needs to have an intensity scale matching intensity_calibration. convert the data to use the common scale. scale = scalar_intensity_calibration.scale / intensity_calibration.scale offset = (scalar_intensity_calibration.offset - intensity_calibration.offset) / intensity_calibration.scale scalar_data = scalar_data * scale + offset scalar_xdata_list.append(DataAndMetadata.new_data_and_metadata(scalar_data, scalar_intensity_calibration, scalar_dimensional_calibrations)) else: scalar_xdata_list.append(None) return scalar_xdata_list data_scale = self.__data_scale xdata_list = self.__xdata_list if data_scale is not None: # update the line graph data left_channel = left_channel if left_channel is not None else 0 right_channel = right_channel if right_channel is not None else data_scale left_channel, right_channel = min(left_channel, right_channel), max(left_channel, right_channel) scalar_data_list = None if y_min is None or y_max is None and len(xdata_list) > 0: scalar_xdata_list = calculate_scalar_xdata(xdata_list) scalar_data_list = [xdata.data if xdata else None for xdata in scalar_xdata_list] calibrated_data_min, calibrated_data_max, y_ticker = LineGraphCanvasItem.calculate_y_axis(scalar_data_list, y_min, y_max, intensity_calibration, y_style) axes = LineGraphCanvasItem.LineGraphAxes(data_scale, calibrated_data_min, calibrated_data_max, left_channel, right_channel, displayed_dimensional_calibration, intensity_calibration, y_style, y_ticker) if scalar_xdata_list is None: if len(xdata_list) > 0: scalar_xdata_list = calculate_scalar_xdata(xdata_list) else: scalar_xdata_list = list() if self.__display_frame_rate_id: Utility.fps_tick("prepare_"+self.__display_frame_rate_id) colors = ('#1E90FF', "#F00", "#0F0", "#00F", "#FF0", "#0FF", "#F0F", "#888", "#800", "#080", "#008", "#CCC", "#880", "#088", "#808", "#964B00") display_layers = self.__display_layers if len(display_layers) == 0: index = 0 for scalar_index, scalar_xdata in enumerate(scalar_xdata_list): if scalar_xdata and scalar_xdata.is_data_1d: if index < 16: display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index}) index += 1 if scalar_xdata and scalar_xdata.is_data_2d: for row in range(min(scalar_xdata.data_shape[-1], 16)): if index < 16: display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index, "data_row": row}) index += 1 display_layer_count = len(display_layers) self.___has_valid_drawn_graph_data = False for index, display_layer in enumerate(display_layers): if index < 16: fill_color = display_layer.get("fill_color") stroke_color = display_layer.get("stroke_color") data_index = display_layer.get("data_index", 0) data_row = display_layer.get("data_row", 0) if 0 <= data_index < len(scalar_xdata_list): scalar_xdata = scalar_xdata_list[data_index] if scalar_xdata: data_row = max(0, min(scalar_xdata.dimensional_shape[0] - 1, data_row)) intensity_calibration = scalar_xdata.intensity_calibration displayed_dimensional_calibration = scalar_xdata.dimensional_calibrations[-1] if scalar_xdata.is_data_2d: scalar_data = scalar_xdata.data[data_row:data_row + 1, :].reshape((scalar_xdata.dimensional_shape[-1],)) scalar_xdata = DataAndMetadata.new_data_and_metadata(scalar_data, intensity_calibration, [displayed_dimensional_calibration]) line_graph_canvas_item = self.__line_graph_stack.canvas_items[display_layer_count - (index + 1)] line_graph_canvas_item.set_fill_color(fill_color) line_graph_canvas_item.set_stroke_color(stroke_color) line_graph_canvas_item.set_axes(axes) line_graph_canvas_item.set_uncalibrated_xdata(scalar_xdata) self.___has_valid_drawn_graph_data = scalar_xdata is not None for index in range(len(display_layers), 16): line_graph_canvas_item = self.__line_graph_stack.canvas_items[index] line_graph_canvas_item.set_axes(None) line_graph_canvas_item.set_uncalibrated_xdata(None) legend_position = self.__legend_position LegendEntry = collections.namedtuple("LegendEntry", ["label", "fill_color", "stroke_color"]) legend_entries = list() for index, display_layer in enumerate(self.__display_layers): data_index = display_layer.get("data_index", None) data_row = display_layer.get("data_row", None) label = display_layer.get("label", str()) if not label: if data_index is not None and data_row is not None: label = "Data {}:{}".format(data_index, data_row) elif data_index is not None: label = "Data {}".format(data_index) else: label = "Unknown" fill_color = display_layer.get("fill_color") stroke_color = display_layer.get("stroke_color") legend_entries.append(LegendEntry(label, fill_color, stroke_color)) self.__update_canvas_items(axes, legend_position, legend_entries) else: for line_graph_canvas_item in self.__line_graph_stack.canvas_items: line_graph_canvas_item.set_axes(None) line_graph_canvas_item.set_uncalibrated_xdata(None) self.__line_graph_xdata_list = list() self.__update_canvas_items(LineGraphCanvasItem.LineGraphAxes(), None, None)
python
def prepare_display(self): """Prepare the display. This method gets called by the canvas layout/draw engine after being triggered by a call to `update`. When data or display parameters change, the internal state of the line plot gets updated. This method takes that internal state and updates the child canvas items. This method is always run on a thread and should be fast but doesn't need to be instant. """ displayed_dimensional_calibration = self.__displayed_dimensional_calibration intensity_calibration = self.__intensity_calibration calibration_style = self.__calibration_style y_min = self.__y_min y_max = self.__y_max y_style = self.__y_style left_channel = self.__left_channel right_channel = self.__right_channel scalar_xdata_list = None def calculate_scalar_xdata(xdata_list): scalar_xdata_list = list() for xdata in xdata_list: if xdata: scalar_data = Image.scalar_from_array(xdata.data) scalar_data = Image.convert_to_grayscale(scalar_data) scalar_intensity_calibration = calibration_style.get_intensity_calibration(xdata) scalar_dimensional_calibrations = calibration_style.get_dimensional_calibrations(xdata.dimensional_shape, xdata.dimensional_calibrations) if displayed_dimensional_calibration.units == scalar_dimensional_calibrations[-1].units and intensity_calibration.units == scalar_intensity_calibration.units: # the data needs to have an intensity scale matching intensity_calibration. convert the data to use the common scale. scale = scalar_intensity_calibration.scale / intensity_calibration.scale offset = (scalar_intensity_calibration.offset - intensity_calibration.offset) / intensity_calibration.scale scalar_data = scalar_data * scale + offset scalar_xdata_list.append(DataAndMetadata.new_data_and_metadata(scalar_data, scalar_intensity_calibration, scalar_dimensional_calibrations)) else: scalar_xdata_list.append(None) return scalar_xdata_list data_scale = self.__data_scale xdata_list = self.__xdata_list if data_scale is not None: # update the line graph data left_channel = left_channel if left_channel is not None else 0 right_channel = right_channel if right_channel is not None else data_scale left_channel, right_channel = min(left_channel, right_channel), max(left_channel, right_channel) scalar_data_list = None if y_min is None or y_max is None and len(xdata_list) > 0: scalar_xdata_list = calculate_scalar_xdata(xdata_list) scalar_data_list = [xdata.data if xdata else None for xdata in scalar_xdata_list] calibrated_data_min, calibrated_data_max, y_ticker = LineGraphCanvasItem.calculate_y_axis(scalar_data_list, y_min, y_max, intensity_calibration, y_style) axes = LineGraphCanvasItem.LineGraphAxes(data_scale, calibrated_data_min, calibrated_data_max, left_channel, right_channel, displayed_dimensional_calibration, intensity_calibration, y_style, y_ticker) if scalar_xdata_list is None: if len(xdata_list) > 0: scalar_xdata_list = calculate_scalar_xdata(xdata_list) else: scalar_xdata_list = list() if self.__display_frame_rate_id: Utility.fps_tick("prepare_"+self.__display_frame_rate_id) colors = ('#1E90FF', "#F00", "#0F0", "#00F", "#FF0", "#0FF", "#F0F", "#888", "#800", "#080", "#008", "#CCC", "#880", "#088", "#808", "#964B00") display_layers = self.__display_layers if len(display_layers) == 0: index = 0 for scalar_index, scalar_xdata in enumerate(scalar_xdata_list): if scalar_xdata and scalar_xdata.is_data_1d: if index < 16: display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index}) index += 1 if scalar_xdata and scalar_xdata.is_data_2d: for row in range(min(scalar_xdata.data_shape[-1], 16)): if index < 16: display_layers.append({"fill_color": colors[index] if index == 0 else None, "stroke_color": colors[index] if index > 0 else None, "data_index": scalar_index, "data_row": row}) index += 1 display_layer_count = len(display_layers) self.___has_valid_drawn_graph_data = False for index, display_layer in enumerate(display_layers): if index < 16: fill_color = display_layer.get("fill_color") stroke_color = display_layer.get("stroke_color") data_index = display_layer.get("data_index", 0) data_row = display_layer.get("data_row", 0) if 0 <= data_index < len(scalar_xdata_list): scalar_xdata = scalar_xdata_list[data_index] if scalar_xdata: data_row = max(0, min(scalar_xdata.dimensional_shape[0] - 1, data_row)) intensity_calibration = scalar_xdata.intensity_calibration displayed_dimensional_calibration = scalar_xdata.dimensional_calibrations[-1] if scalar_xdata.is_data_2d: scalar_data = scalar_xdata.data[data_row:data_row + 1, :].reshape((scalar_xdata.dimensional_shape[-1],)) scalar_xdata = DataAndMetadata.new_data_and_metadata(scalar_data, intensity_calibration, [displayed_dimensional_calibration]) line_graph_canvas_item = self.__line_graph_stack.canvas_items[display_layer_count - (index + 1)] line_graph_canvas_item.set_fill_color(fill_color) line_graph_canvas_item.set_stroke_color(stroke_color) line_graph_canvas_item.set_axes(axes) line_graph_canvas_item.set_uncalibrated_xdata(scalar_xdata) self.___has_valid_drawn_graph_data = scalar_xdata is not None for index in range(len(display_layers), 16): line_graph_canvas_item = self.__line_graph_stack.canvas_items[index] line_graph_canvas_item.set_axes(None) line_graph_canvas_item.set_uncalibrated_xdata(None) legend_position = self.__legend_position LegendEntry = collections.namedtuple("LegendEntry", ["label", "fill_color", "stroke_color"]) legend_entries = list() for index, display_layer in enumerate(self.__display_layers): data_index = display_layer.get("data_index", None) data_row = display_layer.get("data_row", None) label = display_layer.get("label", str()) if not label: if data_index is not None and data_row is not None: label = "Data {}:{}".format(data_index, data_row) elif data_index is not None: label = "Data {}".format(data_index) else: label = "Unknown" fill_color = display_layer.get("fill_color") stroke_color = display_layer.get("stroke_color") legend_entries.append(LegendEntry(label, fill_color, stroke_color)) self.__update_canvas_items(axes, legend_position, legend_entries) else: for line_graph_canvas_item in self.__line_graph_stack.canvas_items: line_graph_canvas_item.set_axes(None) line_graph_canvas_item.set_uncalibrated_xdata(None) self.__line_graph_xdata_list = list() self.__update_canvas_items(LineGraphCanvasItem.LineGraphAxes(), None, None)
[ "def", "prepare_display", "(", "self", ")", ":", "displayed_dimensional_calibration", "=", "self", ".", "__displayed_dimensional_calibration", "intensity_calibration", "=", "self", ".", "__intensity_calibration", "calibration_style", "=", "self", ".", "__calibration_style", "y_min", "=", "self", ".", "__y_min", "y_max", "=", "self", ".", "__y_max", "y_style", "=", "self", ".", "__y_style", "left_channel", "=", "self", ".", "__left_channel", "right_channel", "=", "self", ".", "__right_channel", "scalar_xdata_list", "=", "None", "def", "calculate_scalar_xdata", "(", "xdata_list", ")", ":", "scalar_xdata_list", "=", "list", "(", ")", "for", "xdata", "in", "xdata_list", ":", "if", "xdata", ":", "scalar_data", "=", "Image", ".", "scalar_from_array", "(", "xdata", ".", "data", ")", "scalar_data", "=", "Image", ".", "convert_to_grayscale", "(", "scalar_data", ")", "scalar_intensity_calibration", "=", "calibration_style", ".", "get_intensity_calibration", "(", "xdata", ")", "scalar_dimensional_calibrations", "=", "calibration_style", ".", "get_dimensional_calibrations", "(", "xdata", ".", "dimensional_shape", ",", "xdata", ".", "dimensional_calibrations", ")", "if", "displayed_dimensional_calibration", ".", "units", "==", "scalar_dimensional_calibrations", "[", "-", "1", "]", ".", "units", "and", "intensity_calibration", ".", "units", "==", "scalar_intensity_calibration", ".", "units", ":", "# the data needs to have an intensity scale matching intensity_calibration. convert the data to use the common scale.", "scale", "=", "scalar_intensity_calibration", ".", "scale", "/", "intensity_calibration", ".", "scale", "offset", "=", "(", "scalar_intensity_calibration", ".", "offset", "-", "intensity_calibration", ".", "offset", ")", "/", "intensity_calibration", ".", "scale", "scalar_data", "=", "scalar_data", "*", "scale", "+", "offset", "scalar_xdata_list", ".", "append", "(", "DataAndMetadata", ".", "new_data_and_metadata", "(", "scalar_data", ",", "scalar_intensity_calibration", ",", "scalar_dimensional_calibrations", ")", ")", "else", ":", "scalar_xdata_list", ".", "append", "(", "None", ")", "return", "scalar_xdata_list", "data_scale", "=", "self", ".", "__data_scale", "xdata_list", "=", "self", ".", "__xdata_list", "if", "data_scale", "is", "not", "None", ":", "# update the line graph data", "left_channel", "=", "left_channel", "if", "left_channel", "is", "not", "None", "else", "0", "right_channel", "=", "right_channel", "if", "right_channel", "is", "not", "None", "else", "data_scale", "left_channel", ",", "right_channel", "=", "min", "(", "left_channel", ",", "right_channel", ")", ",", "max", "(", "left_channel", ",", "right_channel", ")", "scalar_data_list", "=", "None", "if", "y_min", "is", "None", "or", "y_max", "is", "None", "and", "len", "(", "xdata_list", ")", ">", "0", ":", "scalar_xdata_list", "=", "calculate_scalar_xdata", "(", "xdata_list", ")", "scalar_data_list", "=", "[", "xdata", ".", "data", "if", "xdata", "else", "None", "for", "xdata", "in", "scalar_xdata_list", "]", "calibrated_data_min", ",", "calibrated_data_max", ",", "y_ticker", "=", "LineGraphCanvasItem", ".", "calculate_y_axis", "(", "scalar_data_list", ",", "y_min", ",", "y_max", ",", "intensity_calibration", ",", "y_style", ")", "axes", "=", "LineGraphCanvasItem", ".", "LineGraphAxes", "(", "data_scale", ",", "calibrated_data_min", ",", "calibrated_data_max", ",", "left_channel", ",", "right_channel", ",", "displayed_dimensional_calibration", ",", "intensity_calibration", ",", "y_style", ",", "y_ticker", ")", "if", "scalar_xdata_list", "is", "None", ":", "if", "len", "(", "xdata_list", ")", ">", "0", ":", "scalar_xdata_list", "=", "calculate_scalar_xdata", "(", "xdata_list", ")", "else", ":", "scalar_xdata_list", "=", "list", "(", ")", "if", "self", ".", "__display_frame_rate_id", ":", "Utility", ".", "fps_tick", "(", "\"prepare_\"", "+", "self", ".", "__display_frame_rate_id", ")", "colors", "=", "(", "'#1E90FF'", ",", "\"#F00\"", ",", "\"#0F0\"", ",", "\"#00F\"", ",", "\"#FF0\"", ",", "\"#0FF\"", ",", "\"#F0F\"", ",", "\"#888\"", ",", "\"#800\"", ",", "\"#080\"", ",", "\"#008\"", ",", "\"#CCC\"", ",", "\"#880\"", ",", "\"#088\"", ",", "\"#808\"", ",", "\"#964B00\"", ")", "display_layers", "=", "self", ".", "__display_layers", "if", "len", "(", "display_layers", ")", "==", "0", ":", "index", "=", "0", "for", "scalar_index", ",", "scalar_xdata", "in", "enumerate", "(", "scalar_xdata_list", ")", ":", "if", "scalar_xdata", "and", "scalar_xdata", ".", "is_data_1d", ":", "if", "index", "<", "16", ":", "display_layers", ".", "append", "(", "{", "\"fill_color\"", ":", "colors", "[", "index", "]", "if", "index", "==", "0", "else", "None", ",", "\"stroke_color\"", ":", "colors", "[", "index", "]", "if", "index", ">", "0", "else", "None", ",", "\"data_index\"", ":", "scalar_index", "}", ")", "index", "+=", "1", "if", "scalar_xdata", "and", "scalar_xdata", ".", "is_data_2d", ":", "for", "row", "in", "range", "(", "min", "(", "scalar_xdata", ".", "data_shape", "[", "-", "1", "]", ",", "16", ")", ")", ":", "if", "index", "<", "16", ":", "display_layers", ".", "append", "(", "{", "\"fill_color\"", ":", "colors", "[", "index", "]", "if", "index", "==", "0", "else", "None", ",", "\"stroke_color\"", ":", "colors", "[", "index", "]", "if", "index", ">", "0", "else", "None", ",", "\"data_index\"", ":", "scalar_index", ",", "\"data_row\"", ":", "row", "}", ")", "index", "+=", "1", "display_layer_count", "=", "len", "(", "display_layers", ")", "self", ".", "___has_valid_drawn_graph_data", "=", "False", "for", "index", ",", "display_layer", "in", "enumerate", "(", "display_layers", ")", ":", "if", "index", "<", "16", ":", "fill_color", "=", "display_layer", ".", "get", "(", "\"fill_color\"", ")", "stroke_color", "=", "display_layer", ".", "get", "(", "\"stroke_color\"", ")", "data_index", "=", "display_layer", ".", "get", "(", "\"data_index\"", ",", "0", ")", "data_row", "=", "display_layer", ".", "get", "(", "\"data_row\"", ",", "0", ")", "if", "0", "<=", "data_index", "<", "len", "(", "scalar_xdata_list", ")", ":", "scalar_xdata", "=", "scalar_xdata_list", "[", "data_index", "]", "if", "scalar_xdata", ":", "data_row", "=", "max", "(", "0", ",", "min", "(", "scalar_xdata", ".", "dimensional_shape", "[", "0", "]", "-", "1", ",", "data_row", ")", ")", "intensity_calibration", "=", "scalar_xdata", ".", "intensity_calibration", "displayed_dimensional_calibration", "=", "scalar_xdata", ".", "dimensional_calibrations", "[", "-", "1", "]", "if", "scalar_xdata", ".", "is_data_2d", ":", "scalar_data", "=", "scalar_xdata", ".", "data", "[", "data_row", ":", "data_row", "+", "1", ",", ":", "]", ".", "reshape", "(", "(", "scalar_xdata", ".", "dimensional_shape", "[", "-", "1", "]", ",", ")", ")", "scalar_xdata", "=", "DataAndMetadata", ".", "new_data_and_metadata", "(", "scalar_data", ",", "intensity_calibration", ",", "[", "displayed_dimensional_calibration", "]", ")", "line_graph_canvas_item", "=", "self", ".", "__line_graph_stack", ".", "canvas_items", "[", "display_layer_count", "-", "(", "index", "+", "1", ")", "]", "line_graph_canvas_item", ".", "set_fill_color", "(", "fill_color", ")", "line_graph_canvas_item", ".", "set_stroke_color", "(", "stroke_color", ")", "line_graph_canvas_item", ".", "set_axes", "(", "axes", ")", "line_graph_canvas_item", ".", "set_uncalibrated_xdata", "(", "scalar_xdata", ")", "self", ".", "___has_valid_drawn_graph_data", "=", "scalar_xdata", "is", "not", "None", "for", "index", "in", "range", "(", "len", "(", "display_layers", ")", ",", "16", ")", ":", "line_graph_canvas_item", "=", "self", ".", "__line_graph_stack", ".", "canvas_items", "[", "index", "]", "line_graph_canvas_item", ".", "set_axes", "(", "None", ")", "line_graph_canvas_item", ".", "set_uncalibrated_xdata", "(", "None", ")", "legend_position", "=", "self", ".", "__legend_position", "LegendEntry", "=", "collections", ".", "namedtuple", "(", "\"LegendEntry\"", ",", "[", "\"label\"", ",", "\"fill_color\"", ",", "\"stroke_color\"", "]", ")", "legend_entries", "=", "list", "(", ")", "for", "index", ",", "display_layer", "in", "enumerate", "(", "self", ".", "__display_layers", ")", ":", "data_index", "=", "display_layer", ".", "get", "(", "\"data_index\"", ",", "None", ")", "data_row", "=", "display_layer", ".", "get", "(", "\"data_row\"", ",", "None", ")", "label", "=", "display_layer", ".", "get", "(", "\"label\"", ",", "str", "(", ")", ")", "if", "not", "label", ":", "if", "data_index", "is", "not", "None", "and", "data_row", "is", "not", "None", ":", "label", "=", "\"Data {}:{}\"", ".", "format", "(", "data_index", ",", "data_row", ")", "elif", "data_index", "is", "not", "None", ":", "label", "=", "\"Data {}\"", ".", "format", "(", "data_index", ")", "else", ":", "label", "=", "\"Unknown\"", "fill_color", "=", "display_layer", ".", "get", "(", "\"fill_color\"", ")", "stroke_color", "=", "display_layer", ".", "get", "(", "\"stroke_color\"", ")", "legend_entries", ".", "append", "(", "LegendEntry", "(", "label", ",", "fill_color", ",", "stroke_color", ")", ")", "self", ".", "__update_canvas_items", "(", "axes", ",", "legend_position", ",", "legend_entries", ")", "else", ":", "for", "line_graph_canvas_item", "in", "self", ".", "__line_graph_stack", ".", "canvas_items", ":", "line_graph_canvas_item", ".", "set_axes", "(", "None", ")", "line_graph_canvas_item", ".", "set_uncalibrated_xdata", "(", "None", ")", "self", ".", "__line_graph_xdata_list", "=", "list", "(", ")", "self", ".", "__update_canvas_items", "(", "LineGraphCanvasItem", ".", "LineGraphAxes", "(", ")", ",", "None", ",", "None", ")" ]
Prepare the display. This method gets called by the canvas layout/draw engine after being triggered by a call to `update`. When data or display parameters change, the internal state of the line plot gets updated. This method takes that internal state and updates the child canvas items. This method is always run on a thread and should be fast but doesn't need to be instant.
[ "Prepare", "the", "display", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LinePlotCanvasItem.py#L402-L538
train
nion-software/nionswift
nion/swift/LinePlotCanvasItem.py
LinePlotCanvasItem.__update_cursor_info
def __update_cursor_info(self): """ Map the mouse to the 1-d position within the line graph. """ if not self.delegate: # allow display to work without delegate return if self.__mouse_in and self.__last_mouse: pos_1d = None axes = self.__axes line_graph_canvas_item = self.line_graph_canvas_item if axes and axes.is_valid and line_graph_canvas_item: mouse = self.map_to_canvas_item(self.__last_mouse, line_graph_canvas_item) plot_rect = line_graph_canvas_item.canvas_bounds if plot_rect.contains_point(mouse): mouse = mouse - plot_rect.origin x = float(mouse.x) / plot_rect.width px = axes.drawn_left_channel + x * (axes.drawn_right_channel - axes.drawn_left_channel) pos_1d = px, self.delegate.cursor_changed(pos_1d)
python
def __update_cursor_info(self): """ Map the mouse to the 1-d position within the line graph. """ if not self.delegate: # allow display to work without delegate return if self.__mouse_in and self.__last_mouse: pos_1d = None axes = self.__axes line_graph_canvas_item = self.line_graph_canvas_item if axes and axes.is_valid and line_graph_canvas_item: mouse = self.map_to_canvas_item(self.__last_mouse, line_graph_canvas_item) plot_rect = line_graph_canvas_item.canvas_bounds if plot_rect.contains_point(mouse): mouse = mouse - plot_rect.origin x = float(mouse.x) / plot_rect.width px = axes.drawn_left_channel + x * (axes.drawn_right_channel - axes.drawn_left_channel) pos_1d = px, self.delegate.cursor_changed(pos_1d)
[ "def", "__update_cursor_info", "(", "self", ")", ":", "if", "not", "self", ".", "delegate", ":", "# allow display to work without delegate", "return", "if", "self", ".", "__mouse_in", "and", "self", ".", "__last_mouse", ":", "pos_1d", "=", "None", "axes", "=", "self", ".", "__axes", "line_graph_canvas_item", "=", "self", ".", "line_graph_canvas_item", "if", "axes", "and", "axes", ".", "is_valid", "and", "line_graph_canvas_item", ":", "mouse", "=", "self", ".", "map_to_canvas_item", "(", "self", ".", "__last_mouse", ",", "line_graph_canvas_item", ")", "plot_rect", "=", "line_graph_canvas_item", ".", "canvas_bounds", "if", "plot_rect", ".", "contains_point", "(", "mouse", ")", ":", "mouse", "=", "mouse", "-", "plot_rect", ".", "origin", "x", "=", "float", "(", "mouse", ".", "x", ")", "/", "plot_rect", ".", "width", "px", "=", "axes", ".", "drawn_left_channel", "+", "x", "*", "(", "axes", ".", "drawn_right_channel", "-", "axes", ".", "drawn_left_channel", ")", "pos_1d", "=", "px", ",", "self", ".", "delegate", ".", "cursor_changed", "(", "pos_1d", ")" ]
Map the mouse to the 1-d position within the line graph.
[ "Map", "the", "mouse", "to", "the", "1", "-", "d", "position", "within", "the", "line", "graph", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/LinePlotCanvasItem.py#L937-L955
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.find_model_patch_tracks
def find_model_patch_tracks(self): """ Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass. Returns: """ self.model_grid.load_data() tracked_model_objects = [] model_objects = [] if self.model_grid.data is None: print("No model output found") return tracked_model_objects min_orig = self.model_ew.min_thresh max_orig = self.model_ew.max_thresh data_increment_orig = self.model_ew.data_increment self.model_ew.min_thresh = 0 self.model_ew.data_increment = 1 self.model_ew.max_thresh = 100 for h, hour in enumerate(self.hours): # Identify storms at each time step and apply size filter print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member, self.run_date.strftime("%Y%m%d%H"), hour)) if self.mask is not None: model_data = self.model_grid.data[h] * self.mask else: model_data = self.model_grid.data[h] model_data[:self.patch_radius] = 0 model_data[-self.patch_radius:] = 0 model_data[:, :self.patch_radius] = 0 model_data[:, -self.patch_radius:] = 0 scaled_data = np.array(rescale_data(model_data, min_orig, max_orig)) hour_labels = label_storm_objects(scaled_data, "ew", self.model_ew.min_thresh, self.model_ew.max_thresh, min_area=self.size_filter, max_area=self.model_ew.max_size, max_range=self.model_ew.delta, increment=self.model_ew.data_increment, gaussian_sd=self.gaussian_window) model_objects.extend(extract_storm_patches(hour_labels, model_data, self.model_grid.x, self.model_grid.y, [hour], dx=self.model_grid.dx, patch_radius=self.patch_radius)) for model_obj in model_objects[-1]: dims = model_obj.timesteps[-1].shape if h > 0: model_obj.estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0]) del scaled_data del model_data del hour_labels tracked_model_objects.extend(track_storms(model_objects, self.hours, self.object_matcher.cost_function_components, self.object_matcher.max_values, self.object_matcher.weights)) self.model_ew.min_thresh = min_orig self.model_ew.max_thresh = max_orig self.model_ew.data_increment = data_increment_orig return tracked_model_objects
python
def find_model_patch_tracks(self): """ Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass. Returns: """ self.model_grid.load_data() tracked_model_objects = [] model_objects = [] if self.model_grid.data is None: print("No model output found") return tracked_model_objects min_orig = self.model_ew.min_thresh max_orig = self.model_ew.max_thresh data_increment_orig = self.model_ew.data_increment self.model_ew.min_thresh = 0 self.model_ew.data_increment = 1 self.model_ew.max_thresh = 100 for h, hour in enumerate(self.hours): # Identify storms at each time step and apply size filter print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member, self.run_date.strftime("%Y%m%d%H"), hour)) if self.mask is not None: model_data = self.model_grid.data[h] * self.mask else: model_data = self.model_grid.data[h] model_data[:self.patch_radius] = 0 model_data[-self.patch_radius:] = 0 model_data[:, :self.patch_radius] = 0 model_data[:, -self.patch_radius:] = 0 scaled_data = np.array(rescale_data(model_data, min_orig, max_orig)) hour_labels = label_storm_objects(scaled_data, "ew", self.model_ew.min_thresh, self.model_ew.max_thresh, min_area=self.size_filter, max_area=self.model_ew.max_size, max_range=self.model_ew.delta, increment=self.model_ew.data_increment, gaussian_sd=self.gaussian_window) model_objects.extend(extract_storm_patches(hour_labels, model_data, self.model_grid.x, self.model_grid.y, [hour], dx=self.model_grid.dx, patch_radius=self.patch_radius)) for model_obj in model_objects[-1]: dims = model_obj.timesteps[-1].shape if h > 0: model_obj.estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0]) del scaled_data del model_data del hour_labels tracked_model_objects.extend(track_storms(model_objects, self.hours, self.object_matcher.cost_function_components, self.object_matcher.max_values, self.object_matcher.weights)) self.model_ew.min_thresh = min_orig self.model_ew.max_thresh = max_orig self.model_ew.data_increment = data_increment_orig return tracked_model_objects
[ "def", "find_model_patch_tracks", "(", "self", ")", ":", "self", ".", "model_grid", ".", "load_data", "(", ")", "tracked_model_objects", "=", "[", "]", "model_objects", "=", "[", "]", "if", "self", ".", "model_grid", ".", "data", "is", "None", ":", "print", "(", "\"No model output found\"", ")", "return", "tracked_model_objects", "min_orig", "=", "self", ".", "model_ew", ".", "min_thresh", "max_orig", "=", "self", ".", "model_ew", ".", "max_thresh", "data_increment_orig", "=", "self", ".", "model_ew", ".", "data_increment", "self", ".", "model_ew", ".", "min_thresh", "=", "0", "self", ".", "model_ew", ".", "data_increment", "=", "1", "self", ".", "model_ew", ".", "max_thresh", "=", "100", "for", "h", ",", "hour", "in", "enumerate", "(", "self", ".", "hours", ")", ":", "# Identify storms at each time step and apply size filter", "print", "(", "\"Finding {0} objects for run {1} Hour: {2:02d}\"", ".", "format", "(", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d%H\"", ")", ",", "hour", ")", ")", "if", "self", ".", "mask", "is", "not", "None", ":", "model_data", "=", "self", ".", "model_grid", ".", "data", "[", "h", "]", "*", "self", ".", "mask", "else", ":", "model_data", "=", "self", ".", "model_grid", ".", "data", "[", "h", "]", "model_data", "[", ":", "self", ".", "patch_radius", "]", "=", "0", "model_data", "[", "-", "self", ".", "patch_radius", ":", "]", "=", "0", "model_data", "[", ":", ",", ":", "self", ".", "patch_radius", "]", "=", "0", "model_data", "[", ":", ",", "-", "self", ".", "patch_radius", ":", "]", "=", "0", "scaled_data", "=", "np", ".", "array", "(", "rescale_data", "(", "model_data", ",", "min_orig", ",", "max_orig", ")", ")", "hour_labels", "=", "label_storm_objects", "(", "scaled_data", ",", "\"ew\"", ",", "self", ".", "model_ew", ".", "min_thresh", ",", "self", ".", "model_ew", ".", "max_thresh", ",", "min_area", "=", "self", ".", "size_filter", ",", "max_area", "=", "self", ".", "model_ew", ".", "max_size", ",", "max_range", "=", "self", ".", "model_ew", ".", "delta", ",", "increment", "=", "self", ".", "model_ew", ".", "data_increment", ",", "gaussian_sd", "=", "self", ".", "gaussian_window", ")", "model_objects", ".", "extend", "(", "extract_storm_patches", "(", "hour_labels", ",", "model_data", ",", "self", ".", "model_grid", ".", "x", ",", "self", ".", "model_grid", ".", "y", ",", "[", "hour", "]", ",", "dx", "=", "self", ".", "model_grid", ".", "dx", ",", "patch_radius", "=", "self", ".", "patch_radius", ")", ")", "for", "model_obj", "in", "model_objects", "[", "-", "1", "]", ":", "dims", "=", "model_obj", ".", "timesteps", "[", "-", "1", "]", ".", "shape", "if", "h", ">", "0", ":", "model_obj", ".", "estimate_motion", "(", "hour", ",", "self", ".", "model_grid", ".", "data", "[", "h", "-", "1", "]", ",", "dims", "[", "1", "]", ",", "dims", "[", "0", "]", ")", "del", "scaled_data", "del", "model_data", "del", "hour_labels", "tracked_model_objects", ".", "extend", "(", "track_storms", "(", "model_objects", ",", "self", ".", "hours", ",", "self", ".", "object_matcher", ".", "cost_function_components", ",", "self", ".", "object_matcher", ".", "max_values", ",", "self", ".", "object_matcher", ".", "weights", ")", ")", "self", ".", "model_ew", ".", "min_thresh", "=", "min_orig", "self", ".", "model_ew", ".", "max_thresh", "=", "max_orig", "self", ".", "model_ew", ".", "data_increment", "=", "data_increment_orig", "return", "tracked_model_objects" ]
Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass. Returns:
[ "Identify", "storms", "in", "gridded", "model", "output", "and", "extract", "uniform", "sized", "patches", "around", "the", "storm", "centers", "of", "mass", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L110-L165
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.find_model_tracks
def find_model_tracks(self): """ Identify storms at each model time step and link them together with object matching. Returns: List of STObjects containing model track information. """ self.model_grid.load_data() model_objects = [] tracked_model_objects = [] if self.model_grid.data is None: print("No model output found") return tracked_model_objects for h, hour in enumerate(self.hours): # Identify storms at each time step and apply size filter print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member, self.run_date.strftime("%Y%m%d%H"), hour)) if self.mask is not None: model_data = self.model_grid.data[h] * self.mask else: model_data = self.model_grid.data[h] # remember orig values min_orig = self.model_ew.min_thresh max_orig = self.model_ew.max_thresh data_increment_orig = self.model_ew.data_increment # scale to int 0-100. scaled_data = np.array(rescale_data( self.model_grid.data[h], min_orig, max_orig)) self.model_ew.min_thresh = 0 self.model_ew.data_increment = 1 self.model_ew.max_thresh = 100 hour_labels = self.model_ew.label(gaussian_filter(scaled_data, self.gaussian_window)) hour_labels[model_data < self.model_ew.min_thresh] = 0 hour_labels = self.model_ew.size_filter(hour_labels, self.size_filter) # Return to orig values self.model_ew.min_thresh = min_orig self.model_ew.max_thresh = max_orig self.model_ew.data_increment = data_increment_orig obj_slices = find_objects(hour_labels) num_slices = len(obj_slices) model_objects.append([]) if num_slices > 0: for s, sl in enumerate(obj_slices): model_objects[-1].append(STObject(self.model_grid.data[h][sl], np.where(hour_labels[sl] == s + 1, 1, 0), self.model_grid.x[sl], self.model_grid.y[sl], self.model_grid.i[sl], self.model_grid.j[sl], hour, hour, dx=self.model_grid.dx)) if h > 0: dims = model_objects[-1][-1].timesteps[0].shape model_objects[-1][-1].estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0]) del hour_labels del scaled_data del model_data for h, hour in enumerate(self.hours): past_time_objs = [] for obj in tracked_model_objects: # Potential trackable objects are identified if obj.end_time == hour - 1: past_time_objs.append(obj) # If no objects existed in the last time step, then consider objects in current time step all new if len(past_time_objs) == 0: tracked_model_objects.extend(model_objects[h]) # Match from previous time step with current time step elif len(past_time_objs) > 0 and len(model_objects[h]) > 0: assignments = self.object_matcher.match_objects(past_time_objs, model_objects[h], hour - 1, hour) unpaired = list(range(len(model_objects[h]))) for pair in assignments: past_time_objs[pair[0]].extend(model_objects[h][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_model_objects.append(model_objects[h][up]) print("Tracked Model Objects: {0:03d} Hour: {1:02d}".format(len(tracked_model_objects), hour)) return tracked_model_objects
python
def find_model_tracks(self): """ Identify storms at each model time step and link them together with object matching. Returns: List of STObjects containing model track information. """ self.model_grid.load_data() model_objects = [] tracked_model_objects = [] if self.model_grid.data is None: print("No model output found") return tracked_model_objects for h, hour in enumerate(self.hours): # Identify storms at each time step and apply size filter print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member, self.run_date.strftime("%Y%m%d%H"), hour)) if self.mask is not None: model_data = self.model_grid.data[h] * self.mask else: model_data = self.model_grid.data[h] # remember orig values min_orig = self.model_ew.min_thresh max_orig = self.model_ew.max_thresh data_increment_orig = self.model_ew.data_increment # scale to int 0-100. scaled_data = np.array(rescale_data( self.model_grid.data[h], min_orig, max_orig)) self.model_ew.min_thresh = 0 self.model_ew.data_increment = 1 self.model_ew.max_thresh = 100 hour_labels = self.model_ew.label(gaussian_filter(scaled_data, self.gaussian_window)) hour_labels[model_data < self.model_ew.min_thresh] = 0 hour_labels = self.model_ew.size_filter(hour_labels, self.size_filter) # Return to orig values self.model_ew.min_thresh = min_orig self.model_ew.max_thresh = max_orig self.model_ew.data_increment = data_increment_orig obj_slices = find_objects(hour_labels) num_slices = len(obj_slices) model_objects.append([]) if num_slices > 0: for s, sl in enumerate(obj_slices): model_objects[-1].append(STObject(self.model_grid.data[h][sl], np.where(hour_labels[sl] == s + 1, 1, 0), self.model_grid.x[sl], self.model_grid.y[sl], self.model_grid.i[sl], self.model_grid.j[sl], hour, hour, dx=self.model_grid.dx)) if h > 0: dims = model_objects[-1][-1].timesteps[0].shape model_objects[-1][-1].estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0]) del hour_labels del scaled_data del model_data for h, hour in enumerate(self.hours): past_time_objs = [] for obj in tracked_model_objects: # Potential trackable objects are identified if obj.end_time == hour - 1: past_time_objs.append(obj) # If no objects existed in the last time step, then consider objects in current time step all new if len(past_time_objs) == 0: tracked_model_objects.extend(model_objects[h]) # Match from previous time step with current time step elif len(past_time_objs) > 0 and len(model_objects[h]) > 0: assignments = self.object_matcher.match_objects(past_time_objs, model_objects[h], hour - 1, hour) unpaired = list(range(len(model_objects[h]))) for pair in assignments: past_time_objs[pair[0]].extend(model_objects[h][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_model_objects.append(model_objects[h][up]) print("Tracked Model Objects: {0:03d} Hour: {1:02d}".format(len(tracked_model_objects), hour)) return tracked_model_objects
[ "def", "find_model_tracks", "(", "self", ")", ":", "self", ".", "model_grid", ".", "load_data", "(", ")", "model_objects", "=", "[", "]", "tracked_model_objects", "=", "[", "]", "if", "self", ".", "model_grid", ".", "data", "is", "None", ":", "print", "(", "\"No model output found\"", ")", "return", "tracked_model_objects", "for", "h", ",", "hour", "in", "enumerate", "(", "self", ".", "hours", ")", ":", "# Identify storms at each time step and apply size filter", "print", "(", "\"Finding {0} objects for run {1} Hour: {2:02d}\"", ".", "format", "(", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d%H\"", ")", ",", "hour", ")", ")", "if", "self", ".", "mask", "is", "not", "None", ":", "model_data", "=", "self", ".", "model_grid", ".", "data", "[", "h", "]", "*", "self", ".", "mask", "else", ":", "model_data", "=", "self", ".", "model_grid", ".", "data", "[", "h", "]", "# remember orig values", "min_orig", "=", "self", ".", "model_ew", ".", "min_thresh", "max_orig", "=", "self", ".", "model_ew", ".", "max_thresh", "data_increment_orig", "=", "self", ".", "model_ew", ".", "data_increment", "# scale to int 0-100.", "scaled_data", "=", "np", ".", "array", "(", "rescale_data", "(", "self", ".", "model_grid", ".", "data", "[", "h", "]", ",", "min_orig", ",", "max_orig", ")", ")", "self", ".", "model_ew", ".", "min_thresh", "=", "0", "self", ".", "model_ew", ".", "data_increment", "=", "1", "self", ".", "model_ew", ".", "max_thresh", "=", "100", "hour_labels", "=", "self", ".", "model_ew", ".", "label", "(", "gaussian_filter", "(", "scaled_data", ",", "self", ".", "gaussian_window", ")", ")", "hour_labels", "[", "model_data", "<", "self", ".", "model_ew", ".", "min_thresh", "]", "=", "0", "hour_labels", "=", "self", ".", "model_ew", ".", "size_filter", "(", "hour_labels", ",", "self", ".", "size_filter", ")", "# Return to orig values", "self", ".", "model_ew", ".", "min_thresh", "=", "min_orig", "self", ".", "model_ew", ".", "max_thresh", "=", "max_orig", "self", ".", "model_ew", ".", "data_increment", "=", "data_increment_orig", "obj_slices", "=", "find_objects", "(", "hour_labels", ")", "num_slices", "=", "len", "(", "obj_slices", ")", "model_objects", ".", "append", "(", "[", "]", ")", "if", "num_slices", ">", "0", ":", "for", "s", ",", "sl", "in", "enumerate", "(", "obj_slices", ")", ":", "model_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "self", ".", "model_grid", ".", "data", "[", "h", "]", "[", "sl", "]", ",", "np", ".", "where", "(", "hour_labels", "[", "sl", "]", "==", "s", "+", "1", ",", "1", ",", "0", ")", ",", "self", ".", "model_grid", ".", "x", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "y", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "i", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "j", "[", "sl", "]", ",", "hour", ",", "hour", ",", "dx", "=", "self", ".", "model_grid", ".", "dx", ")", ")", "if", "h", ">", "0", ":", "dims", "=", "model_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "timesteps", "[", "0", "]", ".", "shape", "model_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "estimate_motion", "(", "hour", ",", "self", ".", "model_grid", ".", "data", "[", "h", "-", "1", "]", ",", "dims", "[", "1", "]", ",", "dims", "[", "0", "]", ")", "del", "hour_labels", "del", "scaled_data", "del", "model_data", "for", "h", ",", "hour", "in", "enumerate", "(", "self", ".", "hours", ")", ":", "past_time_objs", "=", "[", "]", "for", "obj", "in", "tracked_model_objects", ":", "# Potential trackable objects are identified", "if", "obj", ".", "end_time", "==", "hour", "-", "1", ":", "past_time_objs", ".", "append", "(", "obj", ")", "# If no objects existed in the last time step, then consider objects in current time step all new", "if", "len", "(", "past_time_objs", ")", "==", "0", ":", "tracked_model_objects", ".", "extend", "(", "model_objects", "[", "h", "]", ")", "# Match from previous time step with current time step", "elif", "len", "(", "past_time_objs", ")", ">", "0", "and", "len", "(", "model_objects", "[", "h", "]", ")", ">", "0", ":", "assignments", "=", "self", ".", "object_matcher", ".", "match_objects", "(", "past_time_objs", ",", "model_objects", "[", "h", "]", ",", "hour", "-", "1", ",", "hour", ")", "unpaired", "=", "list", "(", "range", "(", "len", "(", "model_objects", "[", "h", "]", ")", ")", ")", "for", "pair", "in", "assignments", ":", "past_time_objs", "[", "pair", "[", "0", "]", "]", ".", "extend", "(", "model_objects", "[", "h", "]", "[", "pair", "[", "1", "]", "]", ")", "unpaired", ".", "remove", "(", "pair", "[", "1", "]", ")", "if", "len", "(", "unpaired", ")", ">", "0", ":", "for", "up", "in", "unpaired", ":", "tracked_model_objects", ".", "append", "(", "model_objects", "[", "h", "]", "[", "up", "]", ")", "print", "(", "\"Tracked Model Objects: {0:03d} Hour: {1:02d}\"", ".", "format", "(", "len", "(", "tracked_model_objects", ")", ",", "hour", ")", ")", "return", "tracked_model_objects" ]
Identify storms at each model time step and link them together with object matching. Returns: List of STObjects containing model track information.
[ "Identify", "storms", "at", "each", "model", "time", "step", "and", "link", "them", "together", "with", "object", "matching", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L167-L247
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.find_mrms_tracks
def find_mrms_tracks(self): """ Identify objects from MRMS timesteps and link them together with object matching. Returns: List of STObjects containing MESH track information. """ obs_objects = [] tracked_obs_objects = [] if self.mrms_ew is not None: self.mrms_grid.load_data() if len(self.mrms_grid.data) != len(self.hours): print('Less than 24 hours of observation data found') return tracked_obs_objects for h, hour in enumerate(self.hours): mrms_data = np.zeros(self.mrms_grid.data[h].shape) mrms_data[:] = np.array(self.mrms_grid.data[h]) mrms_data[mrms_data < 0] = 0 hour_labels = self.mrms_ew.size_filter(self.mrms_ew.label(gaussian_filter(mrms_data, self.gaussian_window)), self.size_filter) hour_labels[mrms_data < self.mrms_ew.min_thresh] = 0 obj_slices = find_objects(hour_labels) num_slices = len(obj_slices) obs_objects.append([]) if num_slices > 0: for sl in obj_slices: obs_objects[-1].append(STObject(mrms_data[sl], np.where(hour_labels[sl] > 0, 1, 0), self.model_grid.x[sl], self.model_grid.y[sl], self.model_grid.i[sl], self.model_grid.j[sl], hour, hour, dx=self.model_grid.dx)) if h > 0: dims = obs_objects[-1][-1].timesteps[0].shape obs_objects[-1][-1].estimate_motion(hour, self.mrms_grid.data[h-1], dims[1], dims[0]) for h, hour in enumerate(self.hours): past_time_objs = [] for obj in tracked_obs_objects: if obj.end_time == hour - 1: past_time_objs.append(obj) if len(past_time_objs) == 0: tracked_obs_objects.extend(obs_objects[h]) elif len(past_time_objs) > 0 and len(obs_objects[h]) > 0: assignments = self.object_matcher.match_objects(past_time_objs, obs_objects[h], hour - 1, hour) unpaired = list(range(len(obs_objects[h]))) for pair in assignments: past_time_objs[pair[0]].extend(obs_objects[h][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_obs_objects.append(obs_objects[h][up]) print("Tracked Obs Objects: {0:03d} Hour: {1:02d}".format(len(tracked_obs_objects), hour)) return tracked_obs_objects
python
def find_mrms_tracks(self): """ Identify objects from MRMS timesteps and link them together with object matching. Returns: List of STObjects containing MESH track information. """ obs_objects = [] tracked_obs_objects = [] if self.mrms_ew is not None: self.mrms_grid.load_data() if len(self.mrms_grid.data) != len(self.hours): print('Less than 24 hours of observation data found') return tracked_obs_objects for h, hour in enumerate(self.hours): mrms_data = np.zeros(self.mrms_grid.data[h].shape) mrms_data[:] = np.array(self.mrms_grid.data[h]) mrms_data[mrms_data < 0] = 0 hour_labels = self.mrms_ew.size_filter(self.mrms_ew.label(gaussian_filter(mrms_data, self.gaussian_window)), self.size_filter) hour_labels[mrms_data < self.mrms_ew.min_thresh] = 0 obj_slices = find_objects(hour_labels) num_slices = len(obj_slices) obs_objects.append([]) if num_slices > 0: for sl in obj_slices: obs_objects[-1].append(STObject(mrms_data[sl], np.where(hour_labels[sl] > 0, 1, 0), self.model_grid.x[sl], self.model_grid.y[sl], self.model_grid.i[sl], self.model_grid.j[sl], hour, hour, dx=self.model_grid.dx)) if h > 0: dims = obs_objects[-1][-1].timesteps[0].shape obs_objects[-1][-1].estimate_motion(hour, self.mrms_grid.data[h-1], dims[1], dims[0]) for h, hour in enumerate(self.hours): past_time_objs = [] for obj in tracked_obs_objects: if obj.end_time == hour - 1: past_time_objs.append(obj) if len(past_time_objs) == 0: tracked_obs_objects.extend(obs_objects[h]) elif len(past_time_objs) > 0 and len(obs_objects[h]) > 0: assignments = self.object_matcher.match_objects(past_time_objs, obs_objects[h], hour - 1, hour) unpaired = list(range(len(obs_objects[h]))) for pair in assignments: past_time_objs[pair[0]].extend(obs_objects[h][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_obs_objects.append(obs_objects[h][up]) print("Tracked Obs Objects: {0:03d} Hour: {1:02d}".format(len(tracked_obs_objects), hour)) return tracked_obs_objects
[ "def", "find_mrms_tracks", "(", "self", ")", ":", "obs_objects", "=", "[", "]", "tracked_obs_objects", "=", "[", "]", "if", "self", ".", "mrms_ew", "is", "not", "None", ":", "self", ".", "mrms_grid", ".", "load_data", "(", ")", "if", "len", "(", "self", ".", "mrms_grid", ".", "data", ")", "!=", "len", "(", "self", ".", "hours", ")", ":", "print", "(", "'Less than 24 hours of observation data found'", ")", "return", "tracked_obs_objects", "for", "h", ",", "hour", "in", "enumerate", "(", "self", ".", "hours", ")", ":", "mrms_data", "=", "np", ".", "zeros", "(", "self", ".", "mrms_grid", ".", "data", "[", "h", "]", ".", "shape", ")", "mrms_data", "[", ":", "]", "=", "np", ".", "array", "(", "self", ".", "mrms_grid", ".", "data", "[", "h", "]", ")", "mrms_data", "[", "mrms_data", "<", "0", "]", "=", "0", "hour_labels", "=", "self", ".", "mrms_ew", ".", "size_filter", "(", "self", ".", "mrms_ew", ".", "label", "(", "gaussian_filter", "(", "mrms_data", ",", "self", ".", "gaussian_window", ")", ")", ",", "self", ".", "size_filter", ")", "hour_labels", "[", "mrms_data", "<", "self", ".", "mrms_ew", ".", "min_thresh", "]", "=", "0", "obj_slices", "=", "find_objects", "(", "hour_labels", ")", "num_slices", "=", "len", "(", "obj_slices", ")", "obs_objects", ".", "append", "(", "[", "]", ")", "if", "num_slices", ">", "0", ":", "for", "sl", "in", "obj_slices", ":", "obs_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "mrms_data", "[", "sl", "]", ",", "np", ".", "where", "(", "hour_labels", "[", "sl", "]", ">", "0", ",", "1", ",", "0", ")", ",", "self", ".", "model_grid", ".", "x", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "y", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "i", "[", "sl", "]", ",", "self", ".", "model_grid", ".", "j", "[", "sl", "]", ",", "hour", ",", "hour", ",", "dx", "=", "self", ".", "model_grid", ".", "dx", ")", ")", "if", "h", ">", "0", ":", "dims", "=", "obs_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "timesteps", "[", "0", "]", ".", "shape", "obs_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "estimate_motion", "(", "hour", ",", "self", ".", "mrms_grid", ".", "data", "[", "h", "-", "1", "]", ",", "dims", "[", "1", "]", ",", "dims", "[", "0", "]", ")", "for", "h", ",", "hour", "in", "enumerate", "(", "self", ".", "hours", ")", ":", "past_time_objs", "=", "[", "]", "for", "obj", "in", "tracked_obs_objects", ":", "if", "obj", ".", "end_time", "==", "hour", "-", "1", ":", "past_time_objs", ".", "append", "(", "obj", ")", "if", "len", "(", "past_time_objs", ")", "==", "0", ":", "tracked_obs_objects", ".", "extend", "(", "obs_objects", "[", "h", "]", ")", "elif", "len", "(", "past_time_objs", ")", ">", "0", "and", "len", "(", "obs_objects", "[", "h", "]", ")", ">", "0", ":", "assignments", "=", "self", ".", "object_matcher", ".", "match_objects", "(", "past_time_objs", ",", "obs_objects", "[", "h", "]", ",", "hour", "-", "1", ",", "hour", ")", "unpaired", "=", "list", "(", "range", "(", "len", "(", "obs_objects", "[", "h", "]", ")", ")", ")", "for", "pair", "in", "assignments", ":", "past_time_objs", "[", "pair", "[", "0", "]", "]", ".", "extend", "(", "obs_objects", "[", "h", "]", "[", "pair", "[", "1", "]", "]", ")", "unpaired", ".", "remove", "(", "pair", "[", "1", "]", ")", "if", "len", "(", "unpaired", ")", ">", "0", ":", "for", "up", "in", "unpaired", ":", "tracked_obs_objects", ".", "append", "(", "obs_objects", "[", "h", "]", "[", "up", "]", ")", "print", "(", "\"Tracked Obs Objects: {0:03d} Hour: {1:02d}\"", ".", "format", "(", "len", "(", "tracked_obs_objects", ")", ",", "hour", ")", ")", "return", "tracked_obs_objects" ]
Identify objects from MRMS timesteps and link them together with object matching. Returns: List of STObjects containing MESH track information.
[ "Identify", "objects", "from", "MRMS", "timesteps", "and", "link", "them", "together", "with", "object", "matching", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L267-L328
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.match_tracks
def match_tracks(self, model_tracks, obs_tracks, unique_matches=True, closest_matches=False): """ Match forecast and observed tracks. Args: model_tracks: obs_tracks: unique_matches: closest_matches: Returns: """ if unique_matches: pairings = self.track_matcher.match_tracks(model_tracks, obs_tracks, closest_matches=closest_matches) else: pairings = self.track_matcher.neighbor_matches(model_tracks, obs_tracks) return pairings
python
def match_tracks(self, model_tracks, obs_tracks, unique_matches=True, closest_matches=False): """ Match forecast and observed tracks. Args: model_tracks: obs_tracks: unique_matches: closest_matches: Returns: """ if unique_matches: pairings = self.track_matcher.match_tracks(model_tracks, obs_tracks, closest_matches=closest_matches) else: pairings = self.track_matcher.neighbor_matches(model_tracks, obs_tracks) return pairings
[ "def", "match_tracks", "(", "self", ",", "model_tracks", ",", "obs_tracks", ",", "unique_matches", "=", "True", ",", "closest_matches", "=", "False", ")", ":", "if", "unique_matches", ":", "pairings", "=", "self", ".", "track_matcher", ".", "match_tracks", "(", "model_tracks", ",", "obs_tracks", ",", "closest_matches", "=", "closest_matches", ")", "else", ":", "pairings", "=", "self", ".", "track_matcher", ".", "neighbor_matches", "(", "model_tracks", ",", "obs_tracks", ")", "return", "pairings" ]
Match forecast and observed tracks. Args: model_tracks: obs_tracks: unique_matches: closest_matches: Returns:
[ "Match", "forecast", "and", "observed", "tracks", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L330-L347
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.extract_model_attributes
def extract_model_attributes(self, tracked_model_objects, storm_variables, potential_variables, tendency_variables=None, future_variables=None): """ Extract model attribute data for each model track. Storm variables are those that describe the model storm directly, such as radar reflectivity or updraft helicity. Potential variables describe the surrounding environmental conditions of the storm, and should be extracted from the timestep before the storm arrives to reduce the chance of the storm contaminating the environmental values. Examples of potential variables include CAPE, shear, temperature, and dewpoint. Future variables are fields that occur in the hour after the extracted field. Args: tracked_model_objects: List of STObjects describing each forecasted storm storm_variables: List of storm variable names potential_variables: List of potential variable names. tendency_variables: List of tendency variables """ if tendency_variables is None: tendency_variables = [] if future_variables is None: future_variables = [] model_grids = {} for l_var in ["lon", "lat"]: for model_obj in tracked_model_objects: model_obj.extract_attribute_array(getattr(self.model_grid, l_var), l_var) for storm_var in storm_variables: print("Storm {0} {1} {2}".format(storm_var,self.ensemble_member, self.run_date.strftime("%Y%m%d"))) model_grids[storm_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, storm_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path,self.model_map_file, self.sector_ind_path,self.single_step) model_grids[storm_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[storm_var]) if storm_var not in potential_variables + tendency_variables + future_variables: del model_grids[storm_var] for potential_var in potential_variables: print("Potential {0} {1} {2}".format(potential_var,self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if potential_var not in model_grids.keys(): model_grids[potential_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, potential_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) model_grids[potential_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[potential_var], potential=True) if potential_var not in tendency_variables + future_variables: del model_grids[potential_var] for future_var in future_variables: print("Future {0} {1} {2}".format(future_var, self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if future_var not in model_grids.keys(): model_grids[future_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, future_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) model_grids[future_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[future_var], future=True) if future_var not in tendency_variables: del model_grids[future_var] for tendency_var in tendency_variables: print("Tendency {0} {1} {2}".format(tendency_var, self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if tendency_var not in model_grids.keys(): model_grids[tendency_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, tendency_var, self.start_date - timedelta(hours=1), self.end_date, self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) for model_obj in tracked_model_objects: model_obj.extract_tendency_grid(model_grids[tendency_var]) del model_grids[tendency_var]
python
def extract_model_attributes(self, tracked_model_objects, storm_variables, potential_variables, tendency_variables=None, future_variables=None): """ Extract model attribute data for each model track. Storm variables are those that describe the model storm directly, such as radar reflectivity or updraft helicity. Potential variables describe the surrounding environmental conditions of the storm, and should be extracted from the timestep before the storm arrives to reduce the chance of the storm contaminating the environmental values. Examples of potential variables include CAPE, shear, temperature, and dewpoint. Future variables are fields that occur in the hour after the extracted field. Args: tracked_model_objects: List of STObjects describing each forecasted storm storm_variables: List of storm variable names potential_variables: List of potential variable names. tendency_variables: List of tendency variables """ if tendency_variables is None: tendency_variables = [] if future_variables is None: future_variables = [] model_grids = {} for l_var in ["lon", "lat"]: for model_obj in tracked_model_objects: model_obj.extract_attribute_array(getattr(self.model_grid, l_var), l_var) for storm_var in storm_variables: print("Storm {0} {1} {2}".format(storm_var,self.ensemble_member, self.run_date.strftime("%Y%m%d"))) model_grids[storm_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, storm_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path,self.model_map_file, self.sector_ind_path,self.single_step) model_grids[storm_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[storm_var]) if storm_var not in potential_variables + tendency_variables + future_variables: del model_grids[storm_var] for potential_var in potential_variables: print("Potential {0} {1} {2}".format(potential_var,self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if potential_var not in model_grids.keys(): model_grids[potential_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, potential_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) model_grids[potential_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[potential_var], potential=True) if potential_var not in tendency_variables + future_variables: del model_grids[potential_var] for future_var in future_variables: print("Future {0} {1} {2}".format(future_var, self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if future_var not in model_grids.keys(): model_grids[future_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, future_var, self.start_date - timedelta(hours=1), self.end_date + timedelta(hours=1), self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) model_grids[future_var].load_data() for model_obj in tracked_model_objects: model_obj.extract_attribute_grid(model_grids[future_var], future=True) if future_var not in tendency_variables: del model_grids[future_var] for tendency_var in tendency_variables: print("Tendency {0} {1} {2}".format(tendency_var, self.ensemble_member, self.run_date.strftime("%Y%m%d"))) if tendency_var not in model_grids.keys(): model_grids[tendency_var] = ModelOutput(self.ensemble_name, self.ensemble_member, self.run_date, tendency_var, self.start_date - timedelta(hours=1), self.end_date, self.model_path, self.model_map_file, self.sector_ind_path,self.single_step) for model_obj in tracked_model_objects: model_obj.extract_tendency_grid(model_grids[tendency_var]) del model_grids[tendency_var]
[ "def", "extract_model_attributes", "(", "self", ",", "tracked_model_objects", ",", "storm_variables", ",", "potential_variables", ",", "tendency_variables", "=", "None", ",", "future_variables", "=", "None", ")", ":", "if", "tendency_variables", "is", "None", ":", "tendency_variables", "=", "[", "]", "if", "future_variables", "is", "None", ":", "future_variables", "=", "[", "]", "model_grids", "=", "{", "}", "for", "l_var", "in", "[", "\"lon\"", ",", "\"lat\"", "]", ":", "for", "model_obj", "in", "tracked_model_objects", ":", "model_obj", ".", "extract_attribute_array", "(", "getattr", "(", "self", ".", "model_grid", ",", "l_var", ")", ",", "l_var", ")", "for", "storm_var", "in", "storm_variables", ":", "print", "(", "\"Storm {0} {1} {2}\"", ".", "format", "(", "storm_var", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d\"", ")", ")", ")", "model_grids", "[", "storm_var", "]", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ",", "storm_var", ",", "self", ".", "start_date", "-", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "end_date", "+", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "model_path", ",", "self", ".", "model_map_file", ",", "self", ".", "sector_ind_path", ",", "self", ".", "single_step", ")", "model_grids", "[", "storm_var", "]", ".", "load_data", "(", ")", "for", "model_obj", "in", "tracked_model_objects", ":", "model_obj", ".", "extract_attribute_grid", "(", "model_grids", "[", "storm_var", "]", ")", "if", "storm_var", "not", "in", "potential_variables", "+", "tendency_variables", "+", "future_variables", ":", "del", "model_grids", "[", "storm_var", "]", "for", "potential_var", "in", "potential_variables", ":", "print", "(", "\"Potential {0} {1} {2}\"", ".", "format", "(", "potential_var", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d\"", ")", ")", ")", "if", "potential_var", "not", "in", "model_grids", ".", "keys", "(", ")", ":", "model_grids", "[", "potential_var", "]", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ",", "potential_var", ",", "self", ".", "start_date", "-", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "end_date", "+", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "model_path", ",", "self", ".", "model_map_file", ",", "self", ".", "sector_ind_path", ",", "self", ".", "single_step", ")", "model_grids", "[", "potential_var", "]", ".", "load_data", "(", ")", "for", "model_obj", "in", "tracked_model_objects", ":", "model_obj", ".", "extract_attribute_grid", "(", "model_grids", "[", "potential_var", "]", ",", "potential", "=", "True", ")", "if", "potential_var", "not", "in", "tendency_variables", "+", "future_variables", ":", "del", "model_grids", "[", "potential_var", "]", "for", "future_var", "in", "future_variables", ":", "print", "(", "\"Future {0} {1} {2}\"", ".", "format", "(", "future_var", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d\"", ")", ")", ")", "if", "future_var", "not", "in", "model_grids", ".", "keys", "(", ")", ":", "model_grids", "[", "future_var", "]", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ",", "future_var", ",", "self", ".", "start_date", "-", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "end_date", "+", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "model_path", ",", "self", ".", "model_map_file", ",", "self", ".", "sector_ind_path", ",", "self", ".", "single_step", ")", "model_grids", "[", "future_var", "]", ".", "load_data", "(", ")", "for", "model_obj", "in", "tracked_model_objects", ":", "model_obj", ".", "extract_attribute_grid", "(", "model_grids", "[", "future_var", "]", ",", "future", "=", "True", ")", "if", "future_var", "not", "in", "tendency_variables", ":", "del", "model_grids", "[", "future_var", "]", "for", "tendency_var", "in", "tendency_variables", ":", "print", "(", "\"Tendency {0} {1} {2}\"", ".", "format", "(", "tendency_var", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ".", "strftime", "(", "\"%Y%m%d\"", ")", ")", ")", "if", "tendency_var", "not", "in", "model_grids", ".", "keys", "(", ")", ":", "model_grids", "[", "tendency_var", "]", "=", "ModelOutput", "(", "self", ".", "ensemble_name", ",", "self", ".", "ensemble_member", ",", "self", ".", "run_date", ",", "tendency_var", ",", "self", ".", "start_date", "-", "timedelta", "(", "hours", "=", "1", ")", ",", "self", ".", "end_date", ",", "self", ".", "model_path", ",", "self", ".", "model_map_file", ",", "self", ".", "sector_ind_path", ",", "self", ".", "single_step", ")", "for", "model_obj", "in", "tracked_model_objects", ":", "model_obj", ".", "extract_tendency_grid", "(", "model_grids", "[", "tendency_var", "]", ")", "del", "model_grids", "[", "tendency_var", "]" ]
Extract model attribute data for each model track. Storm variables are those that describe the model storm directly, such as radar reflectivity or updraft helicity. Potential variables describe the surrounding environmental conditions of the storm, and should be extracted from the timestep before the storm arrives to reduce the chance of the storm contaminating the environmental values. Examples of potential variables include CAPE, shear, temperature, and dewpoint. Future variables are fields that occur in the hour after the extracted field. Args: tracked_model_objects: List of STObjects describing each forecasted storm storm_variables: List of storm variable names potential_variables: List of potential variable names. tendency_variables: List of tendency variables
[ "Extract", "model", "attribute", "data", "for", "each", "model", "track", ".", "Storm", "variables", "are", "those", "that", "describe", "the", "model", "storm", "directly", "such", "as", "radar", "reflectivity", "or", "updraft", "helicity", ".", "Potential", "variables", "describe", "the", "surrounding", "environmental", "conditions", "of", "the", "storm", "and", "should", "be", "extracted", "from", "the", "timestep", "before", "the", "storm", "arrives", "to", "reduce", "the", "chance", "of", "the", "storm", "contaminating", "the", "environmental", "values", ".", "Examples", "of", "potential", "variables", "include", "CAPE", "shear", "temperature", "and", "dewpoint", ".", "Future", "variables", "are", "fields", "that", "occur", "in", "the", "hour", "after", "the", "extracted", "field", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L352-L427
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.match_hail_sizes
def match_hail_sizes(model_tracks, obs_tracks, track_pairings): """ Given forecast and observed track pairings, maximum hail sizes are associated with each paired forecast storm track timestep. If the duration of the forecast and observed tracks differ, then interpolation is used for the intermediate timesteps. Args: model_tracks: List of model track STObjects obs_tracks: List of observed STObjects track_pairings: list of tuples containing the indices of the paired (forecast, observed) tracks """ unpaired = list(range(len(model_tracks))) for p, pair in enumerate(track_pairings): model_track = model_tracks[pair[0]] unpaired.remove(pair[0]) obs_track = obs_tracks[pair[1]] obs_hail_sizes = np.array([step[obs_track.masks[t] == 1].max() for t, step in enumerate(obs_track.timesteps)]) if obs_track.times.size > 1 and model_track.times.size > 1: normalized_obs_times = 1.0 / (obs_track.times.max() - obs_track.times.min())\ * (obs_track.times - obs_track.times.min()) normalized_model_times = 1.0 / (model_track.times.max() - model_track.times.min())\ * (model_track.times - model_track.times.min()) hail_interp = interp1d(normalized_obs_times, obs_hail_sizes, kind="nearest", bounds_error=False, fill_value=0) model_track.observations = hail_interp(normalized_model_times) elif obs_track.times.size == 1: model_track.observations = np.ones(model_track.times.shape) * obs_hail_sizes[0] elif model_track.times.size == 1: model_track.observations = np.array([obs_hail_sizes.max()]) print(pair[0], "obs", obs_hail_sizes) print(pair[0], "model", model_track.observations) for u in unpaired: model_tracks[u].observations = np.zeros(model_tracks[u].times.shape)
python
def match_hail_sizes(model_tracks, obs_tracks, track_pairings): """ Given forecast and observed track pairings, maximum hail sizes are associated with each paired forecast storm track timestep. If the duration of the forecast and observed tracks differ, then interpolation is used for the intermediate timesteps. Args: model_tracks: List of model track STObjects obs_tracks: List of observed STObjects track_pairings: list of tuples containing the indices of the paired (forecast, observed) tracks """ unpaired = list(range(len(model_tracks))) for p, pair in enumerate(track_pairings): model_track = model_tracks[pair[0]] unpaired.remove(pair[0]) obs_track = obs_tracks[pair[1]] obs_hail_sizes = np.array([step[obs_track.masks[t] == 1].max() for t, step in enumerate(obs_track.timesteps)]) if obs_track.times.size > 1 and model_track.times.size > 1: normalized_obs_times = 1.0 / (obs_track.times.max() - obs_track.times.min())\ * (obs_track.times - obs_track.times.min()) normalized_model_times = 1.0 / (model_track.times.max() - model_track.times.min())\ * (model_track.times - model_track.times.min()) hail_interp = interp1d(normalized_obs_times, obs_hail_sizes, kind="nearest", bounds_error=False, fill_value=0) model_track.observations = hail_interp(normalized_model_times) elif obs_track.times.size == 1: model_track.observations = np.ones(model_track.times.shape) * obs_hail_sizes[0] elif model_track.times.size == 1: model_track.observations = np.array([obs_hail_sizes.max()]) print(pair[0], "obs", obs_hail_sizes) print(pair[0], "model", model_track.observations) for u in unpaired: model_tracks[u].observations = np.zeros(model_tracks[u].times.shape)
[ "def", "match_hail_sizes", "(", "model_tracks", ",", "obs_tracks", ",", "track_pairings", ")", ":", "unpaired", "=", "list", "(", "range", "(", "len", "(", "model_tracks", ")", ")", ")", "for", "p", ",", "pair", "in", "enumerate", "(", "track_pairings", ")", ":", "model_track", "=", "model_tracks", "[", "pair", "[", "0", "]", "]", "unpaired", ".", "remove", "(", "pair", "[", "0", "]", ")", "obs_track", "=", "obs_tracks", "[", "pair", "[", "1", "]", "]", "obs_hail_sizes", "=", "np", ".", "array", "(", "[", "step", "[", "obs_track", ".", "masks", "[", "t", "]", "==", "1", "]", ".", "max", "(", ")", "for", "t", ",", "step", "in", "enumerate", "(", "obs_track", ".", "timesteps", ")", "]", ")", "if", "obs_track", ".", "times", ".", "size", ">", "1", "and", "model_track", ".", "times", ".", "size", ">", "1", ":", "normalized_obs_times", "=", "1.0", "/", "(", "obs_track", ".", "times", ".", "max", "(", ")", "-", "obs_track", ".", "times", ".", "min", "(", ")", ")", "*", "(", "obs_track", ".", "times", "-", "obs_track", ".", "times", ".", "min", "(", ")", ")", "normalized_model_times", "=", "1.0", "/", "(", "model_track", ".", "times", ".", "max", "(", ")", "-", "model_track", ".", "times", ".", "min", "(", ")", ")", "*", "(", "model_track", ".", "times", "-", "model_track", ".", "times", ".", "min", "(", ")", ")", "hail_interp", "=", "interp1d", "(", "normalized_obs_times", ",", "obs_hail_sizes", ",", "kind", "=", "\"nearest\"", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "0", ")", "model_track", ".", "observations", "=", "hail_interp", "(", "normalized_model_times", ")", "elif", "obs_track", ".", "times", ".", "size", "==", "1", ":", "model_track", ".", "observations", "=", "np", ".", "ones", "(", "model_track", ".", "times", ".", "shape", ")", "*", "obs_hail_sizes", "[", "0", "]", "elif", "model_track", ".", "times", ".", "size", "==", "1", ":", "model_track", ".", "observations", "=", "np", ".", "array", "(", "[", "obs_hail_sizes", ".", "max", "(", ")", "]", ")", "print", "(", "pair", "[", "0", "]", ",", "\"obs\"", ",", "obs_hail_sizes", ")", "print", "(", "pair", "[", "0", "]", ",", "\"model\"", ",", "model_track", ".", "observations", ")", "for", "u", "in", "unpaired", ":", "model_tracks", "[", "u", "]", ".", "observations", "=", "np", ".", "zeros", "(", "model_tracks", "[", "u", "]", ".", "times", ".", "shape", ")" ]
Given forecast and observed track pairings, maximum hail sizes are associated with each paired forecast storm track timestep. If the duration of the forecast and observed tracks differ, then interpolation is used for the intermediate timesteps. Args: model_tracks: List of model track STObjects obs_tracks: List of observed STObjects track_pairings: list of tuples containing the indices of the paired (forecast, observed) tracks
[ "Given", "forecast", "and", "observed", "track", "pairings", "maximum", "hail", "sizes", "are", "associated", "with", "each", "paired", "forecast", "storm", "track", "timestep", ".", "If", "the", "duration", "of", "the", "forecast", "and", "observed", "tracks", "differ", "then", "interpolation", "is", "used", "for", "the", "intermediate", "timesteps", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L431-L464
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.match_hail_size_step_distributions
def match_hail_size_step_distributions(self, model_tracks, obs_tracks, track_pairings): """ Given a matching set of observed tracks for each model track, Args: model_tracks: obs_tracks: track_pairings: Returns: """ label_columns = ["Matched", "Max_Hail_Size", "Num_Matches", "Shape", "Location", "Scale"] s = 0 for m, model_track in enumerate(model_tracks): model_track.observations = pd.DataFrame(index=model_track.times, columns=label_columns, dtype=np.float64) model_track.observations.loc[:, :] = 0 model_track.observations["Matched"] = model_track.observations["Matched"].astype(np.int32) for t, time in enumerate(model_track.times): model_track.observations.loc[time, "Matched"] = track_pairings.loc[s, "Matched"] if model_track.observations.loc[time, "Matched"] > 0: all_hail_sizes = [] step_pairs = track_pairings.loc[s, "Pairings"] for step_pair in step_pairs: obs_step = obs_tracks[step_pair[0]].timesteps[step_pair[1]].ravel() obs_mask = obs_tracks[step_pair[0]].masks[step_pair[1]].ravel() all_hail_sizes.append(obs_step[(obs_mask == 1) & (obs_step >= self.mrms_ew.min_thresh)]) combined_hail_sizes = np.concatenate(all_hail_sizes) min_hail = combined_hail_sizes.min() - 0.1 model_track.observations.loc[time, "Max_Hail_Size"] = combined_hail_sizes.max() model_track.observations.loc[time, "Num_Matches"] = step_pairs.shape[0] model_track.observations.loc[time, ["Shape", "Location", "Scale"]] = gamma.fit(combined_hail_sizes, floc=min_hail) s += 1
python
def match_hail_size_step_distributions(self, model_tracks, obs_tracks, track_pairings): """ Given a matching set of observed tracks for each model track, Args: model_tracks: obs_tracks: track_pairings: Returns: """ label_columns = ["Matched", "Max_Hail_Size", "Num_Matches", "Shape", "Location", "Scale"] s = 0 for m, model_track in enumerate(model_tracks): model_track.observations = pd.DataFrame(index=model_track.times, columns=label_columns, dtype=np.float64) model_track.observations.loc[:, :] = 0 model_track.observations["Matched"] = model_track.observations["Matched"].astype(np.int32) for t, time in enumerate(model_track.times): model_track.observations.loc[time, "Matched"] = track_pairings.loc[s, "Matched"] if model_track.observations.loc[time, "Matched"] > 0: all_hail_sizes = [] step_pairs = track_pairings.loc[s, "Pairings"] for step_pair in step_pairs: obs_step = obs_tracks[step_pair[0]].timesteps[step_pair[1]].ravel() obs_mask = obs_tracks[step_pair[0]].masks[step_pair[1]].ravel() all_hail_sizes.append(obs_step[(obs_mask == 1) & (obs_step >= self.mrms_ew.min_thresh)]) combined_hail_sizes = np.concatenate(all_hail_sizes) min_hail = combined_hail_sizes.min() - 0.1 model_track.observations.loc[time, "Max_Hail_Size"] = combined_hail_sizes.max() model_track.observations.loc[time, "Num_Matches"] = step_pairs.shape[0] model_track.observations.loc[time, ["Shape", "Location", "Scale"]] = gamma.fit(combined_hail_sizes, floc=min_hail) s += 1
[ "def", "match_hail_size_step_distributions", "(", "self", ",", "model_tracks", ",", "obs_tracks", ",", "track_pairings", ")", ":", "label_columns", "=", "[", "\"Matched\"", ",", "\"Max_Hail_Size\"", ",", "\"Num_Matches\"", ",", "\"Shape\"", ",", "\"Location\"", ",", "\"Scale\"", "]", "s", "=", "0", "for", "m", ",", "model_track", "in", "enumerate", "(", "model_tracks", ")", ":", "model_track", ".", "observations", "=", "pd", ".", "DataFrame", "(", "index", "=", "model_track", ".", "times", ",", "columns", "=", "label_columns", ",", "dtype", "=", "np", ".", "float64", ")", "model_track", ".", "observations", ".", "loc", "[", ":", ",", ":", "]", "=", "0", "model_track", ".", "observations", "[", "\"Matched\"", "]", "=", "model_track", ".", "observations", "[", "\"Matched\"", "]", ".", "astype", "(", "np", ".", "int32", ")", "for", "t", ",", "time", "in", "enumerate", "(", "model_track", ".", "times", ")", ":", "model_track", ".", "observations", ".", "loc", "[", "time", ",", "\"Matched\"", "]", "=", "track_pairings", ".", "loc", "[", "s", ",", "\"Matched\"", "]", "if", "model_track", ".", "observations", ".", "loc", "[", "time", ",", "\"Matched\"", "]", ">", "0", ":", "all_hail_sizes", "=", "[", "]", "step_pairs", "=", "track_pairings", ".", "loc", "[", "s", ",", "\"Pairings\"", "]", "for", "step_pair", "in", "step_pairs", ":", "obs_step", "=", "obs_tracks", "[", "step_pair", "[", "0", "]", "]", ".", "timesteps", "[", "step_pair", "[", "1", "]", "]", ".", "ravel", "(", ")", "obs_mask", "=", "obs_tracks", "[", "step_pair", "[", "0", "]", "]", ".", "masks", "[", "step_pair", "[", "1", "]", "]", ".", "ravel", "(", ")", "all_hail_sizes", ".", "append", "(", "obs_step", "[", "(", "obs_mask", "==", "1", ")", "&", "(", "obs_step", ">=", "self", ".", "mrms_ew", ".", "min_thresh", ")", "]", ")", "combined_hail_sizes", "=", "np", ".", "concatenate", "(", "all_hail_sizes", ")", "min_hail", "=", "combined_hail_sizes", ".", "min", "(", ")", "-", "0.1", "model_track", ".", "observations", ".", "loc", "[", "time", ",", "\"Max_Hail_Size\"", "]", "=", "combined_hail_sizes", ".", "max", "(", ")", "model_track", ".", "observations", ".", "loc", "[", "time", ",", "\"Num_Matches\"", "]", "=", "step_pairs", ".", "shape", "[", "0", "]", "model_track", ".", "observations", ".", "loc", "[", "time", ",", "[", "\"Shape\"", ",", "\"Location\"", ",", "\"Scale\"", "]", "]", "=", "gamma", ".", "fit", "(", "combined_hail_sizes", ",", "floc", "=", "min_hail", ")", "s", "+=", "1" ]
Given a matching set of observed tracks for each model track, Args: model_tracks: obs_tracks: track_pairings: Returns:
[ "Given", "a", "matching", "set", "of", "observed", "tracks", "for", "each", "model", "track", "Args", ":", "model_tracks", ":", "obs_tracks", ":", "track_pairings", ":" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L505-L538
train
djgagne/hagelslag
hagelslag/processing/TrackProcessing.py
TrackProcessor.calc_track_errors
def calc_track_errors(model_tracks, obs_tracks, track_pairings): """ Calculates spatial and temporal translation errors between matched forecast and observed tracks. Args: model_tracks: List of model track STObjects obs_tracks: List of observed track STObjects track_pairings: List of tuples pairing forecast and observed tracks. Returns: pandas DataFrame containing different track errors """ columns = ['obs_track_id', 'translation_error_x', 'translation_error_y', 'start_time_difference', 'end_time_difference', ] track_errors = pd.DataFrame(index=list(range(len(model_tracks))), columns=columns) for p, pair in enumerate(track_pairings): model_track = model_tracks[pair[0]] if type(pair[1]) in [int, np.int64]: obs_track = obs_tracks[pair[1]] else: obs_track = obs_tracks[pair[1][0]] model_com = model_track.center_of_mass(model_track.start_time) obs_com = obs_track.center_of_mass(obs_track.start_time) track_errors.loc[pair[0], 'obs_track_id'] = pair[1] if type(pair[1]) in [int, np.int64] else pair[1][0] track_errors.loc[pair[0], 'translation_error_x'] = model_com[0] - obs_com[0] track_errors.loc[pair[0], 'translation_error_y'] = model_com[1] - obs_com[1] track_errors.loc[pair[0], 'start_time_difference'] = model_track.start_time - obs_track.start_time track_errors.loc[pair[0], 'end_time_difference'] = model_track.end_time - obs_track.end_time return track_errors
python
def calc_track_errors(model_tracks, obs_tracks, track_pairings): """ Calculates spatial and temporal translation errors between matched forecast and observed tracks. Args: model_tracks: List of model track STObjects obs_tracks: List of observed track STObjects track_pairings: List of tuples pairing forecast and observed tracks. Returns: pandas DataFrame containing different track errors """ columns = ['obs_track_id', 'translation_error_x', 'translation_error_y', 'start_time_difference', 'end_time_difference', ] track_errors = pd.DataFrame(index=list(range(len(model_tracks))), columns=columns) for p, pair in enumerate(track_pairings): model_track = model_tracks[pair[0]] if type(pair[1]) in [int, np.int64]: obs_track = obs_tracks[pair[1]] else: obs_track = obs_tracks[pair[1][0]] model_com = model_track.center_of_mass(model_track.start_time) obs_com = obs_track.center_of_mass(obs_track.start_time) track_errors.loc[pair[0], 'obs_track_id'] = pair[1] if type(pair[1]) in [int, np.int64] else pair[1][0] track_errors.loc[pair[0], 'translation_error_x'] = model_com[0] - obs_com[0] track_errors.loc[pair[0], 'translation_error_y'] = model_com[1] - obs_com[1] track_errors.loc[pair[0], 'start_time_difference'] = model_track.start_time - obs_track.start_time track_errors.loc[pair[0], 'end_time_difference'] = model_track.end_time - obs_track.end_time return track_errors
[ "def", "calc_track_errors", "(", "model_tracks", ",", "obs_tracks", ",", "track_pairings", ")", ":", "columns", "=", "[", "'obs_track_id'", ",", "'translation_error_x'", ",", "'translation_error_y'", ",", "'start_time_difference'", ",", "'end_time_difference'", ",", "]", "track_errors", "=", "pd", ".", "DataFrame", "(", "index", "=", "list", "(", "range", "(", "len", "(", "model_tracks", ")", ")", ")", ",", "columns", "=", "columns", ")", "for", "p", ",", "pair", "in", "enumerate", "(", "track_pairings", ")", ":", "model_track", "=", "model_tracks", "[", "pair", "[", "0", "]", "]", "if", "type", "(", "pair", "[", "1", "]", ")", "in", "[", "int", ",", "np", ".", "int64", "]", ":", "obs_track", "=", "obs_tracks", "[", "pair", "[", "1", "]", "]", "else", ":", "obs_track", "=", "obs_tracks", "[", "pair", "[", "1", "]", "[", "0", "]", "]", "model_com", "=", "model_track", ".", "center_of_mass", "(", "model_track", ".", "start_time", ")", "obs_com", "=", "obs_track", ".", "center_of_mass", "(", "obs_track", ".", "start_time", ")", "track_errors", ".", "loc", "[", "pair", "[", "0", "]", ",", "'obs_track_id'", "]", "=", "pair", "[", "1", "]", "if", "type", "(", "pair", "[", "1", "]", ")", "in", "[", "int", ",", "np", ".", "int64", "]", "else", "pair", "[", "1", "]", "[", "0", "]", "track_errors", ".", "loc", "[", "pair", "[", "0", "]", ",", "'translation_error_x'", "]", "=", "model_com", "[", "0", "]", "-", "obs_com", "[", "0", "]", "track_errors", ".", "loc", "[", "pair", "[", "0", "]", ",", "'translation_error_y'", "]", "=", "model_com", "[", "1", "]", "-", "obs_com", "[", "1", "]", "track_errors", ".", "loc", "[", "pair", "[", "0", "]", ",", "'start_time_difference'", "]", "=", "model_track", ".", "start_time", "-", "obs_track", ".", "start_time", "track_errors", ".", "loc", "[", "pair", "[", "0", "]", ",", "'end_time_difference'", "]", "=", "model_track", ".", "end_time", "-", "obs_track", ".", "end_time", "return", "track_errors" ]
Calculates spatial and temporal translation errors between matched forecast and observed tracks. Args: model_tracks: List of model track STObjects obs_tracks: List of observed track STObjects track_pairings: List of tuples pairing forecast and observed tracks. Returns: pandas DataFrame containing different track errors
[ "Calculates", "spatial", "and", "temporal", "translation", "errors", "between", "matched", "forecast", "and", "observed", "tracks", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/TrackProcessing.py#L541-L575
train
nion-software/nionswift
nion/swift/model/Connection.py
Connection.persistent_object_context_changed
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def change_registration(registered_object, unregistered_object): if registered_object and registered_object.uuid == self.parent_uuid: self.__parent = registered_object if self.persistent_object_context: self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration) self.__parent = self.persistent_object_context.get_registered_object(self.parent_uuid)
python
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def change_registration(registered_object, unregistered_object): if registered_object and registered_object.uuid == self.parent_uuid: self.__parent = registered_object if self.persistent_object_context: self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration) self.__parent = self.persistent_object_context.get_registered_object(self.parent_uuid)
[ "def", "persistent_object_context_changed", "(", "self", ")", ":", "super", "(", ")", ".", "persistent_object_context_changed", "(", ")", "def", "change_registration", "(", "registered_object", ",", "unregistered_object", ")", ":", "if", "registered_object", "and", "registered_object", ".", "uuid", "==", "self", ".", "parent_uuid", ":", "self", ".", "__parent", "=", "registered_object", "if", "self", ".", "persistent_object_context", ":", "self", ".", "__registration_listener", "=", "self", ".", "persistent_object_context", ".", "registration_event", ".", "listen", "(", "change_registration", ")", "self", ".", "__parent", "=", "self", ".", "persistent_object_context", ".", "get_registered_object", "(", "self", ".", "parent_uuid", ")" ]
Override from PersistentObject.
[ "Override", "from", "PersistentObject", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Connection.py#L86-L97
train
nion-software/nionswift
nion/swift/model/Connection.py
PropertyConnection.persistent_object_context_changed
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def register(): if self.__source is not None and self.__target is not None: assert not self.__binding self.__binding = Binding.PropertyBinding(self.__source, self.source_property) self.__binding.target_setter = self.__set_target_from_source # while reading, the data item in the display data channel will not be connected; # we still set its value here. when the data item becomes valid, it will update. self.__binding.update_target_direct(self.__binding.get_target_value()) def source_registered(source): self.__source = source register() def target_registered(target): self.__target = target def property_changed(target, property_name): if property_name == self.target_property: self.__set_source_from_target(getattr(target, property_name)) assert self.__target_property_changed_listener is None self.__target_property_changed_listener = target.property_changed_event.listen(functools.partial(property_changed, target)) register() def unregistered(item=None): if not item or item == self.__source: self.__source = None if not item or item == self.__target: self.__target = None if self.__binding: self.__binding.close() self.__binding = None if self.__target_property_changed_listener: self.__target_property_changed_listener.close() self.__target_property_changed_listener = None def change_registration(registered_object, unregistered_object): if registered_object and registered_object.uuid == self.source_uuid: source_registered(registered_object) if registered_object and registered_object.uuid == self.target_uuid: target_registered(registered_object) if unregistered_object and unregistered_object in (self._source, self._target): unregistered(unregistered_object) if self.persistent_object_context: self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration) source = self.persistent_object_context.get_registered_object(self.source_uuid) target = self.persistent_object_context.get_registered_object(self.target_uuid) if source: source_registered(source) if target: target_registered(target) else: unregistered()
python
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def register(): if self.__source is not None and self.__target is not None: assert not self.__binding self.__binding = Binding.PropertyBinding(self.__source, self.source_property) self.__binding.target_setter = self.__set_target_from_source # while reading, the data item in the display data channel will not be connected; # we still set its value here. when the data item becomes valid, it will update. self.__binding.update_target_direct(self.__binding.get_target_value()) def source_registered(source): self.__source = source register() def target_registered(target): self.__target = target def property_changed(target, property_name): if property_name == self.target_property: self.__set_source_from_target(getattr(target, property_name)) assert self.__target_property_changed_listener is None self.__target_property_changed_listener = target.property_changed_event.listen(functools.partial(property_changed, target)) register() def unregistered(item=None): if not item or item == self.__source: self.__source = None if not item or item == self.__target: self.__target = None if self.__binding: self.__binding.close() self.__binding = None if self.__target_property_changed_listener: self.__target_property_changed_listener.close() self.__target_property_changed_listener = None def change_registration(registered_object, unregistered_object): if registered_object and registered_object.uuid == self.source_uuid: source_registered(registered_object) if registered_object and registered_object.uuid == self.target_uuid: target_registered(registered_object) if unregistered_object and unregistered_object in (self._source, self._target): unregistered(unregistered_object) if self.persistent_object_context: self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration) source = self.persistent_object_context.get_registered_object(self.source_uuid) target = self.persistent_object_context.get_registered_object(self.target_uuid) if source: source_registered(source) if target: target_registered(target) else: unregistered()
[ "def", "persistent_object_context_changed", "(", "self", ")", ":", "super", "(", ")", ".", "persistent_object_context_changed", "(", ")", "def", "register", "(", ")", ":", "if", "self", ".", "__source", "is", "not", "None", "and", "self", ".", "__target", "is", "not", "None", ":", "assert", "not", "self", ".", "__binding", "self", ".", "__binding", "=", "Binding", ".", "PropertyBinding", "(", "self", ".", "__source", ",", "self", ".", "source_property", ")", "self", ".", "__binding", ".", "target_setter", "=", "self", ".", "__set_target_from_source", "# while reading, the data item in the display data channel will not be connected;", "# we still set its value here. when the data item becomes valid, it will update.", "self", ".", "__binding", ".", "update_target_direct", "(", "self", ".", "__binding", ".", "get_target_value", "(", ")", ")", "def", "source_registered", "(", "source", ")", ":", "self", ".", "__source", "=", "source", "register", "(", ")", "def", "target_registered", "(", "target", ")", ":", "self", ".", "__target", "=", "target", "def", "property_changed", "(", "target", ",", "property_name", ")", ":", "if", "property_name", "==", "self", ".", "target_property", ":", "self", ".", "__set_source_from_target", "(", "getattr", "(", "target", ",", "property_name", ")", ")", "assert", "self", ".", "__target_property_changed_listener", "is", "None", "self", ".", "__target_property_changed_listener", "=", "target", ".", "property_changed_event", ".", "listen", "(", "functools", ".", "partial", "(", "property_changed", ",", "target", ")", ")", "register", "(", ")", "def", "unregistered", "(", "item", "=", "None", ")", ":", "if", "not", "item", "or", "item", "==", "self", ".", "__source", ":", "self", ".", "__source", "=", "None", "if", "not", "item", "or", "item", "==", "self", ".", "__target", ":", "self", ".", "__target", "=", "None", "if", "self", ".", "__binding", ":", "self", ".", "__binding", ".", "close", "(", ")", "self", ".", "__binding", "=", "None", "if", "self", ".", "__target_property_changed_listener", ":", "self", ".", "__target_property_changed_listener", ".", "close", "(", ")", "self", ".", "__target_property_changed_listener", "=", "None", "def", "change_registration", "(", "registered_object", ",", "unregistered_object", ")", ":", "if", "registered_object", "and", "registered_object", ".", "uuid", "==", "self", ".", "source_uuid", ":", "source_registered", "(", "registered_object", ")", "if", "registered_object", "and", "registered_object", ".", "uuid", "==", "self", ".", "target_uuid", ":", "target_registered", "(", "registered_object", ")", "if", "unregistered_object", "and", "unregistered_object", "in", "(", "self", ".", "_source", ",", "self", ".", "_target", ")", ":", "unregistered", "(", "unregistered_object", ")", "if", "self", ".", "persistent_object_context", ":", "self", ".", "__registration_listener", "=", "self", ".", "persistent_object_context", ".", "registration_event", ".", "listen", "(", "change_registration", ")", "source", "=", "self", ".", "persistent_object_context", ".", "get_registered_object", "(", "self", ".", "source_uuid", ")", "target", "=", "self", ".", "persistent_object_context", ".", "get_registered_object", "(", "self", ".", "target_uuid", ")", "if", "source", ":", "source_registered", "(", "source", ")", "if", "target", ":", "target_registered", "(", "target", ")", "else", ":", "unregistered", "(", ")" ]
Override from PersistentObject.
[ "Override", "from", "PersistentObject", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Connection.py#L162-L219
train
nion-software/nionswift
nion/swift/model/Connection.py
IntervalListConnection.persistent_object_context_changed
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def detach(): for listener in self.__interval_mutated_listeners: listener.close() self.__interval_mutated_listeners = list() def reattach(): detach() interval_descriptors = list() if self.__source: for region in self.__source.graphics: if isinstance(region, Graphics.IntervalGraphic): interval_descriptor = {"interval": region.interval, "color": "#F00"} interval_descriptors.append(interval_descriptor) self.__interval_mutated_listeners.append(region.property_changed_event.listen(lambda k: reattach())) if self.__target: self.__target.interval_descriptors = interval_descriptors def item_inserted(key, value, before_index): if key == "graphics" and self.__target: reattach() def item_removed(key, value, index): if key == "graphics" and self.__target: reattach() def source_registered(source): self.__source = source self.__item_inserted_event_listener = self.__source.item_inserted_event.listen(item_inserted) self.__item_removed_event_listener = self.__source.item_removed_event.listen(item_removed) reattach() def target_registered(target): self.__target = target reattach() def unregistered(source=None): if self.__item_inserted_event_listener: self.__item_inserted_event_listener.close() self.__item_inserted_event_listener = None if self.__item_removed_event_listener: self.__item_removed_event_listener.close() self.__item_removed_event_listener = None if self.persistent_object_context: self.persistent_object_context.subscribe(self.source_uuid, source_registered, unregistered) self.persistent_object_context.subscribe(self.target_uuid, target_registered, unregistered) else: unregistered()
python
def persistent_object_context_changed(self): """ Override from PersistentObject. """ super().persistent_object_context_changed() def detach(): for listener in self.__interval_mutated_listeners: listener.close() self.__interval_mutated_listeners = list() def reattach(): detach() interval_descriptors = list() if self.__source: for region in self.__source.graphics: if isinstance(region, Graphics.IntervalGraphic): interval_descriptor = {"interval": region.interval, "color": "#F00"} interval_descriptors.append(interval_descriptor) self.__interval_mutated_listeners.append(region.property_changed_event.listen(lambda k: reattach())) if self.__target: self.__target.interval_descriptors = interval_descriptors def item_inserted(key, value, before_index): if key == "graphics" and self.__target: reattach() def item_removed(key, value, index): if key == "graphics" and self.__target: reattach() def source_registered(source): self.__source = source self.__item_inserted_event_listener = self.__source.item_inserted_event.listen(item_inserted) self.__item_removed_event_listener = self.__source.item_removed_event.listen(item_removed) reattach() def target_registered(target): self.__target = target reattach() def unregistered(source=None): if self.__item_inserted_event_listener: self.__item_inserted_event_listener.close() self.__item_inserted_event_listener = None if self.__item_removed_event_listener: self.__item_removed_event_listener.close() self.__item_removed_event_listener = None if self.persistent_object_context: self.persistent_object_context.subscribe(self.source_uuid, source_registered, unregistered) self.persistent_object_context.subscribe(self.target_uuid, target_registered, unregistered) else: unregistered()
[ "def", "persistent_object_context_changed", "(", "self", ")", ":", "super", "(", ")", ".", "persistent_object_context_changed", "(", ")", "def", "detach", "(", ")", ":", "for", "listener", "in", "self", ".", "__interval_mutated_listeners", ":", "listener", ".", "close", "(", ")", "self", ".", "__interval_mutated_listeners", "=", "list", "(", ")", "def", "reattach", "(", ")", ":", "detach", "(", ")", "interval_descriptors", "=", "list", "(", ")", "if", "self", ".", "__source", ":", "for", "region", "in", "self", ".", "__source", ".", "graphics", ":", "if", "isinstance", "(", "region", ",", "Graphics", ".", "IntervalGraphic", ")", ":", "interval_descriptor", "=", "{", "\"interval\"", ":", "region", ".", "interval", ",", "\"color\"", ":", "\"#F00\"", "}", "interval_descriptors", ".", "append", "(", "interval_descriptor", ")", "self", ".", "__interval_mutated_listeners", ".", "append", "(", "region", ".", "property_changed_event", ".", "listen", "(", "lambda", "k", ":", "reattach", "(", ")", ")", ")", "if", "self", ".", "__target", ":", "self", ".", "__target", ".", "interval_descriptors", "=", "interval_descriptors", "def", "item_inserted", "(", "key", ",", "value", ",", "before_index", ")", ":", "if", "key", "==", "\"graphics\"", "and", "self", ".", "__target", ":", "reattach", "(", ")", "def", "item_removed", "(", "key", ",", "value", ",", "index", ")", ":", "if", "key", "==", "\"graphics\"", "and", "self", ".", "__target", ":", "reattach", "(", ")", "def", "source_registered", "(", "source", ")", ":", "self", ".", "__source", "=", "source", "self", ".", "__item_inserted_event_listener", "=", "self", ".", "__source", ".", "item_inserted_event", ".", "listen", "(", "item_inserted", ")", "self", ".", "__item_removed_event_listener", "=", "self", ".", "__source", ".", "item_removed_event", ".", "listen", "(", "item_removed", ")", "reattach", "(", ")", "def", "target_registered", "(", "target", ")", ":", "self", ".", "__target", "=", "target", "reattach", "(", ")", "def", "unregistered", "(", "source", "=", "None", ")", ":", "if", "self", ".", "__item_inserted_event_listener", ":", "self", ".", "__item_inserted_event_listener", ".", "close", "(", ")", "self", ".", "__item_inserted_event_listener", "=", "None", "if", "self", ".", "__item_removed_event_listener", ":", "self", ".", "__item_removed_event_listener", ".", "close", "(", ")", "self", ".", "__item_removed_event_listener", "=", "None", "if", "self", ".", "persistent_object_context", ":", "self", ".", "persistent_object_context", ".", "subscribe", "(", "self", ".", "source_uuid", ",", "source_registered", ",", "unregistered", ")", "self", ".", "persistent_object_context", ".", "subscribe", "(", "self", ".", "target_uuid", ",", "target_registered", ",", "unregistered", ")", "else", ":", "unregistered", "(", ")" ]
Override from PersistentObject.
[ "Override", "from", "PersistentObject", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Connection.py#L247-L298
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.__display_for_tree_node
def __display_for_tree_node(self, tree_node): """ Return the text display for the given tree node. Based on number of keys associated with tree node. """ keys = tree_node.keys if len(keys) == 1: return "{0} ({1})".format(tree_node.keys[-1], tree_node.count) elif len(keys) == 2: months = (_("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"), _("September"), _("October"), _("November"), _("December")) return "{0} ({1})".format(months[max(min(tree_node.keys[1]-1, 11), 0)], tree_node.count) else: weekdays = (_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday")) date = datetime.date(tree_node.keys[0], tree_node.keys[1], tree_node.keys[2]) return "{0} - {1} ({2})".format(tree_node.keys[2], weekdays[date.weekday()], tree_node.count)
python
def __display_for_tree_node(self, tree_node): """ Return the text display for the given tree node. Based on number of keys associated with tree node. """ keys = tree_node.keys if len(keys) == 1: return "{0} ({1})".format(tree_node.keys[-1], tree_node.count) elif len(keys) == 2: months = (_("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"), _("September"), _("October"), _("November"), _("December")) return "{0} ({1})".format(months[max(min(tree_node.keys[1]-1, 11), 0)], tree_node.count) else: weekdays = (_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday")) date = datetime.date(tree_node.keys[0], tree_node.keys[1], tree_node.keys[2]) return "{0} - {1} ({2})".format(tree_node.keys[2], weekdays[date.weekday()], tree_node.count)
[ "def", "__display_for_tree_node", "(", "self", ",", "tree_node", ")", ":", "keys", "=", "tree_node", ".", "keys", "if", "len", "(", "keys", ")", "==", "1", ":", "return", "\"{0} ({1})\"", ".", "format", "(", "tree_node", ".", "keys", "[", "-", "1", "]", ",", "tree_node", ".", "count", ")", "elif", "len", "(", "keys", ")", "==", "2", ":", "months", "=", "(", "_", "(", "\"January\"", ")", ",", "_", "(", "\"February\"", ")", ",", "_", "(", "\"March\"", ")", ",", "_", "(", "\"April\"", ")", ",", "_", "(", "\"May\"", ")", ",", "_", "(", "\"June\"", ")", ",", "_", "(", "\"July\"", ")", ",", "_", "(", "\"August\"", ")", ",", "_", "(", "\"September\"", ")", ",", "_", "(", "\"October\"", ")", ",", "_", "(", "\"November\"", ")", ",", "_", "(", "\"December\"", ")", ")", "return", "\"{0} ({1})\"", ".", "format", "(", "months", "[", "max", "(", "min", "(", "tree_node", ".", "keys", "[", "1", "]", "-", "1", ",", "11", ")", ",", "0", ")", "]", ",", "tree_node", ".", "count", ")", "else", ":", "weekdays", "=", "(", "_", "(", "\"Monday\"", ")", ",", "_", "(", "\"Tuesday\"", ")", ",", "_", "(", "\"Wednesday\"", ")", ",", "_", "(", "\"Thursday\"", ")", ",", "_", "(", "\"Friday\"", ")", ",", "_", "(", "\"Saturday\"", ")", ",", "_", "(", "\"Sunday\"", ")", ")", "date", "=", "datetime", ".", "date", "(", "tree_node", ".", "keys", "[", "0", "]", ",", "tree_node", ".", "keys", "[", "1", "]", ",", "tree_node", ".", "keys", "[", "2", "]", ")", "return", "\"{0} - {1} ({2})\"", ".", "format", "(", "tree_node", ".", "keys", "[", "2", "]", ",", "weekdays", "[", "date", ".", "weekday", "(", ")", "]", ",", "tree_node", ".", "count", ")" ]
Return the text display for the given tree node. Based on number of keys associated with tree node.
[ "Return", "the", "text", "display", "for", "the", "given", "tree", "node", ".", "Based", "on", "number", "of", "keys", "associated", "with", "tree", "node", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L121-L133
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.__insert_child
def __insert_child(self, parent_tree_node, index, tree_node): """ Called from the root tree node when a new node is inserted into tree. This method creates properties to represent the node for display and inserts it into the item model controller. """ # manage the item model parent_item = self.__mapping[id(parent_tree_node)] self.item_model_controller.begin_insert(index, index, parent_item.row, parent_item.id) properties = { "display": self.__display_for_tree_node(tree_node), "tree_node": tree_node # used for removal and other lookup } item = self.item_model_controller.create_item(properties) parent_item.insert_child(index, item) self.__mapping[id(tree_node)] = item self.item_model_controller.end_insert()
python
def __insert_child(self, parent_tree_node, index, tree_node): """ Called from the root tree node when a new node is inserted into tree. This method creates properties to represent the node for display and inserts it into the item model controller. """ # manage the item model parent_item = self.__mapping[id(parent_tree_node)] self.item_model_controller.begin_insert(index, index, parent_item.row, parent_item.id) properties = { "display": self.__display_for_tree_node(tree_node), "tree_node": tree_node # used for removal and other lookup } item = self.item_model_controller.create_item(properties) parent_item.insert_child(index, item) self.__mapping[id(tree_node)] = item self.item_model_controller.end_insert()
[ "def", "__insert_child", "(", "self", ",", "parent_tree_node", ",", "index", ",", "tree_node", ")", ":", "# manage the item model", "parent_item", "=", "self", ".", "__mapping", "[", "id", "(", "parent_tree_node", ")", "]", "self", ".", "item_model_controller", ".", "begin_insert", "(", "index", ",", "index", ",", "parent_item", ".", "row", ",", "parent_item", ".", "id", ")", "properties", "=", "{", "\"display\"", ":", "self", ".", "__display_for_tree_node", "(", "tree_node", ")", ",", "\"tree_node\"", ":", "tree_node", "# used for removal and other lookup", "}", "item", "=", "self", ".", "item_model_controller", ".", "create_item", "(", "properties", ")", "parent_item", ".", "insert_child", "(", "index", ",", "item", ")", "self", ".", "__mapping", "[", "id", "(", "tree_node", ")", "]", "=", "item", "self", ".", "item_model_controller", ".", "end_insert", "(", ")" ]
Called from the root tree node when a new node is inserted into tree. This method creates properties to represent the node for display and inserts it into the item model controller.
[ "Called", "from", "the", "root", "tree", "node", "when", "a", "new", "node", "is", "inserted", "into", "tree", ".", "This", "method", "creates", "properties", "to", "represent", "the", "node", "for", "display", "and", "inserts", "it", "into", "the", "item", "model", "controller", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L135-L150
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.__remove_child
def __remove_child(self, parent_tree_node, index): """ Called from the root tree node when a node is removed from the tree. This method removes it into the item model controller. """ # get parent and item parent_item = self.__mapping[id(parent_tree_node)] # manage the item model self.item_model_controller.begin_remove(index, index, parent_item.row, parent_item.id) child_item = parent_item.children[index] parent_item.remove_child(child_item) self.__mapping.pop(id(child_item.data["tree_node"])) self.item_model_controller.end_remove()
python
def __remove_child(self, parent_tree_node, index): """ Called from the root tree node when a node is removed from the tree. This method removes it into the item model controller. """ # get parent and item parent_item = self.__mapping[id(parent_tree_node)] # manage the item model self.item_model_controller.begin_remove(index, index, parent_item.row, parent_item.id) child_item = parent_item.children[index] parent_item.remove_child(child_item) self.__mapping.pop(id(child_item.data["tree_node"])) self.item_model_controller.end_remove()
[ "def", "__remove_child", "(", "self", ",", "parent_tree_node", ",", "index", ")", ":", "# get parent and item", "parent_item", "=", "self", ".", "__mapping", "[", "id", "(", "parent_tree_node", ")", "]", "# manage the item model", "self", ".", "item_model_controller", ".", "begin_remove", "(", "index", ",", "index", ",", "parent_item", ".", "row", ",", "parent_item", ".", "id", ")", "child_item", "=", "parent_item", ".", "children", "[", "index", "]", "parent_item", ".", "remove_child", "(", "child_item", ")", "self", ".", "__mapping", ".", "pop", "(", "id", "(", "child_item", ".", "data", "[", "\"tree_node\"", "]", ")", ")", "self", ".", "item_model_controller", ".", "end_remove", "(", ")" ]
Called from the root tree node when a node is removed from the tree. This method removes it into the item model controller.
[ "Called", "from", "the", "root", "tree", "node", "when", "a", "node", "is", "removed", "from", "the", "tree", ".", "This", "method", "removes", "it", "into", "the", "item", "model", "controller", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L152-L164
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.update_all_nodes
def update_all_nodes(self): """ Update all tree item displays if needed. Usually for count updates. """ item_model_controller = self.item_model_controller if item_model_controller: if self.__node_counts_dirty: for item in self.__mapping.values(): if "tree_node" in item.data: # don't update the root node tree_node = item.data["tree_node"] item.data["display"] = self.__display_for_tree_node(tree_node) item_model_controller.data_changed(item.row, item.parent.row, item.parent.id) self.__node_counts_dirty = False
python
def update_all_nodes(self): """ Update all tree item displays if needed. Usually for count updates. """ item_model_controller = self.item_model_controller if item_model_controller: if self.__node_counts_dirty: for item in self.__mapping.values(): if "tree_node" in item.data: # don't update the root node tree_node = item.data["tree_node"] item.data["display"] = self.__display_for_tree_node(tree_node) item_model_controller.data_changed(item.row, item.parent.row, item.parent.id) self.__node_counts_dirty = False
[ "def", "update_all_nodes", "(", "self", ")", ":", "item_model_controller", "=", "self", ".", "item_model_controller", "if", "item_model_controller", ":", "if", "self", ".", "__node_counts_dirty", ":", "for", "item", "in", "self", ".", "__mapping", ".", "values", "(", ")", ":", "if", "\"tree_node\"", "in", "item", ".", "data", ":", "# don't update the root node", "tree_node", "=", "item", ".", "data", "[", "\"tree_node\"", "]", "item", ".", "data", "[", "\"display\"", "]", "=", "self", ".", "__display_for_tree_node", "(", "tree_node", ")", "item_model_controller", ".", "data_changed", "(", "item", ".", "row", ",", "item", ".", "parent", ".", "row", ",", "item", ".", "parent", ".", "id", ")", "self", ".", "__node_counts_dirty", "=", "False" ]
Update all tree item displays if needed. Usually for count updates.
[ "Update", "all", "tree", "item", "displays", "if", "needed", ".", "Usually", "for", "count", "updates", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L170-L180
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.date_browser_selection_changed
def date_browser_selection_changed(self, selected_indexes): """ Called to handle selection changes in the tree widget. This method should be connected to the on_selection_changed event. This method builds a list of keys represented by all selected items. It then provides date_filter to filter data items based on the list of keys. It then sets the filter into the document controller. :param selected_indexes: The selected indexes :type selected_indexes: list of ints """ partial_date_filters = list() for index, parent_row, parent_id in selected_indexes: item_model_controller = self.item_model_controller tree_node = item_model_controller.item_value("tree_node", index, parent_id) partial_date_filters.append(ListModel.PartialDateFilter("created_local", *tree_node.keys)) if len(partial_date_filters) > 0: self.__date_filter = ListModel.OrFilter(partial_date_filters) else: self.__date_filter = None self.__update_filter()
python
def date_browser_selection_changed(self, selected_indexes): """ Called to handle selection changes in the tree widget. This method should be connected to the on_selection_changed event. This method builds a list of keys represented by all selected items. It then provides date_filter to filter data items based on the list of keys. It then sets the filter into the document controller. :param selected_indexes: The selected indexes :type selected_indexes: list of ints """ partial_date_filters = list() for index, parent_row, parent_id in selected_indexes: item_model_controller = self.item_model_controller tree_node = item_model_controller.item_value("tree_node", index, parent_id) partial_date_filters.append(ListModel.PartialDateFilter("created_local", *tree_node.keys)) if len(partial_date_filters) > 0: self.__date_filter = ListModel.OrFilter(partial_date_filters) else: self.__date_filter = None self.__update_filter()
[ "def", "date_browser_selection_changed", "(", "self", ",", "selected_indexes", ")", ":", "partial_date_filters", "=", "list", "(", ")", "for", "index", ",", "parent_row", ",", "parent_id", "in", "selected_indexes", ":", "item_model_controller", "=", "self", ".", "item_model_controller", "tree_node", "=", "item_model_controller", ".", "item_value", "(", "\"tree_node\"", ",", "index", ",", "parent_id", ")", "partial_date_filters", ".", "append", "(", "ListModel", ".", "PartialDateFilter", "(", "\"created_local\"", ",", "*", "tree_node", ".", "keys", ")", ")", "if", "len", "(", "partial_date_filters", ")", ">", "0", ":", "self", ".", "__date_filter", "=", "ListModel", ".", "OrFilter", "(", "partial_date_filters", ")", "else", ":", "self", ".", "__date_filter", "=", "None", "self", ".", "__update_filter", "(", ")" ]
Called to handle selection changes in the tree widget. This method should be connected to the on_selection_changed event. This method builds a list of keys represented by all selected items. It then provides date_filter to filter data items based on the list of keys. It then sets the filter into the document controller. :param selected_indexes: The selected indexes :type selected_indexes: list of ints
[ "Called", "to", "handle", "selection", "changes", "in", "the", "tree", "widget", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L182-L205
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.text_filter_changed
def text_filter_changed(self, text): """ Called to handle changes to the text filter. :param text: The text for the filter. """ text = text.strip() if text else None if text is not None: self.__text_filter = ListModel.TextFilter("text_for_filter", text) else: self.__text_filter = None self.__update_filter()
python
def text_filter_changed(self, text): """ Called to handle changes to the text filter. :param text: The text for the filter. """ text = text.strip() if text else None if text is not None: self.__text_filter = ListModel.TextFilter("text_for_filter", text) else: self.__text_filter = None self.__update_filter()
[ "def", "text_filter_changed", "(", "self", ",", "text", ")", ":", "text", "=", "text", ".", "strip", "(", ")", "if", "text", "else", "None", "if", "text", "is", "not", "None", ":", "self", ".", "__text_filter", "=", "ListModel", ".", "TextFilter", "(", "\"text_for_filter\"", ",", "text", ")", "else", ":", "self", ".", "__text_filter", "=", "None", "self", ".", "__update_filter", "(", ")" ]
Called to handle changes to the text filter. :param text: The text for the filter.
[ "Called", "to", "handle", "changes", "to", "the", "text", "filter", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L207-L220
train
nion-software/nionswift
nion/swift/FilterPanel.py
FilterController.__update_filter
def __update_filter(self): """ Create a combined filter. Set the resulting filter into the document controller. """ filters = list() if self.__date_filter: filters.append(self.__date_filter) if self.__text_filter: filters.append(self.__text_filter) self.document_controller.display_filter = ListModel.AndFilter(filters)
python
def __update_filter(self): """ Create a combined filter. Set the resulting filter into the document controller. """ filters = list() if self.__date_filter: filters.append(self.__date_filter) if self.__text_filter: filters.append(self.__text_filter) self.document_controller.display_filter = ListModel.AndFilter(filters)
[ "def", "__update_filter", "(", "self", ")", ":", "filters", "=", "list", "(", ")", "if", "self", ".", "__date_filter", ":", "filters", ".", "append", "(", "self", ".", "__date_filter", ")", "if", "self", ".", "__text_filter", ":", "filters", ".", "append", "(", "self", ".", "__text_filter", ")", "self", ".", "document_controller", ".", "display_filter", "=", "ListModel", ".", "AndFilter", "(", "filters", ")" ]
Create a combined filter. Set the resulting filter into the document controller.
[ "Create", "a", "combined", "filter", ".", "Set", "the", "resulting", "filter", "into", "the", "document", "controller", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L222-L231
train
nion-software/nionswift
nion/swift/FilterPanel.py
TreeNode.__get_keys
def __get_keys(self): """ Return the keys associated with this node by adding its key and then adding parent keys recursively. """ keys = list() tree_node = self while tree_node is not None and tree_node.key is not None: keys.insert(0, tree_node.key) tree_node = tree_node.parent return keys
python
def __get_keys(self): """ Return the keys associated with this node by adding its key and then adding parent keys recursively. """ keys = list() tree_node = self while tree_node is not None and tree_node.key is not None: keys.insert(0, tree_node.key) tree_node = tree_node.parent return keys
[ "def", "__get_keys", "(", "self", ")", ":", "keys", "=", "list", "(", ")", "tree_node", "=", "self", "while", "tree_node", "is", "not", "None", "and", "tree_node", ".", "key", "is", "not", "None", ":", "keys", ".", "insert", "(", "0", ",", "tree_node", ".", "key", ")", "tree_node", "=", "tree_node", ".", "parent", "return", "keys" ]
Return the keys associated with this node by adding its key and then adding parent keys recursively.
[ "Return", "the", "keys", "associated", "with", "this", "node", "by", "adding", "its", "key", "and", "then", "adding", "parent", "keys", "recursively", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L348-L355
train
nion-software/nionswift
nion/swift/FilterPanel.py
TreeNode.insert_value
def insert_value(self, keys, value): """ Insert a value (data item) into this tree node and then its children. This will be called in response to a new data item being inserted into the document. Also updates the tree node's cumulative child count. """ self.count += 1 if not self.key: self.__value_reverse_mapping[value] = keys if len(keys) == 0: self.values.append(value) else: key = keys[0] index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed)) if index == len(self.children) or self.children[index].key != key: new_tree_node = TreeNode(key, list(), reversed=self.reversed) new_tree_node.child_inserted = self.child_inserted new_tree_node.child_removed = self.child_removed new_tree_node.tree_node_updated = self.tree_node_updated new_tree_node.__set_parent(self) self.children.insert(index, new_tree_node) if self.child_inserted: self.child_inserted(self, index, new_tree_node) child = self.children[index] child.insert_value(keys[1:], value) if self.tree_node_updated: self.tree_node_updated(child)
python
def insert_value(self, keys, value): """ Insert a value (data item) into this tree node and then its children. This will be called in response to a new data item being inserted into the document. Also updates the tree node's cumulative child count. """ self.count += 1 if not self.key: self.__value_reverse_mapping[value] = keys if len(keys) == 0: self.values.append(value) else: key = keys[0] index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed)) if index == len(self.children) or self.children[index].key != key: new_tree_node = TreeNode(key, list(), reversed=self.reversed) new_tree_node.child_inserted = self.child_inserted new_tree_node.child_removed = self.child_removed new_tree_node.tree_node_updated = self.tree_node_updated new_tree_node.__set_parent(self) self.children.insert(index, new_tree_node) if self.child_inserted: self.child_inserted(self, index, new_tree_node) child = self.children[index] child.insert_value(keys[1:], value) if self.tree_node_updated: self.tree_node_updated(child)
[ "def", "insert_value", "(", "self", ",", "keys", ",", "value", ")", ":", "self", ".", "count", "+=", "1", "if", "not", "self", ".", "key", ":", "self", ".", "__value_reverse_mapping", "[", "value", "]", "=", "keys", "if", "len", "(", "keys", ")", "==", "0", ":", "self", ".", "values", ".", "append", "(", "value", ")", "else", ":", "key", "=", "keys", "[", "0", "]", "index", "=", "bisect", ".", "bisect_left", "(", "self", ".", "children", ",", "TreeNode", "(", "key", ",", "reversed", "=", "self", ".", "reversed", ")", ")", "if", "index", "==", "len", "(", "self", ".", "children", ")", "or", "self", ".", "children", "[", "index", "]", ".", "key", "!=", "key", ":", "new_tree_node", "=", "TreeNode", "(", "key", ",", "list", "(", ")", ",", "reversed", "=", "self", ".", "reversed", ")", "new_tree_node", ".", "child_inserted", "=", "self", ".", "child_inserted", "new_tree_node", ".", "child_removed", "=", "self", ".", "child_removed", "new_tree_node", ".", "tree_node_updated", "=", "self", ".", "tree_node_updated", "new_tree_node", ".", "__set_parent", "(", "self", ")", "self", ".", "children", ".", "insert", "(", "index", ",", "new_tree_node", ")", "if", "self", ".", "child_inserted", ":", "self", ".", "child_inserted", "(", "self", ",", "index", ",", "new_tree_node", ")", "child", "=", "self", ".", "children", "[", "index", "]", "child", ".", "insert_value", "(", "keys", "[", "1", ":", "]", ",", "value", ")", "if", "self", ".", "tree_node_updated", ":", "self", ".", "tree_node_updated", "(", "child", ")" ]
Insert a value (data item) into this tree node and then its children. This will be called in response to a new data item being inserted into the document. Also updates the tree node's cumulative child count.
[ "Insert", "a", "value", "(", "data", "item", ")", "into", "this", "tree", "node", "and", "then", "its", "children", ".", "This", "will", "be", "called", "in", "response", "to", "a", "new", "data", "item", "being", "inserted", "into", "the", "document", ".", "Also", "updates", "the", "tree", "node", "s", "cumulative", "child", "count", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L358-L385
train
nion-software/nionswift
nion/swift/FilterPanel.py
TreeNode.remove_value
def remove_value(self, keys, value): """ Remove a value (data item) from this tree node and its children. Also updates the tree node's cumulative child count. """ self.count -= 1 if not self.key: keys = self.__value_reverse_mapping[value] del self.__value_reverse_mapping[value] if len(keys) == 0: self.values.remove(value) else: key = keys[0] index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed)) assert index != len(self.children) and self.children[index].key == key self.children[index].remove_value(keys[1:], value) if self.tree_node_updated: self.tree_node_updated(self.children[index]) if self.children[index].count == 0: del self.children[index] if self.child_removed: self.child_removed(self, index)
python
def remove_value(self, keys, value): """ Remove a value (data item) from this tree node and its children. Also updates the tree node's cumulative child count. """ self.count -= 1 if not self.key: keys = self.__value_reverse_mapping[value] del self.__value_reverse_mapping[value] if len(keys) == 0: self.values.remove(value) else: key = keys[0] index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed)) assert index != len(self.children) and self.children[index].key == key self.children[index].remove_value(keys[1:], value) if self.tree_node_updated: self.tree_node_updated(self.children[index]) if self.children[index].count == 0: del self.children[index] if self.child_removed: self.child_removed(self, index)
[ "def", "remove_value", "(", "self", ",", "keys", ",", "value", ")", ":", "self", ".", "count", "-=", "1", "if", "not", "self", ".", "key", ":", "keys", "=", "self", ".", "__value_reverse_mapping", "[", "value", "]", "del", "self", ".", "__value_reverse_mapping", "[", "value", "]", "if", "len", "(", "keys", ")", "==", "0", ":", "self", ".", "values", ".", "remove", "(", "value", ")", "else", ":", "key", "=", "keys", "[", "0", "]", "index", "=", "bisect", ".", "bisect_left", "(", "self", ".", "children", ",", "TreeNode", "(", "key", ",", "reversed", "=", "self", ".", "reversed", ")", ")", "assert", "index", "!=", "len", "(", "self", ".", "children", ")", "and", "self", ".", "children", "[", "index", "]", ".", "key", "==", "key", "self", ".", "children", "[", "index", "]", ".", "remove_value", "(", "keys", "[", "1", ":", "]", ",", "value", ")", "if", "self", ".", "tree_node_updated", ":", "self", ".", "tree_node_updated", "(", "self", ".", "children", "[", "index", "]", ")", "if", "self", ".", "children", "[", "index", "]", ".", "count", "==", "0", ":", "del", "self", ".", "children", "[", "index", "]", "if", "self", ".", "child_removed", ":", "self", ".", "child_removed", "(", "self", ",", "index", ")" ]
Remove a value (data item) from this tree node and its children. Also updates the tree node's cumulative child count.
[ "Remove", "a", "value", "(", "data", "item", ")", "from", "this", "tree", "node", "and", "its", "children", ".", "Also", "updates", "the", "tree", "node", "s", "cumulative", "child", "count", "." ]
d43693eaf057b8683b9638e575000f055fede452
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/FilterPanel.py#L387-L408
train
djgagne/hagelslag
hagelslag/processing/tracker.py
label_storm_objects
def label_storm_objects(data, method, min_intensity, max_intensity, min_area=1, max_area=100, max_range=1, increment=1, gaussian_sd=0): """ From a 2D grid or time series of 2D grids, this method labels storm objects with either the Enhanced Watershed or Hysteresis methods. Args: data: the gridded data to be labeled. Should be a 2D numpy array in (y, x) coordinate order or a 3D numpy array in (time, y, x) coordinate order method: "ew" or "watershed" for Enhanced Watershed or "hyst" for hysteresis min_intensity: Minimum intensity threshold for gridpoints contained within any objects max_intensity: For watershed, any points above max_intensity are considered as the same value as max intensity. For hysteresis, all objects have to contain at least 1 pixel that equals or exceeds this value min_area: (default 1) The minimum area of any object in pixels. max_area: (default 100) The area threshold in pixels at which the enhanced watershed ends growth. Object area may exceed this threshold if the pixels at the last watershed level exceed the object area. max_range: Maximum difference between the maximum and minimum value in an enhanced watershed object before growth is stopped. increment: Discretization increment for the enhanced watershed gaussian_sd: Standard deviation of Gaussian filter applied to data Returns: label_grid: an ndarray with the same shape as data in which each pixel is labeled with a positive integer value. """ if method.lower() in ["ew", "watershed"]: labeler = EnhancedWatershed(min_intensity, increment, max_intensity, max_area, max_range) else: labeler = Hysteresis(min_intensity, max_intensity) if len(data.shape) == 2: label_grid = labeler.label(gaussian_filter(data, gaussian_sd)) label_grid[data < min_intensity] = 0 if min_area > 1: label_grid = labeler.size_filter(label_grid, min_area) else: label_grid = np.zeros(data.shape, dtype=int) for t in range(data.shape[0]): label_grid[t] = labeler.label(gaussian_filter(data[t], gaussian_sd)) label_grid[t][data[t] < min_intensity] = 0 if min_area > 1: label_grid[t] = labeler.size_filter(label_grid[t], min_area) return label_grid
python
def label_storm_objects(data, method, min_intensity, max_intensity, min_area=1, max_area=100, max_range=1, increment=1, gaussian_sd=0): """ From a 2D grid or time series of 2D grids, this method labels storm objects with either the Enhanced Watershed or Hysteresis methods. Args: data: the gridded data to be labeled. Should be a 2D numpy array in (y, x) coordinate order or a 3D numpy array in (time, y, x) coordinate order method: "ew" or "watershed" for Enhanced Watershed or "hyst" for hysteresis min_intensity: Minimum intensity threshold for gridpoints contained within any objects max_intensity: For watershed, any points above max_intensity are considered as the same value as max intensity. For hysteresis, all objects have to contain at least 1 pixel that equals or exceeds this value min_area: (default 1) The minimum area of any object in pixels. max_area: (default 100) The area threshold in pixels at which the enhanced watershed ends growth. Object area may exceed this threshold if the pixels at the last watershed level exceed the object area. max_range: Maximum difference between the maximum and minimum value in an enhanced watershed object before growth is stopped. increment: Discretization increment for the enhanced watershed gaussian_sd: Standard deviation of Gaussian filter applied to data Returns: label_grid: an ndarray with the same shape as data in which each pixel is labeled with a positive integer value. """ if method.lower() in ["ew", "watershed"]: labeler = EnhancedWatershed(min_intensity, increment, max_intensity, max_area, max_range) else: labeler = Hysteresis(min_intensity, max_intensity) if len(data.shape) == 2: label_grid = labeler.label(gaussian_filter(data, gaussian_sd)) label_grid[data < min_intensity] = 0 if min_area > 1: label_grid = labeler.size_filter(label_grid, min_area) else: label_grid = np.zeros(data.shape, dtype=int) for t in range(data.shape[0]): label_grid[t] = labeler.label(gaussian_filter(data[t], gaussian_sd)) label_grid[t][data[t] < min_intensity] = 0 if min_area > 1: label_grid[t] = labeler.size_filter(label_grid[t], min_area) return label_grid
[ "def", "label_storm_objects", "(", "data", ",", "method", ",", "min_intensity", ",", "max_intensity", ",", "min_area", "=", "1", ",", "max_area", "=", "100", ",", "max_range", "=", "1", ",", "increment", "=", "1", ",", "gaussian_sd", "=", "0", ")", ":", "if", "method", ".", "lower", "(", ")", "in", "[", "\"ew\"", ",", "\"watershed\"", "]", ":", "labeler", "=", "EnhancedWatershed", "(", "min_intensity", ",", "increment", ",", "max_intensity", ",", "max_area", ",", "max_range", ")", "else", ":", "labeler", "=", "Hysteresis", "(", "min_intensity", ",", "max_intensity", ")", "if", "len", "(", "data", ".", "shape", ")", "==", "2", ":", "label_grid", "=", "labeler", ".", "label", "(", "gaussian_filter", "(", "data", ",", "gaussian_sd", ")", ")", "label_grid", "[", "data", "<", "min_intensity", "]", "=", "0", "if", "min_area", ">", "1", ":", "label_grid", "=", "labeler", ".", "size_filter", "(", "label_grid", ",", "min_area", ")", "else", ":", "label_grid", "=", "np", ".", "zeros", "(", "data", ".", "shape", ",", "dtype", "=", "int", ")", "for", "t", "in", "range", "(", "data", ".", "shape", "[", "0", "]", ")", ":", "label_grid", "[", "t", "]", "=", "labeler", ".", "label", "(", "gaussian_filter", "(", "data", "[", "t", "]", ",", "gaussian_sd", ")", ")", "label_grid", "[", "t", "]", "[", "data", "[", "t", "]", "<", "min_intensity", "]", "=", "0", "if", "min_area", ">", "1", ":", "label_grid", "[", "t", "]", "=", "labeler", ".", "size_filter", "(", "label_grid", "[", "t", "]", ",", "min_area", ")", "return", "label_grid" ]
From a 2D grid or time series of 2D grids, this method labels storm objects with either the Enhanced Watershed or Hysteresis methods. Args: data: the gridded data to be labeled. Should be a 2D numpy array in (y, x) coordinate order or a 3D numpy array in (time, y, x) coordinate order method: "ew" or "watershed" for Enhanced Watershed or "hyst" for hysteresis min_intensity: Minimum intensity threshold for gridpoints contained within any objects max_intensity: For watershed, any points above max_intensity are considered as the same value as max intensity. For hysteresis, all objects have to contain at least 1 pixel that equals or exceeds this value min_area: (default 1) The minimum area of any object in pixels. max_area: (default 100) The area threshold in pixels at which the enhanced watershed ends growth. Object area may exceed this threshold if the pixels at the last watershed level exceed the object area. max_range: Maximum difference between the maximum and minimum value in an enhanced watershed object before growth is stopped. increment: Discretization increment for the enhanced watershed gaussian_sd: Standard deviation of Gaussian filter applied to data Returns: label_grid: an ndarray with the same shape as data in which each pixel is labeled with a positive integer value.
[ "From", "a", "2D", "grid", "or", "time", "series", "of", "2D", "grids", "this", "method", "labels", "storm", "objects", "with", "either", "the", "Enhanced", "Watershed", "or", "Hysteresis", "methods", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/tracker.py#L9-L48
train
djgagne/hagelslag
hagelslag/processing/tracker.py
extract_storm_objects
def extract_storm_objects(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, obj_buffer=0): """ After storms are labeled, this method extracts the storm objects from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times obj_buffer: number of extra pixels beyond bounding box of object to store in each STObject Returns: storm_objects: list of lists containing STObjects identified at each time. """ storm_objects = [] if len(label_grid.shape) == 3: ij_grid = np.indices(label_grid.shape[1:]) for t, time in enumerate(times): storm_objects.append([]) object_slices = list(find_objects(label_grid[t], label_grid[t].max())) if len(object_slices) > 0: for o, obj_slice in enumerate(object_slices): if obj_buffer > 0: obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer), np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1])) for l, osl in enumerate(obj_slice)] else: obj_slice_buff = obj_slice storm_objects[-1].append(STObject(data[t][obj_slice_buff], np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], time, time, dx=dx, step=dt)) if t > 0: dims = storm_objects[-1][-1].timesteps[0].shape storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0]) else: ij_grid = np.indices(label_grid.shape) storm_objects.append([]) object_slices = list(find_objects(label_grid, label_grid.max())) if len(object_slices) > 0: for o, obj_slice in enumerate(object_slices): if obj_buffer > 0: obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer), np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1])) for l, osl in enumerate(obj_slice)] else: obj_slice_buff = obj_slice storm_objects[-1].append(STObject(data[obj_slice_buff], np.where(label_grid[obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], times, times, dx=dx, step=dt)) return storm_objects
python
def extract_storm_objects(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, obj_buffer=0): """ After storms are labeled, this method extracts the storm objects from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times obj_buffer: number of extra pixels beyond bounding box of object to store in each STObject Returns: storm_objects: list of lists containing STObjects identified at each time. """ storm_objects = [] if len(label_grid.shape) == 3: ij_grid = np.indices(label_grid.shape[1:]) for t, time in enumerate(times): storm_objects.append([]) object_slices = list(find_objects(label_grid[t], label_grid[t].max())) if len(object_slices) > 0: for o, obj_slice in enumerate(object_slices): if obj_buffer > 0: obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer), np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1])) for l, osl in enumerate(obj_slice)] else: obj_slice_buff = obj_slice storm_objects[-1].append(STObject(data[t][obj_slice_buff], np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], time, time, dx=dx, step=dt)) if t > 0: dims = storm_objects[-1][-1].timesteps[0].shape storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0]) else: ij_grid = np.indices(label_grid.shape) storm_objects.append([]) object_slices = list(find_objects(label_grid, label_grid.max())) if len(object_slices) > 0: for o, obj_slice in enumerate(object_slices): if obj_buffer > 0: obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer), np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1])) for l, osl in enumerate(obj_slice)] else: obj_slice_buff = obj_slice storm_objects[-1].append(STObject(data[obj_slice_buff], np.where(label_grid[obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], times, times, dx=dx, step=dt)) return storm_objects
[ "def", "extract_storm_objects", "(", "label_grid", ",", "data", ",", "x_grid", ",", "y_grid", ",", "times", ",", "dx", "=", "1", ",", "dt", "=", "1", ",", "obj_buffer", "=", "0", ")", ":", "storm_objects", "=", "[", "]", "if", "len", "(", "label_grid", ".", "shape", ")", "==", "3", ":", "ij_grid", "=", "np", ".", "indices", "(", "label_grid", ".", "shape", "[", "1", ":", "]", ")", "for", "t", ",", "time", "in", "enumerate", "(", "times", ")", ":", "storm_objects", ".", "append", "(", "[", "]", ")", "object_slices", "=", "list", "(", "find_objects", "(", "label_grid", "[", "t", "]", ",", "label_grid", "[", "t", "]", ".", "max", "(", ")", ")", ")", "if", "len", "(", "object_slices", ")", ">", "0", ":", "for", "o", ",", "obj_slice", "in", "enumerate", "(", "object_slices", ")", ":", "if", "obj_buffer", ">", "0", ":", "obj_slice_buff", "=", "[", "slice", "(", "np", ".", "maximum", "(", "0", ",", "osl", ".", "start", "-", "obj_buffer", ")", ",", "np", ".", "minimum", "(", "osl", ".", "stop", "+", "obj_buffer", ",", "label_grid", ".", "shape", "[", "l", "+", "1", "]", ")", ")", "for", "l", ",", "osl", "in", "enumerate", "(", "obj_slice", ")", "]", "else", ":", "obj_slice_buff", "=", "obj_slice", "storm_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "data", "[", "t", "]", "[", "obj_slice_buff", "]", ",", "np", ".", "where", "(", "label_grid", "[", "t", "]", "[", "obj_slice_buff", "]", "==", "o", "+", "1", ",", "1", ",", "0", ")", ",", "x_grid", "[", "obj_slice_buff", "]", ",", "y_grid", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "0", "]", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "1", "]", "[", "obj_slice_buff", "]", ",", "time", ",", "time", ",", "dx", "=", "dx", ",", "step", "=", "dt", ")", ")", "if", "t", ">", "0", ":", "dims", "=", "storm_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "timesteps", "[", "0", "]", ".", "shape", "storm_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "estimate_motion", "(", "time", ",", "data", "[", "t", "-", "1", "]", ",", "dims", "[", "1", "]", ",", "dims", "[", "0", "]", ")", "else", ":", "ij_grid", "=", "np", ".", "indices", "(", "label_grid", ".", "shape", ")", "storm_objects", ".", "append", "(", "[", "]", ")", "object_slices", "=", "list", "(", "find_objects", "(", "label_grid", ",", "label_grid", ".", "max", "(", ")", ")", ")", "if", "len", "(", "object_slices", ")", ">", "0", ":", "for", "o", ",", "obj_slice", "in", "enumerate", "(", "object_slices", ")", ":", "if", "obj_buffer", ">", "0", ":", "obj_slice_buff", "=", "[", "slice", "(", "np", ".", "maximum", "(", "0", ",", "osl", ".", "start", "-", "obj_buffer", ")", ",", "np", ".", "minimum", "(", "osl", ".", "stop", "+", "obj_buffer", ",", "label_grid", ".", "shape", "[", "l", "+", "1", "]", ")", ")", "for", "l", ",", "osl", "in", "enumerate", "(", "obj_slice", ")", "]", "else", ":", "obj_slice_buff", "=", "obj_slice", "storm_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "data", "[", "obj_slice_buff", "]", ",", "np", ".", "where", "(", "label_grid", "[", "obj_slice_buff", "]", "==", "o", "+", "1", ",", "1", ",", "0", ")", ",", "x_grid", "[", "obj_slice_buff", "]", ",", "y_grid", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "0", "]", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "1", "]", "[", "obj_slice_buff", "]", ",", "times", ",", "times", ",", "dx", "=", "dx", ",", "step", "=", "dt", ")", ")", "return", "storm_objects" ]
After storms are labeled, this method extracts the storm objects from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times obj_buffer: number of extra pixels beyond bounding box of object to store in each STObject Returns: storm_objects: list of lists containing STObjects identified at each time.
[ "After", "storms", "are", "labeled", "this", "method", "extracts", "the", "storm", "objects", "from", "the", "grid", "and", "places", "them", "into", "STObjects", ".", "The", "STObjects", "contain", "intensity", "location", "and", "shape", "information", "about", "each", "storm", "at", "each", "timestep", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/tracker.py#L51-L118
train
djgagne/hagelslag
hagelslag/processing/tracker.py
extract_storm_patches
def extract_storm_patches(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, patch_radius=16): """ After storms are labeled, this method extracts boxes of equal size centered on each storm from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times patch_radius: Number of grid points from center of mass to extract Returns: storm_objects: list of lists containing STObjects identified at each time. """ storm_objects = [] if len(label_grid.shape) == 3: ij_grid = np.indices(label_grid.shape[1:]) for t, time in enumerate(times): storm_objects.append([]) # object_slices = find_objects(label_grid[t], label_grid[t].max()) centers = list(center_of_mass(data[t], labels=label_grid[t], index=np.arange(1, label_grid[t].max() + 1))) if len(centers) > 0: for o, center in enumerate(centers): int_center = np.round(center).astype(int) obj_slice_buff = [slice(int_center[0] - patch_radius, int_center[0] + patch_radius), slice(int_center[1] - patch_radius, int_center[1] + patch_radius)] storm_objects[-1].append(STObject(data[t][obj_slice_buff], np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], time, time, dx=dx, step=dt)) if t > 0: dims = storm_objects[-1][-1].timesteps[0].shape storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0]) else: ij_grid = np.indices(label_grid.shape) storm_objects.append([]) centers = list(center_of_mass(data, labels=label_grid, index=np.arange(1, label_grid.max() + 1))) if len(centers) > 0: for o, center in enumerate(centers): int_center = np.round(center).astype(int) obj_slice_buff = (slice(int_center[0] - patch_radius, int_center[0] + patch_radius), slice(int_center[1] - patch_radius, int_center[1] + patch_radius)) storm_objects[-1].append(STObject(data[obj_slice_buff], np.where(label_grid[obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], times[0], times[0], dx=dx, step=dt)) return storm_objects
python
def extract_storm_patches(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, patch_radius=16): """ After storms are labeled, this method extracts boxes of equal size centered on each storm from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times patch_radius: Number of grid points from center of mass to extract Returns: storm_objects: list of lists containing STObjects identified at each time. """ storm_objects = [] if len(label_grid.shape) == 3: ij_grid = np.indices(label_grid.shape[1:]) for t, time in enumerate(times): storm_objects.append([]) # object_slices = find_objects(label_grid[t], label_grid[t].max()) centers = list(center_of_mass(data[t], labels=label_grid[t], index=np.arange(1, label_grid[t].max() + 1))) if len(centers) > 0: for o, center in enumerate(centers): int_center = np.round(center).astype(int) obj_slice_buff = [slice(int_center[0] - patch_radius, int_center[0] + patch_radius), slice(int_center[1] - patch_radius, int_center[1] + patch_radius)] storm_objects[-1].append(STObject(data[t][obj_slice_buff], np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], time, time, dx=dx, step=dt)) if t > 0: dims = storm_objects[-1][-1].timesteps[0].shape storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0]) else: ij_grid = np.indices(label_grid.shape) storm_objects.append([]) centers = list(center_of_mass(data, labels=label_grid, index=np.arange(1, label_grid.max() + 1))) if len(centers) > 0: for o, center in enumerate(centers): int_center = np.round(center).astype(int) obj_slice_buff = (slice(int_center[0] - patch_radius, int_center[0] + patch_radius), slice(int_center[1] - patch_radius, int_center[1] + patch_radius)) storm_objects[-1].append(STObject(data[obj_slice_buff], np.where(label_grid[obj_slice_buff] == o + 1, 1, 0), x_grid[obj_slice_buff], y_grid[obj_slice_buff], ij_grid[0][obj_slice_buff], ij_grid[1][obj_slice_buff], times[0], times[0], dx=dx, step=dt)) return storm_objects
[ "def", "extract_storm_patches", "(", "label_grid", ",", "data", ",", "x_grid", ",", "y_grid", ",", "times", ",", "dx", "=", "1", ",", "dt", "=", "1", ",", "patch_radius", "=", "16", ")", ":", "storm_objects", "=", "[", "]", "if", "len", "(", "label_grid", ".", "shape", ")", "==", "3", ":", "ij_grid", "=", "np", ".", "indices", "(", "label_grid", ".", "shape", "[", "1", ":", "]", ")", "for", "t", ",", "time", "in", "enumerate", "(", "times", ")", ":", "storm_objects", ".", "append", "(", "[", "]", ")", "# object_slices = find_objects(label_grid[t], label_grid[t].max())", "centers", "=", "list", "(", "center_of_mass", "(", "data", "[", "t", "]", ",", "labels", "=", "label_grid", "[", "t", "]", ",", "index", "=", "np", ".", "arange", "(", "1", ",", "label_grid", "[", "t", "]", ".", "max", "(", ")", "+", "1", ")", ")", ")", "if", "len", "(", "centers", ")", ">", "0", ":", "for", "o", ",", "center", "in", "enumerate", "(", "centers", ")", ":", "int_center", "=", "np", ".", "round", "(", "center", ")", ".", "astype", "(", "int", ")", "obj_slice_buff", "=", "[", "slice", "(", "int_center", "[", "0", "]", "-", "patch_radius", ",", "int_center", "[", "0", "]", "+", "patch_radius", ")", ",", "slice", "(", "int_center", "[", "1", "]", "-", "patch_radius", ",", "int_center", "[", "1", "]", "+", "patch_radius", ")", "]", "storm_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "data", "[", "t", "]", "[", "obj_slice_buff", "]", ",", "np", ".", "where", "(", "label_grid", "[", "t", "]", "[", "obj_slice_buff", "]", "==", "o", "+", "1", ",", "1", ",", "0", ")", ",", "x_grid", "[", "obj_slice_buff", "]", ",", "y_grid", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "0", "]", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "1", "]", "[", "obj_slice_buff", "]", ",", "time", ",", "time", ",", "dx", "=", "dx", ",", "step", "=", "dt", ")", ")", "if", "t", ">", "0", ":", "dims", "=", "storm_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "timesteps", "[", "0", "]", ".", "shape", "storm_objects", "[", "-", "1", "]", "[", "-", "1", "]", ".", "estimate_motion", "(", "time", ",", "data", "[", "t", "-", "1", "]", ",", "dims", "[", "1", "]", ",", "dims", "[", "0", "]", ")", "else", ":", "ij_grid", "=", "np", ".", "indices", "(", "label_grid", ".", "shape", ")", "storm_objects", ".", "append", "(", "[", "]", ")", "centers", "=", "list", "(", "center_of_mass", "(", "data", ",", "labels", "=", "label_grid", ",", "index", "=", "np", ".", "arange", "(", "1", ",", "label_grid", ".", "max", "(", ")", "+", "1", ")", ")", ")", "if", "len", "(", "centers", ")", ">", "0", ":", "for", "o", ",", "center", "in", "enumerate", "(", "centers", ")", ":", "int_center", "=", "np", ".", "round", "(", "center", ")", ".", "astype", "(", "int", ")", "obj_slice_buff", "=", "(", "slice", "(", "int_center", "[", "0", "]", "-", "patch_radius", ",", "int_center", "[", "0", "]", "+", "patch_radius", ")", ",", "slice", "(", "int_center", "[", "1", "]", "-", "patch_radius", ",", "int_center", "[", "1", "]", "+", "patch_radius", ")", ")", "storm_objects", "[", "-", "1", "]", ".", "append", "(", "STObject", "(", "data", "[", "obj_slice_buff", "]", ",", "np", ".", "where", "(", "label_grid", "[", "obj_slice_buff", "]", "==", "o", "+", "1", ",", "1", ",", "0", ")", ",", "x_grid", "[", "obj_slice_buff", "]", ",", "y_grid", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "0", "]", "[", "obj_slice_buff", "]", ",", "ij_grid", "[", "1", "]", "[", "obj_slice_buff", "]", ",", "times", "[", "0", "]", ",", "times", "[", "0", "]", ",", "dx", "=", "dx", ",", "step", "=", "dt", ")", ")", "return", "storm_objects" ]
After storms are labeled, this method extracts boxes of equal size centered on each storm from the grid and places them into STObjects. The STObjects contain intensity, location, and shape information about each storm at each timestep. Args: label_grid: 2D or 3D array output by label_storm_objects. data: 2D or 3D array used as input to label_storm_objects. x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length. y_grid: 2D array of y-coordinate data. times: List or array of time values, preferably as integers dx: grid spacing in same units as x_grid and y_grid. dt: period elapsed between times patch_radius: Number of grid points from center of mass to extract Returns: storm_objects: list of lists containing STObjects identified at each time.
[ "After", "storms", "are", "labeled", "this", "method", "extracts", "boxes", "of", "equal", "size", "centered", "on", "each", "storm", "from", "the", "grid", "and", "places", "them", "into", "STObjects", ".", "The", "STObjects", "contain", "intensity", "location", "and", "shape", "information", "about", "each", "storm", "at", "each", "timestep", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/tracker.py#L121-L185
train
djgagne/hagelslag
hagelslag/processing/tracker.py
track_storms
def track_storms(storm_objects, times, distance_components, distance_maxima, distance_weights, tracked_objects=None): """ Given the output of extract_storm_objects, this method tracks storms through time and merges individual STObjects into a set of tracks. Args: storm_objects: list of list of STObjects that have not been tracked. times: List of times associated with each set of STObjects distance_components: list of function objects that make up components of distance function distance_maxima: array of maximum values for each distance for normalization purposes distance_weights: weight given to each component of the distance function. Should add to 1. tracked_objects: List of STObjects that have already been tracked. Returns: tracked_objects: """ obj_matcher = ObjectMatcher(distance_components, distance_weights, distance_maxima) if tracked_objects is None: tracked_objects = [] for t, time in enumerate(times): past_time_objects = [] for obj in tracked_objects: if obj.end_time == time - obj.step: past_time_objects.append(obj) if len(past_time_objects) == 0: tracked_objects.extend(storm_objects[t]) elif len(past_time_objects) > 0 and len(storm_objects[t]) > 0: assignments = obj_matcher.match_objects(past_time_objects, storm_objects[t], times[t-1], times[t]) unpaired = list(range(len(storm_objects[t]))) for pair in assignments: past_time_objects[pair[0]].extend(storm_objects[t][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_objects.append(storm_objects[t][up]) return tracked_objects
python
def track_storms(storm_objects, times, distance_components, distance_maxima, distance_weights, tracked_objects=None): """ Given the output of extract_storm_objects, this method tracks storms through time and merges individual STObjects into a set of tracks. Args: storm_objects: list of list of STObjects that have not been tracked. times: List of times associated with each set of STObjects distance_components: list of function objects that make up components of distance function distance_maxima: array of maximum values for each distance for normalization purposes distance_weights: weight given to each component of the distance function. Should add to 1. tracked_objects: List of STObjects that have already been tracked. Returns: tracked_objects: """ obj_matcher = ObjectMatcher(distance_components, distance_weights, distance_maxima) if tracked_objects is None: tracked_objects = [] for t, time in enumerate(times): past_time_objects = [] for obj in tracked_objects: if obj.end_time == time - obj.step: past_time_objects.append(obj) if len(past_time_objects) == 0: tracked_objects.extend(storm_objects[t]) elif len(past_time_objects) > 0 and len(storm_objects[t]) > 0: assignments = obj_matcher.match_objects(past_time_objects, storm_objects[t], times[t-1], times[t]) unpaired = list(range(len(storm_objects[t]))) for pair in assignments: past_time_objects[pair[0]].extend(storm_objects[t][pair[1]]) unpaired.remove(pair[1]) if len(unpaired) > 0: for up in unpaired: tracked_objects.append(storm_objects[t][up]) return tracked_objects
[ "def", "track_storms", "(", "storm_objects", ",", "times", ",", "distance_components", ",", "distance_maxima", ",", "distance_weights", ",", "tracked_objects", "=", "None", ")", ":", "obj_matcher", "=", "ObjectMatcher", "(", "distance_components", ",", "distance_weights", ",", "distance_maxima", ")", "if", "tracked_objects", "is", "None", ":", "tracked_objects", "=", "[", "]", "for", "t", ",", "time", "in", "enumerate", "(", "times", ")", ":", "past_time_objects", "=", "[", "]", "for", "obj", "in", "tracked_objects", ":", "if", "obj", ".", "end_time", "==", "time", "-", "obj", ".", "step", ":", "past_time_objects", ".", "append", "(", "obj", ")", "if", "len", "(", "past_time_objects", ")", "==", "0", ":", "tracked_objects", ".", "extend", "(", "storm_objects", "[", "t", "]", ")", "elif", "len", "(", "past_time_objects", ")", ">", "0", "and", "len", "(", "storm_objects", "[", "t", "]", ")", ">", "0", ":", "assignments", "=", "obj_matcher", ".", "match_objects", "(", "past_time_objects", ",", "storm_objects", "[", "t", "]", ",", "times", "[", "t", "-", "1", "]", ",", "times", "[", "t", "]", ")", "unpaired", "=", "list", "(", "range", "(", "len", "(", "storm_objects", "[", "t", "]", ")", ")", ")", "for", "pair", "in", "assignments", ":", "past_time_objects", "[", "pair", "[", "0", "]", "]", ".", "extend", "(", "storm_objects", "[", "t", "]", "[", "pair", "[", "1", "]", "]", ")", "unpaired", ".", "remove", "(", "pair", "[", "1", "]", ")", "if", "len", "(", "unpaired", ")", ">", "0", ":", "for", "up", "in", "unpaired", ":", "tracked_objects", ".", "append", "(", "storm_objects", "[", "t", "]", "[", "up", "]", ")", "return", "tracked_objects" ]
Given the output of extract_storm_objects, this method tracks storms through time and merges individual STObjects into a set of tracks. Args: storm_objects: list of list of STObjects that have not been tracked. times: List of times associated with each set of STObjects distance_components: list of function objects that make up components of distance function distance_maxima: array of maximum values for each distance for normalization purposes distance_weights: weight given to each component of the distance function. Should add to 1. tracked_objects: List of STObjects that have already been tracked. Returns: tracked_objects:
[ "Given", "the", "output", "of", "extract_storm_objects", "this", "method", "tracks", "storms", "through", "time", "and", "merges", "individual", "STObjects", "into", "a", "set", "of", "tracks", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/tracker.py#L188-L222
train
djgagne/hagelslag
hagelslag/evaluation/MulticlassContingencyTable.py
MulticlassContingencyTable.peirce_skill_score
def peirce_skill_score(self): """ Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score) """ n = float(self.table.sum()) nf = self.table.sum(axis=1) no = self.table.sum(axis=0) correct = float(self.table.trace()) return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2)
python
def peirce_skill_score(self): """ Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score) """ n = float(self.table.sum()) nf = self.table.sum(axis=1) no = self.table.sum(axis=0) correct = float(self.table.trace()) return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2)
[ "def", "peirce_skill_score", "(", "self", ")", ":", "n", "=", "float", "(", "self", ".", "table", ".", "sum", "(", ")", ")", "nf", "=", "self", ".", "table", ".", "sum", "(", "axis", "=", "1", ")", "no", "=", "self", ".", "table", ".", "sum", "(", "axis", "=", "0", ")", "correct", "=", "float", "(", "self", ".", "table", ".", "trace", "(", ")", ")", "return", "(", "correct", "/", "n", "-", "(", "nf", "*", "no", ")", ".", "sum", "(", ")", "/", "n", "**", "2", ")", "/", "(", "1", "-", "(", "no", "*", "no", ")", ".", "sum", "(", ")", "/", "n", "**", "2", ")" ]
Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score)
[ "Multiclass", "Peirce", "Skill", "Score", "(", "also", "Hanssen", "and", "Kuipers", "score", "True", "Skill", "Score", ")" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/MulticlassContingencyTable.py#L35-L43
train
djgagne/hagelslag
hagelslag/evaluation/MulticlassContingencyTable.py
MulticlassContingencyTable.gerrity_score
def gerrity_score(self): """ Gerrity Score, which weights each cell in the contingency table by its observed relative frequency. :return: """ k = self.table.shape[0] n = float(self.table.sum()) p_o = self.table.sum(axis=0) / n p_sum = np.cumsum(p_o)[:-1] a = (1.0 - p_sum) / p_sum s = np.zeros(self.table.shape, dtype=float) for (i, j) in np.ndindex(*s.shape): if i == j: s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:j]) + np.sum(a[j:k-1])) elif i < j: s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:i]) - (j - i) + np.sum(a[j:k-1])) else: s[i, j] = s[j, i] return np.sum(self.table / float(self.table.sum()) * s)
python
def gerrity_score(self): """ Gerrity Score, which weights each cell in the contingency table by its observed relative frequency. :return: """ k = self.table.shape[0] n = float(self.table.sum()) p_o = self.table.sum(axis=0) / n p_sum = np.cumsum(p_o)[:-1] a = (1.0 - p_sum) / p_sum s = np.zeros(self.table.shape, dtype=float) for (i, j) in np.ndindex(*s.shape): if i == j: s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:j]) + np.sum(a[j:k-1])) elif i < j: s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:i]) - (j - i) + np.sum(a[j:k-1])) else: s[i, j] = s[j, i] return np.sum(self.table / float(self.table.sum()) * s)
[ "def", "gerrity_score", "(", "self", ")", ":", "k", "=", "self", ".", "table", ".", "shape", "[", "0", "]", "n", "=", "float", "(", "self", ".", "table", ".", "sum", "(", ")", ")", "p_o", "=", "self", ".", "table", ".", "sum", "(", "axis", "=", "0", ")", "/", "n", "p_sum", "=", "np", ".", "cumsum", "(", "p_o", ")", "[", ":", "-", "1", "]", "a", "=", "(", "1.0", "-", "p_sum", ")", "/", "p_sum", "s", "=", "np", ".", "zeros", "(", "self", ".", "table", ".", "shape", ",", "dtype", "=", "float", ")", "for", "(", "i", ",", "j", ")", "in", "np", ".", "ndindex", "(", "*", "s", ".", "shape", ")", ":", "if", "i", "==", "j", ":", "s", "[", "i", ",", "j", "]", "=", "1.0", "/", "(", "k", "-", "1.0", ")", "*", "(", "np", ".", "sum", "(", "1.0", "/", "a", "[", "0", ":", "j", "]", ")", "+", "np", ".", "sum", "(", "a", "[", "j", ":", "k", "-", "1", "]", ")", ")", "elif", "i", "<", "j", ":", "s", "[", "i", ",", "j", "]", "=", "1.0", "/", "(", "k", "-", "1.0", ")", "*", "(", "np", ".", "sum", "(", "1.0", "/", "a", "[", "0", ":", "i", "]", ")", "-", "(", "j", "-", "i", ")", "+", "np", ".", "sum", "(", "a", "[", "j", ":", "k", "-", "1", "]", ")", ")", "else", ":", "s", "[", "i", ",", "j", "]", "=", "s", "[", "j", ",", "i", "]", "return", "np", ".", "sum", "(", "self", ".", "table", "/", "float", "(", "self", ".", "table", ".", "sum", "(", ")", ")", "*", "s", ")" ]
Gerrity Score, which weights each cell in the contingency table by its observed relative frequency. :return:
[ "Gerrity", "Score", "which", "weights", "each", "cell", "in", "the", "contingency", "table", "by", "its", "observed", "relative", "frequency", ".", ":", "return", ":" ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/evaluation/MulticlassContingencyTable.py#L45-L63
train
djgagne/hagelslag
hagelslag/processing/ObjectMatcher.py
centroid_distance
def centroid_distance(item_a, time_a, item_b, time_b, max_value): """ Euclidean distance between the centroids of item_a and item_b. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ ax, ay = item_a.center_of_mass(time_a) bx, by = item_b.center_of_mass(time_b) return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
python
def centroid_distance(item_a, time_a, item_b, time_b, max_value): """ Euclidean distance between the centroids of item_a and item_b. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ ax, ay = item_a.center_of_mass(time_a) bx, by = item_b.center_of_mass(time_b) return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
[ "def", "centroid_distance", "(", "item_a", ",", "time_a", ",", "item_b", ",", "time_b", ",", "max_value", ")", ":", "ax", ",", "ay", "=", "item_a", ".", "center_of_mass", "(", "time_a", ")", "bx", ",", "by", "=", "item_b", ".", "center_of_mass", "(", "time_b", ")", "return", "np", ".", "minimum", "(", "np", ".", "sqrt", "(", "(", "ax", "-", "bx", ")", "**", "2", "+", "(", "ay", "-", "by", ")", "**", "2", ")", ",", "max_value", ")", "/", "float", "(", "max_value", ")" ]
Euclidean distance between the centroids of item_a and item_b. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1.
[ "Euclidean", "distance", "between", "the", "centroids", "of", "item_a", "and", "item_b", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L246-L262
train
djgagne/hagelslag
hagelslag/processing/ObjectMatcher.py
shifted_centroid_distance
def shifted_centroid_distance(item_a, time_a, item_b, time_b, max_value): """ Centroid distance with motion corrections. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ ax, ay = item_a.center_of_mass(time_a) bx, by = item_b.center_of_mass(time_b) if time_a < time_b: bx = bx - item_b.u by = by - item_b.v else: ax = ax - item_a.u ay = ay - item_a.v return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
python
def shifted_centroid_distance(item_a, time_a, item_b, time_b, max_value): """ Centroid distance with motion corrections. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ ax, ay = item_a.center_of_mass(time_a) bx, by = item_b.center_of_mass(time_b) if time_a < time_b: bx = bx - item_b.u by = by - item_b.v else: ax = ax - item_a.u ay = ay - item_a.v return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
[ "def", "shifted_centroid_distance", "(", "item_a", ",", "time_a", ",", "item_b", ",", "time_b", ",", "max_value", ")", ":", "ax", ",", "ay", "=", "item_a", ".", "center_of_mass", "(", "time_a", ")", "bx", ",", "by", "=", "item_b", ".", "center_of_mass", "(", "time_b", ")", "if", "time_a", "<", "time_b", ":", "bx", "=", "bx", "-", "item_b", ".", "u", "by", "=", "by", "-", "item_b", ".", "v", "else", ":", "ax", "=", "ax", "-", "item_a", ".", "u", "ay", "=", "ay", "-", "item_a", ".", "v", "return", "np", ".", "minimum", "(", "np", ".", "sqrt", "(", "(", "ax", "-", "bx", ")", "**", "2", "+", "(", "ay", "-", "by", ")", "**", "2", ")", ",", "max_value", ")", "/", "float", "(", "max_value", ")" ]
Centroid distance with motion corrections. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1.
[ "Centroid", "distance", "with", "motion", "corrections", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L269-L291
train
djgagne/hagelslag
hagelslag/processing/ObjectMatcher.py
closest_distance
def closest_distance(item_a, time_a, item_b, time_b, max_value): """ Euclidean distance between the pixels in item_a and item_b closest to each other. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ return np.minimum(item_a.closest_distance(time_a, item_b, time_b), max_value) / float(max_value)
python
def closest_distance(item_a, time_a, item_b, time_b, max_value): """ Euclidean distance between the pixels in item_a and item_b closest to each other. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1. """ return np.minimum(item_a.closest_distance(time_a, item_b, time_b), max_value) / float(max_value)
[ "def", "closest_distance", "(", "item_a", ",", "time_a", ",", "item_b", ",", "time_b", ",", "max_value", ")", ":", "return", "np", ".", "minimum", "(", "item_a", ".", "closest_distance", "(", "time_a", ",", "item_b", ",", "time_b", ")", ",", "max_value", ")", "/", "float", "(", "max_value", ")" ]
Euclidean distance between the pixels in item_a and item_b closest to each other. Args: item_a: STObject from the first set in ObjectMatcher time_a: Time integer being evaluated item_b: STObject from the second set in ObjectMatcher time_b: Time integer being evaluated max_value: Maximum distance value used as scaling value and upper constraint. Returns: Distance value between 0 and 1.
[ "Euclidean", "distance", "between", "the", "pixels", "in", "item_a", "and", "item_b", "closest", "to", "each", "other", "." ]
6fb6c3df90bf4867e13a97d3460b14471d107df1
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L294-L308
train