repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_ListComp
def visit_ListComp(self, node): ''' A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>'] ''' for generator in node.generators: self.visit_comprehension(generator) self.visit(node.elt) return self.add(node)
python
def visit_ListComp(self, node): ''' A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>'] ''' for generator in node.generators: self.visit_comprehension(generator) self.visit(node.elt) return self.add(node)
[ "def", "visit_ListComp", "(", "self", ",", "node", ")", ":", "for", "generator", "in", "node", ".", "generators", ":", "self", ".", "visit_comprehension", "(", "generator", ")", "self", ".", "visit", "(", "node", ".", "elt", ")", "return", "self", ".", "add", "(", "node", ")" ]
A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>']
[ "A", "comprehension", "is", "not", "abstracted", "in", "any", "way" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L493-L507
train
232,700
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_FunctionDef
def visit_FunctionDef(self, node): ''' Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments ''' self.aliases = IntrinsicAliases.copy() self.aliases.update((f.name, {f}) for f in self.global_declarations.values()) self.aliases.update((arg.id, {arg}) for arg in node.args.args) self.generic_visit(node) if Aliases.RetId in self.aliases: # parametrize the expression def parametrize(exp): # constant(?) or global -> no change if isinstance(exp, (ast.Index, Intrinsic, ast.FunctionDef)): return lambda _: {exp} elif isinstance(exp, ContainerOf): pcontainee = parametrize(exp.containee) index = exp.index return lambda args: { ContainerOf(pc, index) for pc in pcontainee(args) } elif isinstance(exp, ast.Name): try: w = node.args.args.index(exp) def return_alias(args): if w < len(args): return {args[w]} else: return {node.args.defaults[w - len(args)]} return return_alias except ValueError: return lambda _: self.get_unbound_value_set() elif isinstance(exp, ast.Subscript): values = parametrize(exp.value) slices = parametrize(exp.slice) return lambda args: { ast.Subscript(value, slice, ast.Load()) for value in values(args) for slice in slices(args)} else: return lambda _: self.get_unbound_value_set() # this is a little tricky: for each returned alias, # parametrize builds a function that, given a list of args, # returns the alias # then as we may have multiple returned alias, we compute the union # of these returned aliases return_aliases = [parametrize(ret_alias) for ret_alias in self.aliases[Aliases.RetId]] def merge_return_aliases(args): merged_return_aliases = set() for return_alias in return_aliases: merged_return_aliases.update(return_alias(args)) return merged_return_aliases node.return_alias = merge_return_aliases
python
def visit_FunctionDef(self, node): ''' Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments ''' self.aliases = IntrinsicAliases.copy() self.aliases.update((f.name, {f}) for f in self.global_declarations.values()) self.aliases.update((arg.id, {arg}) for arg in node.args.args) self.generic_visit(node) if Aliases.RetId in self.aliases: # parametrize the expression def parametrize(exp): # constant(?) or global -> no change if isinstance(exp, (ast.Index, Intrinsic, ast.FunctionDef)): return lambda _: {exp} elif isinstance(exp, ContainerOf): pcontainee = parametrize(exp.containee) index = exp.index return lambda args: { ContainerOf(pc, index) for pc in pcontainee(args) } elif isinstance(exp, ast.Name): try: w = node.args.args.index(exp) def return_alias(args): if w < len(args): return {args[w]} else: return {node.args.defaults[w - len(args)]} return return_alias except ValueError: return lambda _: self.get_unbound_value_set() elif isinstance(exp, ast.Subscript): values = parametrize(exp.value) slices = parametrize(exp.slice) return lambda args: { ast.Subscript(value, slice, ast.Load()) for value in values(args) for slice in slices(args)} else: return lambda _: self.get_unbound_value_set() # this is a little tricky: for each returned alias, # parametrize builds a function that, given a list of args, # returns the alias # then as we may have multiple returned alias, we compute the union # of these returned aliases return_aliases = [parametrize(ret_alias) for ret_alias in self.aliases[Aliases.RetId]] def merge_return_aliases(args): merged_return_aliases = set() for return_alias in return_aliases: merged_return_aliases.update(return_alias(args)) return merged_return_aliases node.return_alias = merge_return_aliases
[ "def", "visit_FunctionDef", "(", "self", ",", "node", ")", ":", "self", ".", "aliases", "=", "IntrinsicAliases", ".", "copy", "(", ")", "self", ".", "aliases", ".", "update", "(", "(", "f", ".", "name", ",", "{", "f", "}", ")", "for", "f", "in", "self", ".", "global_declarations", ".", "values", "(", ")", ")", "self", ".", "aliases", ".", "update", "(", "(", "arg", ".", "id", ",", "{", "arg", "}", ")", "for", "arg", "in", "node", ".", "args", ".", "args", ")", "self", ".", "generic_visit", "(", "node", ")", "if", "Aliases", ".", "RetId", "in", "self", ".", "aliases", ":", "# parametrize the expression", "def", "parametrize", "(", "exp", ")", ":", "# constant(?) or global -> no change", "if", "isinstance", "(", "exp", ",", "(", "ast", ".", "Index", ",", "Intrinsic", ",", "ast", ".", "FunctionDef", ")", ")", ":", "return", "lambda", "_", ":", "{", "exp", "}", "elif", "isinstance", "(", "exp", ",", "ContainerOf", ")", ":", "pcontainee", "=", "parametrize", "(", "exp", ".", "containee", ")", "index", "=", "exp", ".", "index", "return", "lambda", "args", ":", "{", "ContainerOf", "(", "pc", ",", "index", ")", "for", "pc", "in", "pcontainee", "(", "args", ")", "}", "elif", "isinstance", "(", "exp", ",", "ast", ".", "Name", ")", ":", "try", ":", "w", "=", "node", ".", "args", ".", "args", ".", "index", "(", "exp", ")", "def", "return_alias", "(", "args", ")", ":", "if", "w", "<", "len", "(", "args", ")", ":", "return", "{", "args", "[", "w", "]", "}", "else", ":", "return", "{", "node", ".", "args", ".", "defaults", "[", "w", "-", "len", "(", "args", ")", "]", "}", "return", "return_alias", "except", "ValueError", ":", "return", "lambda", "_", ":", "self", ".", "get_unbound_value_set", "(", ")", "elif", "isinstance", "(", "exp", ",", "ast", ".", "Subscript", ")", ":", "values", "=", "parametrize", "(", "exp", ".", "value", ")", "slices", "=", "parametrize", "(", "exp", ".", "slice", ")", "return", "lambda", "args", ":", "{", "ast", ".", "Subscript", "(", "value", ",", "slice", ",", "ast", ".", "Load", "(", ")", ")", "for", "value", "in", "values", "(", "args", ")", "for", "slice", "in", "slices", "(", "args", ")", "}", "else", ":", "return", "lambda", "_", ":", "self", ".", "get_unbound_value_set", "(", ")", "# this is a little tricky: for each returned alias,", "# parametrize builds a function that, given a list of args,", "# returns the alias", "# then as we may have multiple returned alias, we compute the union", "# of these returned aliases", "return_aliases", "=", "[", "parametrize", "(", "ret_alias", ")", "for", "ret_alias", "in", "self", ".", "aliases", "[", "Aliases", ".", "RetId", "]", "]", "def", "merge_return_aliases", "(", "args", ")", ":", "merged_return_aliases", "=", "set", "(", ")", "for", "return_alias", "in", "return_aliases", ":", "merged_return_aliases", ".", "update", "(", "return_alias", "(", "args", ")", ")", "return", "merged_return_aliases", "node", ".", "return_alias", "=", "merge_return_aliases" ]
Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments
[ "Initialise", "aliasing", "default", "value", "before", "visiting", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L532-L600
train
232,701
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_For
def visit_For(self, node): ''' For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|'] ''' iter_aliases = self.visit(node.iter) if all(isinstance(x, ContainerOf) for x in iter_aliases): target_aliases = set() for iter_alias in iter_aliases: target_aliases.add(iter_alias.containee) else: target_aliases = {node.target} self.add(node.target, target_aliases) self.aliases[node.target.id] = self.result[node.target] self.generic_visit(node) self.generic_visit(node)
python
def visit_For(self, node): ''' For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|'] ''' iter_aliases = self.visit(node.iter) if all(isinstance(x, ContainerOf) for x in iter_aliases): target_aliases = set() for iter_alias in iter_aliases: target_aliases.add(iter_alias.containee) else: target_aliases = {node.target} self.add(node.target, target_aliases) self.aliases[node.target.id] = self.result[node.target] self.generic_visit(node) self.generic_visit(node)
[ "def", "visit_For", "(", "self", ",", "node", ")", ":", "iter_aliases", "=", "self", ".", "visit", "(", "node", ".", "iter", ")", "if", "all", "(", "isinstance", "(", "x", ",", "ContainerOf", ")", "for", "x", "in", "iter_aliases", ")", ":", "target_aliases", "=", "set", "(", ")", "for", "iter_alias", "in", "iter_aliases", ":", "target_aliases", ".", "add", "(", "iter_alias", ".", "containee", ")", "else", ":", "target_aliases", "=", "{", "node", ".", "target", "}", "self", ".", "add", "(", "node", ".", "target", ",", "target_aliases", ")", "self", ".", "aliases", "[", "node", ".", "target", ".", "id", "]", "=", "self", ".", "result", "[", "node", ".", "target", "]", "self", ".", "generic_visit", "(", "node", ")", "self", ".", "generic_visit", "(", "node", ")" ]
For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|']
[ "For", "loop", "creates", "aliasing", "between", "the", "target", "and", "the", "content", "of", "the", "iterator" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L628-L666
train
232,702
serge-sans-paille/pythran
pythran/analyses/argument_read_once.py
ArgumentReadOnce.prepare
def prepare(self, node): """ Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions. """ super(ArgumentReadOnce, self).prepare(node) # global functions init for n in self.global_declarations.values(): fe = ArgumentReadOnce.FunctionEffects(n) self.node_to_functioneffect[n] = fe self.result.add(fe) # Pythonic functions init def save_effect(module): """ Recursively save read once effect for Pythonic functions. """ for intr in module.values(): if isinstance(intr, dict): # Submodule case save_effect(intr) else: fe = ArgumentReadOnce.FunctionEffects(intr) self.node_to_functioneffect[intr] = fe self.result.add(fe) if isinstance(intr, intrinsic.Class): # Class case save_effect(intr.fields) for module in MODULES.values(): save_effect(module)
python
def prepare(self, node): """ Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions. """ super(ArgumentReadOnce, self).prepare(node) # global functions init for n in self.global_declarations.values(): fe = ArgumentReadOnce.FunctionEffects(n) self.node_to_functioneffect[n] = fe self.result.add(fe) # Pythonic functions init def save_effect(module): """ Recursively save read once effect for Pythonic functions. """ for intr in module.values(): if isinstance(intr, dict): # Submodule case save_effect(intr) else: fe = ArgumentReadOnce.FunctionEffects(intr) self.node_to_functioneffect[intr] = fe self.result.add(fe) if isinstance(intr, intrinsic.Class): # Class case save_effect(intr.fields) for module in MODULES.values(): save_effect(module)
[ "def", "prepare", "(", "self", ",", "node", ")", ":", "super", "(", "ArgumentReadOnce", ",", "self", ")", ".", "prepare", "(", "node", ")", "# global functions init", "for", "n", "in", "self", ".", "global_declarations", ".", "values", "(", ")", ":", "fe", "=", "ArgumentReadOnce", ".", "FunctionEffects", "(", "n", ")", "self", ".", "node_to_functioneffect", "[", "n", "]", "=", "fe", "self", ".", "result", ".", "add", "(", "fe", ")", "# Pythonic functions init", "def", "save_effect", "(", "module", ")", ":", "\"\"\" Recursively save read once effect for Pythonic functions. \"\"\"", "for", "intr", "in", "module", ".", "values", "(", ")", ":", "if", "isinstance", "(", "intr", ",", "dict", ")", ":", "# Submodule case", "save_effect", "(", "intr", ")", "else", ":", "fe", "=", "ArgumentReadOnce", ".", "FunctionEffects", "(", "intr", ")", "self", ".", "node_to_functioneffect", "[", "intr", "]", "=", "fe", "self", ".", "result", ".", "add", "(", "fe", ")", "if", "isinstance", "(", "intr", ",", "intrinsic", ".", "Class", ")", ":", "# Class case", "save_effect", "(", "intr", ".", "fields", ")", "for", "module", "in", "MODULES", ".", "values", "(", ")", ":", "save_effect", "(", "module", ")" ]
Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions.
[ "Initialise", "arguments", "effects", "as", "this", "analysis", "in", "inter", "-", "procedural", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/argument_read_once.py#L60-L88
train
232,703
astropy/regions
regions/io/ds9/write.py
ds9_objects_to_string
def ds9_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_ds9(coordsys, fmt, radunit)
python
def ds9_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_ds9(coordsys, fmt, radunit)
[ "def", "ds9_objects_to_string", "(", "regions", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "shapelist", "=", "to_shape_list", "(", "regions", ",", "coordsys", ")", "return", "shapelist", ".", "to_ds9", "(", "coordsys", ",", "fmt", ",", "radunit", ")" ]
Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000)
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "DS9", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/write.py#L12-L46
train
232,704
astropy/regions
regions/io/ds9/write.py
write_ds9
def write_ds9(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ output = ds9_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
python
def write_ds9(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ output = ds9_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
[ "def", "write_ds9", "(", "regions", ",", "filename", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "output", "=", "ds9_objects_to_string", "(", "regions", ",", "coordsys", ",", "fmt", ",", "radunit", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fh", ":", "fh", ".", "write", "(", "output", ")" ]
Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000)
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "DS9", "string", "and", "write", "to", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/write.py#L49-L83
train
232,705
astropy/regions
regions/io/crtf/write.py
crtf_objects_to_string
def crtf_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_crtf(coordsys, fmt, radunit)
python
def crtf_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_crtf(coordsys, fmt, radunit)
[ "def", "crtf_objects_to_string", "(", "regions", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "shapelist", "=", "to_shape_list", "(", "regions", ",", "coordsys", ")", "return", "shapelist", ".", "to_crtf", "(", "coordsys", ",", "fmt", ",", "radunit", ")" ]
Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg]
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "CRTF", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/write.py#L12-L48
train
232,706
astropy/regions
regions/io/crtf/write.py
write_crtf
def write_crtf(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ output = crtf_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
python
def write_crtf(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ output = crtf_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
[ "def", "write_crtf", "(", "regions", ",", "filename", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "output", "=", "crtf_objects_to_string", "(", "regions", ",", "coordsys", ",", "fmt", ",", "radunit", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fh", ":", "fh", ".", "write", "(", "output", ")" ]
Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg]
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "CRTF", "string", "and", "write", "to", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/write.py#L51-L86
train
232,707
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion.corners
def corners(self): """ Return the x, y coordinate pairs that define the corners """ corners = [(-self.width/2, -self.height/2), ( self.width/2, -self.height/2), ( self.width/2, self.height/2), (-self.width/2, self.height/2), ] rotmat = [[np.cos(self.angle), np.sin(self.angle)], [-np.sin(self.angle), np.cos(self.angle)]] return np.dot(corners, rotmat) + np.array([self.center.x, self.center.y])
python
def corners(self): """ Return the x, y coordinate pairs that define the corners """ corners = [(-self.width/2, -self.height/2), ( self.width/2, -self.height/2), ( self.width/2, self.height/2), (-self.width/2, self.height/2), ] rotmat = [[np.cos(self.angle), np.sin(self.angle)], [-np.sin(self.angle), np.cos(self.angle)]] return np.dot(corners, rotmat) + np.array([self.center.x, self.center.y])
[ "def", "corners", "(", "self", ")", ":", "corners", "=", "[", "(", "-", "self", ".", "width", "/", "2", ",", "-", "self", ".", "height", "/", "2", ")", ",", "(", "self", ".", "width", "/", "2", ",", "-", "self", ".", "height", "/", "2", ")", ",", "(", "self", ".", "width", "/", "2", ",", "self", ".", "height", "/", "2", ")", ",", "(", "-", "self", ".", "width", "/", "2", ",", "self", ".", "height", "/", "2", ")", ",", "]", "rotmat", "=", "[", "[", "np", ".", "cos", "(", "self", ".", "angle", ")", ",", "np", ".", "sin", "(", "self", ".", "angle", ")", "]", ",", "[", "-", "np", ".", "sin", "(", "self", ".", "angle", ")", ",", "np", ".", "cos", "(", "self", ".", "angle", ")", "]", "]", "return", "np", ".", "dot", "(", "corners", ",", "rotmat", ")", "+", "np", ".", "array", "(", "[", "self", ".", "center", ".", "x", ",", "self", ".", "center", ".", "y", "]", ")" ]
Return the x, y coordinate pairs that define the corners
[ "Return", "the", "x", "y", "coordinate", "pairs", "that", "define", "the", "corners" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L202-L216
train
232,708
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion.to_polygon
def to_polygon(self): """ Return a 4-cornered polygon equivalent to this rectangle """ x,y = self.corners.T vertices = PixCoord(x=x, y=y) return PolygonPixelRegion(vertices=vertices, meta=self.meta, visual=self.visual)
python
def to_polygon(self): """ Return a 4-cornered polygon equivalent to this rectangle """ x,y = self.corners.T vertices = PixCoord(x=x, y=y) return PolygonPixelRegion(vertices=vertices, meta=self.meta, visual=self.visual)
[ "def", "to_polygon", "(", "self", ")", ":", "x", ",", "y", "=", "self", ".", "corners", ".", "T", "vertices", "=", "PixCoord", "(", "x", "=", "x", ",", "y", "=", "y", ")", "return", "PolygonPixelRegion", "(", "vertices", "=", "vertices", ",", "meta", "=", "self", ".", "meta", ",", "visual", "=", "self", ".", "visual", ")" ]
Return a 4-cornered polygon equivalent to this rectangle
[ "Return", "a", "4", "-", "cornered", "polygon", "equivalent", "to", "this", "rectangle" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L218-L225
train
232,709
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion._lower_left_xy
def _lower_left_xy(self): """ Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot """ hw = self.width / 2. hh = self.height / 2. sint = np.sin(self.angle) cost = np.cos(self.angle) dx = (hh * sint) - (hw * cost) dy = -(hh * cost) - (hw * sint) x = self.center.x + dx y = self.center.y + dy return x, y
python
def _lower_left_xy(self): """ Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot """ hw = self.width / 2. hh = self.height / 2. sint = np.sin(self.angle) cost = np.cos(self.angle) dx = (hh * sint) - (hw * cost) dy = -(hh * cost) - (hw * sint) x = self.center.x + dx y = self.center.y + dy return x, y
[ "def", "_lower_left_xy", "(", "self", ")", ":", "hw", "=", "self", ".", "width", "/", "2.", "hh", "=", "self", ".", "height", "/", "2.", "sint", "=", "np", ".", "sin", "(", "self", ".", "angle", ")", "cost", "=", "np", ".", "cos", "(", "self", ".", "angle", ")", "dx", "=", "(", "hh", "*", "sint", ")", "-", "(", "hw", "*", "cost", ")", "dy", "=", "-", "(", "hh", "*", "cost", ")", "-", "(", "hw", "*", "sint", ")", "x", "=", "self", ".", "center", ".", "x", "+", "dx", "y", "=", "self", ".", "center", ".", "y", "+", "dy", "return", "x", ",", "y" ]
Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot
[ "Compute", "lower", "left", "xy", "position", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L228-L244
train
232,710
astropy/regions
regions/core/compound.py
CompoundPixelRegion._make_annulus_path
def _make_annulus_path(patch_inner, patch_outer): """ Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture. """ import matplotlib.path as mpath path_inner = patch_inner.get_path() transform_inner = patch_inner.get_transform() path_inner = transform_inner.transform_path(path_inner) path_outer = patch_outer.get_path() transform_outer = patch_outer.get_transform() path_outer = transform_outer.transform_path(path_outer) verts_inner = path_inner.vertices[:-1][::-1] verts_inner = np.concatenate((verts_inner, [verts_inner[-1]])) verts = np.vstack((path_outer.vertices, verts_inner)) codes = np.hstack((path_outer.codes, path_inner.codes)) return mpath.Path(verts, codes)
python
def _make_annulus_path(patch_inner, patch_outer): """ Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture. """ import matplotlib.path as mpath path_inner = patch_inner.get_path() transform_inner = patch_inner.get_transform() path_inner = transform_inner.transform_path(path_inner) path_outer = patch_outer.get_path() transform_outer = patch_outer.get_transform() path_outer = transform_outer.transform_path(path_outer) verts_inner = path_inner.vertices[:-1][::-1] verts_inner = np.concatenate((verts_inner, [verts_inner[-1]])) verts = np.vstack((path_outer.vertices, verts_inner)) codes = np.hstack((path_outer.codes, path_inner.codes)) return mpath.Path(verts, codes)
[ "def", "_make_annulus_path", "(", "patch_inner", ",", "patch_outer", ")", ":", "import", "matplotlib", ".", "path", "as", "mpath", "path_inner", "=", "patch_inner", ".", "get_path", "(", ")", "transform_inner", "=", "patch_inner", ".", "get_transform", "(", ")", "path_inner", "=", "transform_inner", ".", "transform_path", "(", "path_inner", ")", "path_outer", "=", "patch_outer", ".", "get_path", "(", ")", "transform_outer", "=", "patch_outer", ".", "get_transform", "(", ")", "path_outer", "=", "transform_outer", ".", "transform_path", "(", "path_outer", ")", "verts_inner", "=", "path_inner", ".", "vertices", "[", ":", "-", "1", "]", "[", ":", ":", "-", "1", "]", "verts_inner", "=", "np", ".", "concatenate", "(", "(", "verts_inner", ",", "[", "verts_inner", "[", "-", "1", "]", "]", ")", ")", "verts", "=", "np", ".", "vstack", "(", "(", "path_outer", ".", "vertices", ",", "verts_inner", ")", ")", "codes", "=", "np", ".", "hstack", "(", "(", "path_outer", ".", "codes", ",", "path_inner", ".", "codes", ")", ")", "return", "mpath", ".", "Path", "(", "verts", ",", "codes", ")" ]
Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture.
[ "Defines", "a", "matplotlib", "annulus", "path", "from", "two", "patches", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/compound.py#L104-L130
train
232,711
astropy/regions
regions/io/fits/read.py
read_fits_region
def read_fits_region(filename, errors='strict'): """ Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read) """ regions = [] hdul = fits.open(filename) for hdu in hdul: if hdu.name == 'REGION': table = Table.read(hdu) wcs = WCS(hdu.header, keysel=['image', 'binary', 'pixel']) regions_list = FITSRegionParser(table, errors).shapes.to_regions() for reg in regions_list: regions.append(reg.to_sky(wcs)) return regions
python
def read_fits_region(filename, errors='strict'): """ Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read) """ regions = [] hdul = fits.open(filename) for hdu in hdul: if hdu.name == 'REGION': table = Table.read(hdu) wcs = WCS(hdu.header, keysel=['image', 'binary', 'pixel']) regions_list = FITSRegionParser(table, errors).shapes.to_regions() for reg in regions_list: regions.append(reg.to_sky(wcs)) return regions
[ "def", "read_fits_region", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "regions", "=", "[", "]", "hdul", "=", "fits", ".", "open", "(", "filename", ")", "for", "hdu", "in", "hdul", ":", "if", "hdu", ".", "name", "==", "'REGION'", ":", "table", "=", "Table", ".", "read", "(", "hdu", ")", "wcs", "=", "WCS", "(", "hdu", ".", "header", ",", "keysel", "=", "[", "'image'", ",", "'binary'", ",", "'pixel'", "]", ")", "regions_list", "=", "FITSRegionParser", "(", "table", ",", "errors", ")", ".", "shapes", ".", "to_regions", "(", ")", "for", "reg", "in", "regions_list", ":", "regions", ".", "append", "(", "reg", ".", "to_sky", "(", "wcs", ")", ")", "return", "regions" ]
Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read)
[ "Reads", "a", "FITS", "region", "file", "and", "scans", "for", "any", "fits", "regions", "table", "and", "converts", "them", "into", "Region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/read.py#L228-L270
train
232,712
astropy/regions
regions/io/core.py
to_shape_list
def to_shape_list(region_list, coordinate_system='fk5'): """ Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects. """ shape_list = ShapeList() for region in region_list: coord = [] if isinstance(region, SkyRegion): reg_type = region.__class__.__name__[:-9].lower() else: reg_type = region.__class__.__name__[:-11].lower() for val in regions_attributes[reg_type]: coord.append(getattr(region, val)) if reg_type == 'polygon': coord = [x for x in region.vertices] if coordinate_system: coordsys = coordinate_system else: if isinstance(region, SkyRegion): coordsys = coord[0].name else: coordsys = 'image' frame = coordinates.frame_transform_graph.lookup_name(coordsys) new_coord = [] for val in coord: if isinstance(val, Angle) or isinstance(val, u.Quantity) or isinstance(val, numbers.Number): new_coord.append(val) elif isinstance(val, PixCoord): new_coord.append(u.Quantity(val.x, u.dimensionless_unscaled)) new_coord.append(u.Quantity(val.y, u.dimensionless_unscaled)) else: new_coord.append(Angle(val.transform_to(frame).spherical.lon)) new_coord.append(Angle(val.transform_to(frame).spherical.lat)) meta = dict(region.meta) meta.update(region.visual) if reg_type == 'text': meta['text'] = meta.get('text', meta.pop('label', '')) include = region.meta.pop('include', True) shape_list.append(Shape(coordsys, reg_type, new_coord, meta, False, include)) return shape_list
python
def to_shape_list(region_list, coordinate_system='fk5'): """ Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects. """ shape_list = ShapeList() for region in region_list: coord = [] if isinstance(region, SkyRegion): reg_type = region.__class__.__name__[:-9].lower() else: reg_type = region.__class__.__name__[:-11].lower() for val in regions_attributes[reg_type]: coord.append(getattr(region, val)) if reg_type == 'polygon': coord = [x for x in region.vertices] if coordinate_system: coordsys = coordinate_system else: if isinstance(region, SkyRegion): coordsys = coord[0].name else: coordsys = 'image' frame = coordinates.frame_transform_graph.lookup_name(coordsys) new_coord = [] for val in coord: if isinstance(val, Angle) or isinstance(val, u.Quantity) or isinstance(val, numbers.Number): new_coord.append(val) elif isinstance(val, PixCoord): new_coord.append(u.Quantity(val.x, u.dimensionless_unscaled)) new_coord.append(u.Quantity(val.y, u.dimensionless_unscaled)) else: new_coord.append(Angle(val.transform_to(frame).spherical.lon)) new_coord.append(Angle(val.transform_to(frame).spherical.lat)) meta = dict(region.meta) meta.update(region.visual) if reg_type == 'text': meta['text'] = meta.get('text', meta.pop('label', '')) include = region.meta.pop('include', True) shape_list.append(Shape(coordsys, reg_type, new_coord, meta, False, include)) return shape_list
[ "def", "to_shape_list", "(", "region_list", ",", "coordinate_system", "=", "'fk5'", ")", ":", "shape_list", "=", "ShapeList", "(", ")", "for", "region", "in", "region_list", ":", "coord", "=", "[", "]", "if", "isinstance", "(", "region", ",", "SkyRegion", ")", ":", "reg_type", "=", "region", ".", "__class__", ".", "__name__", "[", ":", "-", "9", "]", ".", "lower", "(", ")", "else", ":", "reg_type", "=", "region", ".", "__class__", ".", "__name__", "[", ":", "-", "11", "]", ".", "lower", "(", ")", "for", "val", "in", "regions_attributes", "[", "reg_type", "]", ":", "coord", ".", "append", "(", "getattr", "(", "region", ",", "val", ")", ")", "if", "reg_type", "==", "'polygon'", ":", "coord", "=", "[", "x", "for", "x", "in", "region", ".", "vertices", "]", "if", "coordinate_system", ":", "coordsys", "=", "coordinate_system", "else", ":", "if", "isinstance", "(", "region", ",", "SkyRegion", ")", ":", "coordsys", "=", "coord", "[", "0", "]", ".", "name", "else", ":", "coordsys", "=", "'image'", "frame", "=", "coordinates", ".", "frame_transform_graph", ".", "lookup_name", "(", "coordsys", ")", "new_coord", "=", "[", "]", "for", "val", "in", "coord", ":", "if", "isinstance", "(", "val", ",", "Angle", ")", "or", "isinstance", "(", "val", ",", "u", ".", "Quantity", ")", "or", "isinstance", "(", "val", ",", "numbers", ".", "Number", ")", ":", "new_coord", ".", "append", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "PixCoord", ")", ":", "new_coord", ".", "append", "(", "u", ".", "Quantity", "(", "val", ".", "x", ",", "u", ".", "dimensionless_unscaled", ")", ")", "new_coord", ".", "append", "(", "u", ".", "Quantity", "(", "val", ".", "y", ",", "u", ".", "dimensionless_unscaled", ")", ")", "else", ":", "new_coord", ".", "append", "(", "Angle", "(", "val", ".", "transform_to", "(", "frame", ")", ".", "spherical", ".", "lon", ")", ")", "new_coord", ".", "append", "(", "Angle", "(", "val", ".", "transform_to", "(", "frame", ")", ".", "spherical", ".", "lat", ")", ")", "meta", "=", "dict", "(", "region", ".", "meta", ")", "meta", ".", "update", "(", "region", ".", "visual", ")", "if", "reg_type", "==", "'text'", ":", "meta", "[", "'text'", "]", "=", "meta", ".", "get", "(", "'text'", ",", "meta", ".", "pop", "(", "'label'", ",", "''", ")", ")", "include", "=", "region", ".", "meta", ".", "pop", "(", "'include'", ",", "True", ")", "shape_list", ".", "append", "(", "Shape", "(", "coordsys", ",", "reg_type", ",", "new_coord", ",", "meta", ",", "False", ",", "include", ")", ")", "return", "shape_list" ]
Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects.
[ "Converts", "a", "list", "of", "regions", "into", "a", "regions", ".", "ShapeList", "object", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L670-L738
train
232,713
astropy/regions
regions/io/core.py
to_ds9_meta
def to_ds9_meta(shape_meta): """ Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary """ # meta keys allowed in DS9. valid_keys = ['symbol', 'include', 'tag', 'line', 'comment', 'name', 'select', 'highlite', 'fixed', 'label', 'text', 'edit', 'move', 'rotate', 'delete', 'source', 'background'] # visual keys allowed in DS9 valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist', 'fill', 'textangle', 'symsize'] # mapped to actual names in DS9 key_mappings = {'symbol': 'point', 'linewidth': 'width', 'label': 'text'} meta = _to_io_meta(shape_meta, valid_keys, key_mappings) if 'font' in meta: meta['font'] += " {0} {1} {2}".format(shape_meta.get('fontsize', 12), shape_meta.get('fontstyle', 'normal'), shape_meta.get('fontweight', 'roman')) return meta
python
def to_ds9_meta(shape_meta): """ Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary """ # meta keys allowed in DS9. valid_keys = ['symbol', 'include', 'tag', 'line', 'comment', 'name', 'select', 'highlite', 'fixed', 'label', 'text', 'edit', 'move', 'rotate', 'delete', 'source', 'background'] # visual keys allowed in DS9 valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist', 'fill', 'textangle', 'symsize'] # mapped to actual names in DS9 key_mappings = {'symbol': 'point', 'linewidth': 'width', 'label': 'text'} meta = _to_io_meta(shape_meta, valid_keys, key_mappings) if 'font' in meta: meta['font'] += " {0} {1} {2}".format(shape_meta.get('fontsize', 12), shape_meta.get('fontstyle', 'normal'), shape_meta.get('fontweight', 'roman')) return meta
[ "def", "to_ds9_meta", "(", "shape_meta", ")", ":", "# meta keys allowed in DS9.", "valid_keys", "=", "[", "'symbol'", ",", "'include'", ",", "'tag'", ",", "'line'", ",", "'comment'", ",", "'name'", ",", "'select'", ",", "'highlite'", ",", "'fixed'", ",", "'label'", ",", "'text'", ",", "'edit'", ",", "'move'", ",", "'rotate'", ",", "'delete'", ",", "'source'", ",", "'background'", "]", "# visual keys allowed in DS9", "valid_keys", "+=", "[", "'color'", ",", "'dash'", ",", "'linewidth'", ",", "'font'", ",", "'dashlist'", ",", "'fill'", ",", "'textangle'", ",", "'symsize'", "]", "# mapped to actual names in DS9", "key_mappings", "=", "{", "'symbol'", ":", "'point'", ",", "'linewidth'", ":", "'width'", ",", "'label'", ":", "'text'", "}", "meta", "=", "_to_io_meta", "(", "shape_meta", ",", "valid_keys", ",", "key_mappings", ")", "if", "'font'", "in", "meta", ":", "meta", "[", "'font'", "]", "+=", "\" {0} {1} {2}\"", ".", "format", "(", "shape_meta", ".", "get", "(", "'fontsize'", ",", "12", ")", ",", "shape_meta", ".", "get", "(", "'fontstyle'", ",", "'normal'", ")", ",", "shape_meta", ".", "get", "(", "'fontweight'", ",", "'roman'", ")", ")", "return", "meta" ]
Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary
[ "Makes", "the", "meta", "data", "DS9", "compatible", "by", "filtering", "and", "mapping", "the", "valid", "keys" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L741-L775
train
232,714
astropy/regions
regions/io/core.py
_to_io_meta
def _to_io_meta(shape_meta, valid_keys, key_mappings): """ This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings """ meta = dict() for key in shape_meta: if key in valid_keys: meta[key_mappings.get(key, key)] = shape_meta[key] return meta
python
def _to_io_meta(shape_meta, valid_keys, key_mappings): """ This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings """ meta = dict() for key in shape_meta: if key in valid_keys: meta[key_mappings.get(key, key)] = shape_meta[key] return meta
[ "def", "_to_io_meta", "(", "shape_meta", ",", "valid_keys", ",", "key_mappings", ")", ":", "meta", "=", "dict", "(", ")", "for", "key", "in", "shape_meta", ":", "if", "key", "in", "valid_keys", ":", "meta", "[", "key_mappings", ".", "get", "(", "key", ",", "key", ")", "]", "=", "shape_meta", "[", "key", "]", "return", "meta" ]
This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings
[ "This", "is", "used", "to", "make", "meta", "data", "compatible", "with", "a", "specific", "io", "by", "filtering", "and", "mapping", "to", "it", "s", "valid", "keys" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L809-L835
train
232,715
astropy/regions
regions/io/core.py
Shape.convert_coords
def convert_coords(self): """ Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process """ if self.coordsys in ['image', 'physical']: coords = self._convert_pix_coords() else: coords = self._convert_sky_coords() if self.region_type == 'line': coords = [coords[0][0], coords[0][1]] if self.region_type == 'text': coords.append(self.meta['text']) return coords
python
def convert_coords(self): """ Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process """ if self.coordsys in ['image', 'physical']: coords = self._convert_pix_coords() else: coords = self._convert_sky_coords() if self.region_type == 'line': coords = [coords[0][0], coords[0][1]] if self.region_type == 'text': coords.append(self.meta['text']) return coords
[ "def", "convert_coords", "(", "self", ")", ":", "if", "self", ".", "coordsys", "in", "[", "'image'", ",", "'physical'", "]", ":", "coords", "=", "self", ".", "_convert_pix_coords", "(", ")", "else", ":", "coords", "=", "self", ".", "_convert_sky_coords", "(", ")", "if", "self", ".", "region_type", "==", "'line'", ":", "coords", "=", "[", "coords", "[", "0", "]", "[", "0", "]", ",", "coords", "[", "0", "]", "[", "1", "]", "]", "if", "self", ".", "region_type", "==", "'text'", ":", "coords", ".", "append", "(", "self", ".", "meta", "[", "'text'", "]", ")", "return", "coords" ]
Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process
[ "Process", "list", "of", "coordinates" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L527-L547
train
232,716
astropy/regions
regions/io/core.py
Shape._convert_sky_coords
def _convert_sky_coords(self): """ Convert to sky coordinates """ parsed_angles = [(x, y) for x, y in zip(self.coord[:-1:2], self.coord[1::2]) if (isinstance(x, coordinates.Angle) and isinstance(y, coordinates.Angle)) ] frame = coordinates.frame_transform_graph.lookup_name(self.coordsys) lon, lat = zip(*parsed_angles) if hasattr(lon, '__len__') and hasattr(lat, '__len__') and len(lon) == 1 and len(lat) == 1: # force entries to be scalar if they are length-1 lon, lat = u.Quantity(lon[0]), u.Quantity(lat[0]) else: # otherwise, they are vector quantities lon, lat = u.Quantity(lon), u.Quantity(lat) sphcoords = coordinates.UnitSphericalRepresentation(lon, lat) coords = [SkyCoord(frame(sphcoords))] if self.region_type != 'polygon': coords += self.coord[len(coords * 2):] return coords
python
def _convert_sky_coords(self): """ Convert to sky coordinates """ parsed_angles = [(x, y) for x, y in zip(self.coord[:-1:2], self.coord[1::2]) if (isinstance(x, coordinates.Angle) and isinstance(y, coordinates.Angle)) ] frame = coordinates.frame_transform_graph.lookup_name(self.coordsys) lon, lat = zip(*parsed_angles) if hasattr(lon, '__len__') and hasattr(lat, '__len__') and len(lon) == 1 and len(lat) == 1: # force entries to be scalar if they are length-1 lon, lat = u.Quantity(lon[0]), u.Quantity(lat[0]) else: # otherwise, they are vector quantities lon, lat = u.Quantity(lon), u.Quantity(lat) sphcoords = coordinates.UnitSphericalRepresentation(lon, lat) coords = [SkyCoord(frame(sphcoords))] if self.region_type != 'polygon': coords += self.coord[len(coords * 2):] return coords
[ "def", "_convert_sky_coords", "(", "self", ")", ":", "parsed_angles", "=", "[", "(", "x", ",", "y", ")", "for", "x", ",", "y", "in", "zip", "(", "self", ".", "coord", "[", ":", "-", "1", ":", "2", "]", ",", "self", ".", "coord", "[", "1", ":", ":", "2", "]", ")", "if", "(", "isinstance", "(", "x", ",", "coordinates", ".", "Angle", ")", "and", "isinstance", "(", "y", ",", "coordinates", ".", "Angle", ")", ")", "]", "frame", "=", "coordinates", ".", "frame_transform_graph", ".", "lookup_name", "(", "self", ".", "coordsys", ")", "lon", ",", "lat", "=", "zip", "(", "*", "parsed_angles", ")", "if", "hasattr", "(", "lon", ",", "'__len__'", ")", "and", "hasattr", "(", "lat", ",", "'__len__'", ")", "and", "len", "(", "lon", ")", "==", "1", "and", "len", "(", "lat", ")", "==", "1", ":", "# force entries to be scalar if they are length-1", "lon", ",", "lat", "=", "u", ".", "Quantity", "(", "lon", "[", "0", "]", ")", ",", "u", ".", "Quantity", "(", "lat", "[", "0", "]", ")", "else", ":", "# otherwise, they are vector quantities", "lon", ",", "lat", "=", "u", ".", "Quantity", "(", "lon", ")", ",", "u", ".", "Quantity", "(", "lat", ")", "sphcoords", "=", "coordinates", ".", "UnitSphericalRepresentation", "(", "lon", ",", "lat", ")", "coords", "=", "[", "SkyCoord", "(", "frame", "(", "sphcoords", ")", ")", "]", "if", "self", ".", "region_type", "!=", "'polygon'", ":", "coords", "+=", "self", ".", "coord", "[", "len", "(", "coords", "*", "2", ")", ":", "]", "return", "coords" ]
Convert to sky coordinates
[ "Convert", "to", "sky", "coordinates" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L549-L572
train
232,717
astropy/regions
regions/io/core.py
Shape._convert_pix_coords
def _convert_pix_coords(self): """ Convert to pixel coordinates, `regions.PixCoord` """ if self.region_type in ['polygon', 'line']: # have to special-case polygon in the phys coord case # b/c can't typecheck when iterating as in sky coord case coords = [PixCoord(self.coord[0::2], self.coord[1::2])] else: temp = [_.value for _ in self.coord] coord = PixCoord(temp[0], temp[1]) coords = [coord] + temp[2:] # The angle remains as a quantity object. # Modulus check makes sure that it works for ellipse/rectangle annulus if self.region_type in ['ellipse', 'rectangle'] and len(coords) % 2 == 0: coords[-1] = self.coord[-1] return coords
python
def _convert_pix_coords(self): """ Convert to pixel coordinates, `regions.PixCoord` """ if self.region_type in ['polygon', 'line']: # have to special-case polygon in the phys coord case # b/c can't typecheck when iterating as in sky coord case coords = [PixCoord(self.coord[0::2], self.coord[1::2])] else: temp = [_.value for _ in self.coord] coord = PixCoord(temp[0], temp[1]) coords = [coord] + temp[2:] # The angle remains as a quantity object. # Modulus check makes sure that it works for ellipse/rectangle annulus if self.region_type in ['ellipse', 'rectangle'] and len(coords) % 2 == 0: coords[-1] = self.coord[-1] return coords
[ "def", "_convert_pix_coords", "(", "self", ")", ":", "if", "self", ".", "region_type", "in", "[", "'polygon'", ",", "'line'", "]", ":", "# have to special-case polygon in the phys coord case", "# b/c can't typecheck when iterating as in sky coord case", "coords", "=", "[", "PixCoord", "(", "self", ".", "coord", "[", "0", ":", ":", "2", "]", ",", "self", ".", "coord", "[", "1", ":", ":", "2", "]", ")", "]", "else", ":", "temp", "=", "[", "_", ".", "value", "for", "_", "in", "self", ".", "coord", "]", "coord", "=", "PixCoord", "(", "temp", "[", "0", "]", ",", "temp", "[", "1", "]", ")", "coords", "=", "[", "coord", "]", "+", "temp", "[", "2", ":", "]", "# The angle remains as a quantity object.", "# Modulus check makes sure that it works for ellipse/rectangle annulus", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'rectangle'", "]", "and", "len", "(", "coords", ")", "%", "2", "==", "0", ":", "coords", "[", "-", "1", "]", "=", "self", ".", "coord", "[", "-", "1", "]", "return", "coords" ]
Convert to pixel coordinates, `regions.PixCoord`
[ "Convert", "to", "pixel", "coordinates", "regions", ".", "PixCoord" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L574-L592
train
232,718
astropy/regions
regions/io/core.py
Shape.to_region
def to_region(self): """ Converts to region, ``regions.Region`` object """ coords = self.convert_coords() log.debug(coords) viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize', 'symbol', 'symsize', 'fontsize', 'fontstyle', 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle', 'point', 'textangle', 'fontweight'] if isinstance(coords[0], SkyCoord): reg = self.shape_to_sky_region[self.region_type](*coords) elif isinstance(coords[0], PixCoord): reg = self.shape_to_pixel_region[self.region_type](*coords) else: self._raise_error("No central coordinate") reg.visual = RegionVisual() reg.meta = RegionMeta() # both 'text' and 'label' should be set to the same value, where we # default to the 'text' value since that is the one used by ds9 regions label = self.meta.get('text', self.meta.get('label', "")) if label != '': reg.meta['label'] = label for key in self.meta: if key in viz_keywords: reg.visual[key] = self.meta[key] else: reg.meta[key] = self.meta[key] reg.meta['include'] = self.include return reg
python
def to_region(self): """ Converts to region, ``regions.Region`` object """ coords = self.convert_coords() log.debug(coords) viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize', 'symbol', 'symsize', 'fontsize', 'fontstyle', 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle', 'point', 'textangle', 'fontweight'] if isinstance(coords[0], SkyCoord): reg = self.shape_to_sky_region[self.region_type](*coords) elif isinstance(coords[0], PixCoord): reg = self.shape_to_pixel_region[self.region_type](*coords) else: self._raise_error("No central coordinate") reg.visual = RegionVisual() reg.meta = RegionMeta() # both 'text' and 'label' should be set to the same value, where we # default to the 'text' value since that is the one used by ds9 regions label = self.meta.get('text', self.meta.get('label', "")) if label != '': reg.meta['label'] = label for key in self.meta: if key in viz_keywords: reg.visual[key] = self.meta[key] else: reg.meta[key] = self.meta[key] reg.meta['include'] = self.include return reg
[ "def", "to_region", "(", "self", ")", ":", "coords", "=", "self", ".", "convert_coords", "(", ")", "log", ".", "debug", "(", "coords", ")", "viz_keywords", "=", "[", "'color'", ",", "'dash'", ",", "'dashlist'", ",", "'width'", ",", "'font'", ",", "'symsize'", ",", "'symbol'", ",", "'symsize'", ",", "'fontsize'", ",", "'fontstyle'", ",", "'usetex'", ",", "'labelpos'", ",", "'labeloff'", ",", "'linewidth'", ",", "'linestyle'", ",", "'point'", ",", "'textangle'", ",", "'fontweight'", "]", "if", "isinstance", "(", "coords", "[", "0", "]", ",", "SkyCoord", ")", ":", "reg", "=", "self", ".", "shape_to_sky_region", "[", "self", ".", "region_type", "]", "(", "*", "coords", ")", "elif", "isinstance", "(", "coords", "[", "0", "]", ",", "PixCoord", ")", ":", "reg", "=", "self", ".", "shape_to_pixel_region", "[", "self", ".", "region_type", "]", "(", "*", "coords", ")", "else", ":", "self", ".", "_raise_error", "(", "\"No central coordinate\"", ")", "reg", ".", "visual", "=", "RegionVisual", "(", ")", "reg", ".", "meta", "=", "RegionMeta", "(", ")", "# both 'text' and 'label' should be set to the same value, where we", "# default to the 'text' value since that is the one used by ds9 regions", "label", "=", "self", ".", "meta", ".", "get", "(", "'text'", ",", "self", ".", "meta", ".", "get", "(", "'label'", ",", "\"\"", ")", ")", "if", "label", "!=", "''", ":", "reg", ".", "meta", "[", "'label'", "]", "=", "label", "for", "key", "in", "self", ".", "meta", ":", "if", "key", "in", "viz_keywords", ":", "reg", ".", "visual", "[", "key", "]", "=", "self", ".", "meta", "[", "key", "]", "else", ":", "reg", ".", "meta", "[", "key", "]", "=", "self", ".", "meta", "[", "key", "]", "reg", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include", "return", "reg" ]
Converts to region, ``regions.Region`` object
[ "Converts", "to", "region", "regions", ".", "Region", "object" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L594-L628
train
232,719
astropy/regions
regions/io/core.py
Shape.check_crtf
def check_crtf(self): """ Checks for CRTF compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by CRTF".format(self.region_type)) if self.coordsys not in valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF".format(self.coordsys))
python
def check_crtf(self): """ Checks for CRTF compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by CRTF".format(self.region_type)) if self.coordsys not in valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF".format(self.coordsys))
[ "def", "check_crtf", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", "\"supported by CRTF\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'CRTF'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame in \"", "\"astropy supported by CRTF\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks for CRTF compatibility.
[ "Checks", "for", "CRTF", "compatibility", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L633-L643
train
232,720
astropy/regions
regions/io/core.py
Shape.check_ds9
def check_ds9(self): """ Checks for DS9 compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by DS9".format(self.region_type)) if self.coordsys not in valid_coordsys['DS9']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9".format(self.coordsys))
python
def check_ds9(self): """ Checks for DS9 compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by DS9".format(self.region_type)) if self.coordsys not in valid_coordsys['DS9']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9".format(self.coordsys))
[ "def", "check_ds9", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", "\"supported by DS9\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'DS9'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame \"", "\"in astropy supported by DS9\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks for DS9 compatibility.
[ "Checks", "for", "DS9", "compatibility", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L645-L655
train
232,721
astropy/regions
regions/io/core.py
Shape._validate
def _validate(self): """ Checks whether all the attributes of this object is valid. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" .format(self.region_type)) if self.coordsys not in valid_coordsys['DS9'] + valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy".format(self.coordsys))
python
def _validate(self): """ Checks whether all the attributes of this object is valid. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" .format(self.region_type)) if self.coordsys not in valid_coordsys['DS9'] + valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy".format(self.coordsys))
[ "def", "_validate", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'DS9'", "]", "+", "valid_coordsys", "[", "'CRTF'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame \"", "\"in astropy\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks whether all the attributes of this object is valid.
[ "Checks", "whether", "all", "the", "attributes", "of", "this", "object", "is", "valid", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L657-L667
train
232,722
astropy/regions
regions/io/crtf/read.py
read_crtf
def read_crtf(filename, errors='strict'): """ Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'} """ with open(filename) as fh: if regex_begin.search(fh.readline()): region_string = fh.read() parser = CRTFParser(region_string, errors) return parser.shapes.to_regions() else: raise CRTFRegionParserError('Every CRTF Region must start with "#CRTF" ')
python
def read_crtf(filename, errors='strict'): """ Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'} """ with open(filename) as fh: if regex_begin.search(fh.readline()): region_string = fh.read() parser = CRTFParser(region_string, errors) return parser.shapes.to_regions() else: raise CRTFRegionParserError('Every CRTF Region must start with "#CRTF" ')
[ "def", "read_crtf", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "with", "open", "(", "filename", ")", "as", "fh", ":", "if", "regex_begin", ".", "search", "(", "fh", ".", "readline", "(", ")", ")", ":", "region_string", "=", "fh", ".", "read", "(", ")", "parser", "=", "CRTFParser", "(", "region_string", ",", "errors", ")", "return", "parser", ".", "shapes", ".", "to_regions", "(", ")", "else", ":", "raise", "CRTFRegionParserError", "(", "'Every CRTF Region must start with \"#CRTF\" '", ")" ]
Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'}
[ "Reads", "a", "CRTF", "region", "file", "and", "returns", "a", "list", "of", "region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L43-L85
train
232,723
astropy/regions
regions/io/crtf/read.py
CRTFParser.parse_line
def parse_line(self, line): """ Parses a single line. """ # Skip blanks if line == '': return # Skip comments if regex_comment.search(line): return # Special case / header: parse global parameters into metadata global_parameters = regex_global.search(line) if global_parameters: self.parse_global_meta(global_parameters.group('parameters')) return # Tries to check the validity of the line. crtf_line = regex_line.search(line) if crtf_line: # Tries to parse the line. # Finds info about the region. region = regex_region.search(crtf_line.group('region')) type_ = region.group('type') or 'reg' include = region.group('include') or '+' region_type = region.group('regiontype').lower() if region_type in self.valid_definition: helper = CRTFRegionParser(self.global_meta, include, type_, region_type, *crtf_line.group('region', 'parameters')) self.shapes.append(helper.shape) else: self._raise_error("Not a valid CRTF Region type: '{0}'.".format(region_type)) else: self._raise_error("Not a valid CRTF line: '{0}'.".format(line)) return
python
def parse_line(self, line): """ Parses a single line. """ # Skip blanks if line == '': return # Skip comments if regex_comment.search(line): return # Special case / header: parse global parameters into metadata global_parameters = regex_global.search(line) if global_parameters: self.parse_global_meta(global_parameters.group('parameters')) return # Tries to check the validity of the line. crtf_line = regex_line.search(line) if crtf_line: # Tries to parse the line. # Finds info about the region. region = regex_region.search(crtf_line.group('region')) type_ = region.group('type') or 'reg' include = region.group('include') or '+' region_type = region.group('regiontype').lower() if region_type in self.valid_definition: helper = CRTFRegionParser(self.global_meta, include, type_, region_type, *crtf_line.group('region', 'parameters')) self.shapes.append(helper.shape) else: self._raise_error("Not a valid CRTF Region type: '{0}'.".format(region_type)) else: self._raise_error("Not a valid CRTF line: '{0}'.".format(line)) return
[ "def", "parse_line", "(", "self", ",", "line", ")", ":", "# Skip blanks", "if", "line", "==", "''", ":", "return", "# Skip comments", "if", "regex_comment", ".", "search", "(", "line", ")", ":", "return", "# Special case / header: parse global parameters into metadata", "global_parameters", "=", "regex_global", ".", "search", "(", "line", ")", "if", "global_parameters", ":", "self", ".", "parse_global_meta", "(", "global_parameters", ".", "group", "(", "'parameters'", ")", ")", "return", "# Tries to check the validity of the line.", "crtf_line", "=", "regex_line", ".", "search", "(", "line", ")", "if", "crtf_line", ":", "# Tries to parse the line.", "# Finds info about the region.", "region", "=", "regex_region", ".", "search", "(", "crtf_line", ".", "group", "(", "'region'", ")", ")", "type_", "=", "region", ".", "group", "(", "'type'", ")", "or", "'reg'", "include", "=", "region", ".", "group", "(", "'include'", ")", "or", "'+'", "region_type", "=", "region", ".", "group", "(", "'regiontype'", ")", ".", "lower", "(", ")", "if", "region_type", "in", "self", ".", "valid_definition", ":", "helper", "=", "CRTFRegionParser", "(", "self", ".", "global_meta", ",", "include", ",", "type_", ",", "region_type", ",", "*", "crtf_line", ".", "group", "(", "'region'", ",", "'parameters'", ")", ")", "self", ".", "shapes", ".", "append", "(", "helper", ".", "shape", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not a valid CRTF Region type: '{0}'.\"", ".", "format", "(", "region_type", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not a valid CRTF line: '{0}'.\"", ".", "format", "(", "line", ")", ")", "return" ]
Parses a single line.
[ "Parses", "a", "single", "line", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L161-L199
train
232,724
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.parse
def parse(self): """ Starting point to parse the CRTF region string. """ self.convert_meta() self.coordsys = self.meta.get('coord', 'image').lower() self.set_coordsys() self.convert_coordinates() self.make_shape()
python
def parse(self): """ Starting point to parse the CRTF region string. """ self.convert_meta() self.coordsys = self.meta.get('coord', 'image').lower() self.set_coordsys() self.convert_coordinates() self.make_shape()
[ "def", "parse", "(", "self", ")", ":", "self", ".", "convert_meta", "(", ")", "self", ".", "coordsys", "=", "self", ".", "meta", ".", "get", "(", "'coord'", ",", "'image'", ")", ".", "lower", "(", ")", "self", ".", "set_coordsys", "(", ")", "self", ".", "convert_coordinates", "(", ")", "self", ".", "make_shape", "(", ")" ]
Starting point to parse the CRTF region string.
[ "Starting", "point", "to", "parse", "the", "CRTF", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L320-L329
train
232,725
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.set_coordsys
def set_coordsys(self): """ Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped) """ if self.coordsys.lower() in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[self.coordsys.lower()]
python
def set_coordsys(self): """ Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped) """ if self.coordsys.lower() in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[self.coordsys.lower()]
[ "def", "set_coordsys", "(", "self", ")", ":", "if", "self", ".", "coordsys", ".", "lower", "(", ")", "in", "self", ".", "coordsys_mapping", ":", "self", ".", "coordsys", "=", "self", ".", "coordsys_mapping", "[", "self", ".", "coordsys", ".", "lower", "(", ")", "]" ]
Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped)
[ "Mapping", "to", "astropy", "s", "coordinate", "system", "name" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L331-L338
train
232,726
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.convert_coordinates
def convert_coordinates(self): """ Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects """ coord_list_str = regex_coordinate.findall(self.reg_str) + regex_length.findall(self.reg_str) coord_list = [] if self.region_type == 'poly': if len(coord_list_str) < 4: self._raise_error('Not in proper format: {} polygon should have > 4 coordinates'.format(self.reg_str)) if coord_list_str[0] != coord_list_str[-1]: self._raise_error("Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same".format(self.reg_str)) else: if len(coord_list_str) != len(self.language_spec[self.region_type]): self._raise_error("Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" .format(self.reg_str, self.region_type)) for attr_spec, val_str in zip(self.language_spec[self.region_type], coord_list_str): if attr_spec == 'c': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_coordinate(val_str[0])) coord_list.append(CoordinateParser.parse_coordinate(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a coordinate".format(val_str)) if attr_spec == 'pl': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[0])) coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a pair of length".format(val_str)) if attr_spec == 'l': if isinstance(val_str, six.string_types): coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str)) else: self._raise_error("Not in proper format: {0} should be a single length".format(val_str)) if attr_spec == 's': if self.region_type == 'symbol': if val_str in valid_symbols: self.meta['symbol'] = val_str else: self._raise_error("Not in proper format: '{0}' should be a symbol".format(val_str)) elif self.region_type == 'text': self.meta['text'] = val_str[1:-1] self.coord = coord_list
python
def convert_coordinates(self): """ Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects """ coord_list_str = regex_coordinate.findall(self.reg_str) + regex_length.findall(self.reg_str) coord_list = [] if self.region_type == 'poly': if len(coord_list_str) < 4: self._raise_error('Not in proper format: {} polygon should have > 4 coordinates'.format(self.reg_str)) if coord_list_str[0] != coord_list_str[-1]: self._raise_error("Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same".format(self.reg_str)) else: if len(coord_list_str) != len(self.language_spec[self.region_type]): self._raise_error("Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" .format(self.reg_str, self.region_type)) for attr_spec, val_str in zip(self.language_spec[self.region_type], coord_list_str): if attr_spec == 'c': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_coordinate(val_str[0])) coord_list.append(CoordinateParser.parse_coordinate(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a coordinate".format(val_str)) if attr_spec == 'pl': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[0])) coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a pair of length".format(val_str)) if attr_spec == 'l': if isinstance(val_str, six.string_types): coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str)) else: self._raise_error("Not in proper format: {0} should be a single length".format(val_str)) if attr_spec == 's': if self.region_type == 'symbol': if val_str in valid_symbols: self.meta['symbol'] = val_str else: self._raise_error("Not in proper format: '{0}' should be a symbol".format(val_str)) elif self.region_type == 'text': self.meta['text'] = val_str[1:-1] self.coord = coord_list
[ "def", "convert_coordinates", "(", "self", ")", ":", "coord_list_str", "=", "regex_coordinate", ".", "findall", "(", "self", ".", "reg_str", ")", "+", "regex_length", ".", "findall", "(", "self", ".", "reg_str", ")", "coord_list", "=", "[", "]", "if", "self", ".", "region_type", "==", "'poly'", ":", "if", "len", "(", "coord_list_str", ")", "<", "4", ":", "self", ".", "_raise_error", "(", "'Not in proper format: {} polygon should have > 4 coordinates'", ".", "format", "(", "self", ".", "reg_str", ")", ")", "if", "coord_list_str", "[", "0", "]", "!=", "coord_list_str", "[", "-", "1", "]", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}', \"", "\"In polygon, the last and first coordinates should be same\"", ".", "format", "(", "self", ".", "reg_str", ")", ")", "else", ":", "if", "len", "(", "coord_list_str", ")", "!=", "len", "(", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", ")", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}', \"", "\"Does not contain expected number of parameters for the region '{1}'\"", ".", "format", "(", "self", ".", "reg_str", ",", "self", ".", "region_type", ")", ")", "for", "attr_spec", ",", "val_str", "in", "zip", "(", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", ",", "coord_list_str", ")", ":", "if", "attr_spec", "==", "'c'", ":", "if", "len", "(", "val_str", ")", "==", "2", "and", "val_str", "[", "1", "]", "!=", "''", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_coordinate", "(", "val_str", "[", "0", "]", ")", ")", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_coordinate", "(", "val_str", "[", "1", "]", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a coordinate\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'pl'", ":", "if", "len", "(", "val_str", ")", "==", "2", "and", "val_str", "[", "1", "]", "!=", "''", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", "[", "0", "]", ")", ")", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", "[", "1", "]", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a pair of length\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'l'", ":", "if", "isinstance", "(", "val_str", ",", "six", ".", "string_types", ")", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a single length\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'s'", ":", "if", "self", ".", "region_type", "==", "'symbol'", ":", "if", "val_str", "in", "valid_symbols", ":", "self", ".", "meta", "[", "'symbol'", "]", "=", "val_str", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}' should be a symbol\"", ".", "format", "(", "val_str", ")", ")", "elif", "self", ".", "region_type", "==", "'text'", ":", "self", ".", "meta", "[", "'text'", "]", "=", "val_str", "[", "1", ":", "-", "1", "]", "self", ".", "coord", "=", "coord_list" ]
Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects
[ "Convert", "coordinate", "string", "to", "~astropy", ".", "coordinates", ".", "Angle", "or", "~astropy", ".", "units", ".", "quantity", ".", "Quantity", "objects" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L340-L387
train
232,727
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.convert_meta
def convert_meta(self): """ Parses the meta_str to python dictionary and stores in ``meta`` attribute. """ if self.meta_str: self.meta_str = regex_meta.findall(self.meta_str + ',') if self.meta_str: for par in self.meta_str: if par[0] is not '': val1 = par[0] val2 = par[1] else: val1 = par[2] val2 = par[3] val1 = val1.strip() val2 = val2.strip() if val1 in CRTFParser.valid_global_keys or val1 == 'label': if val1 in ('range', 'corr', 'labeloff'): val2 = val2.split(',') val2 = [x.strip() for x in val2] self.meta[val1] = val2 else: self._raise_error("'{0}' is not a valid meta key".format(val1)) self.meta['include'] = self.include != '-' self.include = self.meta['include'] if 'range' in self.meta: self.meta['range'] = [u.Quantity(x) for x in self.meta['range']] self.meta['type'] = self.type_
python
def convert_meta(self): """ Parses the meta_str to python dictionary and stores in ``meta`` attribute. """ if self.meta_str: self.meta_str = regex_meta.findall(self.meta_str + ',') if self.meta_str: for par in self.meta_str: if par[0] is not '': val1 = par[0] val2 = par[1] else: val1 = par[2] val2 = par[3] val1 = val1.strip() val2 = val2.strip() if val1 in CRTFParser.valid_global_keys or val1 == 'label': if val1 in ('range', 'corr', 'labeloff'): val2 = val2.split(',') val2 = [x.strip() for x in val2] self.meta[val1] = val2 else: self._raise_error("'{0}' is not a valid meta key".format(val1)) self.meta['include'] = self.include != '-' self.include = self.meta['include'] if 'range' in self.meta: self.meta['range'] = [u.Quantity(x) for x in self.meta['range']] self.meta['type'] = self.type_
[ "def", "convert_meta", "(", "self", ")", ":", "if", "self", ".", "meta_str", ":", "self", ".", "meta_str", "=", "regex_meta", ".", "findall", "(", "self", ".", "meta_str", "+", "','", ")", "if", "self", ".", "meta_str", ":", "for", "par", "in", "self", ".", "meta_str", ":", "if", "par", "[", "0", "]", "is", "not", "''", ":", "val1", "=", "par", "[", "0", "]", "val2", "=", "par", "[", "1", "]", "else", ":", "val1", "=", "par", "[", "2", "]", "val2", "=", "par", "[", "3", "]", "val1", "=", "val1", ".", "strip", "(", ")", "val2", "=", "val2", ".", "strip", "(", ")", "if", "val1", "in", "CRTFParser", ".", "valid_global_keys", "or", "val1", "==", "'label'", ":", "if", "val1", "in", "(", "'range'", ",", "'corr'", ",", "'labeloff'", ")", ":", "val2", "=", "val2", ".", "split", "(", "','", ")", "val2", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "val2", "]", "self", ".", "meta", "[", "val1", "]", "=", "val2", "else", ":", "self", ".", "_raise_error", "(", "\"'{0}' is not a valid meta key\"", ".", "format", "(", "val1", ")", ")", "self", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include", "!=", "'-'", "self", ".", "include", "=", "self", ".", "meta", "[", "'include'", "]", "if", "'range'", "in", "self", ".", "meta", ":", "self", ".", "meta", "[", "'range'", "]", "=", "[", "u", ".", "Quantity", "(", "x", ")", "for", "x", "in", "self", ".", "meta", "[", "'range'", "]", "]", "self", ".", "meta", "[", "'type'", "]", "=", "self", ".", "type_" ]
Parses the meta_str to python dictionary and stores in ``meta`` attribute.
[ "Parses", "the", "meta_str", "to", "python", "dictionary", "and", "stores", "in", "meta", "attribute", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L389-L419
train
232,728
astropy/regions
regions/io/fits/write.py
fits_region_objects_to_table
def fits_region_objects_to_table(regions): """ Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1 """ for reg in regions: if isinstance(reg, SkyRegion): raise TypeError('Every region must be a pixel region'.format(reg)) shape_list = to_shape_list(regions, coordinate_system='image') return shape_list.to_fits()
python
def fits_region_objects_to_table(regions): """ Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1 """ for reg in regions: if isinstance(reg, SkyRegion): raise TypeError('Every region must be a pixel region'.format(reg)) shape_list = to_shape_list(regions, coordinate_system='image') return shape_list.to_fits()
[ "def", "fits_region_objects_to_table", "(", "regions", ")", ":", "for", "reg", "in", "regions", ":", "if", "isinstance", "(", "reg", ",", "SkyRegion", ")", ":", "raise", "TypeError", "(", "'Every region must be a pixel region'", ".", "format", "(", "reg", ")", ")", "shape_list", "=", "to_shape_list", "(", "regions", ",", "coordinate_system", "=", "'image'", ")", "return", "shape_list", ".", "to_fits", "(", ")" ]
Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1
[ "Converts", "list", "of", "regions", "to", "FITS", "region", "table", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/write.py#L15-L47
train
232,729
astropy/regions
regions/io/fits/write.py
write_fits_region
def write_fits_region(filename, regions, header=None): """ Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header) """ output = fits_region_objects_to_table(regions) bin_table = fits.BinTableHDU(data=output, header=header) bin_table.writeto(filename)
python
def write_fits_region(filename, regions, header=None): """ Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header) """ output = fits_region_objects_to_table(regions) bin_table = fits.BinTableHDU(data=output, header=header) bin_table.writeto(filename)
[ "def", "write_fits_region", "(", "filename", ",", "regions", ",", "header", "=", "None", ")", ":", "output", "=", "fits_region_objects_to_table", "(", "regions", ")", "bin_table", "=", "fits", ".", "BinTableHDU", "(", "data", "=", "output", ",", "header", "=", "header", ")", "bin_table", ".", "writeto", "(", "filename", ")" ]
Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header)
[ "Converts", "list", "of", "regions", "to", "FITS", "region", "table", "and", "write", "to", "a", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/write.py#L50-L78
train
232,730
astropy/regions
regions/_utils/examples.py
make_example_dataset
def make_example_dataset(data='simulated', config=None): """Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list """ if data == 'simulated': return ExampleDatasetSimulated(config=config) elif data == 'fermi': return ExampleDatasetFermi(config=config) else: raise ValueError('Invalid selection data: {}'.format(data))
python
def make_example_dataset(data='simulated', config=None): """Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list """ if data == 'simulated': return ExampleDatasetSimulated(config=config) elif data == 'fermi': return ExampleDatasetFermi(config=config) else: raise ValueError('Invalid selection data: {}'.format(data))
[ "def", "make_example_dataset", "(", "data", "=", "'simulated'", ",", "config", "=", "None", ")", ":", "if", "data", "==", "'simulated'", ":", "return", "ExampleDatasetSimulated", "(", "config", "=", "config", ")", "elif", "data", "==", "'fermi'", ":", "return", "ExampleDatasetFermi", "(", "config", "=", "config", ")", "else", ":", "raise", "ValueError", "(", "'Invalid selection data: {}'", ".", "format", "(", "data", ")", ")" ]
Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list
[ "Make", "example", "dataset", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/examples.py#L17-L64
train
232,731
astropy/regions
regions/_utils/examples.py
_table_to_bintable
def _table_to_bintable(table): """Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.""" data = table.as_array() header = fits.Header() header.update(table.meta) name = table.meta.pop('name', None) return fits.BinTableHDU(data, header, name=name)
python
def _table_to_bintable(table): """Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.""" data = table.as_array() header = fits.Header() header.update(table.meta) name = table.meta.pop('name', None) return fits.BinTableHDU(data, header, name=name)
[ "def", "_table_to_bintable", "(", "table", ")", ":", "data", "=", "table", ".", "as_array", "(", ")", "header", "=", "fits", ".", "Header", "(", ")", "header", ".", "update", "(", "table", ".", "meta", ")", "name", "=", "table", ".", "meta", ".", "pop", "(", "'name'", ",", "None", ")", "return", "fits", ".", "BinTableHDU", "(", "data", ",", "header", ",", "name", "=", "name", ")" ]
Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.
[ "Convert", "~astropy", ".", "table", ".", "Table", "to", "astropy", ".", "io", ".", "fits", ".", "BinTable", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/examples.py#L223-L229
train
232,732
astropy/regions
regions/io/ds9/read.py
read_ds9
def read_ds9(filename, errors='strict'): """ Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'} """ with open(filename) as fh: region_string = fh.read() parser = DS9Parser(region_string, errors=errors) return parser.shapes.to_regions()
python
def read_ds9(filename, errors='strict'): """ Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'} """ with open(filename) as fh: region_string = fh.read() parser = DS9Parser(region_string, errors=errors) return parser.shapes.to_regions()
[ "def", "read_ds9", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "with", "open", "(", "filename", ")", "as", "fh", ":", "region_string", "=", "fh", ".", "read", "(", ")", "parser", "=", "DS9Parser", "(", "region_string", ",", "errors", "=", "errors", ")", "return", "parser", ".", "shapes", ".", "to_regions", "(", ")" ]
Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'}
[ "Read", "a", "DS9", "region", "file", "in", "as", "a", "list", "of", "~regions", ".", "Region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L38-L77
train
232,733
astropy/regions
regions/io/ds9/read.py
DS9Parser.set_coordsys
def set_coordsys(self, coordsys): """ Transform coordinate system # TODO: needs expert attention """ if coordsys in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[coordsys] else: self.coordsys = coordsys
python
def set_coordsys(self, coordsys): """ Transform coordinate system # TODO: needs expert attention """ if coordsys in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[coordsys] else: self.coordsys = coordsys
[ "def", "set_coordsys", "(", "self", ",", "coordsys", ")", ":", "if", "coordsys", "in", "self", ".", "coordsys_mapping", ":", "self", ".", "coordsys", "=", "self", ".", "coordsys_mapping", "[", "coordsys", "]", "else", ":", "self", ".", "coordsys", "=", "coordsys" ]
Transform coordinate system # TODO: needs expert attention
[ "Transform", "coordinate", "system" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L215-L224
train
232,734
astropy/regions
regions/io/ds9/read.py
DS9Parser.run
def run(self): """ Run all steps """ for line_ in self.region_string.split('\n'): for line in line_.split(";"): self.parse_line(line) log.debug('Global state: {}'.format(self))
python
def run(self): """ Run all steps """ for line_ in self.region_string.split('\n'): for line in line_.split(";"): self.parse_line(line) log.debug('Global state: {}'.format(self))
[ "def", "run", "(", "self", ")", ":", "for", "line_", "in", "self", ".", "region_string", ".", "split", "(", "'\\n'", ")", ":", "for", "line", "in", "line_", ".", "split", "(", "\";\"", ")", ":", "self", ".", "parse_line", "(", "line", ")", "log", ".", "debug", "(", "'Global state: {}'", ".", "format", "(", "self", ")", ")" ]
Run all steps
[ "Run", "all", "steps" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L226-L233
train
232,735
astropy/regions
regions/io/ds9/read.py
DS9Parser.parse_meta
def parse_meta(meta_str): """ Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data """ keys_vals = [(x, y) for x, _, y in regex_meta.findall(meta_str.strip())] extra_text = regex_meta.split(meta_str.strip())[-1] result = OrderedDict() for key, val in keys_vals: # regex can include trailing whitespace or inverted commas # remove it val = val.strip().strip("'").strip('"') if key == 'text': val = val.lstrip("{").rstrip("}") if key in result: if key == 'tag': result[key].append(val) else: raise ValueError("Duplicate key {0} found".format(key)) else: if key == 'tag': result[key] = [val] else: result[key] = val if extra_text: result['comment'] = extra_text return result
python
def parse_meta(meta_str): """ Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data """ keys_vals = [(x, y) for x, _, y in regex_meta.findall(meta_str.strip())] extra_text = regex_meta.split(meta_str.strip())[-1] result = OrderedDict() for key, val in keys_vals: # regex can include trailing whitespace or inverted commas # remove it val = val.strip().strip("'").strip('"') if key == 'text': val = val.lstrip("{").rstrip("}") if key in result: if key == 'tag': result[key].append(val) else: raise ValueError("Duplicate key {0} found".format(key)) else: if key == 'tag': result[key] = [val] else: result[key] = val if extra_text: result['comment'] = extra_text return result
[ "def", "parse_meta", "(", "meta_str", ")", ":", "keys_vals", "=", "[", "(", "x", ",", "y", ")", "for", "x", ",", "_", ",", "y", "in", "regex_meta", ".", "findall", "(", "meta_str", ".", "strip", "(", ")", ")", "]", "extra_text", "=", "regex_meta", ".", "split", "(", "meta_str", ".", "strip", "(", ")", ")", "[", "-", "1", "]", "result", "=", "OrderedDict", "(", ")", "for", "key", ",", "val", "in", "keys_vals", ":", "# regex can include trailing whitespace or inverted commas", "# remove it", "val", "=", "val", ".", "strip", "(", ")", ".", "strip", "(", "\"'\"", ")", ".", "strip", "(", "'\"'", ")", "if", "key", "==", "'text'", ":", "val", "=", "val", ".", "lstrip", "(", "\"{\"", ")", ".", "rstrip", "(", "\"}\"", ")", "if", "key", "in", "result", ":", "if", "key", "==", "'tag'", ":", "result", "[", "key", "]", ".", "append", "(", "val", ")", "else", ":", "raise", "ValueError", "(", "\"Duplicate key {0} found\"", ".", "format", "(", "key", ")", ")", "else", ":", "if", "key", "==", "'tag'", ":", "result", "[", "key", "]", "=", "[", "val", "]", "else", ":", "result", "[", "key", "]", "=", "val", "if", "extra_text", ":", "result", "[", "'comment'", "]", "=", "extra_text", "return", "result" ]
Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data
[ "Parse", "the", "metadata", "for", "a", "single", "ds9", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L288-L327
train
232,736
astropy/regions
regions/io/ds9/read.py
DS9Parser.parse_region
def parse_region(self, include, region_type, region_end, line): """ Extract a Shape from a region string """ if self.coordsys is None: raise DS9RegionParserError("No coordinate system specified and a" " region has been found.") else: helper = DS9RegionParser(coordsys=self.coordsys, include=include, region_type=region_type, region_end=region_end, global_meta=self.global_meta, line=line) helper.parse() self.shapes.append(helper.shape)
python
def parse_region(self, include, region_type, region_end, line): """ Extract a Shape from a region string """ if self.coordsys is None: raise DS9RegionParserError("No coordinate system specified and a" " region has been found.") else: helper = DS9RegionParser(coordsys=self.coordsys, include=include, region_type=region_type, region_end=region_end, global_meta=self.global_meta, line=line) helper.parse() self.shapes.append(helper.shape)
[ "def", "parse_region", "(", "self", ",", "include", ",", "region_type", ",", "region_end", ",", "line", ")", ":", "if", "self", ".", "coordsys", "is", "None", ":", "raise", "DS9RegionParserError", "(", "\"No coordinate system specified and a\"", "\" region has been found.\"", ")", "else", ":", "helper", "=", "DS9RegionParser", "(", "coordsys", "=", "self", ".", "coordsys", ",", "include", "=", "include", ",", "region_type", "=", "region_type", ",", "region_end", "=", "region_end", ",", "global_meta", "=", "self", ".", "global_meta", ",", "line", "=", "line", ")", "helper", ".", "parse", "(", ")", "self", ".", "shapes", ".", "append", "(", "helper", ".", "shape", ")" ]
Extract a Shape from a region string
[ "Extract", "a", "Shape", "from", "a", "region", "string" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L329-L344
train
232,737
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.parse
def parse(self): """ Convert line to shape object """ log.debug(self) self.parse_composite() self.split_line() self.convert_coordinates() self.convert_meta() self.make_shape() log.debug(self)
python
def parse(self): """ Convert line to shape object """ log.debug(self) self.parse_composite() self.split_line() self.convert_coordinates() self.convert_meta() self.make_shape() log.debug(self)
[ "def", "parse", "(", "self", ")", ":", "log", ".", "debug", "(", "self", ")", "self", ".", "parse_composite", "(", ")", "self", ".", "split_line", "(", ")", "self", ".", "convert_coordinates", "(", ")", "self", ".", "convert_meta", "(", ")", "self", ".", "make_shape", "(", ")", "log", ".", "debug", "(", "self", ")" ]
Convert line to shape object
[ "Convert", "line", "to", "shape", "object" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L431-L442
train
232,738
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.split_line
def split_line(self): """ Split line into coordinates and meta string """ # coordinate of the # symbol or end of the line (-1) if not found hash_or_end = self.line.find("#") temp = self.line[self.region_end:hash_or_end].strip(" |") self.coord_str = regex_paren.sub("", temp) # don't want any meta_str if there is no metadata found if hash_or_end >= 0: self.meta_str = self.line[hash_or_end:] else: self.meta_str = ""
python
def split_line(self): """ Split line into coordinates and meta string """ # coordinate of the # symbol or end of the line (-1) if not found hash_or_end = self.line.find("#") temp = self.line[self.region_end:hash_or_end].strip(" |") self.coord_str = regex_paren.sub("", temp) # don't want any meta_str if there is no metadata found if hash_or_end >= 0: self.meta_str = self.line[hash_or_end:] else: self.meta_str = ""
[ "def", "split_line", "(", "self", ")", ":", "# coordinate of the # symbol or end of the line (-1) if not found", "hash_or_end", "=", "self", ".", "line", ".", "find", "(", "\"#\"", ")", "temp", "=", "self", ".", "line", "[", "self", ".", "region_end", ":", "hash_or_end", "]", ".", "strip", "(", "\" |\"", ")", "self", ".", "coord_str", "=", "regex_paren", ".", "sub", "(", "\"\"", ",", "temp", ")", "# don't want any meta_str if there is no metadata found", "if", "hash_or_end", ">=", "0", ":", "self", ".", "meta_str", "=", "self", ".", "line", "[", "hash_or_end", ":", "]", "else", ":", "self", ".", "meta_str", "=", "\"\"" ]
Split line into coordinates and meta string
[ "Split", "line", "into", "coordinates", "and", "meta", "string" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L450-L463
train
232,739
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.convert_coordinates
def convert_coordinates(self): """ Convert coordinate string to objects """ coord_list = [] # strip out "null" elements, i.e. ''. It might be possible to eliminate # these some other way, i.e. with regex directly, but I don't know how. # We need to copy in order not to burn up the iterators elements = [x for x in regex_splitter.split(self.coord_str) if x] element_parsers = self.language_spec[self.region_type] for ii, (element, element_parser) in enumerate(zip(elements, element_parsers)): if element_parser is coordinate: unit = self.coordinate_units[self.coordsys][ii % 2] coord_list.append(element_parser(element, unit)) elif self.coordinate_units[self.coordsys][0] is u.dimensionless_unscaled: coord_list.append(element_parser(element, unit=u.dimensionless_unscaled)) else: coord_list.append(element_parser(element)) if self.region_type in ['ellipse', 'box'] and len(coord_list) % 2 == 1: coord_list[-1] = CoordinateParser.parse_angular_length_quantity(elements[len(coord_list)-1]) # Reset iterator for ellipse and annulus # Note that this cannot be done with copy.deepcopy on python2 if self.region_type in ['ellipse', 'annulus']: self.language_spec[self.region_type] = itertools.chain( (coordinate, coordinate), itertools.cycle((radius,))) self.coord = coord_list
python
def convert_coordinates(self): """ Convert coordinate string to objects """ coord_list = [] # strip out "null" elements, i.e. ''. It might be possible to eliminate # these some other way, i.e. with regex directly, but I don't know how. # We need to copy in order not to burn up the iterators elements = [x for x in regex_splitter.split(self.coord_str) if x] element_parsers = self.language_spec[self.region_type] for ii, (element, element_parser) in enumerate(zip(elements, element_parsers)): if element_parser is coordinate: unit = self.coordinate_units[self.coordsys][ii % 2] coord_list.append(element_parser(element, unit)) elif self.coordinate_units[self.coordsys][0] is u.dimensionless_unscaled: coord_list.append(element_parser(element, unit=u.dimensionless_unscaled)) else: coord_list.append(element_parser(element)) if self.region_type in ['ellipse', 'box'] and len(coord_list) % 2 == 1: coord_list[-1] = CoordinateParser.parse_angular_length_quantity(elements[len(coord_list)-1]) # Reset iterator for ellipse and annulus # Note that this cannot be done with copy.deepcopy on python2 if self.region_type in ['ellipse', 'annulus']: self.language_spec[self.region_type] = itertools.chain( (coordinate, coordinate), itertools.cycle((radius,))) self.coord = coord_list
[ "def", "convert_coordinates", "(", "self", ")", ":", "coord_list", "=", "[", "]", "# strip out \"null\" elements, i.e. ''. It might be possible to eliminate", "# these some other way, i.e. with regex directly, but I don't know how.", "# We need to copy in order not to burn up the iterators", "elements", "=", "[", "x", "for", "x", "in", "regex_splitter", ".", "split", "(", "self", ".", "coord_str", ")", "if", "x", "]", "element_parsers", "=", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", "for", "ii", ",", "(", "element", ",", "element_parser", ")", "in", "enumerate", "(", "zip", "(", "elements", ",", "element_parsers", ")", ")", ":", "if", "element_parser", "is", "coordinate", ":", "unit", "=", "self", ".", "coordinate_units", "[", "self", ".", "coordsys", "]", "[", "ii", "%", "2", "]", "coord_list", ".", "append", "(", "element_parser", "(", "element", ",", "unit", ")", ")", "elif", "self", ".", "coordinate_units", "[", "self", ".", "coordsys", "]", "[", "0", "]", "is", "u", ".", "dimensionless_unscaled", ":", "coord_list", ".", "append", "(", "element_parser", "(", "element", ",", "unit", "=", "u", ".", "dimensionless_unscaled", ")", ")", "else", ":", "coord_list", ".", "append", "(", "element_parser", "(", "element", ")", ")", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'box'", "]", "and", "len", "(", "coord_list", ")", "%", "2", "==", "1", ":", "coord_list", "[", "-", "1", "]", "=", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "elements", "[", "len", "(", "coord_list", ")", "-", "1", "]", ")", "# Reset iterator for ellipse and annulus", "# Note that this cannot be done with copy.deepcopy on python2", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'annulus'", "]", ":", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", "=", "itertools", ".", "chain", "(", "(", "coordinate", ",", "coordinate", ")", ",", "itertools", ".", "cycle", "(", "(", "radius", ",", ")", ")", ")", "self", ".", "coord", "=", "coord_list" ]
Convert coordinate string to objects
[ "Convert", "coordinate", "string", "to", "objects" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L465-L494
train
232,740
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.convert_meta
def convert_meta(self): """ Convert meta string to dict """ meta_ = DS9Parser.parse_meta(self.meta_str) self.meta = copy.deepcopy(self.global_meta) self.meta.update(meta_) # the 'include' is not part of the metadata string; # it is pre-parsed as part of the shape type and should always # override the global one self.include = self.meta.get('include', True) if self.include == '' else self.include != '-' self.meta['include'] = self.include
python
def convert_meta(self): """ Convert meta string to dict """ meta_ = DS9Parser.parse_meta(self.meta_str) self.meta = copy.deepcopy(self.global_meta) self.meta.update(meta_) # the 'include' is not part of the metadata string; # it is pre-parsed as part of the shape type and should always # override the global one self.include = self.meta.get('include', True) if self.include == '' else self.include != '-' self.meta['include'] = self.include
[ "def", "convert_meta", "(", "self", ")", ":", "meta_", "=", "DS9Parser", ".", "parse_meta", "(", "self", ".", "meta_str", ")", "self", ".", "meta", "=", "copy", ".", "deepcopy", "(", "self", ".", "global_meta", ")", "self", ".", "meta", ".", "update", "(", "meta_", ")", "# the 'include' is not part of the metadata string;", "# it is pre-parsed as part of the shape type and should always", "# override the global one", "self", ".", "include", "=", "self", ".", "meta", ".", "get", "(", "'include'", ",", "True", ")", "if", "self", ".", "include", "==", "''", "else", "self", ".", "include", "!=", "'-'", "self", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include" ]
Convert meta string to dict
[ "Convert", "meta", "string", "to", "dict" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L496-L507
train
232,741
astropy/regions
regions/core/pixcoord.py
PixCoord._validate
def _validate(val, name, expected='any'): """Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later) """ if not isinstance(val, PixCoord): raise TypeError('{} must be a PixCoord'.format(name)) if expected == 'any': pass elif expected == 'scalar': if not val.isscalar: raise ValueError('{} must be a scalar PixCoord'.format(name)) elif expected == 'not scalar': if val.isscalar: raise ValueError('{} must be a non-scalar PixCoord'.format(name)) else: raise ValueError('Invalid argument for `expected`: {}'.format(expected)) return val
python
def _validate(val, name, expected='any'): """Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later) """ if not isinstance(val, PixCoord): raise TypeError('{} must be a PixCoord'.format(name)) if expected == 'any': pass elif expected == 'scalar': if not val.isscalar: raise ValueError('{} must be a scalar PixCoord'.format(name)) elif expected == 'not scalar': if val.isscalar: raise ValueError('{} must be a non-scalar PixCoord'.format(name)) else: raise ValueError('Invalid argument for `expected`: {}'.format(expected)) return val
[ "def", "_validate", "(", "val", ",", "name", ",", "expected", "=", "'any'", ")", ":", "if", "not", "isinstance", "(", "val", ",", "PixCoord", ")", ":", "raise", "TypeError", "(", "'{} must be a PixCoord'", ".", "format", "(", "name", ")", ")", "if", "expected", "==", "'any'", ":", "pass", "elif", "expected", "==", "'scalar'", ":", "if", "not", "val", ".", "isscalar", ":", "raise", "ValueError", "(", "'{} must be a scalar PixCoord'", ".", "format", "(", "name", ")", ")", "elif", "expected", "==", "'not scalar'", ":", "if", "val", ".", "isscalar", ":", "raise", "ValueError", "(", "'{} must be a non-scalar PixCoord'", ".", "format", "(", "name", ")", ")", "else", ":", "raise", "ValueError", "(", "'Invalid argument for `expected`: {}'", ".", "format", "(", "expected", ")", ")", "return", "val" ]
Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later)
[ "Validate", "that", "a", "given", "object", "is", "an", "appropriate", "PixCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L45-L79
train
232,742
astropy/regions
regions/core/pixcoord.py
PixCoord.to_sky
def to_sky(self, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there. """ return SkyCoord.from_pixel( xp=self.x, yp=self.y, wcs=wcs, origin=origin, mode=mode, )
python
def to_sky(self, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there. """ return SkyCoord.from_pixel( xp=self.x, yp=self.y, wcs=wcs, origin=origin, mode=mode, )
[ "def", "to_sky", "(", "self", ",", "wcs", ",", "origin", "=", "_DEFAULT_WCS_ORIGIN", ",", "mode", "=", "_DEFAULT_WCS_MODE", ")", ":", "return", "SkyCoord", ".", "from_pixel", "(", "xp", "=", "self", ".", "x", ",", "yp", "=", "self", ".", "y", ",", "wcs", "=", "wcs", ",", "origin", "=", "origin", ",", "mode", "=", "mode", ",", ")" ]
Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there.
[ "Convert", "this", "PixCoord", "to", "~astropy", ".", "coordinates", ".", "SkyCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L123-L132
train
232,743
astropy/regions
regions/core/pixcoord.py
PixCoord.from_sky
def from_sky(cls, skycoord, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there. """ x, y = skycoord.to_pixel(wcs=wcs, origin=origin, mode=mode) return cls(x=x, y=y)
python
def from_sky(cls, skycoord, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there. """ x, y = skycoord.to_pixel(wcs=wcs, origin=origin, mode=mode) return cls(x=x, y=y)
[ "def", "from_sky", "(", "cls", ",", "skycoord", ",", "wcs", ",", "origin", "=", "_DEFAULT_WCS_ORIGIN", ",", "mode", "=", "_DEFAULT_WCS_MODE", ")", ":", "x", ",", "y", "=", "skycoord", ".", "to_pixel", "(", "wcs", "=", "wcs", ",", "origin", "=", "origin", ",", "mode", "=", "mode", ")", "return", "cls", "(", "x", "=", "x", ",", "y", "=", "y", ")" ]
Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there.
[ "Create", "PixCoord", "from", "~astropy", ".", "coordinates", ".", "SkyCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L135-L142
train
232,744
astropy/regions
regions/core/pixcoord.py
PixCoord.separation
def separation(self, other): r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels """ dx = other.x - self.x dy = other.y - self.y return np.hypot(dx, dy)
python
def separation(self, other): r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels """ dx = other.x - self.x dy = other.y - self.y return np.hypot(dx, dy)
[ "def", "separation", "(", "self", ",", "other", ")", ":", "dx", "=", "other", ".", "x", "-", "self", ".", "x", "dy", "=", "other", ".", "y", "-", "self", ".", "y", "return", "np", ".", "hypot", "(", "dx", ",", "dy", ")" ]
r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels
[ "r", "Separation", "to", "another", "pixel", "coordinate", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L144-L164
train
232,745
astropy/regions
regions/_utils/wcs_helpers.py
skycoord_to_pixel_scale_angle
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec): """ Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space. """ # Convert to pixel coordinates x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode) pixcoord = PixCoord(x=x, y=y) # We take a point directly 'above' (in latitude) the position requested # and convert it to pixel coordinates, then we use that to figure out the # scale and position angle of the coordinate system at the location of # the points. # Find the coordinates as a representation object r_old = skycoord.represent_as('unitspherical') # Add a a small perturbation in the latitude direction (since longitude # is more difficult because it is not directly an angle). dlat = small_offset r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat) coords_offset = skycoord.realize_frame(r_new) # Find pixel coordinates of offset coordinates x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs, mode=skycoord_to_pixel_mode) # Find vector dx = x_offset - x dy = y_offset - y # Find the length of the vector scale = np.hypot(dx, dy) / dlat.to('degree').value # Find the position angle angle = np.arctan2(dy, dx) * u.radian return pixcoord, scale, angle
python
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec): """ Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space. """ # Convert to pixel coordinates x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode) pixcoord = PixCoord(x=x, y=y) # We take a point directly 'above' (in latitude) the position requested # and convert it to pixel coordinates, then we use that to figure out the # scale and position angle of the coordinate system at the location of # the points. # Find the coordinates as a representation object r_old = skycoord.represent_as('unitspherical') # Add a a small perturbation in the latitude direction (since longitude # is more difficult because it is not directly an angle). dlat = small_offset r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat) coords_offset = skycoord.realize_frame(r_new) # Find pixel coordinates of offset coordinates x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs, mode=skycoord_to_pixel_mode) # Find vector dx = x_offset - x dy = y_offset - y # Find the length of the vector scale = np.hypot(dx, dy) / dlat.to('degree').value # Find the position angle angle = np.arctan2(dy, dx) * u.radian return pixcoord, scale, angle
[ "def", "skycoord_to_pixel_scale_angle", "(", "skycoord", ",", "wcs", ",", "small_offset", "=", "1", "*", "u", ".", "arcsec", ")", ":", "# Convert to pixel coordinates", "x", ",", "y", "=", "skycoord_to_pixel", "(", "skycoord", ",", "wcs", ",", "mode", "=", "skycoord_to_pixel_mode", ")", "pixcoord", "=", "PixCoord", "(", "x", "=", "x", ",", "y", "=", "y", ")", "# We take a point directly 'above' (in latitude) the position requested", "# and convert it to pixel coordinates, then we use that to figure out the", "# scale and position angle of the coordinate system at the location of", "# the points.", "# Find the coordinates as a representation object", "r_old", "=", "skycoord", ".", "represent_as", "(", "'unitspherical'", ")", "# Add a a small perturbation in the latitude direction (since longitude", "# is more difficult because it is not directly an angle).", "dlat", "=", "small_offset", "r_new", "=", "UnitSphericalRepresentation", "(", "r_old", ".", "lon", ",", "r_old", ".", "lat", "+", "dlat", ")", "coords_offset", "=", "skycoord", ".", "realize_frame", "(", "r_new", ")", "# Find pixel coordinates of offset coordinates", "x_offset", ",", "y_offset", "=", "skycoord_to_pixel", "(", "coords_offset", ",", "wcs", ",", "mode", "=", "skycoord_to_pixel_mode", ")", "# Find vector", "dx", "=", "x_offset", "-", "x", "dy", "=", "y_offset", "-", "y", "# Find the length of the vector", "scale", "=", "np", ".", "hypot", "(", "dx", ",", "dy", ")", "/", "dlat", ".", "to", "(", "'degree'", ")", ".", "value", "# Find the position angle", "angle", "=", "np", ".", "arctan2", "(", "dy", ",", "dx", ")", "*", "u", ".", "radian", "return", "pixcoord", ",", "scale", ",", "angle" ]
Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space.
[ "Convert", "a", "set", "of", "SkyCoord", "coordinates", "into", "pixel", "coordinates", "pixel", "scales", "and", "position", "angles", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/wcs_helpers.py#L13-L69
train
232,746
astropy/regions
regions/_utils/wcs_helpers.py
assert_angle
def assert_angle(name, q): """ Check that ``q`` is an angular `~astropy.units.Quantity`. """ if isinstance(q, u.Quantity): if q.unit.physical_type == 'angle': pass else: raise ValueError("{0} should have angular units".format(name)) else: raise TypeError("{0} should be a Quantity instance".format(name))
python
def assert_angle(name, q): """ Check that ``q`` is an angular `~astropy.units.Quantity`. """ if isinstance(q, u.Quantity): if q.unit.physical_type == 'angle': pass else: raise ValueError("{0} should have angular units".format(name)) else: raise TypeError("{0} should be a Quantity instance".format(name))
[ "def", "assert_angle", "(", "name", ",", "q", ")", ":", "if", "isinstance", "(", "q", ",", "u", ".", "Quantity", ")", ":", "if", "q", ".", "unit", ".", "physical_type", "==", "'angle'", ":", "pass", "else", ":", "raise", "ValueError", "(", "\"{0} should have angular units\"", ".", "format", "(", "name", ")", ")", "else", ":", "raise", "TypeError", "(", "\"{0} should be a Quantity instance\"", ".", "format", "(", "name", ")", ")" ]
Check that ``q`` is an angular `~astropy.units.Quantity`.
[ "Check", "that", "q", "is", "an", "angular", "~astropy", ".", "units", ".", "Quantity", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/wcs_helpers.py#L86-L96
train
232,747
astropy/regions
ah_bootstrap.py
_silence
def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr
python
def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr
[ "def", "_silence", "(", ")", ":", "old_stdout", "=", "sys", ".", "stdout", "old_stderr", "=", "sys", ".", "stderr", "sys", ".", "stdout", "=", "_DummyFile", "(", ")", "sys", ".", "stderr", "=", "_DummyFile", "(", ")", "exception_occurred", "=", "False", "try", ":", "yield", "except", ":", "exception_occurred", "=", "True", "# Go ahead and clean up so that exception handling can work normally", "sys", ".", "stdout", "=", "old_stdout", "sys", ".", "stderr", "=", "old_stderr", "raise", "if", "not", "exception_occurred", ":", "sys", ".", "stdout", "=", "old_stdout", "sys", ".", "stderr", "=", "old_stderr" ]
A context manager that silences sys.stdout and sys.stderr.
[ "A", "context", "manager", "that", "silences", "sys", ".", "stdout", "and", "sys", ".", "stderr", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L914-L933
train
232,748
astropy/regions
ah_bootstrap.py
use_astropy_helpers
def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run()
python
def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run()
[ "def", "use_astropy_helpers", "(", "*", "*", "kwargs", ")", ":", "global", "BOOTSTRAPPER", "config", "=", "BOOTSTRAPPER", ".", "config", "config", ".", "update", "(", "*", "*", "kwargs", ")", "# Create a new bootstrapper with the updated configuration and run it", "BOOTSTRAPPER", "=", "_Bootstrapper", "(", "*", "*", "config", ")", "BOOTSTRAPPER", ".", "run", "(", ")" ]
Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`.
[ "Ensure", "that", "the", "astropy_helpers", "module", "is", "available", "and", "is", "importable", ".", "This", "supports", "automatic", "submodule", "initialization", "if", "astropy_helpers", "is", "included", "in", "a", "project", "as", "a", "git", "submodule", "or", "will", "download", "it", "from", "PyPI", "if", "necessary", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L959-L1022
train
232,749
astropy/regions
ah_bootstrap.py
_Bootstrapper.config
def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname))
python
def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname))
[ "def", "config", "(", "self", ")", ":", "return", "dict", "(", "(", "optname", ",", "getattr", "(", "self", ",", "optname", ")", ")", "for", "optname", ",", "_", "in", "CFG_OPTIONS", "if", "hasattr", "(", "self", ",", "optname", ")", ")" ]
A `dict` containing the options this `_Bootstrapper` was configured with.
[ "A", "dict", "containing", "the", "options", "this", "_Bootstrapper", "was", "configured", "with", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L393-L400
train
232,750
astropy/regions
ah_bootstrap.py
_Bootstrapper.get_local_directory_dist
def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
python
def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
[ "def", "get_local_directory_dist", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "path", ")", ":", "return", "log", ".", "info", "(", "'Attempting to import astropy_helpers from {0} {1!r}'", ".", "format", "(", "'submodule'", "if", "self", ".", "is_submodule", "else", "'directory'", ",", "self", ".", "path", ")", ")", "dist", "=", "self", ".", "_directory_import", "(", ")", "if", "dist", "is", "None", ":", "log", ".", "warn", "(", "'The requested path {0!r} for importing {1} does not '", "'exist, or does not contain a copy of the {1} '", "'package.'", ".", "format", "(", "self", ".", "path", ",", "PACKAGE_NAME", ")", ")", "elif", "self", ".", "auto_upgrade", "and", "not", "self", ".", "is_submodule", ":", "# A version of astropy-helpers was found on the available path, but", "# check to see if a bugfix release is available on PyPI", "upgrade", "=", "self", ".", "_do_upgrade", "(", "dist", ")", "if", "upgrade", "is", "not", "None", ":", "dist", "=", "upgrade", "return", "dist" ]
Handle importing a vendored package from a subdirectory of the source distribution.
[ "Handle", "importing", "a", "vendored", "package", "from", "a", "subdirectory", "of", "the", "source", "distribution", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L402-L429
train
232,751
astropy/regions
ah_bootstrap.py
_Bootstrapper.get_local_file_dist
def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
python
def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
[ "def", "get_local_file_dist", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "path", ")", ":", "return", "log", ".", "info", "(", "'Attempting to unpack and import astropy_helpers from '", "'{0!r}'", ".", "format", "(", "self", ".", "path", ")", ")", "try", ":", "dist", "=", "self", ".", "_do_download", "(", "find_links", "=", "[", "self", ".", "path", "]", ")", "except", "Exception", "as", "e", ":", "if", "DEBUG", ":", "raise", "log", ".", "warn", "(", "'Failed to import {0} from the specified archive {1!r}: '", "'{2}'", ".", "format", "(", "PACKAGE_NAME", ",", "self", ".", "path", ",", "str", "(", "e", ")", ")", ")", "dist", "=", "None", "if", "dist", "is", "not", "None", "and", "self", ".", "auto_upgrade", ":", "# A version of astropy-helpers was found on the available path, but", "# check to see if a bugfix release is available on PyPI", "upgrade", "=", "self", ".", "_do_upgrade", "(", "dist", ")", "if", "upgrade", "is", "not", "None", ":", "dist", "=", "upgrade", "return", "dist" ]
Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive.
[ "Handle", "importing", "from", "a", "source", "archive", ";", "this", "also", "uses", "setup_requires", "but", "points", "easy_install", "directly", "to", "the", "source", "archive", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L431-L461
train
232,752
astropy/regions
ah_bootstrap.py
_Bootstrapper._directory_import
def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
python
def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
[ "def", "_directory_import", "(", "self", ")", ":", "# Return True on success, False on failure but download is allowed, and", "# otherwise raise SystemExit", "path", "=", "os", ".", "path", ".", "abspath", "(", "self", ".", "path", ")", "# Use an empty WorkingSet rather than the man", "# pkg_resources.working_set, since on older versions of setuptools this", "# will invoke a VersionConflict when trying to install an upgrade", "ws", "=", "pkg_resources", ".", "WorkingSet", "(", "[", "]", ")", "ws", ".", "add_entry", "(", "path", ")", "dist", "=", "ws", ".", "by_key", ".", "get", "(", "DIST_NAME", ")", "if", "dist", "is", "None", ":", "# We didn't find an egg-info/dist-info in the given path, but if a", "# setup.py exists we can generate it", "setup_py", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'setup.py'", ")", "if", "os", ".", "path", ".", "isfile", "(", "setup_py", ")", ":", "# We use subprocess instead of run_setup from setuptools to", "# avoid segmentation faults - see the following for more details:", "# https://github.com/cython/cython/issues/2104", "sp", ".", "check_output", "(", "[", "sys", ".", "executable", ",", "'setup.py'", ",", "'egg_info'", "]", ",", "cwd", "=", "path", ")", "for", "dist", "in", "pkg_resources", ".", "find_distributions", "(", "path", ",", "True", ")", ":", "# There should be only one...", "return", "dist", "return", "dist" ]
Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise.
[ "Import", "astropy_helpers", "from", "the", "given", "path", "which", "will", "be", "added", "to", "sys", ".", "path", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L486-L519
train
232,753
astropy/regions
ah_bootstrap.py
_Bootstrapper._check_submodule
def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git()
python
def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git()
[ "def", "_check_submodule", "(", "self", ")", ":", "if", "(", "self", ".", "path", "is", "None", "or", "(", "os", ".", "path", ".", "exists", "(", "self", ".", "path", ")", "and", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "path", ")", ")", ")", ":", "return", "False", "if", "self", ".", "use_git", ":", "return", "self", ".", "_check_submodule_using_git", "(", ")", "else", ":", "return", "self", ".", "_check_submodule_no_git", "(", ")" ]
Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details.
[ "Check", "if", "the", "given", "path", "is", "a", "git", "submodule", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L607-L622
train
232,754
EconForge/dolo
dolo/numeric/tensor.py
sdot
def sdot( U, V ): ''' Computes the tensorproduct reducing last dimensoin of U with first dimension of V. For matrices, it is equal to regular matrix product. ''' nu = U.ndim #nv = V.ndim return np.tensordot( U, V, axes=(nu-1,0) )
python
def sdot( U, V ): ''' Computes the tensorproduct reducing last dimensoin of U with first dimension of V. For matrices, it is equal to regular matrix product. ''' nu = U.ndim #nv = V.ndim return np.tensordot( U, V, axes=(nu-1,0) )
[ "def", "sdot", "(", "U", ",", "V", ")", ":", "nu", "=", "U", ".", "ndim", "#nv = V.ndim", "return", "np", ".", "tensordot", "(", "U", ",", "V", ",", "axes", "=", "(", "nu", "-", "1", ",", "0", ")", ")" ]
Computes the tensorproduct reducing last dimensoin of U with first dimension of V. For matrices, it is equal to regular matrix product.
[ "Computes", "the", "tensorproduct", "reducing", "last", "dimensoin", "of", "U", "with", "first", "dimension", "of", "V", ".", "For", "matrices", "it", "is", "equal", "to", "regular", "matrix", "product", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/tensor.py#L44-L51
train
232,755
EconForge/dolo
dolo/numeric/interpolation/smolyak.py
SmolyakBasic.set_values
def set_values(self,x): """ Updates self.theta parameter. No returns values""" x = numpy.atleast_2d(x) x = x.real # ahem C_inv = self.__C_inv__ theta = numpy.dot( x, C_inv ) self.theta = theta return theta
python
def set_values(self,x): """ Updates self.theta parameter. No returns values""" x = numpy.atleast_2d(x) x = x.real # ahem C_inv = self.__C_inv__ theta = numpy.dot( x, C_inv ) self.theta = theta return theta
[ "def", "set_values", "(", "self", ",", "x", ")", ":", "x", "=", "numpy", ".", "atleast_2d", "(", "x", ")", "x", "=", "x", ".", "real", "# ahem", "C_inv", "=", "self", ".", "__C_inv__", "theta", "=", "numpy", ".", "dot", "(", "x", ",", "C_inv", ")", "self", ".", "theta", "=", "theta", "return", "theta" ]
Updates self.theta parameter. No returns values
[ "Updates", "self", ".", "theta", "parameter", ".", "No", "returns", "values" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/interpolation/smolyak.py#L256-L267
train
232,756
EconForge/dolo
dolo/numeric/discretization/discretization.py
tauchen
def tauchen(N, mu, rho, sigma, m=2): """ Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma. """ Z = np.zeros((N,1)) Zprob = np.zeros((N,N)) a = (1-rho)*mu Z[-1] = m * math.sqrt(sigma**2 / (1 - (rho**2))) Z[0] = -1 * Z[-1] zstep = (Z[-1] - Z[0]) / (N - 1) for i in range(1,N): Z[i] = Z[0] + zstep * (i) Z = Z + a / (1-rho) for j in range(0,N): for k in range(0,N): if k == 0: Zprob[j,k] = sp.stats.norm.cdf((Z[0] - a - rho * Z[j] + zstep / 2) / sigma) elif k == (N-1): Zprob[j,k] = 1 - sp.stats.norm.cdf((Z[-1] - a - rho * Z[j] - zstep / 2) / sigma) else: up = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] + zstep / 2) / sigma) down = sp.stats.norm.cdf( (Z[k] - a - rho * Z[j] - zstep / 2) / sigma) Zprob[j,k] = up - down return( (Z, Zprob) )
python
def tauchen(N, mu, rho, sigma, m=2): """ Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma. """ Z = np.zeros((N,1)) Zprob = np.zeros((N,N)) a = (1-rho)*mu Z[-1] = m * math.sqrt(sigma**2 / (1 - (rho**2))) Z[0] = -1 * Z[-1] zstep = (Z[-1] - Z[0]) / (N - 1) for i in range(1,N): Z[i] = Z[0] + zstep * (i) Z = Z + a / (1-rho) for j in range(0,N): for k in range(0,N): if k == 0: Zprob[j,k] = sp.stats.norm.cdf((Z[0] - a - rho * Z[j] + zstep / 2) / sigma) elif k == (N-1): Zprob[j,k] = 1 - sp.stats.norm.cdf((Z[-1] - a - rho * Z[j] - zstep / 2) / sigma) else: up = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] + zstep / 2) / sigma) down = sp.stats.norm.cdf( (Z[k] - a - rho * Z[j] - zstep / 2) / sigma) Zprob[j,k] = up - down return( (Z, Zprob) )
[ "def", "tauchen", "(", "N", ",", "mu", ",", "rho", ",", "sigma", ",", "m", "=", "2", ")", ":", "Z", "=", "np", ".", "zeros", "(", "(", "N", ",", "1", ")", ")", "Zprob", "=", "np", ".", "zeros", "(", "(", "N", ",", "N", ")", ")", "a", "=", "(", "1", "-", "rho", ")", "*", "mu", "Z", "[", "-", "1", "]", "=", "m", "*", "math", ".", "sqrt", "(", "sigma", "**", "2", "/", "(", "1", "-", "(", "rho", "**", "2", ")", ")", ")", "Z", "[", "0", "]", "=", "-", "1", "*", "Z", "[", "-", "1", "]", "zstep", "=", "(", "Z", "[", "-", "1", "]", "-", "Z", "[", "0", "]", ")", "/", "(", "N", "-", "1", ")", "for", "i", "in", "range", "(", "1", ",", "N", ")", ":", "Z", "[", "i", "]", "=", "Z", "[", "0", "]", "+", "zstep", "*", "(", "i", ")", "Z", "=", "Z", "+", "a", "/", "(", "1", "-", "rho", ")", "for", "j", "in", "range", "(", "0", ",", "N", ")", ":", "for", "k", "in", "range", "(", "0", ",", "N", ")", ":", "if", "k", "==", "0", ":", "Zprob", "[", "j", ",", "k", "]", "=", "sp", ".", "stats", ".", "norm", ".", "cdf", "(", "(", "Z", "[", "0", "]", "-", "a", "-", "rho", "*", "Z", "[", "j", "]", "+", "zstep", "/", "2", ")", "/", "sigma", ")", "elif", "k", "==", "(", "N", "-", "1", ")", ":", "Zprob", "[", "j", ",", "k", "]", "=", "1", "-", "sp", ".", "stats", ".", "norm", ".", "cdf", "(", "(", "Z", "[", "-", "1", "]", "-", "a", "-", "rho", "*", "Z", "[", "j", "]", "-", "zstep", "/", "2", ")", "/", "sigma", ")", "else", ":", "up", "=", "sp", ".", "stats", ".", "norm", ".", "cdf", "(", "(", "Z", "[", "k", "]", "-", "a", "-", "rho", "*", "Z", "[", "j", "]", "+", "zstep", "/", "2", ")", "/", "sigma", ")", "down", "=", "sp", ".", "stats", ".", "norm", ".", "cdf", "(", "(", "Z", "[", "k", "]", "-", "a", "-", "rho", "*", "Z", "[", "j", "]", "-", "zstep", "/", "2", ")", "/", "sigma", ")", "Zprob", "[", "j", ",", "k", "]", "=", "up", "-", "down", "return", "(", "(", "Z", ",", "Zprob", ")", ")" ]
Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma.
[ "Approximate", "an", "AR1", "process", "by", "a", "finite", "markov", "chain", "using", "Tauchen", "s", "method", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/discretization/discretization.py#L13-L50
train
232,757
EconForge/dolo
dolo/numeric/discretization/discretization.py
rouwenhorst
def rouwenhorst(rho, sigma, N): """ Approximate an AR1 process by a finite markov chain using Rouwenhorst's method. :param rho: autocorrelation of the AR1 process :param sigma: conditional standard deviation of the AR1 process :param N: number of states :return [nodes, P]: equally spaced nodes and transition matrix """ from numpy import sqrt, linspace, array,zeros sigma = float(sigma) if N == 1: nodes = array([0.0]) transitions = array([[1.0]]) return [nodes, transitions] p = (rho+1)/2 q = p nu = sqrt( (N-1)/(1-rho**2) )*sigma nodes = linspace( -nu, nu, N) sig_a = sigma n = 1 # mat0 = array( [[1]] ) mat0 = array([[p,1-p],[1-q,q]]) if N == 2: return [nodes,mat0] for n in range(3,N+1): mat = zeros( (n,n) ) mat_A = mat.copy() mat_B = mat.copy() mat_C = mat.copy() mat_D = mat.copy() mat_A[:-1,:-1] = mat0 mat_B[:-1,1:] = mat0 mat_C[1:,:-1] = mat0 mat_D[1:,1:] = mat0 mat0 = p*mat_A + (1-p)*mat_B + (1-q)*mat_C + q*mat_D mat0[1:-1,:] = mat0[1:-1,:]/2 P = mat0 return [nodes, P]
python
def rouwenhorst(rho, sigma, N): """ Approximate an AR1 process by a finite markov chain using Rouwenhorst's method. :param rho: autocorrelation of the AR1 process :param sigma: conditional standard deviation of the AR1 process :param N: number of states :return [nodes, P]: equally spaced nodes and transition matrix """ from numpy import sqrt, linspace, array,zeros sigma = float(sigma) if N == 1: nodes = array([0.0]) transitions = array([[1.0]]) return [nodes, transitions] p = (rho+1)/2 q = p nu = sqrt( (N-1)/(1-rho**2) )*sigma nodes = linspace( -nu, nu, N) sig_a = sigma n = 1 # mat0 = array( [[1]] ) mat0 = array([[p,1-p],[1-q,q]]) if N == 2: return [nodes,mat0] for n in range(3,N+1): mat = zeros( (n,n) ) mat_A = mat.copy() mat_B = mat.copy() mat_C = mat.copy() mat_D = mat.copy() mat_A[:-1,:-1] = mat0 mat_B[:-1,1:] = mat0 mat_C[1:,:-1] = mat0 mat_D[1:,1:] = mat0 mat0 = p*mat_A + (1-p)*mat_B + (1-q)*mat_C + q*mat_D mat0[1:-1,:] = mat0[1:-1,:]/2 P = mat0 return [nodes, P]
[ "def", "rouwenhorst", "(", "rho", ",", "sigma", ",", "N", ")", ":", "from", "numpy", "import", "sqrt", ",", "linspace", ",", "array", ",", "zeros", "sigma", "=", "float", "(", "sigma", ")", "if", "N", "==", "1", ":", "nodes", "=", "array", "(", "[", "0.0", "]", ")", "transitions", "=", "array", "(", "[", "[", "1.0", "]", "]", ")", "return", "[", "nodes", ",", "transitions", "]", "p", "=", "(", "rho", "+", "1", ")", "/", "2", "q", "=", "p", "nu", "=", "sqrt", "(", "(", "N", "-", "1", ")", "/", "(", "1", "-", "rho", "**", "2", ")", ")", "*", "sigma", "nodes", "=", "linspace", "(", "-", "nu", ",", "nu", ",", "N", ")", "sig_a", "=", "sigma", "n", "=", "1", "# mat0 = array( [[1]] )", "mat0", "=", "array", "(", "[", "[", "p", ",", "1", "-", "p", "]", ",", "[", "1", "-", "q", ",", "q", "]", "]", ")", "if", "N", "==", "2", ":", "return", "[", "nodes", ",", "mat0", "]", "for", "n", "in", "range", "(", "3", ",", "N", "+", "1", ")", ":", "mat", "=", "zeros", "(", "(", "n", ",", "n", ")", ")", "mat_A", "=", "mat", ".", "copy", "(", ")", "mat_B", "=", "mat", ".", "copy", "(", ")", "mat_C", "=", "mat", ".", "copy", "(", ")", "mat_D", "=", "mat", ".", "copy", "(", ")", "mat_A", "[", ":", "-", "1", ",", ":", "-", "1", "]", "=", "mat0", "mat_B", "[", ":", "-", "1", ",", "1", ":", "]", "=", "mat0", "mat_C", "[", "1", ":", ",", ":", "-", "1", "]", "=", "mat0", "mat_D", "[", "1", ":", ",", "1", ":", "]", "=", "mat0", "mat0", "=", "p", "*", "mat_A", "+", "(", "1", "-", "p", ")", "*", "mat_B", "+", "(", "1", "-", "q", ")", "*", "mat_C", "+", "q", "*", "mat_D", "mat0", "[", "1", ":", "-", "1", ",", ":", "]", "=", "mat0", "[", "1", ":", "-", "1", ",", ":", "]", "/", "2", "P", "=", "mat0", "return", "[", "nodes", ",", "P", "]" ]
Approximate an AR1 process by a finite markov chain using Rouwenhorst's method. :param rho: autocorrelation of the AR1 process :param sigma: conditional standard deviation of the AR1 process :param N: number of states :return [nodes, P]: equally spaced nodes and transition matrix
[ "Approximate", "an", "AR1", "process", "by", "a", "finite", "markov", "chain", "using", "Rouwenhorst", "s", "method", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/discretization/discretization.py#L53-L97
train
232,758
EconForge/dolo
dolo/numeric/discretization/discretization.py
tensor_markov
def tensor_markov( *args ): """Computes the product of two independent markov chains. :param m1: a tuple containing the nodes and the transition matrix of the first chain :param m2: a tuple containing the nodes and the transition matrix of the second chain :return: a tuple containing the nodes and the transition matrix of the product chain """ if len(args) > 2: m1 = args[0] m2 = args[1] tail = args[2:] prod = tensor_markov(m1,m2) return tensor_markov( prod, tail ) elif len(args) == 2: m1,m2 = args n1, t1 = m1 n2, t2 = m2 n1 = np.array(n1, dtype=float) n2 = np.array(n2, dtype=float) t1 = np.array(t1, dtype=float) t2 = np.array(t2, dtype=float) assert(n1.shape[0] == t1.shape[0] == t1.shape[1]) assert(n2.shape[0] == t2.shape[0] == t2.shape[1]) t = np.kron(t1, t2) p = t1.shape[0] q = t2.shape[0] np.tile( n2, (1,p)) # n = np.row_stack([ # np.repeat(n1, q, axis=1), # np.tile( n2, (1,p)) # ]) n = np.column_stack([ np.repeat(n1, q, axis=0), np.tile( n2, (p,1)) ]) return [n,t] else: raise Exception("Incorrect number of arguments. Expected at least 2. Found {}.".format(len(args)))
python
def tensor_markov( *args ): """Computes the product of two independent markov chains. :param m1: a tuple containing the nodes and the transition matrix of the first chain :param m2: a tuple containing the nodes and the transition matrix of the second chain :return: a tuple containing the nodes and the transition matrix of the product chain """ if len(args) > 2: m1 = args[0] m2 = args[1] tail = args[2:] prod = tensor_markov(m1,m2) return tensor_markov( prod, tail ) elif len(args) == 2: m1,m2 = args n1, t1 = m1 n2, t2 = m2 n1 = np.array(n1, dtype=float) n2 = np.array(n2, dtype=float) t1 = np.array(t1, dtype=float) t2 = np.array(t2, dtype=float) assert(n1.shape[0] == t1.shape[0] == t1.shape[1]) assert(n2.shape[0] == t2.shape[0] == t2.shape[1]) t = np.kron(t1, t2) p = t1.shape[0] q = t2.shape[0] np.tile( n2, (1,p)) # n = np.row_stack([ # np.repeat(n1, q, axis=1), # np.tile( n2, (1,p)) # ]) n = np.column_stack([ np.repeat(n1, q, axis=0), np.tile( n2, (p,1)) ]) return [n,t] else: raise Exception("Incorrect number of arguments. Expected at least 2. Found {}.".format(len(args)))
[ "def", "tensor_markov", "(", "*", "args", ")", ":", "if", "len", "(", "args", ")", ">", "2", ":", "m1", "=", "args", "[", "0", "]", "m2", "=", "args", "[", "1", "]", "tail", "=", "args", "[", "2", ":", "]", "prod", "=", "tensor_markov", "(", "m1", ",", "m2", ")", "return", "tensor_markov", "(", "prod", ",", "tail", ")", "elif", "len", "(", "args", ")", "==", "2", ":", "m1", ",", "m2", "=", "args", "n1", ",", "t1", "=", "m1", "n2", ",", "t2", "=", "m2", "n1", "=", "np", ".", "array", "(", "n1", ",", "dtype", "=", "float", ")", "n2", "=", "np", ".", "array", "(", "n2", ",", "dtype", "=", "float", ")", "t1", "=", "np", ".", "array", "(", "t1", ",", "dtype", "=", "float", ")", "t2", "=", "np", ".", "array", "(", "t2", ",", "dtype", "=", "float", ")", "assert", "(", "n1", ".", "shape", "[", "0", "]", "==", "t1", ".", "shape", "[", "0", "]", "==", "t1", ".", "shape", "[", "1", "]", ")", "assert", "(", "n2", ".", "shape", "[", "0", "]", "==", "t2", ".", "shape", "[", "0", "]", "==", "t2", ".", "shape", "[", "1", "]", ")", "t", "=", "np", ".", "kron", "(", "t1", ",", "t2", ")", "p", "=", "t1", ".", "shape", "[", "0", "]", "q", "=", "t2", ".", "shape", "[", "0", "]", "np", ".", "tile", "(", "n2", ",", "(", "1", ",", "p", ")", ")", "# n = np.row_stack([", "# np.repeat(n1, q, axis=1),", "# np.tile( n2, (1,p))", "# ])", "n", "=", "np", ".", "column_stack", "(", "[", "np", ".", "repeat", "(", "n1", ",", "q", ",", "axis", "=", "0", ")", ",", "np", ".", "tile", "(", "n2", ",", "(", "p", ",", "1", ")", ")", "]", ")", "return", "[", "n", ",", "t", "]", "else", ":", "raise", "Exception", "(", "\"Incorrect number of arguments. Expected at least 2. Found {}.\"", ".", "format", "(", "len", "(", "args", ")", ")", ")" ]
Computes the product of two independent markov chains. :param m1: a tuple containing the nodes and the transition matrix of the first chain :param m2: a tuple containing the nodes and the transition matrix of the second chain :return: a tuple containing the nodes and the transition matrix of the product chain
[ "Computes", "the", "product", "of", "two", "independent", "markov", "chains", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/discretization/discretization.py#L155-L201
train
232,759
EconForge/dolo
trash/dolo/misc/modfile.py
dynare_import
def dynare_import(filename,full_output=False, debug=False): '''Imports model defined in specified file''' import os basename = os.path.basename(filename) fname = re.compile('(.*)\.(.*)').match(basename).group(1) f = open(filename) txt = f.read() model = parse_dynare_text(txt,full_output=full_output, debug=debug) model.name = fname return model
python
def dynare_import(filename,full_output=False, debug=False): '''Imports model defined in specified file''' import os basename = os.path.basename(filename) fname = re.compile('(.*)\.(.*)').match(basename).group(1) f = open(filename) txt = f.read() model = parse_dynare_text(txt,full_output=full_output, debug=debug) model.name = fname return model
[ "def", "dynare_import", "(", "filename", ",", "full_output", "=", "False", ",", "debug", "=", "False", ")", ":", "import", "os", "basename", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "fname", "=", "re", ".", "compile", "(", "'(.*)\\.(.*)'", ")", ".", "match", "(", "basename", ")", ".", "group", "(", "1", ")", "f", "=", "open", "(", "filename", ")", "txt", "=", "f", ".", "read", "(", ")", "model", "=", "parse_dynare_text", "(", "txt", ",", "full_output", "=", "full_output", ",", "debug", "=", "debug", ")", "model", ".", "name", "=", "fname", "return", "model" ]
Imports model defined in specified file
[ "Imports", "model", "defined", "in", "specified", "file" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/trash/dolo/misc/modfile.py#L311-L320
train
232,760
EconForge/dolo
dolo/algos/perfect_foresight.py
_shocks_to_epsilons
def _shocks_to_epsilons(model, shocks, T): """ Helper function to support input argument `shocks` being one of many different data types. Will always return a `T, n_e` matrix. """ n_e = len(model.calibration['exogenous']) # if we have a DataFrame, convert it to a dict and rely on the method below if isinstance(shocks, pd.DataFrame): shocks = {k: shocks[k].tolist() for k in shocks.columns} # handle case where shocks might be a dict. Be careful to handle case where # value arrays are not the same length if isinstance(shocks, dict): epsilons = np.zeros((T + 1, n_e)) for (i, k) in enumerate(model.symbols["exogenous"]): if k in shocks: this_shock = shocks[k] epsilons[:len(this_shock), i] = this_shock epsilons[len(this_shock):, i] = this_shock[-1] else: # otherwise set to value in calibration epsilons[:, i] = model.calibration["exogenous"][i] return epsilons # read from calibration if not given if shocks is None: shocks = model.calibration["exogenous"] # now we just assume that shocks is array-like and try using the output of # np.asarray(shocks) shocks = np.asarray(shocks) shocks = shocks.reshape((-1, n_e)) # until last period, exogenous shock takes its last value epsilons = np.zeros((T + 1, n_e)) epsilons[:(shocks.shape[0] - 1), :] = shocks[1:, :] epsilons[(shocks.shape[0] - 1):, :] = shocks[-1:, :] return epsilons
python
def _shocks_to_epsilons(model, shocks, T): """ Helper function to support input argument `shocks` being one of many different data types. Will always return a `T, n_e` matrix. """ n_e = len(model.calibration['exogenous']) # if we have a DataFrame, convert it to a dict and rely on the method below if isinstance(shocks, pd.DataFrame): shocks = {k: shocks[k].tolist() for k in shocks.columns} # handle case where shocks might be a dict. Be careful to handle case where # value arrays are not the same length if isinstance(shocks, dict): epsilons = np.zeros((T + 1, n_e)) for (i, k) in enumerate(model.symbols["exogenous"]): if k in shocks: this_shock = shocks[k] epsilons[:len(this_shock), i] = this_shock epsilons[len(this_shock):, i] = this_shock[-1] else: # otherwise set to value in calibration epsilons[:, i] = model.calibration["exogenous"][i] return epsilons # read from calibration if not given if shocks is None: shocks = model.calibration["exogenous"] # now we just assume that shocks is array-like and try using the output of # np.asarray(shocks) shocks = np.asarray(shocks) shocks = shocks.reshape((-1, n_e)) # until last period, exogenous shock takes its last value epsilons = np.zeros((T + 1, n_e)) epsilons[:(shocks.shape[0] - 1), :] = shocks[1:, :] epsilons[(shocks.shape[0] - 1):, :] = shocks[-1:, :] return epsilons
[ "def", "_shocks_to_epsilons", "(", "model", ",", "shocks", ",", "T", ")", ":", "n_e", "=", "len", "(", "model", ".", "calibration", "[", "'exogenous'", "]", ")", "# if we have a DataFrame, convert it to a dict and rely on the method below", "if", "isinstance", "(", "shocks", ",", "pd", ".", "DataFrame", ")", ":", "shocks", "=", "{", "k", ":", "shocks", "[", "k", "]", ".", "tolist", "(", ")", "for", "k", "in", "shocks", ".", "columns", "}", "# handle case where shocks might be a dict. Be careful to handle case where", "# value arrays are not the same length", "if", "isinstance", "(", "shocks", ",", "dict", ")", ":", "epsilons", "=", "np", ".", "zeros", "(", "(", "T", "+", "1", ",", "n_e", ")", ")", "for", "(", "i", ",", "k", ")", "in", "enumerate", "(", "model", ".", "symbols", "[", "\"exogenous\"", "]", ")", ":", "if", "k", "in", "shocks", ":", "this_shock", "=", "shocks", "[", "k", "]", "epsilons", "[", ":", "len", "(", "this_shock", ")", ",", "i", "]", "=", "this_shock", "epsilons", "[", "len", "(", "this_shock", ")", ":", ",", "i", "]", "=", "this_shock", "[", "-", "1", "]", "else", ":", "# otherwise set to value in calibration", "epsilons", "[", ":", ",", "i", "]", "=", "model", ".", "calibration", "[", "\"exogenous\"", "]", "[", "i", "]", "return", "epsilons", "# read from calibration if not given", "if", "shocks", "is", "None", ":", "shocks", "=", "model", ".", "calibration", "[", "\"exogenous\"", "]", "# now we just assume that shocks is array-like and try using the output of", "# np.asarray(shocks)", "shocks", "=", "np", ".", "asarray", "(", "shocks", ")", "shocks", "=", "shocks", ".", "reshape", "(", "(", "-", "1", ",", "n_e", ")", ")", "# until last period, exogenous shock takes its last value", "epsilons", "=", "np", ".", "zeros", "(", "(", "T", "+", "1", ",", "n_e", ")", ")", "epsilons", "[", ":", "(", "shocks", ".", "shape", "[", "0", "]", "-", "1", ")", ",", ":", "]", "=", "shocks", "[", "1", ":", ",", ":", "]", "epsilons", "[", "(", "shocks", ".", "shape", "[", "0", "]", "-", "1", ")", ":", ",", ":", "]", "=", "shocks", "[", "-", "1", ":", ",", ":", "]", "return", "epsilons" ]
Helper function to support input argument `shocks` being one of many different data types. Will always return a `T, n_e` matrix.
[ "Helper", "function", "to", "support", "input", "argument", "shocks", "being", "one", "of", "many", "different", "data", "types", ".", "Will", "always", "return", "a", "T", "n_e", "matrix", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/algos/perfect_foresight.py#L9-L49
train
232,761
EconForge/dolo
trash/dolo/misc/symbolic_interactive.py
clear_all
def clear_all(): """ Clears all parameters, variables, and shocks defined previously """ frame = inspect.currentframe().f_back try: if frame.f_globals.get('variables_order'): # we should avoid to declare symbols twice ! del frame.f_globals['variables_order'] if frame.f_globals.get('parameters_order'): # we should avoid to declare symbols twice ! del frame.f_globals['parameters_order'] finally: del frame
python
def clear_all(): """ Clears all parameters, variables, and shocks defined previously """ frame = inspect.currentframe().f_back try: if frame.f_globals.get('variables_order'): # we should avoid to declare symbols twice ! del frame.f_globals['variables_order'] if frame.f_globals.get('parameters_order'): # we should avoid to declare symbols twice ! del frame.f_globals['parameters_order'] finally: del frame
[ "def", "clear_all", "(", ")", ":", "frame", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", "try", ":", "if", "frame", ".", "f_globals", ".", "get", "(", "'variables_order'", ")", ":", "# we should avoid to declare symbols twice !", "del", "frame", ".", "f_globals", "[", "'variables_order'", "]", "if", "frame", ".", "f_globals", ".", "get", "(", "'parameters_order'", ")", ":", "# we should avoid to declare symbols twice !", "del", "frame", ".", "f_globals", "[", "'parameters_order'", "]", "finally", ":", "del", "frame" ]
Clears all parameters, variables, and shocks defined previously
[ "Clears", "all", "parameters", "variables", "and", "shocks", "defined", "previously" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/trash/dolo/misc/symbolic_interactive.py#L319-L333
train
232,762
EconForge/dolo
trash/dolo/algos/dtcscc/nonlinearsystem.py
nonlinear_system
def nonlinear_system(model, initial_dr=None, maxit=10, tol=1e-8, grid={}, distribution={}, verbose=True): ''' Finds a global solution for ``model`` by solving one large system of equations using a simple newton algorithm. Parameters ---------- model: NumericModel "dtcscc" model to be solved verbose: boolean if True, display iterations initial_dr: decision rule initial guess for the decision rule maxit: int maximum number of iterationsd tol: tolerance criterium for successive approximations grid: grid options distribution: distribution options Returns ------- decision rule : approximated solution ''' if verbose: headline = '|{0:^4} | {1:10} | {2:8} |' headline = headline.format('N', ' Error', 'Time') stars = '-'*len(headline) print(stars) print(headline) print(stars) # format string for within loop fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} |' f = model.functions['arbitrage'] g = model.functions['transition'] p = model.calibration['parameters'] distrib = model.get_distribution(**distribution) nodes, weights = distrib.discretize() approx = model.get_grid(**grid) ms = create_interpolator(approx, approx.interpolation) grid = ms.grid if initial_dr is None: dr = approximate_controls(model) else: dr = initial_dr ms.set_values(dr(grid)) x = dr(grid) x0 = x.copy() it = 0 err = 10 a0 = x0.copy().reshape((x0.shape[0]*x0.shape[1],)) a = a0.copy() while err > tol and it < maxit: it += 1 t1 = time.time() r, da = residuals(f, g, grid, a.reshape(x0.shape), ms, nodes, weights, p, diff=True)[:2] r = r.flatten() err = abs(r).max() t2 = time.time() if verbose: print(fmt_str.format(it, err, t2-t1)) if err > tol: a -= scipy.sparse.linalg.spsolve(da, r) if verbose: print(stars) return ms
python
def nonlinear_system(model, initial_dr=None, maxit=10, tol=1e-8, grid={}, distribution={}, verbose=True): ''' Finds a global solution for ``model`` by solving one large system of equations using a simple newton algorithm. Parameters ---------- model: NumericModel "dtcscc" model to be solved verbose: boolean if True, display iterations initial_dr: decision rule initial guess for the decision rule maxit: int maximum number of iterationsd tol: tolerance criterium for successive approximations grid: grid options distribution: distribution options Returns ------- decision rule : approximated solution ''' if verbose: headline = '|{0:^4} | {1:10} | {2:8} |' headline = headline.format('N', ' Error', 'Time') stars = '-'*len(headline) print(stars) print(headline) print(stars) # format string for within loop fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} |' f = model.functions['arbitrage'] g = model.functions['transition'] p = model.calibration['parameters'] distrib = model.get_distribution(**distribution) nodes, weights = distrib.discretize() approx = model.get_grid(**grid) ms = create_interpolator(approx, approx.interpolation) grid = ms.grid if initial_dr is None: dr = approximate_controls(model) else: dr = initial_dr ms.set_values(dr(grid)) x = dr(grid) x0 = x.copy() it = 0 err = 10 a0 = x0.copy().reshape((x0.shape[0]*x0.shape[1],)) a = a0.copy() while err > tol and it < maxit: it += 1 t1 = time.time() r, da = residuals(f, g, grid, a.reshape(x0.shape), ms, nodes, weights, p, diff=True)[:2] r = r.flatten() err = abs(r).max() t2 = time.time() if verbose: print(fmt_str.format(it, err, t2-t1)) if err > tol: a -= scipy.sparse.linalg.spsolve(da, r) if verbose: print(stars) return ms
[ "def", "nonlinear_system", "(", "model", ",", "initial_dr", "=", "None", ",", "maxit", "=", "10", ",", "tol", "=", "1e-8", ",", "grid", "=", "{", "}", ",", "distribution", "=", "{", "}", ",", "verbose", "=", "True", ")", ":", "if", "verbose", ":", "headline", "=", "'|{0:^4} | {1:10} | {2:8} |'", "headline", "=", "headline", ".", "format", "(", "'N'", ",", "' Error'", ",", "'Time'", ")", "stars", "=", "'-'", "*", "len", "(", "headline", ")", "print", "(", "stars", ")", "print", "(", "headline", ")", "print", "(", "stars", ")", "# format string for within loop", "fmt_str", "=", "'|{0:4} | {1:10.3e} | {2:8.3f} |'", "f", "=", "model", ".", "functions", "[", "'arbitrage'", "]", "g", "=", "model", ".", "functions", "[", "'transition'", "]", "p", "=", "model", ".", "calibration", "[", "'parameters'", "]", "distrib", "=", "model", ".", "get_distribution", "(", "*", "*", "distribution", ")", "nodes", ",", "weights", "=", "distrib", ".", "discretize", "(", ")", "approx", "=", "model", ".", "get_grid", "(", "*", "*", "grid", ")", "ms", "=", "create_interpolator", "(", "approx", ",", "approx", ".", "interpolation", ")", "grid", "=", "ms", ".", "grid", "if", "initial_dr", "is", "None", ":", "dr", "=", "approximate_controls", "(", "model", ")", "else", ":", "dr", "=", "initial_dr", "ms", ".", "set_values", "(", "dr", "(", "grid", ")", ")", "x", "=", "dr", "(", "grid", ")", "x0", "=", "x", ".", "copy", "(", ")", "it", "=", "0", "err", "=", "10", "a0", "=", "x0", ".", "copy", "(", ")", ".", "reshape", "(", "(", "x0", ".", "shape", "[", "0", "]", "*", "x0", ".", "shape", "[", "1", "]", ",", ")", ")", "a", "=", "a0", ".", "copy", "(", ")", "while", "err", ">", "tol", "and", "it", "<", "maxit", ":", "it", "+=", "1", "t1", "=", "time", ".", "time", "(", ")", "r", ",", "da", "=", "residuals", "(", "f", ",", "g", ",", "grid", ",", "a", ".", "reshape", "(", "x0", ".", "shape", ")", ",", "ms", ",", "nodes", ",", "weights", ",", "p", ",", "diff", "=", "True", ")", "[", ":", "2", "]", "r", "=", "r", ".", "flatten", "(", ")", "err", "=", "abs", "(", "r", ")", ".", "max", "(", ")", "t2", "=", "time", ".", "time", "(", ")", "if", "verbose", ":", "print", "(", "fmt_str", ".", "format", "(", "it", ",", "err", ",", "t2", "-", "t1", ")", ")", "if", "err", ">", "tol", ":", "a", "-=", "scipy", ".", "sparse", ".", "linalg", ".", "spsolve", "(", "da", ",", "r", ")", "if", "verbose", ":", "print", "(", "stars", ")", "return", "ms" ]
Finds a global solution for ``model`` by solving one large system of equations using a simple newton algorithm. Parameters ---------- model: NumericModel "dtcscc" model to be solved verbose: boolean if True, display iterations initial_dr: decision rule initial guess for the decision rule maxit: int maximum number of iterationsd tol: tolerance criterium for successive approximations grid: grid options distribution: distribution options Returns ------- decision rule : approximated solution
[ "Finds", "a", "global", "solution", "for", "model", "by", "solving", "one", "large", "system", "of", "equations", "using", "a", "simple", "newton", "algorithm", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/trash/dolo/algos/dtcscc/nonlinearsystem.py#L10-L97
train
232,763
EconForge/dolo
dolo/numeric/discretization/quadrature.py
gauss_hermite_nodes
def gauss_hermite_nodes(orders, sigma, mu=None): ''' Computes the weights and nodes for Gauss Hermite quadrature. Parameters ---------- orders : int, list, array The order of integration used in the quadrature routine sigma : array-like If one dimensional, the variance of the normal distribution being approximated. If multidimensional, the variance-covariance matrix of the multivariate normal process being approximated. Returns ------- x : array Quadrature nodes w : array Quadrature weights ''' if isinstance(orders, int): orders = [orders] import numpy if mu is None: mu = numpy.array( [0]*sigma.shape[0] ) herms = [hermgauss(i) for i in orders] points = [ h[0]*numpy.sqrt(2) for h in herms] weights = [ h[1]/numpy.sqrt( numpy.pi) for h in herms] if len(orders) == 1: # Note: if sigma is 2D, x will always be 2D, even if sigma is only 1x1. # print(points.shape) x = numpy.array(points[0])*numpy.sqrt(float(sigma)) if sigma.ndim==2: x = x[:,None] w = weights[0] return [x,w] else: x = cartesian( points).T from functools import reduce w = reduce( numpy.kron, weights) zero_columns = numpy.where(sigma.sum(axis=0)==0)[0] for i in zero_columns: sigma[i,i] = 1.0 C = numpy.linalg.cholesky(sigma) x = numpy.dot(C, x) + mu[:,numpy.newaxis] x = numpy.ascontiguousarray(x.T) for i in zero_columns: x[:,i] =0 return [x,w]
python
def gauss_hermite_nodes(orders, sigma, mu=None): ''' Computes the weights and nodes for Gauss Hermite quadrature. Parameters ---------- orders : int, list, array The order of integration used in the quadrature routine sigma : array-like If one dimensional, the variance of the normal distribution being approximated. If multidimensional, the variance-covariance matrix of the multivariate normal process being approximated. Returns ------- x : array Quadrature nodes w : array Quadrature weights ''' if isinstance(orders, int): orders = [orders] import numpy if mu is None: mu = numpy.array( [0]*sigma.shape[0] ) herms = [hermgauss(i) for i in orders] points = [ h[0]*numpy.sqrt(2) for h in herms] weights = [ h[1]/numpy.sqrt( numpy.pi) for h in herms] if len(orders) == 1: # Note: if sigma is 2D, x will always be 2D, even if sigma is only 1x1. # print(points.shape) x = numpy.array(points[0])*numpy.sqrt(float(sigma)) if sigma.ndim==2: x = x[:,None] w = weights[0] return [x,w] else: x = cartesian( points).T from functools import reduce w = reduce( numpy.kron, weights) zero_columns = numpy.where(sigma.sum(axis=0)==0)[0] for i in zero_columns: sigma[i,i] = 1.0 C = numpy.linalg.cholesky(sigma) x = numpy.dot(C, x) + mu[:,numpy.newaxis] x = numpy.ascontiguousarray(x.T) for i in zero_columns: x[:,i] =0 return [x,w]
[ "def", "gauss_hermite_nodes", "(", "orders", ",", "sigma", ",", "mu", "=", "None", ")", ":", "if", "isinstance", "(", "orders", ",", "int", ")", ":", "orders", "=", "[", "orders", "]", "import", "numpy", "if", "mu", "is", "None", ":", "mu", "=", "numpy", ".", "array", "(", "[", "0", "]", "*", "sigma", ".", "shape", "[", "0", "]", ")", "herms", "=", "[", "hermgauss", "(", "i", ")", "for", "i", "in", "orders", "]", "points", "=", "[", "h", "[", "0", "]", "*", "numpy", ".", "sqrt", "(", "2", ")", "for", "h", "in", "herms", "]", "weights", "=", "[", "h", "[", "1", "]", "/", "numpy", ".", "sqrt", "(", "numpy", ".", "pi", ")", "for", "h", "in", "herms", "]", "if", "len", "(", "orders", ")", "==", "1", ":", "# Note: if sigma is 2D, x will always be 2D, even if sigma is only 1x1.", "# print(points.shape)", "x", "=", "numpy", ".", "array", "(", "points", "[", "0", "]", ")", "*", "numpy", ".", "sqrt", "(", "float", "(", "sigma", ")", ")", "if", "sigma", ".", "ndim", "==", "2", ":", "x", "=", "x", "[", ":", ",", "None", "]", "w", "=", "weights", "[", "0", "]", "return", "[", "x", ",", "w", "]", "else", ":", "x", "=", "cartesian", "(", "points", ")", ".", "T", "from", "functools", "import", "reduce", "w", "=", "reduce", "(", "numpy", ".", "kron", ",", "weights", ")", "zero_columns", "=", "numpy", ".", "where", "(", "sigma", ".", "sum", "(", "axis", "=", "0", ")", "==", "0", ")", "[", "0", "]", "for", "i", "in", "zero_columns", ":", "sigma", "[", "i", ",", "i", "]", "=", "1.0", "C", "=", "numpy", ".", "linalg", ".", "cholesky", "(", "sigma", ")", "x", "=", "numpy", ".", "dot", "(", "C", ",", "x", ")", "+", "mu", "[", ":", ",", "numpy", ".", "newaxis", "]", "x", "=", "numpy", ".", "ascontiguousarray", "(", "x", ".", "T", ")", "for", "i", "in", "zero_columns", ":", "x", "[", ":", ",", "i", "]", "=", "0", "return", "[", "x", ",", "w", "]" ]
Computes the weights and nodes for Gauss Hermite quadrature. Parameters ---------- orders : int, list, array The order of integration used in the quadrature routine sigma : array-like If one dimensional, the variance of the normal distribution being approximated. If multidimensional, the variance-covariance matrix of the multivariate normal process being approximated. Returns ------- x : array Quadrature nodes w : array Quadrature weights
[ "Computes", "the", "weights", "and", "nodes", "for", "Gauss", "Hermite", "quadrature", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/discretization/quadrature.py#L59-L122
train
232,764
EconForge/dolo
dolo/numeric/optimize/newton.py
newton
def newton(f, x, verbose=False, tol=1e-6, maxit=5, jactype='serial'): """Solve nonlinear system using safeguarded Newton iterations Parameters ---------- Return ------ """ if verbose: print = lambda txt: old_print(txt) else: print = lambda txt: None it = 0 error = 10 converged = False maxbacksteps = 30 x0 = x if jactype == 'sparse': from scipy.sparse.linalg import spsolve as solve elif jactype == 'full': from numpy.linalg import solve else: solve = serial_solve while it<maxit and not converged: [v,dv] = f(x) # TODO: rewrite starting here # print("Time to evaluate {}".format(ss-tt)0) error_0 = abs(v).max() if error_0 < tol: if verbose: print("> System was solved after iteration {}. Residual={}".format(it,error_0)) converged = True else: it += 1 dx = solve(dv, v) # norm_dx = abs(dx).max() for bck in range(maxbacksteps): xx = x - dx*(2**(-bck)) vm = f(xx)[0] err = abs(vm).max() if err < error_0: break x = xx if verbose: print("\t> {} | {} | {}".format(it, err, bck)) if not converged: import warnings warnings.warn("Did not converge") return [x, it]
python
def newton(f, x, verbose=False, tol=1e-6, maxit=5, jactype='serial'): """Solve nonlinear system using safeguarded Newton iterations Parameters ---------- Return ------ """ if verbose: print = lambda txt: old_print(txt) else: print = lambda txt: None it = 0 error = 10 converged = False maxbacksteps = 30 x0 = x if jactype == 'sparse': from scipy.sparse.linalg import spsolve as solve elif jactype == 'full': from numpy.linalg import solve else: solve = serial_solve while it<maxit and not converged: [v,dv] = f(x) # TODO: rewrite starting here # print("Time to evaluate {}".format(ss-tt)0) error_0 = abs(v).max() if error_0 < tol: if verbose: print("> System was solved after iteration {}. Residual={}".format(it,error_0)) converged = True else: it += 1 dx = solve(dv, v) # norm_dx = abs(dx).max() for bck in range(maxbacksteps): xx = x - dx*(2**(-bck)) vm = f(xx)[0] err = abs(vm).max() if err < error_0: break x = xx if verbose: print("\t> {} | {} | {}".format(it, err, bck)) if not converged: import warnings warnings.warn("Did not converge") return [x, it]
[ "def", "newton", "(", "f", ",", "x", ",", "verbose", "=", "False", ",", "tol", "=", "1e-6", ",", "maxit", "=", "5", ",", "jactype", "=", "'serial'", ")", ":", "if", "verbose", ":", "print", "=", "lambda", "txt", ":", "old_print", "(", "txt", ")", "else", ":", "print", "=", "lambda", "txt", ":", "None", "it", "=", "0", "error", "=", "10", "converged", "=", "False", "maxbacksteps", "=", "30", "x0", "=", "x", "if", "jactype", "==", "'sparse'", ":", "from", "scipy", ".", "sparse", ".", "linalg", "import", "spsolve", "as", "solve", "elif", "jactype", "==", "'full'", ":", "from", "numpy", ".", "linalg", "import", "solve", "else", ":", "solve", "=", "serial_solve", "while", "it", "<", "maxit", "and", "not", "converged", ":", "[", "v", ",", "dv", "]", "=", "f", "(", "x", ")", "# TODO: rewrite starting here", "# print(\"Time to evaluate {}\".format(ss-tt)0)", "error_0", "=", "abs", "(", "v", ")", ".", "max", "(", ")", "if", "error_0", "<", "tol", ":", "if", "verbose", ":", "print", "(", "\"> System was solved after iteration {}. Residual={}\"", ".", "format", "(", "it", ",", "error_0", ")", ")", "converged", "=", "True", "else", ":", "it", "+=", "1", "dx", "=", "solve", "(", "dv", ",", "v", ")", "# norm_dx = abs(dx).max()", "for", "bck", "in", "range", "(", "maxbacksteps", ")", ":", "xx", "=", "x", "-", "dx", "*", "(", "2", "**", "(", "-", "bck", ")", ")", "vm", "=", "f", "(", "xx", ")", "[", "0", "]", "err", "=", "abs", "(", "vm", ")", ".", "max", "(", ")", "if", "err", "<", "error_0", ":", "break", "x", "=", "xx", "if", "verbose", ":", "print", "(", "\"\\t> {} | {} | {}\"", ".", "format", "(", "it", ",", "err", ",", "bck", ")", ")", "if", "not", "converged", ":", "import", "warnings", "warnings", ".", "warn", "(", "\"Did not converge\"", ")", "return", "[", "x", ",", "it", "]" ]
Solve nonlinear system using safeguarded Newton iterations Parameters ---------- Return ------
[ "Solve", "nonlinear", "system", "using", "safeguarded", "Newton", "iterations" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/optimize/newton.py#L81-L151
train
232,765
EconForge/dolo
dolo/numeric/extern/qz.py
qzordered
def qzordered(A,B,crit=1.0): "Eigenvalues bigger than crit are sorted in the top-left." TOL = 1e-10 def select(alpha, beta): return alpha**2>crit*beta**2 [S,T,alpha,beta,U,V] = ordqz(A,B,output='real',sort=select) eigval = abs(numpy.diag(S)/numpy.diag(T)) return [S,T,U,V,eigval]
python
def qzordered(A,B,crit=1.0): "Eigenvalues bigger than crit are sorted in the top-left." TOL = 1e-10 def select(alpha, beta): return alpha**2>crit*beta**2 [S,T,alpha,beta,U,V] = ordqz(A,B,output='real',sort=select) eigval = abs(numpy.diag(S)/numpy.diag(T)) return [S,T,U,V,eigval]
[ "def", "qzordered", "(", "A", ",", "B", ",", "crit", "=", "1.0", ")", ":", "TOL", "=", "1e-10", "def", "select", "(", "alpha", ",", "beta", ")", ":", "return", "alpha", "**", "2", ">", "crit", "*", "beta", "**", "2", "[", "S", ",", "T", ",", "alpha", ",", "beta", ",", "U", ",", "V", "]", "=", "ordqz", "(", "A", ",", "B", ",", "output", "=", "'real'", ",", "sort", "=", "select", ")", "eigval", "=", "abs", "(", "numpy", ".", "diag", "(", "S", ")", "/", "numpy", ".", "diag", "(", "T", ")", ")", "return", "[", "S", ",", "T", ",", "U", ",", "V", ",", "eigval", "]" ]
Eigenvalues bigger than crit are sorted in the top-left.
[ "Eigenvalues", "bigger", "than", "crit", "are", "sorted", "in", "the", "top", "-", "left", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/extern/qz.py#L6-L18
train
232,766
EconForge/dolo
dolo/numeric/extern/qz.py
ordqz
def ordqz(A, B, sort='lhp', output='real', overwrite_a=False, overwrite_b=False, check_finite=True): """ QZ decomposition for a pair of matrices with reordering. .. versionadded:: 0.17.0 Parameters ---------- A : (N, N) array_like 2d array to decompose B : (N, N) array_like 2d array to decompose sort : {callable, 'lhp', 'rhp', 'iuc', 'ouc'}, optional Specifies whether the upper eigenvalues should be sorted. A callable may be passed that, given a eigenvalue, returns a boolean denoting whether the eigenvalue should be sorted to the top-left (True). For real matrix pairs, the sort function takes three real arguments (alphar, alphai, beta). The eigenvalue ``x = (alphar + alphai*1j)/beta``. For complex matrix pairs or output='complex', the sort function takes two complex arguments (alpha, beta). The eigenvalue ``x = (alpha/beta)``. Alternatively, string parameters may be used: - 'lhp' Left-hand plane (x.real < 0.0) - 'rhp' Right-hand plane (x.real > 0.0) - 'iuc' Inside the unit circle (x*x.conjugate() < 1.0) - 'ouc' Outside the unit circle (x*x.conjugate() > 1.0) output : str {'real','complex'}, optional Construct the real or complex QZ decomposition for real matrices. Default is 'real'. overwrite_a : bool, optional If True, the contents of A are overwritten. overwrite_b : bool, optional If True, the contents of B are overwritten. check_finite : bool, optional If true checks the elements of `A` and `B` are finite numbers. If false does no checking and passes matrix through to underlying algorithm. Returns ------- AA : (N, N) ndarray Generalized Schur form of A. BB : (N, N) ndarray Generalized Schur form of B. alpha : (N,) ndarray alpha = alphar + alphai * 1j. See notes. beta : (N,) ndarray See notes. Q : (N, N) ndarray The left Schur vectors. Z : (N, N) ndarray The right Schur vectors. Notes ----- On exit, ``(ALPHAR(j) + ALPHAI(j)*i)/BETA(j), j=1,...,N``, will be the generalized eigenvalues. ``ALPHAR(j) + ALPHAI(j)*i`` and ``BETA(j),j=1,...,N`` are the diagonals of the complex Schur form (S,T) that would result if the 2-by-2 diagonal blocks of the real generalized Schur form of (A,B) were further reduced to triangular form using complex unitary transformations. If ALPHAI(j) is zero, then the j-th eigenvalue is real; if positive, then the ``j``-th and ``(j+1)``-st eigenvalues are a complex conjugate pair, with ``ALPHAI(j+1)`` negative. See also -------- qz """ import warnings import numpy as np from numpy import asarray_chkfinite from scipy.linalg.misc import LinAlgError, _datacopied from scipy.linalg.lapack import get_lapack_funcs from scipy._lib.six import callable from scipy.linalg._decomp_qz import _qz, _select_function #NOTE: should users be able to set these? lwork = None result, typ = _qz(A, B, output=output, lwork=lwork, sort=None, overwrite_a=overwrite_a, overwrite_b=overwrite_b, check_finite=check_finite) AA, BB, Q, Z = result[0], result[1], result[-4], result[-3] if typ not in 'cz': alpha, beta = result[3] + result[4]*1.j, result[5] else: alpha, beta = result[3], result[4] sfunction = _select_function(sort) select = sfunction(alpha, beta) tgsen, = get_lapack_funcs(('tgsen',), (AA, BB)) if lwork is None or lwork == -1: result = tgsen(select, AA, BB, Q, Z, lwork=-1) lwork = result[-3][0].real.astype(np.int) # looks like wrong value passed to ZTGSYL if not lwork += 1 liwork = None if liwork is None or liwork == -1: result = tgsen(select, AA, BB, Q, Z, liwork=-1) liwork = result[-2][0] result = tgsen(select, AA, BB, Q, Z, lwork=lwork, liwork=liwork) info = result[-1] if info < 0: raise ValueError("Illegal value in argument %d of tgsen" % -info) elif info == 1: raise ValueError("Reordering of (A, B) failed because the transformed" " matrix pair (A, B) would be too far from " "generalized Schur form; the problem is very " "ill-conditioned. (A, B) may have been partially " "reorded. If requested, 0 is returned in DIF(*), " "PL, and PR.") # for real results has a, b, alphar, alphai, beta, q, z, m, pl, pr, dif, # work, iwork, info if typ in ['f', 'd']: alpha = result[2] + result[3] * 1.j return (result[0], result[1], alpha, result[4], result[5], result[6]) # for complex results has a, b, alpha, beta, q, z, m, pl, pr, dif, work, # iwork, info else: return result[0], result[1], result[2], result[3], result[4], result[5]
python
def ordqz(A, B, sort='lhp', output='real', overwrite_a=False, overwrite_b=False, check_finite=True): """ QZ decomposition for a pair of matrices with reordering. .. versionadded:: 0.17.0 Parameters ---------- A : (N, N) array_like 2d array to decompose B : (N, N) array_like 2d array to decompose sort : {callable, 'lhp', 'rhp', 'iuc', 'ouc'}, optional Specifies whether the upper eigenvalues should be sorted. A callable may be passed that, given a eigenvalue, returns a boolean denoting whether the eigenvalue should be sorted to the top-left (True). For real matrix pairs, the sort function takes three real arguments (alphar, alphai, beta). The eigenvalue ``x = (alphar + alphai*1j)/beta``. For complex matrix pairs or output='complex', the sort function takes two complex arguments (alpha, beta). The eigenvalue ``x = (alpha/beta)``. Alternatively, string parameters may be used: - 'lhp' Left-hand plane (x.real < 0.0) - 'rhp' Right-hand plane (x.real > 0.0) - 'iuc' Inside the unit circle (x*x.conjugate() < 1.0) - 'ouc' Outside the unit circle (x*x.conjugate() > 1.0) output : str {'real','complex'}, optional Construct the real or complex QZ decomposition for real matrices. Default is 'real'. overwrite_a : bool, optional If True, the contents of A are overwritten. overwrite_b : bool, optional If True, the contents of B are overwritten. check_finite : bool, optional If true checks the elements of `A` and `B` are finite numbers. If false does no checking and passes matrix through to underlying algorithm. Returns ------- AA : (N, N) ndarray Generalized Schur form of A. BB : (N, N) ndarray Generalized Schur form of B. alpha : (N,) ndarray alpha = alphar + alphai * 1j. See notes. beta : (N,) ndarray See notes. Q : (N, N) ndarray The left Schur vectors. Z : (N, N) ndarray The right Schur vectors. Notes ----- On exit, ``(ALPHAR(j) + ALPHAI(j)*i)/BETA(j), j=1,...,N``, will be the generalized eigenvalues. ``ALPHAR(j) + ALPHAI(j)*i`` and ``BETA(j),j=1,...,N`` are the diagonals of the complex Schur form (S,T) that would result if the 2-by-2 diagonal blocks of the real generalized Schur form of (A,B) were further reduced to triangular form using complex unitary transformations. If ALPHAI(j) is zero, then the j-th eigenvalue is real; if positive, then the ``j``-th and ``(j+1)``-st eigenvalues are a complex conjugate pair, with ``ALPHAI(j+1)`` negative. See also -------- qz """ import warnings import numpy as np from numpy import asarray_chkfinite from scipy.linalg.misc import LinAlgError, _datacopied from scipy.linalg.lapack import get_lapack_funcs from scipy._lib.six import callable from scipy.linalg._decomp_qz import _qz, _select_function #NOTE: should users be able to set these? lwork = None result, typ = _qz(A, B, output=output, lwork=lwork, sort=None, overwrite_a=overwrite_a, overwrite_b=overwrite_b, check_finite=check_finite) AA, BB, Q, Z = result[0], result[1], result[-4], result[-3] if typ not in 'cz': alpha, beta = result[3] + result[4]*1.j, result[5] else: alpha, beta = result[3], result[4] sfunction = _select_function(sort) select = sfunction(alpha, beta) tgsen, = get_lapack_funcs(('tgsen',), (AA, BB)) if lwork is None or lwork == -1: result = tgsen(select, AA, BB, Q, Z, lwork=-1) lwork = result[-3][0].real.astype(np.int) # looks like wrong value passed to ZTGSYL if not lwork += 1 liwork = None if liwork is None or liwork == -1: result = tgsen(select, AA, BB, Q, Z, liwork=-1) liwork = result[-2][0] result = tgsen(select, AA, BB, Q, Z, lwork=lwork, liwork=liwork) info = result[-1] if info < 0: raise ValueError("Illegal value in argument %d of tgsen" % -info) elif info == 1: raise ValueError("Reordering of (A, B) failed because the transformed" " matrix pair (A, B) would be too far from " "generalized Schur form; the problem is very " "ill-conditioned. (A, B) may have been partially " "reorded. If requested, 0 is returned in DIF(*), " "PL, and PR.") # for real results has a, b, alphar, alphai, beta, q, z, m, pl, pr, dif, # work, iwork, info if typ in ['f', 'd']: alpha = result[2] + result[3] * 1.j return (result[0], result[1], alpha, result[4], result[5], result[6]) # for complex results has a, b, alpha, beta, q, z, m, pl, pr, dif, work, # iwork, info else: return result[0], result[1], result[2], result[3], result[4], result[5]
[ "def", "ordqz", "(", "A", ",", "B", ",", "sort", "=", "'lhp'", ",", "output", "=", "'real'", ",", "overwrite_a", "=", "False", ",", "overwrite_b", "=", "False", ",", "check_finite", "=", "True", ")", ":", "import", "warnings", "import", "numpy", "as", "np", "from", "numpy", "import", "asarray_chkfinite", "from", "scipy", ".", "linalg", ".", "misc", "import", "LinAlgError", ",", "_datacopied", "from", "scipy", ".", "linalg", ".", "lapack", "import", "get_lapack_funcs", "from", "scipy", ".", "_lib", ".", "six", "import", "callable", "from", "scipy", ".", "linalg", ".", "_decomp_qz", "import", "_qz", ",", "_select_function", "#NOTE: should users be able to set these?", "lwork", "=", "None", "result", ",", "typ", "=", "_qz", "(", "A", ",", "B", ",", "output", "=", "output", ",", "lwork", "=", "lwork", ",", "sort", "=", "None", ",", "overwrite_a", "=", "overwrite_a", ",", "overwrite_b", "=", "overwrite_b", ",", "check_finite", "=", "check_finite", ")", "AA", ",", "BB", ",", "Q", ",", "Z", "=", "result", "[", "0", "]", ",", "result", "[", "1", "]", ",", "result", "[", "-", "4", "]", ",", "result", "[", "-", "3", "]", "if", "typ", "not", "in", "'cz'", ":", "alpha", ",", "beta", "=", "result", "[", "3", "]", "+", "result", "[", "4", "]", "*", "1.j", ",", "result", "[", "5", "]", "else", ":", "alpha", ",", "beta", "=", "result", "[", "3", "]", ",", "result", "[", "4", "]", "sfunction", "=", "_select_function", "(", "sort", ")", "select", "=", "sfunction", "(", "alpha", ",", "beta", ")", "tgsen", ",", "=", "get_lapack_funcs", "(", "(", "'tgsen'", ",", ")", ",", "(", "AA", ",", "BB", ")", ")", "if", "lwork", "is", "None", "or", "lwork", "==", "-", "1", ":", "result", "=", "tgsen", "(", "select", ",", "AA", ",", "BB", ",", "Q", ",", "Z", ",", "lwork", "=", "-", "1", ")", "lwork", "=", "result", "[", "-", "3", "]", "[", "0", "]", ".", "real", ".", "astype", "(", "np", ".", "int", ")", "# looks like wrong value passed to ZTGSYL if not", "lwork", "+=", "1", "liwork", "=", "None", "if", "liwork", "is", "None", "or", "liwork", "==", "-", "1", ":", "result", "=", "tgsen", "(", "select", ",", "AA", ",", "BB", ",", "Q", ",", "Z", ",", "liwork", "=", "-", "1", ")", "liwork", "=", "result", "[", "-", "2", "]", "[", "0", "]", "result", "=", "tgsen", "(", "select", ",", "AA", ",", "BB", ",", "Q", ",", "Z", ",", "lwork", "=", "lwork", ",", "liwork", "=", "liwork", ")", "info", "=", "result", "[", "-", "1", "]", "if", "info", "<", "0", ":", "raise", "ValueError", "(", "\"Illegal value in argument %d of tgsen\"", "%", "-", "info", ")", "elif", "info", "==", "1", ":", "raise", "ValueError", "(", "\"Reordering of (A, B) failed because the transformed\"", "\" matrix pair (A, B) would be too far from \"", "\"generalized Schur form; the problem is very \"", "\"ill-conditioned. (A, B) may have been partially \"", "\"reorded. If requested, 0 is returned in DIF(*), \"", "\"PL, and PR.\"", ")", "# for real results has a, b, alphar, alphai, beta, q, z, m, pl, pr, dif,", "# work, iwork, info", "if", "typ", "in", "[", "'f'", ",", "'d'", "]", ":", "alpha", "=", "result", "[", "2", "]", "+", "result", "[", "3", "]", "*", "1.j", "return", "(", "result", "[", "0", "]", ",", "result", "[", "1", "]", ",", "alpha", ",", "result", "[", "4", "]", ",", "result", "[", "5", "]", ",", "result", "[", "6", "]", ")", "# for complex results has a, b, alpha, beta, q, z, m, pl, pr, dif, work,", "# iwork, info", "else", ":", "return", "result", "[", "0", "]", ",", "result", "[", "1", "]", ",", "result", "[", "2", "]", ",", "result", "[", "3", "]", ",", "result", "[", "4", "]", ",", "result", "[", "5", "]" ]
QZ decomposition for a pair of matrices with reordering. .. versionadded:: 0.17.0 Parameters ---------- A : (N, N) array_like 2d array to decompose B : (N, N) array_like 2d array to decompose sort : {callable, 'lhp', 'rhp', 'iuc', 'ouc'}, optional Specifies whether the upper eigenvalues should be sorted. A callable may be passed that, given a eigenvalue, returns a boolean denoting whether the eigenvalue should be sorted to the top-left (True). For real matrix pairs, the sort function takes three real arguments (alphar, alphai, beta). The eigenvalue ``x = (alphar + alphai*1j)/beta``. For complex matrix pairs or output='complex', the sort function takes two complex arguments (alpha, beta). The eigenvalue ``x = (alpha/beta)``. Alternatively, string parameters may be used: - 'lhp' Left-hand plane (x.real < 0.0) - 'rhp' Right-hand plane (x.real > 0.0) - 'iuc' Inside the unit circle (x*x.conjugate() < 1.0) - 'ouc' Outside the unit circle (x*x.conjugate() > 1.0) output : str {'real','complex'}, optional Construct the real or complex QZ decomposition for real matrices. Default is 'real'. overwrite_a : bool, optional If True, the contents of A are overwritten. overwrite_b : bool, optional If True, the contents of B are overwritten. check_finite : bool, optional If true checks the elements of `A` and `B` are finite numbers. If false does no checking and passes matrix through to underlying algorithm. Returns ------- AA : (N, N) ndarray Generalized Schur form of A. BB : (N, N) ndarray Generalized Schur form of B. alpha : (N,) ndarray alpha = alphar + alphai * 1j. See notes. beta : (N,) ndarray See notes. Q : (N, N) ndarray The left Schur vectors. Z : (N, N) ndarray The right Schur vectors. Notes ----- On exit, ``(ALPHAR(j) + ALPHAI(j)*i)/BETA(j), j=1,...,N``, will be the generalized eigenvalues. ``ALPHAR(j) + ALPHAI(j)*i`` and ``BETA(j),j=1,...,N`` are the diagonals of the complex Schur form (S,T) that would result if the 2-by-2 diagonal blocks of the real generalized Schur form of (A,B) were further reduced to triangular form using complex unitary transformations. If ALPHAI(j) is zero, then the j-th eigenvalue is real; if positive, then the ``j``-th and ``(j+1)``-st eigenvalues are a complex conjugate pair, with ``ALPHAI(j+1)`` negative. See also -------- qz
[ "QZ", "decomposition", "for", "a", "pair", "of", "matrices", "with", "reordering", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/extern/qz.py#L21-L154
train
232,767
EconForge/dolo
trash/dolo/algos/dtcscc/time_iteration_2.py
parameterized_expectations_direct
def parameterized_expectations_direct(model, verbose=False, initial_dr=None, pert_order=1, grid={}, distribution={}, maxit=100, tol=1e-8): ''' Finds a global solution for ``model`` using parameterized expectations function. Requires the model to be written with controls as a direct function of the model objects. The algorithm iterates on the expectations function in the arbitrage equation. It follows the discussion in section 9.9 of Miranda and Fackler (2002). Parameters ---------- model : NumericModel "dtcscc" model to be solved verbose : boolean if True, display iterations initial_dr : decision rule initial guess for the decision rule pert_order : {1} if no initial guess is supplied, the perturbation solution at order ``pert_order`` is used as initial guess grid: grid options distribution: distribution options maxit: maximum number of iterations tol: tolerance criterium for successive approximations Returns ------- decision rule : approximated solution ''' t1 = time.time() g = model.functions['transition'] d = model.functions['direct_response'] h = model.functions['expectation'] parms = model.calibration['parameters'] if initial_dr is None: if pert_order == 1: initial_dr = approximate_controls(model) if pert_order > 1: raise Exception("Perturbation order > 1 not supported (yet).") approx = model.get_grid(**grid) grid = approx.grid interp_type = approx.interpolation dr = create_interpolator(approx, interp_type) expect = create_interpolator(approx, interp_type) distrib = model.get_distribution(**distribution) nodes, weights = distrib.discretize() N = grid.shape[0] z = np.zeros((N, len(model.symbols['expectations']))) x_0 = initial_dr(grid) x_0 = x_0.real # just in case ... h_0 = h(grid, x_0, parms) it = 0 err = 10 err_0 = 10 if verbose: headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |' headline = headline.format('N', ' Error', 'Gain', 'Time') stars = '-'*len(headline) print(stars) print(headline) print(stars) # format string for within loop fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |' while err > tol and it <= maxit: it += 1 t_start = time.time() # dr.set_values(x_0) expect.set_values(h_0) z[...] = 0 for i in range(weights.shape[0]): e = nodes[i, :] S = g(grid, x_0, e, parms) # evaluate expectation over the future state z += weights[i]*expect(S) # TODO: check that control is admissible new_x = d(grid, z, parms) new_h = h(grid, new_x, parms) # update error err = (abs(new_h - h_0).max()) # Update guess for decision rule and expectations function x_0 = new_x h_0 = new_h # print error information if `verbose` err_SA = err/err_0 err_0 = err t_finish = time.time() elapsed = t_finish - t_start if verbose: print(fmt_str.format(it, err, err_SA, elapsed)) if it == maxit: import warnings warnings.warn(UserWarning("Maximum number of iterations reached")) # compute final fime and do final printout if `verbose` t2 = time.time() if verbose: print(stars) print('Elapsed: {} seconds.'.format(t2 - t1)) print(stars) # Interpolation for the decision rule dr.set_values(x_0) return dr
python
def parameterized_expectations_direct(model, verbose=False, initial_dr=None, pert_order=1, grid={}, distribution={}, maxit=100, tol=1e-8): ''' Finds a global solution for ``model`` using parameterized expectations function. Requires the model to be written with controls as a direct function of the model objects. The algorithm iterates on the expectations function in the arbitrage equation. It follows the discussion in section 9.9 of Miranda and Fackler (2002). Parameters ---------- model : NumericModel "dtcscc" model to be solved verbose : boolean if True, display iterations initial_dr : decision rule initial guess for the decision rule pert_order : {1} if no initial guess is supplied, the perturbation solution at order ``pert_order`` is used as initial guess grid: grid options distribution: distribution options maxit: maximum number of iterations tol: tolerance criterium for successive approximations Returns ------- decision rule : approximated solution ''' t1 = time.time() g = model.functions['transition'] d = model.functions['direct_response'] h = model.functions['expectation'] parms = model.calibration['parameters'] if initial_dr is None: if pert_order == 1: initial_dr = approximate_controls(model) if pert_order > 1: raise Exception("Perturbation order > 1 not supported (yet).") approx = model.get_grid(**grid) grid = approx.grid interp_type = approx.interpolation dr = create_interpolator(approx, interp_type) expect = create_interpolator(approx, interp_type) distrib = model.get_distribution(**distribution) nodes, weights = distrib.discretize() N = grid.shape[0] z = np.zeros((N, len(model.symbols['expectations']))) x_0 = initial_dr(grid) x_0 = x_0.real # just in case ... h_0 = h(grid, x_0, parms) it = 0 err = 10 err_0 = 10 if verbose: headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |' headline = headline.format('N', ' Error', 'Gain', 'Time') stars = '-'*len(headline) print(stars) print(headline) print(stars) # format string for within loop fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |' while err > tol and it <= maxit: it += 1 t_start = time.time() # dr.set_values(x_0) expect.set_values(h_0) z[...] = 0 for i in range(weights.shape[0]): e = nodes[i, :] S = g(grid, x_0, e, parms) # evaluate expectation over the future state z += weights[i]*expect(S) # TODO: check that control is admissible new_x = d(grid, z, parms) new_h = h(grid, new_x, parms) # update error err = (abs(new_h - h_0).max()) # Update guess for decision rule and expectations function x_0 = new_x h_0 = new_h # print error information if `verbose` err_SA = err/err_0 err_0 = err t_finish = time.time() elapsed = t_finish - t_start if verbose: print(fmt_str.format(it, err, err_SA, elapsed)) if it == maxit: import warnings warnings.warn(UserWarning("Maximum number of iterations reached")) # compute final fime and do final printout if `verbose` t2 = time.time() if verbose: print(stars) print('Elapsed: {} seconds.'.format(t2 - t1)) print(stars) # Interpolation for the decision rule dr.set_values(x_0) return dr
[ "def", "parameterized_expectations_direct", "(", "model", ",", "verbose", "=", "False", ",", "initial_dr", "=", "None", ",", "pert_order", "=", "1", ",", "grid", "=", "{", "}", ",", "distribution", "=", "{", "}", ",", "maxit", "=", "100", ",", "tol", "=", "1e-8", ")", ":", "t1", "=", "time", ".", "time", "(", ")", "g", "=", "model", ".", "functions", "[", "'transition'", "]", "d", "=", "model", ".", "functions", "[", "'direct_response'", "]", "h", "=", "model", ".", "functions", "[", "'expectation'", "]", "parms", "=", "model", ".", "calibration", "[", "'parameters'", "]", "if", "initial_dr", "is", "None", ":", "if", "pert_order", "==", "1", ":", "initial_dr", "=", "approximate_controls", "(", "model", ")", "if", "pert_order", ">", "1", ":", "raise", "Exception", "(", "\"Perturbation order > 1 not supported (yet).\"", ")", "approx", "=", "model", ".", "get_grid", "(", "*", "*", "grid", ")", "grid", "=", "approx", ".", "grid", "interp_type", "=", "approx", ".", "interpolation", "dr", "=", "create_interpolator", "(", "approx", ",", "interp_type", ")", "expect", "=", "create_interpolator", "(", "approx", ",", "interp_type", ")", "distrib", "=", "model", ".", "get_distribution", "(", "*", "*", "distribution", ")", "nodes", ",", "weights", "=", "distrib", ".", "discretize", "(", ")", "N", "=", "grid", ".", "shape", "[", "0", "]", "z", "=", "np", ".", "zeros", "(", "(", "N", ",", "len", "(", "model", ".", "symbols", "[", "'expectations'", "]", ")", ")", ")", "x_0", "=", "initial_dr", "(", "grid", ")", "x_0", "=", "x_0", ".", "real", "# just in case ...", "h_0", "=", "h", "(", "grid", ",", "x_0", ",", "parms", ")", "it", "=", "0", "err", "=", "10", "err_0", "=", "10", "if", "verbose", ":", "headline", "=", "'|{0:^4} | {1:10} | {2:8} | {3:8} |'", "headline", "=", "headline", ".", "format", "(", "'N'", ",", "' Error'", ",", "'Gain'", ",", "'Time'", ")", "stars", "=", "'-'", "*", "len", "(", "headline", ")", "print", "(", "stars", ")", "print", "(", "headline", ")", "print", "(", "stars", ")", "# format string for within loop", "fmt_str", "=", "'|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'", "while", "err", ">", "tol", "and", "it", "<=", "maxit", ":", "it", "+=", "1", "t_start", "=", "time", ".", "time", "(", ")", "# dr.set_values(x_0)", "expect", ".", "set_values", "(", "h_0", ")", "z", "[", "...", "]", "=", "0", "for", "i", "in", "range", "(", "weights", ".", "shape", "[", "0", "]", ")", ":", "e", "=", "nodes", "[", "i", ",", ":", "]", "S", "=", "g", "(", "grid", ",", "x_0", ",", "e", ",", "parms", ")", "# evaluate expectation over the future state", "z", "+=", "weights", "[", "i", "]", "*", "expect", "(", "S", ")", "# TODO: check that control is admissible", "new_x", "=", "d", "(", "grid", ",", "z", ",", "parms", ")", "new_h", "=", "h", "(", "grid", ",", "new_x", ",", "parms", ")", "# update error", "err", "=", "(", "abs", "(", "new_h", "-", "h_0", ")", ".", "max", "(", ")", ")", "# Update guess for decision rule and expectations function", "x_0", "=", "new_x", "h_0", "=", "new_h", "# print error information if `verbose`", "err_SA", "=", "err", "/", "err_0", "err_0", "=", "err", "t_finish", "=", "time", ".", "time", "(", ")", "elapsed", "=", "t_finish", "-", "t_start", "if", "verbose", ":", "print", "(", "fmt_str", ".", "format", "(", "it", ",", "err", ",", "err_SA", ",", "elapsed", ")", ")", "if", "it", "==", "maxit", ":", "import", "warnings", "warnings", ".", "warn", "(", "UserWarning", "(", "\"Maximum number of iterations reached\"", ")", ")", "# compute final fime and do final printout if `verbose`", "t2", "=", "time", ".", "time", "(", ")", "if", "verbose", ":", "print", "(", "stars", ")", "print", "(", "'Elapsed: {} seconds.'", ".", "format", "(", "t2", "-", "t1", ")", ")", "print", "(", "stars", ")", "# Interpolation for the decision rule", "dr", ".", "set_values", "(", "x_0", ")", "return", "dr" ]
Finds a global solution for ``model`` using parameterized expectations function. Requires the model to be written with controls as a direct function of the model objects. The algorithm iterates on the expectations function in the arbitrage equation. It follows the discussion in section 9.9 of Miranda and Fackler (2002). Parameters ---------- model : NumericModel "dtcscc" model to be solved verbose : boolean if True, display iterations initial_dr : decision rule initial guess for the decision rule pert_order : {1} if no initial guess is supplied, the perturbation solution at order ``pert_order`` is used as initial guess grid: grid options distribution: distribution options maxit: maximum number of iterations tol: tolerance criterium for successive approximations Returns ------- decision rule : approximated solution
[ "Finds", "a", "global", "solution", "for", "model", "using", "parameterized", "expectations", "function", ".", "Requires", "the", "model", "to", "be", "written", "with", "controls", "as", "a", "direct", "function", "of", "the", "model", "objects", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/trash/dolo/algos/dtcscc/time_iteration_2.py#L186-L312
train
232,768
EconForge/dolo
dolo/compiler/misc.py
numdiff
def numdiff(fun, args): """Vectorized numerical differentiation""" # vectorized version epsilon = 1e-8 args = list(args) v0 = fun(*args) N = v0.shape[0] l_v = len(v0) dvs = [] for i, a in enumerate(args): l_a = (a).shape[1] dv = numpy.zeros((N, l_v, l_a)) nargs = list(args) #.copy() for j in range(l_a): xx = args[i].copy() xx[:, j] += epsilon nargs[i] = xx dv[:, :, j] = (fun(*nargs) - v0) / epsilon dvs.append(dv) return [v0] + dvs
python
def numdiff(fun, args): """Vectorized numerical differentiation""" # vectorized version epsilon = 1e-8 args = list(args) v0 = fun(*args) N = v0.shape[0] l_v = len(v0) dvs = [] for i, a in enumerate(args): l_a = (a).shape[1] dv = numpy.zeros((N, l_v, l_a)) nargs = list(args) #.copy() for j in range(l_a): xx = args[i].copy() xx[:, j] += epsilon nargs[i] = xx dv[:, :, j] = (fun(*nargs) - v0) / epsilon dvs.append(dv) return [v0] + dvs
[ "def", "numdiff", "(", "fun", ",", "args", ")", ":", "# vectorized version", "epsilon", "=", "1e-8", "args", "=", "list", "(", "args", ")", "v0", "=", "fun", "(", "*", "args", ")", "N", "=", "v0", ".", "shape", "[", "0", "]", "l_v", "=", "len", "(", "v0", ")", "dvs", "=", "[", "]", "for", "i", ",", "a", "in", "enumerate", "(", "args", ")", ":", "l_a", "=", "(", "a", ")", ".", "shape", "[", "1", "]", "dv", "=", "numpy", ".", "zeros", "(", "(", "N", ",", "l_v", ",", "l_a", ")", ")", "nargs", "=", "list", "(", "args", ")", "#.copy()", "for", "j", "in", "range", "(", "l_a", ")", ":", "xx", "=", "args", "[", "i", "]", ".", "copy", "(", ")", "xx", "[", ":", ",", "j", "]", "+=", "epsilon", "nargs", "[", "i", "]", "=", "xx", "dv", "[", ":", ",", ":", ",", "j", "]", "=", "(", "fun", "(", "*", "nargs", ")", "-", "v0", ")", "/", "epsilon", "dvs", ".", "append", "(", "dv", ")", "return", "[", "v0", "]", "+", "dvs" ]
Vectorized numerical differentiation
[ "Vectorized", "numerical", "differentiation" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/compiler/misc.py#L97-L118
train
232,769
EconForge/dolo
dolo/numeric/filters.py
bandpass_filter
def bandpass_filter(data, k, w1, w2): """ This function will apply a bandpass filter to data. It will be kth order and will select the band between w1 and w2. Parameters ---------- data: array, dtype=float The data you wish to filter k: number, int The order of approximation for the filter. A max value for this isdata.size/2 w1: number, float This is the lower bound for which frequencies will pass through. w2: number, float This is the upper bound for which frequencies will pass through. Returns ------- y: array, dtype=float The filtered data. """ data = np.asarray(data) low_w = np.pi * 2 / w2 high_w = np.pi * 2 / w1 bweights = np.zeros(2 * k + 1) bweights[k] = (high_w - low_w) / np.pi j = np.arange(1, int(k) + 1) weights = 1 / (np.pi * j) * (sin(high_w * j) - sin(low_w * j)) bweights[k + j] = weights bweights[:k] = weights[::-1] bweights -= bweights.mean() return fftconvolve(bweights, data, mode='valid')
python
def bandpass_filter(data, k, w1, w2): """ This function will apply a bandpass filter to data. It will be kth order and will select the band between w1 and w2. Parameters ---------- data: array, dtype=float The data you wish to filter k: number, int The order of approximation for the filter. A max value for this isdata.size/2 w1: number, float This is the lower bound for which frequencies will pass through. w2: number, float This is the upper bound for which frequencies will pass through. Returns ------- y: array, dtype=float The filtered data. """ data = np.asarray(data) low_w = np.pi * 2 / w2 high_w = np.pi * 2 / w1 bweights = np.zeros(2 * k + 1) bweights[k] = (high_w - low_w) / np.pi j = np.arange(1, int(k) + 1) weights = 1 / (np.pi * j) * (sin(high_w * j) - sin(low_w * j)) bweights[k + j] = weights bweights[:k] = weights[::-1] bweights -= bweights.mean() return fftconvolve(bweights, data, mode='valid')
[ "def", "bandpass_filter", "(", "data", ",", "k", ",", "w1", ",", "w2", ")", ":", "data", "=", "np", ".", "asarray", "(", "data", ")", "low_w", "=", "np", ".", "pi", "*", "2", "/", "w2", "high_w", "=", "np", ".", "pi", "*", "2", "/", "w1", "bweights", "=", "np", ".", "zeros", "(", "2", "*", "k", "+", "1", ")", "bweights", "[", "k", "]", "=", "(", "high_w", "-", "low_w", ")", "/", "np", ".", "pi", "j", "=", "np", ".", "arange", "(", "1", ",", "int", "(", "k", ")", "+", "1", ")", "weights", "=", "1", "/", "(", "np", ".", "pi", "*", "j", ")", "*", "(", "sin", "(", "high_w", "*", "j", ")", "-", "sin", "(", "low_w", "*", "j", ")", ")", "bweights", "[", "k", "+", "j", "]", "=", "weights", "bweights", "[", ":", "k", "]", "=", "weights", "[", ":", ":", "-", "1", "]", "bweights", "-=", "bweights", ".", "mean", "(", ")", "return", "fftconvolve", "(", "bweights", ",", "data", ",", "mode", "=", "'valid'", ")" ]
This function will apply a bandpass filter to data. It will be kth order and will select the band between w1 and w2. Parameters ---------- data: array, dtype=float The data you wish to filter k: number, int The order of approximation for the filter. A max value for this isdata.size/2 w1: number, float This is the lower bound for which frequencies will pass through. w2: number, float This is the upper bound for which frequencies will pass through. Returns ------- y: array, dtype=float The filtered data.
[ "This", "function", "will", "apply", "a", "bandpass", "filter", "to", "data", ".", "It", "will", "be", "kth", "order", "and", "will", "select", "the", "band", "between", "w1", "and", "w2", "." ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/numeric/filters.py#L83-L119
train
232,770
EconForge/dolo
dolo/misc/dprint.py
dprint
def dprint(s): '''Prints `s` with additional debugging informations''' import inspect frameinfo = inspect.stack()[1] callerframe = frameinfo.frame d = callerframe.f_locals if (isinstance(s,str)): val = eval(s, d) else: val = s cc = frameinfo.code_context[0] import re regex = re.compile("dprint\((.*)\)") res = regex.search(cc) s = res.group(1) text = '' text += bcolors.OKBLUE + "At <{}>\n".format(str(frameinfo)) + bcolors.ENDC text += bcolors.WARNING + "{}: ".format(s) + bcolors.ENDC text += str(val) text += str() print(text)
python
def dprint(s): '''Prints `s` with additional debugging informations''' import inspect frameinfo = inspect.stack()[1] callerframe = frameinfo.frame d = callerframe.f_locals if (isinstance(s,str)): val = eval(s, d) else: val = s cc = frameinfo.code_context[0] import re regex = re.compile("dprint\((.*)\)") res = regex.search(cc) s = res.group(1) text = '' text += bcolors.OKBLUE + "At <{}>\n".format(str(frameinfo)) + bcolors.ENDC text += bcolors.WARNING + "{}: ".format(s) + bcolors.ENDC text += str(val) text += str() print(text)
[ "def", "dprint", "(", "s", ")", ":", "import", "inspect", "frameinfo", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "callerframe", "=", "frameinfo", ".", "frame", "d", "=", "callerframe", ".", "f_locals", "if", "(", "isinstance", "(", "s", ",", "str", ")", ")", ":", "val", "=", "eval", "(", "s", ",", "d", ")", "else", ":", "val", "=", "s", "cc", "=", "frameinfo", ".", "code_context", "[", "0", "]", "import", "re", "regex", "=", "re", ".", "compile", "(", "\"dprint\\((.*)\\)\"", ")", "res", "=", "regex", ".", "search", "(", "cc", ")", "s", "=", "res", ".", "group", "(", "1", ")", "text", "=", "''", "text", "+=", "bcolors", ".", "OKBLUE", "+", "\"At <{}>\\n\"", ".", "format", "(", "str", "(", "frameinfo", ")", ")", "+", "bcolors", ".", "ENDC", "text", "+=", "bcolors", ".", "WARNING", "+", "\"{}: \"", ".", "format", "(", "s", ")", "+", "bcolors", ".", "ENDC", "text", "+=", "str", "(", "val", ")", "text", "+=", "str", "(", ")", "print", "(", "text", ")" ]
Prints `s` with additional debugging informations
[ "Prints", "s", "with", "additional", "debugging", "informations" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/misc/dprint.py#L21-L46
train
232,771
EconForge/dolo
dolo/compiler/function_compiler_sympy.py
non_decreasing_series
def non_decreasing_series(n, size): '''Lists all combinations of 0,...,n-1 in increasing order''' if size == 1: return [[a] for a in range(n)] else: lc = non_decreasing_series(n, size-1) ll = [] for l in lc: last = l[-1] for i in range(last, n): e = l + [i] ll.append(e) return ll
python
def non_decreasing_series(n, size): '''Lists all combinations of 0,...,n-1 in increasing order''' if size == 1: return [[a] for a in range(n)] else: lc = non_decreasing_series(n, size-1) ll = [] for l in lc: last = l[-1] for i in range(last, n): e = l + [i] ll.append(e) return ll
[ "def", "non_decreasing_series", "(", "n", ",", "size", ")", ":", "if", "size", "==", "1", ":", "return", "[", "[", "a", "]", "for", "a", "in", "range", "(", "n", ")", "]", "else", ":", "lc", "=", "non_decreasing_series", "(", "n", ",", "size", "-", "1", ")", "ll", "=", "[", "]", "for", "l", "in", "lc", ":", "last", "=", "l", "[", "-", "1", "]", "for", "i", "in", "range", "(", "last", ",", "n", ")", ":", "e", "=", "l", "+", "[", "i", "]", "ll", ".", "append", "(", "e", ")", "return", "ll" ]
Lists all combinations of 0,...,n-1 in increasing order
[ "Lists", "all", "combinations", "of", "0", "...", "n", "-", "1", "in", "increasing", "order" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/compiler/function_compiler_sympy.py#L13-L26
train
232,772
EconForge/dolo
dolo/compiler/function_compiler_sympy.py
higher_order_diff
def higher_order_diff(eqs, syms, order=2): '''Takes higher order derivatives of a list of equations w.r.t a list of paramters''' import numpy eqs = list([sympy.sympify(eq) for eq in eqs]) syms = list([sympy.sympify(s) for s in syms]) neq = len(eqs) p = len(syms) D = [numpy.array(eqs)] orders = [] for i in range(1,order+1): par = D[i-1] mat = numpy.empty([neq] + [p]*i, dtype=object) #.append( numpy.zeros(orders)) for ind in non_decreasing_series(p,i): ind_parent = ind[:-1] k = ind[-1] for line in range(neq): ii = [line] + ind iid = [line] + ind_parent eeq = par[ tuple(iid) ] mat[tuple(ii)] = eeq.diff(syms[k]) D.append(mat) return D
python
def higher_order_diff(eqs, syms, order=2): '''Takes higher order derivatives of a list of equations w.r.t a list of paramters''' import numpy eqs = list([sympy.sympify(eq) for eq in eqs]) syms = list([sympy.sympify(s) for s in syms]) neq = len(eqs) p = len(syms) D = [numpy.array(eqs)] orders = [] for i in range(1,order+1): par = D[i-1] mat = numpy.empty([neq] + [p]*i, dtype=object) #.append( numpy.zeros(orders)) for ind in non_decreasing_series(p,i): ind_parent = ind[:-1] k = ind[-1] for line in range(neq): ii = [line] + ind iid = [line] + ind_parent eeq = par[ tuple(iid) ] mat[tuple(ii)] = eeq.diff(syms[k]) D.append(mat) return D
[ "def", "higher_order_diff", "(", "eqs", ",", "syms", ",", "order", "=", "2", ")", ":", "import", "numpy", "eqs", "=", "list", "(", "[", "sympy", ".", "sympify", "(", "eq", ")", "for", "eq", "in", "eqs", "]", ")", "syms", "=", "list", "(", "[", "sympy", ".", "sympify", "(", "s", ")", "for", "s", "in", "syms", "]", ")", "neq", "=", "len", "(", "eqs", ")", "p", "=", "len", "(", "syms", ")", "D", "=", "[", "numpy", ".", "array", "(", "eqs", ")", "]", "orders", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "order", "+", "1", ")", ":", "par", "=", "D", "[", "i", "-", "1", "]", "mat", "=", "numpy", ".", "empty", "(", "[", "neq", "]", "+", "[", "p", "]", "*", "i", ",", "dtype", "=", "object", ")", "#.append( numpy.zeros(orders))", "for", "ind", "in", "non_decreasing_series", "(", "p", ",", "i", ")", ":", "ind_parent", "=", "ind", "[", ":", "-", "1", "]", "k", "=", "ind", "[", "-", "1", "]", "for", "line", "in", "range", "(", "neq", ")", ":", "ii", "=", "[", "line", "]", "+", "ind", "iid", "=", "[", "line", "]", "+", "ind_parent", "eeq", "=", "par", "[", "tuple", "(", "iid", ")", "]", "mat", "[", "tuple", "(", "ii", ")", "]", "=", "eeq", ".", "diff", "(", "syms", "[", "k", "]", ")", "D", ".", "append", "(", "mat", ")", "return", "D" ]
Takes higher order derivatives of a list of equations w.r.t a list of paramters
[ "Takes", "higher", "order", "derivatives", "of", "a", "list", "of", "equations", "w", ".", "r", ".", "t", "a", "list", "of", "paramters" ]
d91ddf148b009bf79852d9aec70f3a1877e0f79a
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/compiler/function_compiler_sympy.py#L28-L60
train
232,773
pokerregion/poker
poker/website/pocketfives.py
get_ranked_players
def get_ranked_players(): """Get the list of the first 100 ranked players.""" rankings_page = requests.get(RANKINGS_URL) root = etree.HTML(rankings_page.text) player_rows = root.xpath('//div[@id="ranked"]//tr') for row in player_rows[1:]: player_row = row.xpath('td[@class!="country"]//text()') yield _Player( name=player_row[1], country=row[1][0].get('title'), triple_crowns=player_row[3], monthly_win=player_row[4], biggest_cash=player_row[5], plb_score=player_row[6], biggest_score=player_row[7], average_score=player_row[8], previous_rank=player_row[9], )
python
def get_ranked_players(): """Get the list of the first 100 ranked players.""" rankings_page = requests.get(RANKINGS_URL) root = etree.HTML(rankings_page.text) player_rows = root.xpath('//div[@id="ranked"]//tr') for row in player_rows[1:]: player_row = row.xpath('td[@class!="country"]//text()') yield _Player( name=player_row[1], country=row[1][0].get('title'), triple_crowns=player_row[3], monthly_win=player_row[4], biggest_cash=player_row[5], plb_score=player_row[6], biggest_score=player_row[7], average_score=player_row[8], previous_rank=player_row[9], )
[ "def", "get_ranked_players", "(", ")", ":", "rankings_page", "=", "requests", ".", "get", "(", "RANKINGS_URL", ")", "root", "=", "etree", ".", "HTML", "(", "rankings_page", ".", "text", ")", "player_rows", "=", "root", ".", "xpath", "(", "'//div[@id=\"ranked\"]//tr'", ")", "for", "row", "in", "player_rows", "[", "1", ":", "]", ":", "player_row", "=", "row", ".", "xpath", "(", "'td[@class!=\"country\"]//text()'", ")", "yield", "_Player", "(", "name", "=", "player_row", "[", "1", "]", ",", "country", "=", "row", "[", "1", "]", "[", "0", "]", ".", "get", "(", "'title'", ")", ",", "triple_crowns", "=", "player_row", "[", "3", "]", ",", "monthly_win", "=", "player_row", "[", "4", "]", ",", "biggest_cash", "=", "player_row", "[", "5", "]", ",", "plb_score", "=", "player_row", "[", "6", "]", ",", "biggest_score", "=", "player_row", "[", "7", "]", ",", "average_score", "=", "player_row", "[", "8", "]", ",", "previous_rank", "=", "player_row", "[", "9", "]", ",", ")" ]
Get the list of the first 100 ranked players.
[ "Get", "the", "list", "of", "the", "first", "100", "ranked", "players", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/website/pocketfives.py#L31-L50
train
232,774
pokerregion/poker
poker/card.py
Rank.difference
def difference(cls, first, second): """Tells the numerical difference between two ranks.""" # so we always get a Rank instance even if string were passed in first, second = cls(first), cls(second) rank_list = list(cls) return abs(rank_list.index(first) - rank_list.index(second))
python
def difference(cls, first, second): """Tells the numerical difference between two ranks.""" # so we always get a Rank instance even if string were passed in first, second = cls(first), cls(second) rank_list = list(cls) return abs(rank_list.index(first) - rank_list.index(second))
[ "def", "difference", "(", "cls", ",", "first", ",", "second", ")", ":", "# so we always get a Rank instance even if string were passed in", "first", ",", "second", "=", "cls", "(", "first", ")", ",", "cls", "(", "second", ")", "rank_list", "=", "list", "(", "cls", ")", "return", "abs", "(", "rank_list", ".", "index", "(", "first", ")", "-", "rank_list", ".", "index", "(", "second", ")", ")" ]
Tells the numerical difference between two ranks.
[ "Tells", "the", "numerical", "difference", "between", "two", "ranks", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/card.py#L42-L48
train
232,775
pokerregion/poker
poker/card.py
_CardMeta.make_random
def make_random(cls): """Returns a random Card instance.""" self = object.__new__(cls) self.rank = Rank.make_random() self.suit = Suit.make_random() return self
python
def make_random(cls): """Returns a random Card instance.""" self = object.__new__(cls) self.rank = Rank.make_random() self.suit = Suit.make_random() return self
[ "def", "make_random", "(", "cls", ")", ":", "self", "=", "object", ".", "__new__", "(", "cls", ")", "self", ".", "rank", "=", "Rank", ".", "make_random", "(", ")", "self", ".", "suit", "=", "Suit", ".", "make_random", "(", ")", "return", "self" ]
Returns a random Card instance.
[ "Returns", "a", "random", "Card", "instance", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/card.py#L64-L69
train
232,776
pokerregion/poker
poker/commands.py
twoplustwo_player
def twoplustwo_player(username): """Get profile information about a Two plus Two Forum member given the username.""" from .website.twoplustwo import ForumMember, AmbiguousUserNameError, UserNotFoundError try: member = ForumMember(username) except UserNotFoundError: raise click.ClickException('User "%s" not found!' % username) except AmbiguousUserNameError as e: click.echo('Got multiple users with similar names!', err=True) for ind, user in enumerate(e.users): click.echo('{}. {}'.format(ind + 1, user.name), err=True) number = click.prompt('Which would you like to see [{}-{}]'.format(1, len(e.users)), prompt_suffix='? ', type=click.IntRange(1, len(e.users)), err=True) userid = e.users[int(number) - 1].id member = ForumMember.from_userid(userid) click.echo(err=True) # empty line after input _print_header('Two plus two forum member') _print_values( ('Username', member.username), ('Forum id', member.id), ('Location', member.location), ('Total posts', member.total_posts), ('Posts per day', member.posts_per_day), ('Rank', member.rank), ('Last activity', member.last_activity), ('Join date', member.join_date), ('Usergroups', member.public_usergroups), ('Profile picture', member.profile_picture), ('Avatar', member.avatar), )
python
def twoplustwo_player(username): """Get profile information about a Two plus Two Forum member given the username.""" from .website.twoplustwo import ForumMember, AmbiguousUserNameError, UserNotFoundError try: member = ForumMember(username) except UserNotFoundError: raise click.ClickException('User "%s" not found!' % username) except AmbiguousUserNameError as e: click.echo('Got multiple users with similar names!', err=True) for ind, user in enumerate(e.users): click.echo('{}. {}'.format(ind + 1, user.name), err=True) number = click.prompt('Which would you like to see [{}-{}]'.format(1, len(e.users)), prompt_suffix='? ', type=click.IntRange(1, len(e.users)), err=True) userid = e.users[int(number) - 1].id member = ForumMember.from_userid(userid) click.echo(err=True) # empty line after input _print_header('Two plus two forum member') _print_values( ('Username', member.username), ('Forum id', member.id), ('Location', member.location), ('Total posts', member.total_posts), ('Posts per day', member.posts_per_day), ('Rank', member.rank), ('Last activity', member.last_activity), ('Join date', member.join_date), ('Usergroups', member.public_usergroups), ('Profile picture', member.profile_picture), ('Avatar', member.avatar), )
[ "def", "twoplustwo_player", "(", "username", ")", ":", "from", ".", "website", ".", "twoplustwo", "import", "ForumMember", ",", "AmbiguousUserNameError", ",", "UserNotFoundError", "try", ":", "member", "=", "ForumMember", "(", "username", ")", "except", "UserNotFoundError", ":", "raise", "click", ".", "ClickException", "(", "'User \"%s\" not found!'", "%", "username", ")", "except", "AmbiguousUserNameError", "as", "e", ":", "click", ".", "echo", "(", "'Got multiple users with similar names!'", ",", "err", "=", "True", ")", "for", "ind", ",", "user", "in", "enumerate", "(", "e", ".", "users", ")", ":", "click", ".", "echo", "(", "'{}. {}'", ".", "format", "(", "ind", "+", "1", ",", "user", ".", "name", ")", ",", "err", "=", "True", ")", "number", "=", "click", ".", "prompt", "(", "'Which would you like to see [{}-{}]'", ".", "format", "(", "1", ",", "len", "(", "e", ".", "users", ")", ")", ",", "prompt_suffix", "=", "'? '", ",", "type", "=", "click", ".", "IntRange", "(", "1", ",", "len", "(", "e", ".", "users", ")", ")", ",", "err", "=", "True", ")", "userid", "=", "e", ".", "users", "[", "int", "(", "number", ")", "-", "1", "]", ".", "id", "member", "=", "ForumMember", ".", "from_userid", "(", "userid", ")", "click", ".", "echo", "(", "err", "=", "True", ")", "# empty line after input", "_print_header", "(", "'Two plus two forum member'", ")", "_print_values", "(", "(", "'Username'", ",", "member", ".", "username", ")", ",", "(", "'Forum id'", ",", "member", ".", "id", ")", ",", "(", "'Location'", ",", "member", ".", "location", ")", ",", "(", "'Total posts'", ",", "member", ".", "total_posts", ")", ",", "(", "'Posts per day'", ",", "member", ".", "posts_per_day", ")", ",", "(", "'Rank'", ",", "member", ".", "rank", ")", ",", "(", "'Last activity'", ",", "member", ".", "last_activity", ")", ",", "(", "'Join date'", ",", "member", ".", "join_date", ")", ",", "(", "'Usergroups'", ",", "member", ".", "public_usergroups", ")", ",", "(", "'Profile picture'", ",", "member", ".", "profile_picture", ")", ",", "(", "'Avatar'", ",", "member", ".", "avatar", ")", ",", ")" ]
Get profile information about a Two plus Two Forum member given the username.
[ "Get", "profile", "information", "about", "a", "Two", "plus", "Two", "Forum", "member", "given", "the", "username", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/commands.py#L59-L95
train
232,777
pokerregion/poker
poker/commands.py
p5list
def p5list(num): """List pocketfives ranked players, max 100 if no NUM, or NUM if specified.""" from .website.pocketfives import get_ranked_players format_str = '{:>4.4} {!s:<15.13}{!s:<18.15}{!s:<9.6}{!s:<10.7}'\ '{!s:<14.11}{!s:<12.9}{!s:<12.9}{!s:<12.9}{!s:<4.4}' click.echo(format_str.format( 'Rank' , 'Player name', 'Country', 'Triple', 'Monthly', 'Biggest cash', 'PLB score', 'Biggest s', 'Average s', 'Prev' )) # just generate the appropriate number of underlines and cut them with format_str underlines = ['-' * 20] * 10 click.echo(format_str.format(*underlines)) for ind, player in enumerate(get_ranked_players()): click.echo(format_str.format(str(ind + 1) + '.', *player)) if ind == num - 1: break
python
def p5list(num): """List pocketfives ranked players, max 100 if no NUM, or NUM if specified.""" from .website.pocketfives import get_ranked_players format_str = '{:>4.4} {!s:<15.13}{!s:<18.15}{!s:<9.6}{!s:<10.7}'\ '{!s:<14.11}{!s:<12.9}{!s:<12.9}{!s:<12.9}{!s:<4.4}' click.echo(format_str.format( 'Rank' , 'Player name', 'Country', 'Triple', 'Monthly', 'Biggest cash', 'PLB score', 'Biggest s', 'Average s', 'Prev' )) # just generate the appropriate number of underlines and cut them with format_str underlines = ['-' * 20] * 10 click.echo(format_str.format(*underlines)) for ind, player in enumerate(get_ranked_players()): click.echo(format_str.format(str(ind + 1) + '.', *player)) if ind == num - 1: break
[ "def", "p5list", "(", "num", ")", ":", "from", ".", "website", ".", "pocketfives", "import", "get_ranked_players", "format_str", "=", "'{:>4.4} {!s:<15.13}{!s:<18.15}{!s:<9.6}{!s:<10.7}'", "'{!s:<14.11}{!s:<12.9}{!s:<12.9}{!s:<12.9}{!s:<4.4}'", "click", ".", "echo", "(", "format_str", ".", "format", "(", "'Rank'", ",", "'Player name'", ",", "'Country'", ",", "'Triple'", ",", "'Monthly'", ",", "'Biggest cash'", ",", "'PLB score'", ",", "'Biggest s'", ",", "'Average s'", ",", "'Prev'", ")", ")", "# just generate the appropriate number of underlines and cut them with format_str", "underlines", "=", "[", "'-'", "*", "20", "]", "*", "10", "click", ".", "echo", "(", "format_str", ".", "format", "(", "*", "underlines", ")", ")", "for", "ind", ",", "player", "in", "enumerate", "(", "get_ranked_players", "(", ")", ")", ":", "click", ".", "echo", "(", "format_str", ".", "format", "(", "str", "(", "ind", "+", "1", ")", "+", "'.'", ",", "*", "player", ")", ")", "if", "ind", "==", "num", "-", "1", ":", "break" ]
List pocketfives ranked players, max 100 if no NUM, or NUM if specified.
[ "List", "pocketfives", "ranked", "players", "max", "100", "if", "no", "NUM", "or", "NUM", "if", "specified", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/commands.py#L100-L119
train
232,778
pokerregion/poker
poker/commands.py
psstatus
def psstatus(): """Shows PokerStars status such as number of players, tournaments.""" from .website.pokerstars import get_status _print_header('PokerStars status') status = get_status() _print_values( ('Info updated', status.updated), ('Tables', status.tables), ('Players', status.players), ('Active tournaments', status.active_tournaments), ('Total tournaments', status.total_tournaments), ('Clubs', status.clubs), ('Club members', status.club_members), ) site_format_str = '{0.id:<12} {0.tables:<7,} {0.players:<8,} {0.active_tournaments:,}' click.echo('\nSite Tables Players Tournaments') click.echo('----------- ------ ------- -----------') for site in status.sites: click.echo(site_format_str.format(site))
python
def psstatus(): """Shows PokerStars status such as number of players, tournaments.""" from .website.pokerstars import get_status _print_header('PokerStars status') status = get_status() _print_values( ('Info updated', status.updated), ('Tables', status.tables), ('Players', status.players), ('Active tournaments', status.active_tournaments), ('Total tournaments', status.total_tournaments), ('Clubs', status.clubs), ('Club members', status.club_members), ) site_format_str = '{0.id:<12} {0.tables:<7,} {0.players:<8,} {0.active_tournaments:,}' click.echo('\nSite Tables Players Tournaments') click.echo('----------- ------ ------- -----------') for site in status.sites: click.echo(site_format_str.format(site))
[ "def", "psstatus", "(", ")", ":", "from", ".", "website", ".", "pokerstars", "import", "get_status", "_print_header", "(", "'PokerStars status'", ")", "status", "=", "get_status", "(", ")", "_print_values", "(", "(", "'Info updated'", ",", "status", ".", "updated", ")", ",", "(", "'Tables'", ",", "status", ".", "tables", ")", ",", "(", "'Players'", ",", "status", ".", "players", ")", ",", "(", "'Active tournaments'", ",", "status", ".", "active_tournaments", ")", ",", "(", "'Total tournaments'", ",", "status", ".", "total_tournaments", ")", ",", "(", "'Clubs'", ",", "status", ".", "clubs", ")", ",", "(", "'Club members'", ",", "status", ".", "club_members", ")", ",", ")", "site_format_str", "=", "'{0.id:<12} {0.tables:<7,} {0.players:<8,} {0.active_tournaments:,}'", "click", ".", "echo", "(", "'\\nSite Tables Players Tournaments'", ")", "click", ".", "echo", "(", "'----------- ------ ------- -----------'", ")", "for", "site", "in", "status", ".", "sites", ":", "click", ".", "echo", "(", "site_format_str", ".", "format", "(", "site", ")", ")" ]
Shows PokerStars status such as number of players, tournaments.
[ "Shows", "PokerStars", "status", "such", "as", "number", "of", "players", "tournaments", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/commands.py#L123-L145
train
232,779
pokerregion/poker
poker/room/pokerstars.py
Notes.notes
def notes(self): """Tuple of notes..""" return tuple(self._get_note_data(note) for note in self.root.iter('note'))
python
def notes(self): """Tuple of notes..""" return tuple(self._get_note_data(note) for note in self.root.iter('note'))
[ "def", "notes", "(", "self", ")", ":", "return", "tuple", "(", "self", ".", "_get_note_data", "(", "note", ")", "for", "note", "in", "self", ".", "root", ".", "iter", "(", "'note'", ")", ")" ]
Tuple of notes..
[ "Tuple", "of", "notes", ".." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L335-L337
train
232,780
pokerregion/poker
poker/room/pokerstars.py
Notes.labels
def labels(self): """Tuple of labels.""" return tuple(_Label(label.get('id'), label.get('color'), label.text) for label in self.root.iter('label'))
python
def labels(self): """Tuple of labels.""" return tuple(_Label(label.get('id'), label.get('color'), label.text) for label in self.root.iter('label'))
[ "def", "labels", "(", "self", ")", ":", "return", "tuple", "(", "_Label", "(", "label", ".", "get", "(", "'id'", ")", ",", "label", ".", "get", "(", "'color'", ")", ",", "label", ".", "text", ")", "for", "label", "in", "self", ".", "root", ".", "iter", "(", "'label'", ")", ")" ]
Tuple of labels.
[ "Tuple", "of", "labels", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L340-L343
train
232,781
pokerregion/poker
poker/room/pokerstars.py
Notes.add_note
def add_note(self, player, text, label=None, update=None): """Add a note to the xml. If update param is None, it will be the current time.""" if label is not None and (label not in self.label_names): raise LabelNotFoundError('Invalid label: {}'.format(label)) if update is None: update = datetime.utcnow() # converted to timestamp, rounded to ones update = update.strftime('%s') label_id = self._get_label_id(label) new_note = etree.Element('note', player=player, label=label_id, update=update) new_note.text = text self.root.append(new_note)
python
def add_note(self, player, text, label=None, update=None): """Add a note to the xml. If update param is None, it will be the current time.""" if label is not None and (label not in self.label_names): raise LabelNotFoundError('Invalid label: {}'.format(label)) if update is None: update = datetime.utcnow() # converted to timestamp, rounded to ones update = update.strftime('%s') label_id = self._get_label_id(label) new_note = etree.Element('note', player=player, label=label_id, update=update) new_note.text = text self.root.append(new_note)
[ "def", "add_note", "(", "self", ",", "player", ",", "text", ",", "label", "=", "None", ",", "update", "=", "None", ")", ":", "if", "label", "is", "not", "None", "and", "(", "label", "not", "in", "self", ".", "label_names", ")", ":", "raise", "LabelNotFoundError", "(", "'Invalid label: {}'", ".", "format", "(", "label", ")", ")", "if", "update", "is", "None", ":", "update", "=", "datetime", ".", "utcnow", "(", ")", "# converted to timestamp, rounded to ones", "update", "=", "update", ".", "strftime", "(", "'%s'", ")", "label_id", "=", "self", ".", "_get_label_id", "(", "label", ")", "new_note", "=", "etree", ".", "Element", "(", "'note'", ",", "player", "=", "player", ",", "label", "=", "label_id", ",", "update", "=", "update", ")", "new_note", ".", "text", "=", "text", "self", ".", "root", ".", "append", "(", "new_note", ")" ]
Add a note to the xml. If update param is None, it will be the current time.
[ "Add", "a", "note", "to", "the", "xml", ".", "If", "update", "param", "is", "None", "it", "will", "be", "the", "current", "time", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L354-L365
train
232,782
pokerregion/poker
poker/room/pokerstars.py
Notes.append_note
def append_note(self, player, text): """Append text to an already existing note.""" note = self._find_note(player) note.text += text
python
def append_note(self, player, text): """Append text to an already existing note.""" note = self._find_note(player) note.text += text
[ "def", "append_note", "(", "self", ",", "player", ",", "text", ")", ":", "note", "=", "self", ".", "_find_note", "(", "player", ")", "note", ".", "text", "+=", "text" ]
Append text to an already existing note.
[ "Append", "text", "to", "an", "already", "existing", "note", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L367-L370
train
232,783
pokerregion/poker
poker/room/pokerstars.py
Notes.prepend_note
def prepend_note(self, player, text): """Prepend text to an already existing note.""" note = self._find_note(player) note.text = text + note.text
python
def prepend_note(self, player, text): """Prepend text to an already existing note.""" note = self._find_note(player) note.text = text + note.text
[ "def", "prepend_note", "(", "self", ",", "player", ",", "text", ")", ":", "note", "=", "self", ".", "_find_note", "(", "player", ")", "note", ".", "text", "=", "text", "+", "note", ".", "text" ]
Prepend text to an already existing note.
[ "Prepend", "text", "to", "an", "already", "existing", "note", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L372-L375
train
232,784
pokerregion/poker
poker/room/pokerstars.py
Notes.get_label
def get_label(self, name): """Find the label by name.""" label_tag = self._find_label(name) return _Label(label_tag.get('id'), label_tag.get('color'), label_tag.text)
python
def get_label(self, name): """Find the label by name.""" label_tag = self._find_label(name) return _Label(label_tag.get('id'), label_tag.get('color'), label_tag.text)
[ "def", "get_label", "(", "self", ",", "name", ")", ":", "label_tag", "=", "self", ".", "_find_label", "(", "name", ")", "return", "_Label", "(", "label_tag", ".", "get", "(", "'id'", ")", ",", "label_tag", ".", "get", "(", "'color'", ")", ",", "label_tag", ".", "text", ")" ]
Find the label by name.
[ "Find", "the", "label", "by", "name", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L412-L415
train
232,785
pokerregion/poker
poker/room/pokerstars.py
Notes.add_label
def add_label(self, name, color): """Add a new label. It's id will automatically be calculated.""" color_upper = color.upper() if not self._color_re.match(color_upper): raise ValueError('Invalid color: {}'.format(color)) labels_tag = self.root[0] last_id = int(labels_tag[-1].get('id')) new_id = str(last_id + 1) new_label = etree.Element('label', id=new_id, color=color_upper) new_label.text = name labels_tag.append(new_label)
python
def add_label(self, name, color): """Add a new label. It's id will automatically be calculated.""" color_upper = color.upper() if not self._color_re.match(color_upper): raise ValueError('Invalid color: {}'.format(color)) labels_tag = self.root[0] last_id = int(labels_tag[-1].get('id')) new_id = str(last_id + 1) new_label = etree.Element('label', id=new_id, color=color_upper) new_label.text = name labels_tag.append(new_label)
[ "def", "add_label", "(", "self", ",", "name", ",", "color", ")", ":", "color_upper", "=", "color", ".", "upper", "(", ")", "if", "not", "self", ".", "_color_re", ".", "match", "(", "color_upper", ")", ":", "raise", "ValueError", "(", "'Invalid color: {}'", ".", "format", "(", "color", ")", ")", "labels_tag", "=", "self", ".", "root", "[", "0", "]", "last_id", "=", "int", "(", "labels_tag", "[", "-", "1", "]", ".", "get", "(", "'id'", ")", ")", "new_id", "=", "str", "(", "last_id", "+", "1", ")", "new_label", "=", "etree", ".", "Element", "(", "'label'", ",", "id", "=", "new_id", ",", "color", "=", "color_upper", ")", "new_label", ".", "text", "=", "name", "labels_tag", ".", "append", "(", "new_label", ")" ]
Add a new label. It's id will automatically be calculated.
[ "Add", "a", "new", "label", ".", "It", "s", "id", "will", "automatically", "be", "calculated", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L417-L430
train
232,786
pokerregion/poker
poker/room/pokerstars.py
Notes.del_label
def del_label(self, name): """Delete a label by name.""" labels_tag = self.root[0] labels_tag.remove(self._find_label(name))
python
def del_label(self, name): """Delete a label by name.""" labels_tag = self.root[0] labels_tag.remove(self._find_label(name))
[ "def", "del_label", "(", "self", ",", "name", ")", ":", "labels_tag", "=", "self", ".", "root", "[", "0", "]", "labels_tag", ".", "remove", "(", "self", ".", "_find_label", "(", "name", ")", ")" ]
Delete a label by name.
[ "Delete", "a", "label", "by", "name", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L432-L435
train
232,787
pokerregion/poker
poker/room/pokerstars.py
Notes.save
def save(self, filename): """Save the note XML to a file.""" with open(filename, 'w') as fp: fp.write(str(self))
python
def save(self, filename): """Save the note XML to a file.""" with open(filename, 'w') as fp: fp.write(str(self))
[ "def", "save", "(", "self", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fp", ":", "fp", ".", "write", "(", "str", "(", "self", ")", ")" ]
Save the note XML to a file.
[ "Save", "the", "note", "XML", "to", "a", "file", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/room/pokerstars.py#L447-L450
train
232,788
pokerregion/poker
poker/handhistory.py
_BaseHandHistory.board
def board(self): """Calculates board from flop, turn and river.""" board = [] if self.flop: board.extend(self.flop.cards) if self.turn: board.append(self.turn) if self.river: board.append(self.river) return tuple(board) if board else None
python
def board(self): """Calculates board from flop, turn and river.""" board = [] if self.flop: board.extend(self.flop.cards) if self.turn: board.append(self.turn) if self.river: board.append(self.river) return tuple(board) if board else None
[ "def", "board", "(", "self", ")", ":", "board", "=", "[", "]", "if", "self", ".", "flop", ":", "board", ".", "extend", "(", "self", ".", "flop", ".", "cards", ")", "if", "self", ".", "turn", ":", "board", ".", "append", "(", "self", ".", "turn", ")", "if", "self", ".", "river", ":", "board", ".", "append", "(", "self", ".", "river", ")", "return", "tuple", "(", "board", ")", "if", "board", "else", "None" ]
Calculates board from flop, turn and river.
[ "Calculates", "board", "from", "flop", "turn", "and", "river", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/handhistory.py#L167-L176
train
232,789
pokerregion/poker
poker/handhistory.py
_BaseHandHistory._parse_date
def _parse_date(self, date_string): """Parse the date_string and return a datetime object as UTC.""" date = datetime.strptime(date_string, self._DATE_FORMAT) self.date = self._TZ.localize(date).astimezone(pytz.UTC)
python
def _parse_date(self, date_string): """Parse the date_string and return a datetime object as UTC.""" date = datetime.strptime(date_string, self._DATE_FORMAT) self.date = self._TZ.localize(date).astimezone(pytz.UTC)
[ "def", "_parse_date", "(", "self", ",", "date_string", ")", ":", "date", "=", "datetime", ".", "strptime", "(", "date_string", ",", "self", ".", "_DATE_FORMAT", ")", "self", ".", "date", "=", "self", ".", "_TZ", ".", "localize", "(", "date", ")", ".", "astimezone", "(", "pytz", ".", "UTC", ")" ]
Parse the date_string and return a datetime object as UTC.
[ "Parse", "the", "date_string", "and", "return", "a", "datetime", "object", "as", "UTC", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/handhistory.py#L178-L181
train
232,790
pokerregion/poker
poker/handhistory.py
_SplittableHandHistoryMixin._split_raw
def _split_raw(self): """Split hand history by sections.""" self._splitted = self._split_re.split(self.raw) # search split locations (basically empty strings) self._sections = [ind for ind, elem in enumerate(self._splitted) if not elem]
python
def _split_raw(self): """Split hand history by sections.""" self._splitted = self._split_re.split(self.raw) # search split locations (basically empty strings) self._sections = [ind for ind, elem in enumerate(self._splitted) if not elem]
[ "def", "_split_raw", "(", "self", ")", ":", "self", ".", "_splitted", "=", "self", ".", "_split_re", ".", "split", "(", "self", ".", "raw", ")", "# search split locations (basically empty strings)", "self", ".", "_sections", "=", "[", "ind", "for", "ind", ",", "elem", "in", "enumerate", "(", "self", ".", "_splitted", ")", "if", "not", "elem", "]" ]
Split hand history by sections.
[ "Split", "hand", "history", "by", "sections", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/handhistory.py#L201-L206
train
232,791
pokerregion/poker
poker/website/twoplustwo.py
ForumMember._get_timezone
def _get_timezone(self, root): """Find timezone informatation on bottom of the page.""" tz_str = root.xpath('//div[@class="smallfont" and @align="center"]')[0].text hours = int(self._tz_re.search(tz_str).group(1)) return tzoffset(tz_str, hours * 60)
python
def _get_timezone(self, root): """Find timezone informatation on bottom of the page.""" tz_str = root.xpath('//div[@class="smallfont" and @align="center"]')[0].text hours = int(self._tz_re.search(tz_str).group(1)) return tzoffset(tz_str, hours * 60)
[ "def", "_get_timezone", "(", "self", ",", "root", ")", ":", "tz_str", "=", "root", ".", "xpath", "(", "'//div[@class=\"smallfont\" and @align=\"center\"]'", ")", "[", "0", "]", ".", "text", "hours", "=", "int", "(", "self", ".", "_tz_re", ".", "search", "(", "tz_str", ")", ".", "group", "(", "1", ")", ")", "return", "tzoffset", "(", "tz_str", ",", "hours", "*", "60", ")" ]
Find timezone informatation on bottom of the page.
[ "Find", "timezone", "informatation", "on", "bottom", "of", "the", "page", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/website/twoplustwo.py#L125-L129
train
232,792
pokerregion/poker
poker/website/pokerstars.py
get_current_tournaments
def get_current_tournaments(): """Get the next 200 tournaments from pokerstars.""" schedule_page = requests.get(TOURNAMENTS_XML_URL) root = etree.XML(schedule_page.content) for tour in root.iter('{*}tournament'): yield _Tournament( start_date=tour.findtext('{*}start_date'), name=tour.findtext('{*}name'), game=tour.findtext('{*}game'), buyin=tour.findtext('{*}buy_in_fee'), players=tour.get('players') )
python
def get_current_tournaments(): """Get the next 200 tournaments from pokerstars.""" schedule_page = requests.get(TOURNAMENTS_XML_URL) root = etree.XML(schedule_page.content) for tour in root.iter('{*}tournament'): yield _Tournament( start_date=tour.findtext('{*}start_date'), name=tour.findtext('{*}name'), game=tour.findtext('{*}game'), buyin=tour.findtext('{*}buy_in_fee'), players=tour.get('players') )
[ "def", "get_current_tournaments", "(", ")", ":", "schedule_page", "=", "requests", ".", "get", "(", "TOURNAMENTS_XML_URL", ")", "root", "=", "etree", ".", "XML", "(", "schedule_page", ".", "content", ")", "for", "tour", "in", "root", ".", "iter", "(", "'{*}tournament'", ")", ":", "yield", "_Tournament", "(", "start_date", "=", "tour", ".", "findtext", "(", "'{*}start_date'", ")", ",", "name", "=", "tour", ".", "findtext", "(", "'{*}name'", ")", ",", "game", "=", "tour", ".", "findtext", "(", "'{*}game'", ")", ",", "buyin", "=", "tour", ".", "findtext", "(", "'{*}buy_in_fee'", ")", ",", "players", "=", "tour", ".", "get", "(", "'players'", ")", ")" ]
Get the next 200 tournaments from pokerstars.
[ "Get", "the", "next", "200", "tournaments", "from", "pokerstars", "." ]
2d8cf208fdf2b26bdc935972dcbe7a983a9e9768
https://github.com/pokerregion/poker/blob/2d8cf208fdf2b26bdc935972dcbe7a983a9e9768/poker/website/pokerstars.py#L29-L42
train
232,793
RKrahl/pytest-dependency
setup.py
_filter_file
def _filter_file(src, dest, subst): """Copy src to dest doing substitutions on the fly. """ substre = re.compile(r'\$(%s)' % '|'.join(subst.keys())) def repl(m): return subst[m.group(1)] with open(src, "rt") as sf, open(dest, "wt") as df: while True: l = sf.readline() if not l: break df.write(re.sub(substre, repl, l))
python
def _filter_file(src, dest, subst): """Copy src to dest doing substitutions on the fly. """ substre = re.compile(r'\$(%s)' % '|'.join(subst.keys())) def repl(m): return subst[m.group(1)] with open(src, "rt") as sf, open(dest, "wt") as df: while True: l = sf.readline() if not l: break df.write(re.sub(substre, repl, l))
[ "def", "_filter_file", "(", "src", ",", "dest", ",", "subst", ")", ":", "substre", "=", "re", ".", "compile", "(", "r'\\$(%s)'", "%", "'|'", ".", "join", "(", "subst", ".", "keys", "(", ")", ")", ")", "def", "repl", "(", "m", ")", ":", "return", "subst", "[", "m", ".", "group", "(", "1", ")", "]", "with", "open", "(", "src", ",", "\"rt\"", ")", "as", "sf", ",", "open", "(", "dest", ",", "\"wt\"", ")", "as", "df", ":", "while", "True", ":", "l", "=", "sf", ".", "readline", "(", ")", "if", "not", "l", ":", "break", "df", ".", "write", "(", "re", ".", "sub", "(", "substre", ",", "repl", ",", "l", ")", ")" ]
Copy src to dest doing substitutions on the fly.
[ "Copy", "src", "to", "dest", "doing", "substitutions", "on", "the", "fly", "." ]
7b7c10818266ec4b05c36c341cf84f05d7ab53ce
https://github.com/RKrahl/pytest-dependency/blob/7b7c10818266ec4b05c36c341cf84f05d7ab53ce/setup.py#L18-L29
train
232,794
profusion/sgqlc
sgqlc/endpoint/base.py
BaseEndpoint._fixup_graphql_error
def _fixup_graphql_error(self, data): '''Given a possible GraphQL error payload, make sure it's in shape. This will ensure the given ``data`` is in the shape: .. code-block:: json {"errors": [{"message": "some string"}]} If ``errors`` is not an array, it will be made into a single element array, with the object in that format, with its string representation being the message. If an element of the ``errors`` array is not in the format, then it's converted to the format, with its string representation being the message. The input object is not changed, a copy is made if needed. :return: the given ``data`` formatted to the correct shape, a copy is made and returned if any fix up was needed. :rtype: dict ''' original_data = data errors = data.get('errors') original_errors = errors if not isinstance(errors, list): self.logger.warning('data["errors"] is not a list! Fix up data=%r', data) data = data.copy() data['errors'] = [{'message': str(errors)}] return data for i, error in enumerate(errors): if not isinstance(error, dict): self.logger.warning('Error #%d: is not a dict: %r. Fix up!', i, error) if data is original_data: data = data.copy() if errors is original_errors: errors = errors.copy() data['errors'] = errors errors[i] = {'message': str(error)} continue message = error.get('message') if not isinstance(message, str): if data is original_data: data = data.copy() if errors is original_errors: errors = errors.copy() data['errors'] = errors message = str(error) if message is None else str(message) error = error.copy() error['message'] = message errors[i] = error return data
python
def _fixup_graphql_error(self, data): '''Given a possible GraphQL error payload, make sure it's in shape. This will ensure the given ``data`` is in the shape: .. code-block:: json {"errors": [{"message": "some string"}]} If ``errors`` is not an array, it will be made into a single element array, with the object in that format, with its string representation being the message. If an element of the ``errors`` array is not in the format, then it's converted to the format, with its string representation being the message. The input object is not changed, a copy is made if needed. :return: the given ``data`` formatted to the correct shape, a copy is made and returned if any fix up was needed. :rtype: dict ''' original_data = data errors = data.get('errors') original_errors = errors if not isinstance(errors, list): self.logger.warning('data["errors"] is not a list! Fix up data=%r', data) data = data.copy() data['errors'] = [{'message': str(errors)}] return data for i, error in enumerate(errors): if not isinstance(error, dict): self.logger.warning('Error #%d: is not a dict: %r. Fix up!', i, error) if data is original_data: data = data.copy() if errors is original_errors: errors = errors.copy() data['errors'] = errors errors[i] = {'message': str(error)} continue message = error.get('message') if not isinstance(message, str): if data is original_data: data = data.copy() if errors is original_errors: errors = errors.copy() data['errors'] = errors message = str(error) if message is None else str(message) error = error.copy() error['message'] = message errors[i] = error return data
[ "def", "_fixup_graphql_error", "(", "self", ",", "data", ")", ":", "original_data", "=", "data", "errors", "=", "data", ".", "get", "(", "'errors'", ")", "original_errors", "=", "errors", "if", "not", "isinstance", "(", "errors", ",", "list", ")", ":", "self", ".", "logger", ".", "warning", "(", "'data[\"errors\"] is not a list! Fix up data=%r'", ",", "data", ")", "data", "=", "data", ".", "copy", "(", ")", "data", "[", "'errors'", "]", "=", "[", "{", "'message'", ":", "str", "(", "errors", ")", "}", "]", "return", "data", "for", "i", ",", "error", "in", "enumerate", "(", "errors", ")", ":", "if", "not", "isinstance", "(", "error", ",", "dict", ")", ":", "self", ".", "logger", ".", "warning", "(", "'Error #%d: is not a dict: %r. Fix up!'", ",", "i", ",", "error", ")", "if", "data", "is", "original_data", ":", "data", "=", "data", ".", "copy", "(", ")", "if", "errors", "is", "original_errors", ":", "errors", "=", "errors", ".", "copy", "(", ")", "data", "[", "'errors'", "]", "=", "errors", "errors", "[", "i", "]", "=", "{", "'message'", ":", "str", "(", "error", ")", "}", "continue", "message", "=", "error", ".", "get", "(", "'message'", ")", "if", "not", "isinstance", "(", "message", ",", "str", ")", ":", "if", "data", "is", "original_data", ":", "data", "=", "data", ".", "copy", "(", ")", "if", "errors", "is", "original_errors", ":", "errors", "=", "errors", ".", "copy", "(", ")", "data", "[", "'errors'", "]", "=", "errors", "message", "=", "str", "(", "error", ")", "if", "message", "is", "None", "else", "str", "(", "message", ")", "error", "=", "error", ".", "copy", "(", ")", "error", "[", "'message'", "]", "=", "message", "errors", "[", "i", "]", "=", "error", "return", "data" ]
Given a possible GraphQL error payload, make sure it's in shape. This will ensure the given ``data`` is in the shape: .. code-block:: json {"errors": [{"message": "some string"}]} If ``errors`` is not an array, it will be made into a single element array, with the object in that format, with its string representation being the message. If an element of the ``errors`` array is not in the format, then it's converted to the format, with its string representation being the message. The input object is not changed, a copy is made if needed. :return: the given ``data`` formatted to the correct shape, a copy is made and returned if any fix up was needed. :rtype: dict
[ "Given", "a", "possible", "GraphQL", "error", "payload", "make", "sure", "it", "s", "in", "shape", "." ]
684afb059c93f142150043cafac09b7fd52bfa27
https://github.com/profusion/sgqlc/blob/684afb059c93f142150043cafac09b7fd52bfa27/sgqlc/endpoint/base.py#L104-L163
train
232,795
profusion/sgqlc
sgqlc/endpoint/base.py
BaseEndpoint.snippet
def snippet(code, locations, sep=' | ', colmark=('-', '^'), context=5): '''Given a code and list of locations, convert to snippet lines. return will include line number, a separator (``sep``), then line contents. At most ``context`` lines are shown before each location line. After each location line, the column is marked using ``colmark``. The first character is repeated up to column, the second character is used only once. :return: list of lines of sources or column markups. :rtype: list ''' if not locations: return [] lines = code.split('\n') offset = int(len(lines) / 10) + 1 linenofmt = '%{}d'.format(offset) s = [] for loc in locations: line = max(0, loc.get('line', 1) - 1) column = max(0, loc.get('column', 1) - 1) start_line = max(0, line - context) for i, ln in enumerate(lines[start_line:line + 1], start_line): s.append('{}{}{}'.format(linenofmt % i, sep, ln)) s.append('{}{}{}'.format(' ' * (offset + len(sep)), colmark[0] * column, colmark[1])) return s
python
def snippet(code, locations, sep=' | ', colmark=('-', '^'), context=5): '''Given a code and list of locations, convert to snippet lines. return will include line number, a separator (``sep``), then line contents. At most ``context`` lines are shown before each location line. After each location line, the column is marked using ``colmark``. The first character is repeated up to column, the second character is used only once. :return: list of lines of sources or column markups. :rtype: list ''' if not locations: return [] lines = code.split('\n') offset = int(len(lines) / 10) + 1 linenofmt = '%{}d'.format(offset) s = [] for loc in locations: line = max(0, loc.get('line', 1) - 1) column = max(0, loc.get('column', 1) - 1) start_line = max(0, line - context) for i, ln in enumerate(lines[start_line:line + 1], start_line): s.append('{}{}{}'.format(linenofmt % i, sep, ln)) s.append('{}{}{}'.format(' ' * (offset + len(sep)), colmark[0] * column, colmark[1])) return s
[ "def", "snippet", "(", "code", ",", "locations", ",", "sep", "=", "' | '", ",", "colmark", "=", "(", "'-'", ",", "'^'", ")", ",", "context", "=", "5", ")", ":", "if", "not", "locations", ":", "return", "[", "]", "lines", "=", "code", ".", "split", "(", "'\\n'", ")", "offset", "=", "int", "(", "len", "(", "lines", ")", "/", "10", ")", "+", "1", "linenofmt", "=", "'%{}d'", ".", "format", "(", "offset", ")", "s", "=", "[", "]", "for", "loc", "in", "locations", ":", "line", "=", "max", "(", "0", ",", "loc", ".", "get", "(", "'line'", ",", "1", ")", "-", "1", ")", "column", "=", "max", "(", "0", ",", "loc", ".", "get", "(", "'column'", ",", "1", ")", "-", "1", ")", "start_line", "=", "max", "(", "0", ",", "line", "-", "context", ")", "for", "i", ",", "ln", "in", "enumerate", "(", "lines", "[", "start_line", ":", "line", "+", "1", "]", ",", "start_line", ")", ":", "s", ".", "append", "(", "'{}{}{}'", ".", "format", "(", "linenofmt", "%", "i", ",", "sep", ",", "ln", ")", ")", "s", ".", "append", "(", "'{}{}{}'", ".", "format", "(", "' '", "*", "(", "offset", "+", "len", "(", "sep", ")", ")", ",", "colmark", "[", "0", "]", "*", "column", ",", "colmark", "[", "1", "]", ")", ")", "return", "s" ]
Given a code and list of locations, convert to snippet lines. return will include line number, a separator (``sep``), then line contents. At most ``context`` lines are shown before each location line. After each location line, the column is marked using ``colmark``. The first character is repeated up to column, the second character is used only once. :return: list of lines of sources or column markups. :rtype: list
[ "Given", "a", "code", "and", "list", "of", "locations", "convert", "to", "snippet", "lines", "." ]
684afb059c93f142150043cafac09b7fd52bfa27
https://github.com/profusion/sgqlc/blob/684afb059c93f142150043cafac09b7fd52bfa27/sgqlc/endpoint/base.py#L206-L236
train
232,796
profusion/sgqlc
sgqlc/types/__init__.py
_create_non_null_wrapper
def _create_non_null_wrapper(name, t): 'creates type wrapper for non-null of given type' def __new__(cls, json_data, selection_list=None): if json_data is None: raise ValueError(name + ' received null value') return t(json_data, selection_list) def __to_graphql_input__(value, indent=0, indent_string=' '): return t.__to_graphql_input__(value, indent, indent_string) return type(name, (t,), { '__new__': __new__, '_%s__auto_register' % name: False, '__to_graphql_input__': __to_graphql_input__, })
python
def _create_non_null_wrapper(name, t): 'creates type wrapper for non-null of given type' def __new__(cls, json_data, selection_list=None): if json_data is None: raise ValueError(name + ' received null value') return t(json_data, selection_list) def __to_graphql_input__(value, indent=0, indent_string=' '): return t.__to_graphql_input__(value, indent, indent_string) return type(name, (t,), { '__new__': __new__, '_%s__auto_register' % name: False, '__to_graphql_input__': __to_graphql_input__, })
[ "def", "_create_non_null_wrapper", "(", "name", ",", "t", ")", ":", "def", "__new__", "(", "cls", ",", "json_data", ",", "selection_list", "=", "None", ")", ":", "if", "json_data", "is", "None", ":", "raise", "ValueError", "(", "name", "+", "' received null value'", ")", "return", "t", "(", "json_data", ",", "selection_list", ")", "def", "__to_graphql_input__", "(", "value", ",", "indent", "=", "0", ",", "indent_string", "=", "' '", ")", ":", "return", "t", ".", "__to_graphql_input__", "(", "value", ",", "indent", ",", "indent_string", ")", "return", "type", "(", "name", ",", "(", "t", ",", ")", ",", "{", "'__new__'", ":", "__new__", ",", "'_%s__auto_register'", "%", "name", ":", "False", ",", "'__to_graphql_input__'", ":", "__to_graphql_input__", ",", "}", ")" ]
creates type wrapper for non-null of given type
[ "creates", "type", "wrapper", "for", "non", "-", "null", "of", "given", "type" ]
684afb059c93f142150043cafac09b7fd52bfa27
https://github.com/profusion/sgqlc/blob/684afb059c93f142150043cafac09b7fd52bfa27/sgqlc/types/__init__.py#L869-L883
train
232,797
profusion/sgqlc
sgqlc/types/__init__.py
_create_list_of_wrapper
def _create_list_of_wrapper(name, t): 'creates type wrapper for list of given type' def __new__(cls, json_data, selection_list=None): if json_data is None: return None return [t(v, selection_list) for v in json_data] def __to_graphql_input__(value, indent=0, indent_string=' '): r = [] for v in value: r.append(t.__to_graphql_input__(v, indent, indent_string)) return '[' + ', '.join(r) + ']' def __to_json_value__(value): if value is None: return None return [t.__to_json_value__(v) for v in value] return type(name, (t,), { '__new__': __new__, '_%s__auto_register' % name: False, '__to_graphql_input__': __to_graphql_input__, '__to_json_value__': __to_json_value__, })
python
def _create_list_of_wrapper(name, t): 'creates type wrapper for list of given type' def __new__(cls, json_data, selection_list=None): if json_data is None: return None return [t(v, selection_list) for v in json_data] def __to_graphql_input__(value, indent=0, indent_string=' '): r = [] for v in value: r.append(t.__to_graphql_input__(v, indent, indent_string)) return '[' + ', '.join(r) + ']' def __to_json_value__(value): if value is None: return None return [t.__to_json_value__(v) for v in value] return type(name, (t,), { '__new__': __new__, '_%s__auto_register' % name: False, '__to_graphql_input__': __to_graphql_input__, '__to_json_value__': __to_json_value__, })
[ "def", "_create_list_of_wrapper", "(", "name", ",", "t", ")", ":", "def", "__new__", "(", "cls", ",", "json_data", ",", "selection_list", "=", "None", ")", ":", "if", "json_data", "is", "None", ":", "return", "None", "return", "[", "t", "(", "v", ",", "selection_list", ")", "for", "v", "in", "json_data", "]", "def", "__to_graphql_input__", "(", "value", ",", "indent", "=", "0", ",", "indent_string", "=", "' '", ")", ":", "r", "=", "[", "]", "for", "v", "in", "value", ":", "r", ".", "append", "(", "t", ".", "__to_graphql_input__", "(", "v", ",", "indent", ",", "indent_string", ")", ")", "return", "'['", "+", "', '", ".", "join", "(", "r", ")", "+", "']'", "def", "__to_json_value__", "(", "value", ")", ":", "if", "value", "is", "None", ":", "return", "None", "return", "[", "t", ".", "__to_json_value__", "(", "v", ")", "for", "v", "in", "value", "]", "return", "type", "(", "name", ",", "(", "t", ",", ")", ",", "{", "'__new__'", ":", "__new__", ",", "'_%s__auto_register'", "%", "name", ":", "False", ",", "'__to_graphql_input__'", ":", "__to_graphql_input__", ",", "'__to_json_value__'", ":", "__to_json_value__", ",", "}", ")" ]
creates type wrapper for list of given type
[ "creates", "type", "wrapper", "for", "list", "of", "given", "type" ]
684afb059c93f142150043cafac09b7fd52bfa27
https://github.com/profusion/sgqlc/blob/684afb059c93f142150043cafac09b7fd52bfa27/sgqlc/types/__init__.py#L886-L909
train
232,798
profusion/sgqlc
sgqlc/endpoint/http.py
add_query_to_url
def add_query_to_url(url, extra_query): '''Adds an extra query to URL, returning the new URL. Extra query may be a dict or a list as returned by :func:`urllib.parse.parse_qsl()` and :func:`urllib.parse.parse_qs()`. ''' split = urllib.parse.urlsplit(url) merged_query = urllib.parse.parse_qsl(split.query) if isinstance(extra_query, dict): for k, v in extra_query.items(): if not isinstance(v, (tuple, list)): merged_query.append((k, v)) else: for cv in v: merged_query.append((k, cv)) else: merged_query.extend(extra_query) merged_split = urllib.parse.SplitResult( split.scheme, split.netloc, split.path, urllib.parse.urlencode(merged_query), split.fragment, ) return merged_split.geturl()
python
def add_query_to_url(url, extra_query): '''Adds an extra query to URL, returning the new URL. Extra query may be a dict or a list as returned by :func:`urllib.parse.parse_qsl()` and :func:`urllib.parse.parse_qs()`. ''' split = urllib.parse.urlsplit(url) merged_query = urllib.parse.parse_qsl(split.query) if isinstance(extra_query, dict): for k, v in extra_query.items(): if not isinstance(v, (tuple, list)): merged_query.append((k, v)) else: for cv in v: merged_query.append((k, cv)) else: merged_query.extend(extra_query) merged_split = urllib.parse.SplitResult( split.scheme, split.netloc, split.path, urllib.parse.urlencode(merged_query), split.fragment, ) return merged_split.geturl()
[ "def", "add_query_to_url", "(", "url", ",", "extra_query", ")", ":", "split", "=", "urllib", ".", "parse", ".", "urlsplit", "(", "url", ")", "merged_query", "=", "urllib", ".", "parse", ".", "parse_qsl", "(", "split", ".", "query", ")", "if", "isinstance", "(", "extra_query", ",", "dict", ")", ":", "for", "k", ",", "v", "in", "extra_query", ".", "items", "(", ")", ":", "if", "not", "isinstance", "(", "v", ",", "(", "tuple", ",", "list", ")", ")", ":", "merged_query", ".", "append", "(", "(", "k", ",", "v", ")", ")", "else", ":", "for", "cv", "in", "v", ":", "merged_query", ".", "append", "(", "(", "k", ",", "cv", ")", ")", "else", ":", "merged_query", ".", "extend", "(", "extra_query", ")", "merged_split", "=", "urllib", ".", "parse", ".", "SplitResult", "(", "split", ".", "scheme", ",", "split", ".", "netloc", ",", "split", ".", "path", ",", "urllib", ".", "parse", ".", "urlencode", "(", "merged_query", ")", ",", "split", ".", "fragment", ",", ")", "return", "merged_split", ".", "geturl", "(", ")" ]
Adds an extra query to URL, returning the new URL. Extra query may be a dict or a list as returned by :func:`urllib.parse.parse_qsl()` and :func:`urllib.parse.parse_qs()`.
[ "Adds", "an", "extra", "query", "to", "URL", "returning", "the", "new", "URL", "." ]
684afb059c93f142150043cafac09b7fd52bfa27
https://github.com/profusion/sgqlc/blob/684afb059c93f142150043cafac09b7fd52bfa27/sgqlc/endpoint/http.py#L33-L59
train
232,799