repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
guaix-ucm/pyemir
emirdrp/processing/wavecal/islitlet_progress.py
islitlet_progress
def islitlet_progress(islitlet, islitlet_max): """Auxiliary function to print out progress in loop of slitlets. Parameters ---------- islitlet : int Current slitlet number. islitlet_max : int Maximum slitlet number. """ if islitlet % 10 == 0: cout = str(islitlet // 10) else: cout = '.' sys.stdout.write(cout) if islitlet == islitlet_max: sys.stdout.write('\n') sys.stdout.flush()
python
def islitlet_progress(islitlet, islitlet_max): """Auxiliary function to print out progress in loop of slitlets. Parameters ---------- islitlet : int Current slitlet number. islitlet_max : int Maximum slitlet number. """ if islitlet % 10 == 0: cout = str(islitlet // 10) else: cout = '.' sys.stdout.write(cout) if islitlet == islitlet_max: sys.stdout.write('\n') sys.stdout.flush()
[ "def", "islitlet_progress", "(", "islitlet", ",", "islitlet_max", ")", ":", "if", "islitlet", "%", "10", "==", "0", ":", "cout", "=", "str", "(", "islitlet", "//", "10", ")", "else", ":", "cout", "=", "'.'", "sys", ".", "stdout", ".", "write", "(", ...
Auxiliary function to print out progress in loop of slitlets. Parameters ---------- islitlet : int Current slitlet number. islitlet_max : int Maximum slitlet number.
[ "Auxiliary", "function", "to", "print", "out", "progress", "in", "loop", "of", "slitlets", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/islitlet_progress.py#L27-L45
train
49,200
guaix-ucm/pyemir
emirdrp/core/correctors.py
get_corrector_f
def get_corrector_f(rinput, meta, ins, datamodel): """Corrector for intensity flat""" from emirdrp.processing.flatfield import FlatFieldCorrector flat_info = meta['master_flat'] with rinput.master_flat.open() as hdul: _logger.info('loading intensity flat') _logger.debug('flat info: %s', flat_info) mflat = hdul[0].data # Check NaN and Ceros mask1 = mflat < 0 mask2 = ~numpy.isfinite(mflat) if numpy.any(mask1): _logger.warning('flat has %d values below 0', mask1.sum()) if numpy.any(mask2): _logger.warning('flat has %d NaN', mask2.sum()) flat_corrector = FlatFieldCorrector(mflat, datamodel=datamodel, calibid=datamodel.get_imgid(hdul)) return flat_corrector
python
def get_corrector_f(rinput, meta, ins, datamodel): """Corrector for intensity flat""" from emirdrp.processing.flatfield import FlatFieldCorrector flat_info = meta['master_flat'] with rinput.master_flat.open() as hdul: _logger.info('loading intensity flat') _logger.debug('flat info: %s', flat_info) mflat = hdul[0].data # Check NaN and Ceros mask1 = mflat < 0 mask2 = ~numpy.isfinite(mflat) if numpy.any(mask1): _logger.warning('flat has %d values below 0', mask1.sum()) if numpy.any(mask2): _logger.warning('flat has %d NaN', mask2.sum()) flat_corrector = FlatFieldCorrector(mflat, datamodel=datamodel, calibid=datamodel.get_imgid(hdul)) return flat_corrector
[ "def", "get_corrector_f", "(", "rinput", ",", "meta", ",", "ins", ",", "datamodel", ")", ":", "from", "emirdrp", ".", "processing", ".", "flatfield", "import", "FlatFieldCorrector", "flat_info", "=", "meta", "[", "'master_flat'", "]", "with", "rinput", ".", ...
Corrector for intensity flat
[ "Corrector", "for", "intensity", "flat" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/core/correctors.py#L94-L113
train
49,201
guaix-ucm/pyemir
emirdrp/decorators.py
loginfo
def loginfo(method): """Log the contents of Recipe Input""" def loginfo_method(self, rinput): klass = rinput.__class__ for key in klass.stored(): val = getattr(rinput, key) if isinstance(val, DataFrame): self.logger.debug("DataFrame %s", info.gather_info_dframe(val)) elif isinstance(val, ObservationResult): for f in val.images: self.logger.debug("OB DataFrame %s" , info.gather_info_dframe(f)) else: pass result = method(self, rinput) return result return loginfo_method
python
def loginfo(method): """Log the contents of Recipe Input""" def loginfo_method(self, rinput): klass = rinput.__class__ for key in klass.stored(): val = getattr(rinput, key) if isinstance(val, DataFrame): self.logger.debug("DataFrame %s", info.gather_info_dframe(val)) elif isinstance(val, ObservationResult): for f in val.images: self.logger.debug("OB DataFrame %s" , info.gather_info_dframe(f)) else: pass result = method(self, rinput) return result return loginfo_method
[ "def", "loginfo", "(", "method", ")", ":", "def", "loginfo_method", "(", "self", ",", "rinput", ")", ":", "klass", "=", "rinput", ".", "__class__", "for", "key", "in", "klass", ".", "stored", "(", ")", ":", "val", "=", "getattr", "(", "rinput", ",", ...
Log the contents of Recipe Input
[ "Log", "the", "contents", "of", "Recipe", "Input" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/decorators.py#L28-L50
train
49,202
BeyondTheClouds/enoslib
enoslib/host.py
Host.to_host
def to_host(self): """Copy or coerce to a Host.""" return Host(self.address, alias=self.alias, user=self.user, keyfile=self.keyfile, port=self.port, extra=self.extra)
python
def to_host(self): """Copy or coerce to a Host.""" return Host(self.address, alias=self.alias, user=self.user, keyfile=self.keyfile, port=self.port, extra=self.extra)
[ "def", "to_host", "(", "self", ")", ":", "return", "Host", "(", "self", ".", "address", ",", "alias", "=", "self", ".", "alias", ",", "user", "=", "self", ".", "user", ",", "keyfile", "=", "self", ".", "keyfile", ",", "port", "=", "self", ".", "p...
Copy or coerce to a Host.
[ "Copy", "or", "coerce", "to", "a", "Host", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/host.py#L27-L34
train
49,203
Jaymon/prom
prom/cli/generate.py
get_table_info
def get_table_info(*table_names): """Returns a dict with table_name keys mapped to the Interface that table exists in :param *table_names: the tables you are searching for """ ret = {} if table_names: for table_name in table_names: for name, inter in get_interfaces().items(): if inter.has_table(table_name): yield table_name, inter, inter.get_fields(table_name) else: for name, inter in get_interfaces().items(): table_names = inter.get_tables() for table_name in table_names: yield table_name, inter, inter.get_fields(table_name)
python
def get_table_info(*table_names): """Returns a dict with table_name keys mapped to the Interface that table exists in :param *table_names: the tables you are searching for """ ret = {} if table_names: for table_name in table_names: for name, inter in get_interfaces().items(): if inter.has_table(table_name): yield table_name, inter, inter.get_fields(table_name) else: for name, inter in get_interfaces().items(): table_names = inter.get_tables() for table_name in table_names: yield table_name, inter, inter.get_fields(table_name)
[ "def", "get_table_info", "(", "*", "table_names", ")", ":", "ret", "=", "{", "}", "if", "table_names", ":", "for", "table_name", "in", "table_names", ":", "for", "name", ",", "inter", "in", "get_interfaces", "(", ")", ".", "items", "(", ")", ":", "if",...
Returns a dict with table_name keys mapped to the Interface that table exists in :param *table_names: the tables you are searching for
[ "Returns", "a", "dict", "with", "table_name", "keys", "mapped", "to", "the", "Interface", "that", "table", "exists", "in" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/generate.py#L12-L28
train
49,204
Jaymon/prom
prom/cli/generate.py
main_generate
def main_generate(table_names, stream): """This will print out valid prom python code for given tables that already exist in a database. This is really handy when you want to bootstrap an existing database to work with prom and don't want to manually create Orm objects for the tables you want to use, let `generate` do it for you """ with stream.open() as fp: fp.write_line("from datetime import datetime, date") fp.write_line("from decimal import Decimal") fp.write_line("from prom import Orm, Field") fp.write_newlines() for table_name, inter, fields in get_table_info(*table_names): fp.write_line("class {}(Orm):".format(table_name.title().replace("_", ""))) fp.write_line(" table_name = '{}'".format(table_name)) if inter.connection_config.name: fp.write_line(" connection_name = '{}'".format(inter.connection_config.name)) fp.write_newlines() magic_field_names = set(["_id", "_created", "_updated"]) if "_id" in fields: fp.write_line(get_field_def("_id", fields.pop("_id"))) magic_field_names.discard("_id") for field_name, field_d in fields.items(): fp.write_line(get_field_def(field_name, field_d)) for magic_field_name in magic_field_names: if magic_field_name not in fields: fp.write_line(" {} = None".format(magic_field_name)) fp.write_newlines(2)
python
def main_generate(table_names, stream): """This will print out valid prom python code for given tables that already exist in a database. This is really handy when you want to bootstrap an existing database to work with prom and don't want to manually create Orm objects for the tables you want to use, let `generate` do it for you """ with stream.open() as fp: fp.write_line("from datetime import datetime, date") fp.write_line("from decimal import Decimal") fp.write_line("from prom import Orm, Field") fp.write_newlines() for table_name, inter, fields in get_table_info(*table_names): fp.write_line("class {}(Orm):".format(table_name.title().replace("_", ""))) fp.write_line(" table_name = '{}'".format(table_name)) if inter.connection_config.name: fp.write_line(" connection_name = '{}'".format(inter.connection_config.name)) fp.write_newlines() magic_field_names = set(["_id", "_created", "_updated"]) if "_id" in fields: fp.write_line(get_field_def("_id", fields.pop("_id"))) magic_field_names.discard("_id") for field_name, field_d in fields.items(): fp.write_line(get_field_def(field_name, field_d)) for magic_field_name in magic_field_names: if magic_field_name not in fields: fp.write_line(" {} = None".format(magic_field_name)) fp.write_newlines(2)
[ "def", "main_generate", "(", "table_names", ",", "stream", ")", ":", "with", "stream", ".", "open", "(", ")", "as", "fp", ":", "fp", ".", "write_line", "(", "\"from datetime import datetime, date\"", ")", "fp", ".", "write_line", "(", "\"from decimal import Deci...
This will print out valid prom python code for given tables that already exist in a database. This is really handy when you want to bootstrap an existing database to work with prom and don't want to manually create Orm objects for the tables you want to use, let `generate` do it for you
[ "This", "will", "print", "out", "valid", "prom", "python", "code", "for", "given", "tables", "that", "already", "exist", "in", "a", "database", "." ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/generate.py#L50-L84
train
49,205
guaix-ucm/pyemir
emirdrp/instrument/distortions.py
exvp
def exvp(pos_x, pos_y): """Convert virtual pixel to real pixel""" pos_x = numpy.asarray(pos_x) pos_y = numpy.asarray(pos_y) # convert virtual pixel to real pixel # convert world coordinate to pixel center = [1024.5, 1024.5] cf = EMIR_PLATESCALE_RADS pos_base_x = pos_x - center[0] pos_base_y = pos_y - center[1] ra = numpy.hypot(pos_base_x, pos_base_y) thet = numpy.arctan2(pos_base_y, pos_base_x) r = cf * ra rr1 = 1 + 14606.7 * r**2 + 1739716115.1 * r**4 nx1 = rr1 * ra * numpy.cos(thet) + center[0] ny1 = rr1 * ra * numpy.sin(thet) + center[1] return nx1, ny1
python
def exvp(pos_x, pos_y): """Convert virtual pixel to real pixel""" pos_x = numpy.asarray(pos_x) pos_y = numpy.asarray(pos_y) # convert virtual pixel to real pixel # convert world coordinate to pixel center = [1024.5, 1024.5] cf = EMIR_PLATESCALE_RADS pos_base_x = pos_x - center[0] pos_base_y = pos_y - center[1] ra = numpy.hypot(pos_base_x, pos_base_y) thet = numpy.arctan2(pos_base_y, pos_base_x) r = cf * ra rr1 = 1 + 14606.7 * r**2 + 1739716115.1 * r**4 nx1 = rr1 * ra * numpy.cos(thet) + center[0] ny1 = rr1 * ra * numpy.sin(thet) + center[1] return nx1, ny1
[ "def", "exvp", "(", "pos_x", ",", "pos_y", ")", ":", "pos_x", "=", "numpy", ".", "asarray", "(", "pos_x", ")", "pos_y", "=", "numpy", ".", "asarray", "(", "pos_y", ")", "# convert virtual pixel to real pixel", "# convert world coordinate to pixel", "center", "="...
Convert virtual pixel to real pixel
[ "Convert", "virtual", "pixel", "to", "real", "pixel" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/distortions.py#L26-L45
train
49,206
IdentityPython/oidcendpoint
src/oidcendpoint/oidc/registration.py
match_sp_sep
def match_sp_sep(first, second): """ Verify that all the values in 'first' appear in 'second'. The values can either be in the form of lists or as space separated items. :param first: :param second: :return: True/False """ if isinstance(first, list): one = [set(v.split(" ")) for v in first] else: one = [{v} for v in first.split(" ")] if isinstance(second, list): other = [set(v.split(" ")) for v in second] else: other = [{v} for v in second.split(" ")] # all values in one must appear in other if any(rt not in other for rt in one): return False return True
python
def match_sp_sep(first, second): """ Verify that all the values in 'first' appear in 'second'. The values can either be in the form of lists or as space separated items. :param first: :param second: :return: True/False """ if isinstance(first, list): one = [set(v.split(" ")) for v in first] else: one = [{v} for v in first.split(" ")] if isinstance(second, list): other = [set(v.split(" ")) for v in second] else: other = [{v} for v in second.split(" ")] # all values in one must appear in other if any(rt not in other for rt in one): return False return True
[ "def", "match_sp_sep", "(", "first", ",", "second", ")", ":", "if", "isinstance", "(", "first", ",", "list", ")", ":", "one", "=", "[", "set", "(", "v", ".", "split", "(", "\" \"", ")", ")", "for", "v", "in", "first", "]", "else", ":", "one", "...
Verify that all the values in 'first' appear in 'second'. The values can either be in the form of lists or as space separated items. :param first: :param second: :return: True/False
[ "Verify", "that", "all", "the", "values", "in", "first", "appear", "in", "second", ".", "The", "values", "can", "either", "be", "in", "the", "form", "of", "lists", "or", "as", "space", "separated", "items", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/registration.py#L57-L80
train
49,207
IdentityPython/oidcendpoint
src/oidcendpoint/oidc/registration.py
Registration._verify_sector_identifier
def _verify_sector_identifier(self, request): """ Verify `sector_identifier_uri` is reachable and that it contains `redirect_uri`s. :param request: Provider registration request :return: si_redirects, sector_id :raises: InvalidSectorIdentifier """ si_url = request["sector_identifier_uri"] try: res = self.endpoint_context.httpc.get(si_url) except Exception as err: logger.error(err) res = None if not res: raise InvalidSectorIdentifier("Couldn't read from sector_identifier_uri") logger.debug("sector_identifier_uri => %s", sanitize(res.text)) try: si_redirects = json.loads(res.text) except ValueError: raise InvalidSectorIdentifier( "Error deserializing sector_identifier_uri content") if "redirect_uris" in request: logger.debug("redirect_uris: %s", request["redirect_uris"]) for uri in request["redirect_uris"]: if uri not in si_redirects: raise InvalidSectorIdentifier( "redirect_uri missing from sector_identifiers") return si_redirects, si_url
python
def _verify_sector_identifier(self, request): """ Verify `sector_identifier_uri` is reachable and that it contains `redirect_uri`s. :param request: Provider registration request :return: si_redirects, sector_id :raises: InvalidSectorIdentifier """ si_url = request["sector_identifier_uri"] try: res = self.endpoint_context.httpc.get(si_url) except Exception as err: logger.error(err) res = None if not res: raise InvalidSectorIdentifier("Couldn't read from sector_identifier_uri") logger.debug("sector_identifier_uri => %s", sanitize(res.text)) try: si_redirects = json.loads(res.text) except ValueError: raise InvalidSectorIdentifier( "Error deserializing sector_identifier_uri content") if "redirect_uris" in request: logger.debug("redirect_uris: %s", request["redirect_uris"]) for uri in request["redirect_uris"]: if uri not in si_redirects: raise InvalidSectorIdentifier( "redirect_uri missing from sector_identifiers") return si_redirects, si_url
[ "def", "_verify_sector_identifier", "(", "self", ",", "request", ")", ":", "si_url", "=", "request", "[", "\"sector_identifier_uri\"", "]", "try", ":", "res", "=", "self", ".", "endpoint_context", ".", "httpc", ".", "get", "(", "si_url", ")", "except", "Exce...
Verify `sector_identifier_uri` is reachable and that it contains `redirect_uri`s. :param request: Provider registration request :return: si_redirects, sector_id :raises: InvalidSectorIdentifier
[ "Verify", "sector_identifier_uri", "is", "reachable", "and", "that", "it", "contains", "redirect_uri", "s", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/registration.py#L288-L322
train
49,208
guaix-ucm/pyemir
emirdrp/util/sexcatalog.py
SExtractorfile.read
def read(self): """ Read the file until EOF and return a list of dictionaries. """ __result = [] __ll = self.readline() while __ll: __result.append(__ll) __ll = self.readline() return list(__result)
python
def read(self): """ Read the file until EOF and return a list of dictionaries. """ __result = [] __ll = self.readline() while __ll: __result.append(__ll) __ll = self.readline() return list(__result)
[ "def", "read", "(", "self", ")", ":", "__result", "=", "[", "]", "__ll", "=", "self", ".", "readline", "(", ")", "while", "__ll", ":", "__result", ".", "append", "(", "__ll", ")", "__ll", "=", "self", ".", "readline", "(", ")", "return", "list", ...
Read the file until EOF and return a list of dictionaries.
[ "Read", "the", "file", "until", "EOF", "and", "return", "a", "list", "of", "dictionaries", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/util/sexcatalog.py#L761-L772
train
49,209
guaix-ucm/pyemir
emirdrp/util/sexcatalog.py
SExtractorfile.close
def close(self): """ Close the SExtractor file. """ if self._file: if not(self._file.closed): self._file.close() self.closed = True
python
def close(self): """ Close the SExtractor file. """ if self._file: if not(self._file.closed): self._file.close() self.closed = True
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_file", ":", "if", "not", "(", "self", ".", "_file", ".", "closed", ")", ":", "self", ".", "_file", ".", "close", "(", ")", "self", ".", "closed", "=", "True" ]
Close the SExtractor file.
[ "Close", "the", "SExtractor", "file", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/util/sexcatalog.py#L777-L784
train
49,210
BeyondTheClouds/enoslib
enoslib/infra/enos_g5k/driver.py
get_driver
def get_driver(configuration): """Build an instance of the driver to interact with G5K """ resources = configuration["resources"] machines = resources["machines"] networks = resources["networks"] oargrid_jobids = configuration.get("oargrid_jobids") if oargrid_jobids: logger.debug("Loading the OargridStaticDriver") return OargridStaticDriver(oargrid_jobids) else: job_name = configuration.get("job_name", DEFAULT_JOB_NAME) walltime = configuration.get("walltime", DEFAULT_WALLTIME) job_type = configuration.get("job_type", JOB_TYPE_DEPLOY) reservation_date = configuration.get("reservation", False) # NOTE(msimonin): some time ago asimonet proposes to auto-detect # the queues and it was quiet convenient # see https://github.com/BeyondTheClouds/enos/pull/62 queue = configuration.get("queue", None) logger.debug("Loading the OargridDynamicDriver") return OargridDynamicDriver( job_name, walltime, job_type, reservation_date, queue, machines, networks )
python
def get_driver(configuration): """Build an instance of the driver to interact with G5K """ resources = configuration["resources"] machines = resources["machines"] networks = resources["networks"] oargrid_jobids = configuration.get("oargrid_jobids") if oargrid_jobids: logger.debug("Loading the OargridStaticDriver") return OargridStaticDriver(oargrid_jobids) else: job_name = configuration.get("job_name", DEFAULT_JOB_NAME) walltime = configuration.get("walltime", DEFAULT_WALLTIME) job_type = configuration.get("job_type", JOB_TYPE_DEPLOY) reservation_date = configuration.get("reservation", False) # NOTE(msimonin): some time ago asimonet proposes to auto-detect # the queues and it was quiet convenient # see https://github.com/BeyondTheClouds/enos/pull/62 queue = configuration.get("queue", None) logger.debug("Loading the OargridDynamicDriver") return OargridDynamicDriver( job_name, walltime, job_type, reservation_date, queue, machines, networks )
[ "def", "get_driver", "(", "configuration", ")", ":", "resources", "=", "configuration", "[", "\"resources\"", "]", "machines", "=", "resources", "[", "\"machines\"", "]", "networks", "=", "resources", "[", "\"networks\"", "]", "oargrid_jobids", "=", "configuration...
Build an instance of the driver to interact with G5K
[ "Build", "an", "instance", "of", "the", "driver", "to", "interact", "with", "G5K" ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/driver.py#L18-L48
train
49,211
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
ver_dec_content
def ver_dec_content(parts, sign_key=None, enc_key=None, sign_alg='SHA256'): """ Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp """ if parts is None: return None elif len(parts) == 3: # verify the cookie signature timestamp, load, b64_mac = parts mac = base64.b64decode(b64_mac) verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), mac, sign_key.key): return load, timestamp else: raise VerificationError() elif len(parts) == 4: b_timestamp = parts[0] iv = base64.b64decode(parts[1]) ciphertext = base64.b64decode(parts[2]) tag = base64.b64decode(parts[3]) decrypter = AES_GCMEncrypter(key=enc_key.key) msg = decrypter.decrypt(ciphertext, iv, tag=tag) p = lv_unpack(msg.decode('utf-8')) load = p[0] timestamp = p[1] if len(p) == 3: verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), base64.b64decode(p[2]), sign_key.key): return load, timestamp else: return load, timestamp return None
python
def ver_dec_content(parts, sign_key=None, enc_key=None, sign_alg='SHA256'): """ Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp """ if parts is None: return None elif len(parts) == 3: # verify the cookie signature timestamp, load, b64_mac = parts mac = base64.b64decode(b64_mac) verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), mac, sign_key.key): return load, timestamp else: raise VerificationError() elif len(parts) == 4: b_timestamp = parts[0] iv = base64.b64decode(parts[1]) ciphertext = base64.b64decode(parts[2]) tag = base64.b64decode(parts[3]) decrypter = AES_GCMEncrypter(key=enc_key.key) msg = decrypter.decrypt(ciphertext, iv, tag=tag) p = lv_unpack(msg.decode('utf-8')) load = p[0] timestamp = p[1] if len(p) == 3: verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), base64.b64decode(p[2]), sign_key.key): return load, timestamp else: return load, timestamp return None
[ "def", "ver_dec_content", "(", "parts", ",", "sign_key", "=", "None", ",", "enc_key", "=", "None", ",", "sign_alg", "=", "'SHA256'", ")", ":", "if", "parts", "is", "None", ":", "return", "None", "elif", "len", "(", "parts", ")", "==", "3", ":", "# ve...
Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp
[ "Verifies", "the", "value", "of", "a", "cookie" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L88-L129
train
49,212
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
make_cookie_content
def make_cookie_content(name, load, sign_key, domain=None, path=None, timestamp="", enc_key=None, max_age=0, sign_alg='SHA256'): """ Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance """ if not timestamp: timestamp = str(int(time.time())) _cookie_value = sign_enc_payload(load, timestamp, sign_key=sign_key, enc_key=enc_key, sign_alg=sign_alg) content = {name: {"value": _cookie_value}} if path is not None: content[name]["path"] = path if domain is not None: content[name]["domain"] = domain content[name]['httponly'] = True if max_age: content[name]["expires"] = in_a_while(seconds=max_age) return content
python
def make_cookie_content(name, load, sign_key, domain=None, path=None, timestamp="", enc_key=None, max_age=0, sign_alg='SHA256'): """ Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance """ if not timestamp: timestamp = str(int(time.time())) _cookie_value = sign_enc_payload(load, timestamp, sign_key=sign_key, enc_key=enc_key, sign_alg=sign_alg) content = {name: {"value": _cookie_value}} if path is not None: content[name]["path"] = path if domain is not None: content[name]["domain"] = domain content[name]['httponly'] = True if max_age: content[name]["expires"] = in_a_while(seconds=max_age) return content
[ "def", "make_cookie_content", "(", "name", ",", "load", ",", "sign_key", ",", "domain", "=", "None", ",", "path", "=", "None", ",", "timestamp", "=", "\"\"", ",", "enc_key", "=", "None", ",", "max_age", "=", "0", ",", "sign_alg", "=", "'SHA256'", ")", ...
Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance
[ "Create", "and", "return", "a", "cookies", "content" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L132-L181
train
49,213
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
cookie_parts
def cookie_parts(name, kaka): """ Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name """ cookie_obj = SimpleCookie(as_unicode(kaka)) morsel = cookie_obj.get(name) if morsel: return morsel.value.split("|") else: return None
python
def cookie_parts(name, kaka): """ Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name """ cookie_obj = SimpleCookie(as_unicode(kaka)) morsel = cookie_obj.get(name) if morsel: return morsel.value.split("|") else: return None
[ "def", "cookie_parts", "(", "name", ",", "kaka", ")", ":", "cookie_obj", "=", "SimpleCookie", "(", "as_unicode", "(", "kaka", ")", ")", "morsel", "=", "cookie_obj", ".", "get", "(", "name", ")", "if", "morsel", ":", "return", "morsel", ".", "value", "....
Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name
[ "Give", "me", "the", "parts", "of", "the", "cookie", "payload" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L201-L215
train
49,214
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.delete_cookie
def delete_cookie(self, cookie_name=None): """ Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers """ if cookie_name is None: cookie_name = self.default_value['name'] return self.create_cookie("", "", cookie_name=cookie_name, kill=True)
python
def delete_cookie(self, cookie_name=None): """ Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers """ if cookie_name is None: cookie_name = self.default_value['name'] return self.create_cookie("", "", cookie_name=cookie_name, kill=True)
[ "def", "delete_cookie", "(", "self", ",", "cookie_name", "=", "None", ")", ":", "if", "cookie_name", "is", "None", ":", "cookie_name", "=", "self", ".", "default_value", "[", "'name'", "]", "return", "self", ".", "create_cookie", "(", "\"\"", ",", "\"\"", ...
Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers
[ "Create", "a", "cookie", "that", "will", "immediately", "expire", "when", "it", "hits", "the", "other", "side", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L286-L297
train
49,215
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.get_cookie_value
def get_cookie_value(self, cookie=None, cookie_name=None): """ Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type) """ if cookie_name is None: cookie_name = self.default_value['name'] if cookie is None or cookie_name is None: return None else: try: info, timestamp = parse_cookie(cookie_name, self.sign_key, cookie, self.enc_key, self.sign_alg) except (TypeError, AssertionError): return None else: value, _ts, typ = info.split("::") if timestamp == _ts: return value, _ts, typ return None
python
def get_cookie_value(self, cookie=None, cookie_name=None): """ Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type) """ if cookie_name is None: cookie_name = self.default_value['name'] if cookie is None or cookie_name is None: return None else: try: info, timestamp = parse_cookie(cookie_name, self.sign_key, cookie, self.enc_key, self.sign_alg) except (TypeError, AssertionError): return None else: value, _ts, typ = info.split("::") if timestamp == _ts: return value, _ts, typ return None
[ "def", "get_cookie_value", "(", "self", ",", "cookie", "=", "None", ",", "cookie_name", "=", "None", ")", ":", "if", "cookie_name", "is", "None", ":", "cookie_name", "=", "self", ".", "default_value", "[", "'name'", "]", "if", "cookie", "is", "None", "or...
Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type)
[ "Return", "information", "stored", "in", "a", "Cookie" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L344-L367
train
49,216
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.append_cookie
def append_cookie(self, cookie, name, payload, typ, domain=None, path=None, timestamp="", max_age=0): """ Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return: """ timestamp = str(int(time.time())) # create cookie payload try: _payload = "::".join([payload, timestamp, typ]) except TypeError: _payload = "::".join([payload[0], timestamp, typ]) content = make_cookie_content(name, _payload, self.sign_key, domain=domain, path=path, timestamp=timestamp, enc_key=self.enc_key, max_age=max_age, sign_alg=self.sign_alg) for name, args in content.items(): cookie[name] = args['value'] for key, value in args.items(): if key == 'value': continue cookie[name][key] = value return cookie
python
def append_cookie(self, cookie, name, payload, typ, domain=None, path=None, timestamp="", max_age=0): """ Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return: """ timestamp = str(int(time.time())) # create cookie payload try: _payload = "::".join([payload, timestamp, typ]) except TypeError: _payload = "::".join([payload[0], timestamp, typ]) content = make_cookie_content(name, _payload, self.sign_key, domain=domain, path=path, timestamp=timestamp, enc_key=self.enc_key, max_age=max_age, sign_alg=self.sign_alg) for name, args in content.items(): cookie[name] = args['value'] for key, value in args.items(): if key == 'value': continue cookie[name][key] = value return cookie
[ "def", "append_cookie", "(", "self", ",", "cookie", ",", "name", ",", "payload", ",", "typ", ",", "domain", "=", "None", ",", "path", "=", "None", ",", "timestamp", "=", "\"\"", ",", "max_age", "=", "0", ")", ":", "timestamp", "=", "str", "(", "int...
Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return:
[ "Adds", "a", "cookie", "to", "a", "SimpleCookie", "instance" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L369-L404
train
49,217
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_ac_power
def f_ac_power(inverter, v_mp, p_mp): """ Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W] """ return pvlib.pvsystem.snlinverter(v_mp, p_mp, inverter).flatten()
python
def f_ac_power(inverter, v_mp, p_mp): """ Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W] """ return pvlib.pvsystem.snlinverter(v_mp, p_mp, inverter).flatten()
[ "def", "f_ac_power", "(", "inverter", ",", "v_mp", ",", "p_mp", ")", ":", "return", "pvlib", ".", "pvsystem", ".", "snlinverter", "(", "v_mp", ",", "p_mp", ",", "inverter", ")", ".", "flatten", "(", ")" ]
Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W]
[ "Calculate", "AC", "power" ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L10-L19
train
49,218
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_dc_power
def f_dc_power(effective_irradiance, cell_temp, module): """ Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp """ dc = pvlib.pvsystem.sapm(effective_irradiance, cell_temp, module) fields = ('i_sc', 'i_mp', 'v_oc', 'v_mp', 'p_mp') return tuple(dc[field] for field in fields)
python
def f_dc_power(effective_irradiance, cell_temp, module): """ Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp """ dc = pvlib.pvsystem.sapm(effective_irradiance, cell_temp, module) fields = ('i_sc', 'i_mp', 'v_oc', 'v_mp', 'p_mp') return tuple(dc[field] for field in fields)
[ "def", "f_dc_power", "(", "effective_irradiance", ",", "cell_temp", ",", "module", ")", ":", "dc", "=", "pvlib", ".", "pvsystem", ".", "sapm", "(", "effective_irradiance", ",", "cell_temp", ",", "module", ")", "fields", "=", "(", "'i_sc'", ",", "'i_mp'", "...
Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp
[ "Calculate", "DC", "power", "using", "Sandia", "Performance", "model" ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L22-L33
train
49,219
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_effective_irradiance
def f_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module): """ Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns] """ Ee = pvlib.pvsystem.sapm_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module) return Ee.reshape(1, -1)
python
def f_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module): """ Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns] """ Ee = pvlib.pvsystem.sapm_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module) return Ee.reshape(1, -1)
[ "def", "f_effective_irradiance", "(", "poa_direct", ",", "poa_diffuse", ",", "am_abs", ",", "aoi", ",", "module", ")", ":", "Ee", "=", "pvlib", ".", "pvsystem", ".", "sapm_effective_irradiance", "(", "poa_direct", ",", "poa_diffuse", ",", "am_abs", ",", "aoi",...
Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns]
[ "Calculate", "effective", "irradiance", "for", "Sandia", "Performance", "model" ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L36-L49
train
49,220
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_cell_temp
def f_cell_temp(poa_global, wind_speed, air_temp): """ Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC] """ temps = pvlib.pvsystem.sapm_celltemp(poa_global, wind_speed, air_temp) return temps['temp_cell'].values, temps['temp_module'].values
python
def f_cell_temp(poa_global, wind_speed, air_temp): """ Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC] """ temps = pvlib.pvsystem.sapm_celltemp(poa_global, wind_speed, air_temp) return temps['temp_cell'].values, temps['temp_module'].values
[ "def", "f_cell_temp", "(", "poa_global", ",", "wind_speed", ",", "air_temp", ")", ":", "temps", "=", "pvlib", ".", "pvsystem", ".", "sapm_celltemp", "(", "poa_global", ",", "wind_speed", ",", "air_temp", ")", "return", "temps", "[", "'temp_cell'", "]", ".", ...
Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC]
[ "Calculate", "cell", "temperature", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L52-L62
train
49,221
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_aoi
def f_aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth): """ Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg] """ return pvlib.irradiance.aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
python
def f_aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth): """ Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg] """ return pvlib.irradiance.aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
[ "def", "f_aoi", "(", "surface_tilt", ",", "surface_azimuth", ",", "solar_zenith", ",", "solar_azimuth", ")", ":", "return", "pvlib", ".", "irradiance", ".", "aoi", "(", "surface_tilt", ",", "surface_azimuth", ",", "solar_zenith", ",", "solar_azimuth", ")" ]
Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg]
[ "Calculate", "angle", "of", "incidence" ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L65-L76
train
49,222
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
find_position
def find_position(edges, prow, bstart, bend, total=5): """Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids """ nt = total // 2 # This bar is too near the border if prow-nt < 0 or prow + nt >= edges.shape[0]: return [] s2edges = edges[prow-nt:prow+nt+1, bstart:bend] structure = morph.generate_binary_structure(2,2) # 8 way conection har, num_f = mes.label(s2edges, structure=structure) cen_of_mass = mes.center_of_mass(s2edges, labels=har, index=range(1, num_f + 1)) # center_of_mass returns y, x coordinates cen_of_mass_off = [(x + bstart, prow-nt + y) for y,x in cen_of_mass] return cen_of_mass_off
python
def find_position(edges, prow, bstart, bend, total=5): """Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids """ nt = total // 2 # This bar is too near the border if prow-nt < 0 or prow + nt >= edges.shape[0]: return [] s2edges = edges[prow-nt:prow+nt+1, bstart:bend] structure = morph.generate_binary_structure(2,2) # 8 way conection har, num_f = mes.label(s2edges, structure=structure) cen_of_mass = mes.center_of_mass(s2edges, labels=har, index=range(1, num_f + 1)) # center_of_mass returns y, x coordinates cen_of_mass_off = [(x + bstart, prow-nt + y) for y,x in cen_of_mass] return cen_of_mass_off
[ "def", "find_position", "(", "edges", ",", "prow", ",", "bstart", ",", "bend", ",", "total", "=", "5", ")", ":", "nt", "=", "total", "//", "2", "# This bar is too near the border", "if", "prow", "-", "nt", "<", "0", "or", "prow", "+", "nt", ">=", "ed...
Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids
[ "Find", "a", "EMIR", "CSU", "bar", "position", "in", "a", "edge", "image", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L38-L77
train
49,223
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
calc_fwhm
def calc_fwhm(img, region, fexpand=3, axis=0): """Compute the FWHM in the direction given by axis""" # We compute know the FWHM of the slit # Given the computed position of the slit # Expand 'fexpand' pixels around # and cut an slice in the median filtered image xpregion = expand_region(region, fexpand, fexpand) cslit = img[xpregion] # Collapse it pslit = cslit.mean(axis=axis) # Estimate the background as a flat line # starting in pslit[0] and ending in pslit[-1] x2 = len(pslit) y1, y2 = pslit[0], pslit[-1] mslope = (y2-y1) / x2 # background estimation backstim = mslope*numpy.arange(x2) + y1 # We subtract background qslit = pslit-backstim # and find the pixel of the maximum pidx = numpy.argmax(qslit) peak, fwhm = fmod.compute_fwhm_1d_simple(qslit, pidx) return fwhm
python
def calc_fwhm(img, region, fexpand=3, axis=0): """Compute the FWHM in the direction given by axis""" # We compute know the FWHM of the slit # Given the computed position of the slit # Expand 'fexpand' pixels around # and cut an slice in the median filtered image xpregion = expand_region(region, fexpand, fexpand) cslit = img[xpregion] # Collapse it pslit = cslit.mean(axis=axis) # Estimate the background as a flat line # starting in pslit[0] and ending in pslit[-1] x2 = len(pslit) y1, y2 = pslit[0], pslit[-1] mslope = (y2-y1) / x2 # background estimation backstim = mslope*numpy.arange(x2) + y1 # We subtract background qslit = pslit-backstim # and find the pixel of the maximum pidx = numpy.argmax(qslit) peak, fwhm = fmod.compute_fwhm_1d_simple(qslit, pidx) return fwhm
[ "def", "calc_fwhm", "(", "img", ",", "region", ",", "fexpand", "=", "3", ",", "axis", "=", "0", ")", ":", "# We compute know the FWHM of the slit", "# Given the computed position of the slit", "# Expand 'fexpand' pixels around", "# and cut an slice in the median filtered image"...
Compute the FWHM in the direction given by axis
[ "Compute", "the", "FWHM", "in", "the", "direction", "given", "by", "axis" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L80-L107
train
49,224
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
simple_prot
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start,len(x)-1): a,b,c = x[i-1], x[i], x[i+1] if b - a > 0 and b -c >= 0: return i else: return None
python
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start,len(x)-1): a,b,c = x[i-1], x[i], x[i+1] if b - a > 0 and b -c >= 0: return i else: return None
[ "def", "simple_prot", "(", "x", ",", "start", ")", ":", "# start must b >= 1", "for", "i", "in", "range", "(", "start", ",", "len", "(", "x", ")", "-", "1", ")", ":", "a", ",", "b", ",", "c", "=", "x", "[", "i", "-", "1", "]", ",", "x", "["...
Find the first peak to the right of start
[ "Find", "the", "first", "peak", "to", "the", "right", "of", "start" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L110-L120
train
49,225
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
position_half_h
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
python
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
[ "def", "position_half_h", "(", "pslit", ",", "cpix", ",", "backw", "=", "4", ")", ":", "# Find the first peak to the right of cpix", "next_peak", "=", "simple_prot", "(", "pslit", ",", "cpix", ")", "if", "next_peak", "is", "None", ":", "raise", "ValueError", "...
Find the position where the value is half of the peak
[ "Find", "the", "position", "where", "the", "value", "is", "half", "of", "the", "peak" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L123-L155
train
49,226
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
locate_bar_l
def locate_bar_l(icut, epos): """Fine position of the left CSU bar""" def swap_coor(x): return x def swap_line(tab): return tab return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line )
python
def locate_bar_l(icut, epos): """Fine position of the left CSU bar""" def swap_coor(x): return x def swap_line(tab): return tab return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line )
[ "def", "locate_bar_l", "(", "icut", ",", "epos", ")", ":", "def", "swap_coor", "(", "x", ")", ":", "return", "x", "def", "swap_line", "(", "tab", ")", ":", "return", "tab", "return", "_locate_bar_gen", "(", "icut", ",", "epos", ",", "transform1", "=", ...
Fine position of the left CSU bar
[ "Fine", "position", "of", "the", "left", "CSU", "bar" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L158-L169
train
49,227
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
locate_bar_r
def locate_bar_r(icut, epos): """Fine position of the right CSU bar""" sm = len(icut) def swap_coor(x): return sm - 1 - x def swap_line(tab): return tab[::-1] return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line)
python
def locate_bar_r(icut, epos): """Fine position of the right CSU bar""" sm = len(icut) def swap_coor(x): return sm - 1 - x def swap_line(tab): return tab[::-1] return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line)
[ "def", "locate_bar_r", "(", "icut", ",", "epos", ")", ":", "sm", "=", "len", "(", "icut", ")", "def", "swap_coor", "(", "x", ")", ":", "return", "sm", "-", "1", "-", "x", "def", "swap_line", "(", "tab", ")", ":", "return", "tab", "[", ":", ":",...
Fine position of the right CSU bar
[ "Fine", "position", "of", "the", "right", "CSU", "bar" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L172-L183
train
49,228
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
_locate_bar_gen
def _locate_bar_gen(icut, epos, transform1, transform2): """Generic function for the fine position of the CSU""" epos_pix = coor_to_pix_1d(epos) # transform -> epos_pix_s = transform1(epos_pix) icut2 = transform2(icut) # try: res = position_half_h(icut2, epos_pix_s) xint_s, next_peak_s, wpos1_s, wpos2_s, background_level, half_height = res # xint = transform1(xint_s) # epos_f = xint error = 0 except ValueError: error = 2 epos_f = epos return epos_pix, epos_f, error
python
def _locate_bar_gen(icut, epos, transform1, transform2): """Generic function for the fine position of the CSU""" epos_pix = coor_to_pix_1d(epos) # transform -> epos_pix_s = transform1(epos_pix) icut2 = transform2(icut) # try: res = position_half_h(icut2, epos_pix_s) xint_s, next_peak_s, wpos1_s, wpos2_s, background_level, half_height = res # xint = transform1(xint_s) # epos_f = xint error = 0 except ValueError: error = 2 epos_f = epos return epos_pix, epos_f, error
[ "def", "_locate_bar_gen", "(", "icut", ",", "epos", ",", "transform1", ",", "transform2", ")", ":", "epos_pix", "=", "coor_to_pix_1d", "(", "epos", ")", "# transform ->", "epos_pix_s", "=", "transform1", "(", "epos_pix", ")", "icut2", "=", "transform2", "(", ...
Generic function for the fine position of the CSU
[ "Generic", "function", "for", "the", "fine", "position", "of", "the", "CSU" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L186-L211
train
49,229
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
overlap
def overlap(intv1, intv2): """Overlaping of two intervals""" return max(0, min(intv1[1], intv2[1]) - max(intv1[0], intv2[0]))
python
def overlap(intv1, intv2): """Overlaping of two intervals""" return max(0, min(intv1[1], intv2[1]) - max(intv1[0], intv2[0]))
[ "def", "overlap", "(", "intv1", ",", "intv2", ")", ":", "return", "max", "(", "0", ",", "min", "(", "intv1", "[", "1", "]", ",", "intv2", "[", "1", "]", ")", "-", "max", "(", "intv1", "[", "0", "]", ",", "intv2", "[", "0", "]", ")", ")" ]
Overlaping of two intervals
[ "Overlaping", "of", "two", "intervals" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L430-L432
train
49,230
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
exvp_scalar
def exvp_scalar(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates. """ # plate scale: 0.1944 arcsec/pixel # conversion factor (in radian/pixel) factor = 0.1944 * np.pi/(180.0*3600) # distance from image center (pixels) r_pix = np.sqrt((x - x0*1000)**2 + (y - y0*1000)**2) # distance from imagen center (radians) r_rad = factor * r_pix # radial distortion: this number is 1.0 for r=0 and increases # slightly (reaching values around 1.033) for r~sqrt(2)*1024 # (the distance to the corner of the detector measured from the # center) rdist = (1 + c2 * 1.0E4 * r_rad**2 + c4 * 1.0E9 * r_rad**4) # angle measured from the Y axis towards the X axis theta = np.arctan((x - x0*1000)/(y - y0*1000)) if y < y0*1000: theta = theta - np.pi # distorted coordinates xdist = (rdist * r_pix * np.sin(theta+theta0)) + x0*1000 ydist = (ff * rdist * r_pix * np.cos(theta+theta0)) + y0*1000 return xdist, ydist
python
def exvp_scalar(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates. """ # plate scale: 0.1944 arcsec/pixel # conversion factor (in radian/pixel) factor = 0.1944 * np.pi/(180.0*3600) # distance from image center (pixels) r_pix = np.sqrt((x - x0*1000)**2 + (y - y0*1000)**2) # distance from imagen center (radians) r_rad = factor * r_pix # radial distortion: this number is 1.0 for r=0 and increases # slightly (reaching values around 1.033) for r~sqrt(2)*1024 # (the distance to the corner of the detector measured from the # center) rdist = (1 + c2 * 1.0E4 * r_rad**2 + c4 * 1.0E9 * r_rad**4) # angle measured from the Y axis towards the X axis theta = np.arctan((x - x0*1000)/(y - y0*1000)) if y < y0*1000: theta = theta - np.pi # distorted coordinates xdist = (rdist * r_pix * np.sin(theta+theta0)) + x0*1000 ydist = (ff * rdist * r_pix * np.cos(theta+theta0)) + y0*1000 return xdist, ydist
[ "def", "exvp_scalar", "(", "x", ",", "y", ",", "x0", ",", "y0", ",", "c2", ",", "c4", ",", "theta0", ",", "ff", ")", ":", "# plate scale: 0.1944 arcsec/pixel", "# conversion factor (in radian/pixel)", "factor", "=", "0.1944", "*", "np", ".", "pi", "/", "("...
Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates.
[ "Convert", "virtual", "pixel", "to", "real", "pixel", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L200-L253
train
49,231
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
expected_distorted_boundaries
def expected_distorted_boundaries(islitlet, csu_bar_slit_center, borderlist, params, parmodel, numpts, deg, debugplot=0): """Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails. """ c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline = \ return_params(islitlet, csu_bar_slit_center, params, parmodel) xp = np.linspace(1, EMIR_NAXIS1, numpts) slit_dist = (slit_height * 10) + slit_gap # undistorted (constant) y-coordinate of the lower and upper boundaries ybottom = y_baseline * 100 + (islitlet - 1) * slit_dist ytop = ybottom + (slit_height * 10) list_spectrails = [] for borderval in borderlist: yvalue = ybottom + borderval * (ytop - ybottom) # undistorted boundary yp_value = np.ones(numpts) * yvalue # distorted boundary xdist, ydist = exvp(xp, yp_value, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) spectrail = SpectrumTrail() # declare SpectrumTrail instance spectrail.fit(x=xdist, y=ydist, deg=deg, debugplot=debugplot) list_spectrails.append(spectrail) return list_spectrails
python
def expected_distorted_boundaries(islitlet, csu_bar_slit_center, borderlist, params, parmodel, numpts, deg, debugplot=0): """Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails. """ c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline = \ return_params(islitlet, csu_bar_slit_center, params, parmodel) xp = np.linspace(1, EMIR_NAXIS1, numpts) slit_dist = (slit_height * 10) + slit_gap # undistorted (constant) y-coordinate of the lower and upper boundaries ybottom = y_baseline * 100 + (islitlet - 1) * slit_dist ytop = ybottom + (slit_height * 10) list_spectrails = [] for borderval in borderlist: yvalue = ybottom + borderval * (ytop - ybottom) # undistorted boundary yp_value = np.ones(numpts) * yvalue # distorted boundary xdist, ydist = exvp(xp, yp_value, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) spectrail = SpectrumTrail() # declare SpectrumTrail instance spectrail.fit(x=xdist, y=ydist, deg=deg, debugplot=debugplot) list_spectrails.append(spectrail) return list_spectrails
[ "def", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "borderlist", ",", "params", ",", "parmodel", ",", "numpts", ",", "deg", ",", "debugplot", "=", "0", ")", ":", "c2", ",", "c4", ",", "ff", ",", "slit_gap", ",", "slit...
Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails.
[ "Return", "expected", "SpectrumTrail", "instances", "associated", "to", "a", "given", "slitlet", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L441-L509
train
49,232
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
fun_residuals
def fun_residuals(params, parmodel, bounddict, shrinking_factor, numresolution, islitmin, islitmax, debugplot): """Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals. """ global FUNCTION_EVALUATIONS global_residual = 0.0 nsummed = 0 read_slitlets = list(bounddict['contents'].keys()) # read_slitlets.sort() # this is not really necessary for tmp_slitlet in read_slitlets: islitlet = int(tmp_slitlet[7:]) if islitmin <= islitlet <= islitmax: read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) # read_dateobs.sort() # this is not really necessary for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] csu_bar_slit_center = tmp_dict['csu_bar_slit_center'] # expected boundaries using provided parameters list_spectrails = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0, 1], params, parmodel, numpts=numresolution, deg=5, debugplot=0 ) poly_lower_expected = list_spectrails[0].poly_funct poly_upper_expected = list_spectrails[1].poly_funct # measured lower boundary poly_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower_bound = tmp_dict['boundary_xmin_lower'] xmax_lower_bound = tmp_dict['boundary_xmax_lower'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_lower = np.linspace(xmin_lower_bound + dx, xmax_lower_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_lower_expected - poly_lower_measured global_residual += np.sum(poly_diff(xdum_lower)**2) nsummed += numresolution # measured upper boundary poly_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper_bound = tmp_dict['boundary_xmin_upper'] xmax_upper_bound = tmp_dict['boundary_xmax_upper'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_upper = np.linspace(xmin_upper_bound + dx, xmax_upper_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_upper_expected - poly_upper_measured global_residual += np.sum(poly_diff(xdum_upper)**2) nsummed += numresolution if nsummed > 0: global_residual = np.sqrt(global_residual/nsummed) if debugplot >= 10: FUNCTION_EVALUATIONS += 1 print('-' * 79) print('>>> Number of function evaluations:', FUNCTION_EVALUATIONS) print('>>> global residual...............:', global_residual) params.pretty_print() return global_residual
python
def fun_residuals(params, parmodel, bounddict, shrinking_factor, numresolution, islitmin, islitmax, debugplot): """Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals. """ global FUNCTION_EVALUATIONS global_residual = 0.0 nsummed = 0 read_slitlets = list(bounddict['contents'].keys()) # read_slitlets.sort() # this is not really necessary for tmp_slitlet in read_slitlets: islitlet = int(tmp_slitlet[7:]) if islitmin <= islitlet <= islitmax: read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) # read_dateobs.sort() # this is not really necessary for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] csu_bar_slit_center = tmp_dict['csu_bar_slit_center'] # expected boundaries using provided parameters list_spectrails = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0, 1], params, parmodel, numpts=numresolution, deg=5, debugplot=0 ) poly_lower_expected = list_spectrails[0].poly_funct poly_upper_expected = list_spectrails[1].poly_funct # measured lower boundary poly_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower_bound = tmp_dict['boundary_xmin_lower'] xmax_lower_bound = tmp_dict['boundary_xmax_lower'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_lower = np.linspace(xmin_lower_bound + dx, xmax_lower_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_lower_expected - poly_lower_measured global_residual += np.sum(poly_diff(xdum_lower)**2) nsummed += numresolution # measured upper boundary poly_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper_bound = tmp_dict['boundary_xmin_upper'] xmax_upper_bound = tmp_dict['boundary_xmax_upper'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_upper = np.linspace(xmin_upper_bound + dx, xmax_upper_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_upper_expected - poly_upper_measured global_residual += np.sum(poly_diff(xdum_upper)**2) nsummed += numresolution if nsummed > 0: global_residual = np.sqrt(global_residual/nsummed) if debugplot >= 10: FUNCTION_EVALUATIONS += 1 print('-' * 79) print('>>> Number of function evaluations:', FUNCTION_EVALUATIONS) print('>>> global residual...............:', global_residual) params.pretty_print() return global_residual
[ "def", "fun_residuals", "(", "params", ",", "parmodel", ",", "bounddict", ",", "shrinking_factor", ",", "numresolution", ",", "islitmin", ",", "islitmax", ",", "debugplot", ")", ":", "global", "FUNCTION_EVALUATIONS", "global_residual", "=", "0.0", "nsummed", "=", ...
Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals.
[ "Function", "to", "be", "minimised", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L578-L677
train
49,233
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
overplot_boundaries_from_bounddict
def overplot_boundaries_from_bounddict(ax, bounddict, micolors, linetype='-'): """Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. """ for islitlet in range(1, EMIR_NBARS + 1): tmpcolor = micolors[islitlet % 2] tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum = pol_lower_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype) pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) ydum = pol_upper_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype)
python
def overplot_boundaries_from_bounddict(ax, bounddict, micolors, linetype='-'): """Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. """ for islitlet in range(1, EMIR_NBARS + 1): tmpcolor = micolors[islitlet % 2] tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum = pol_lower_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype) pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) ydum = pol_upper_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype)
[ "def", "overplot_boundaries_from_bounddict", "(", "ax", ",", "bounddict", ",", "micolors", ",", "linetype", "=", "'-'", ")", ":", "for", "islitlet", "in", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ":", "tmpcolor", "=", "micolors", "[", "islitlet...
Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type.
[ "Overplot", "boundaries", "on", "current", "plot", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L680-L716
train
49,234
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
overplot_boundaries_from_params
def overplot_boundaries_from_params(ax, params, parmodel, list_islitlet, list_csu_bar_slit_center, micolors=('m', 'c'), linetype='--', labels=True, alpha_fill=None, global_offset_x_pix=0, global_offset_y_pix=0): """Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets. """ # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) list_pol_lower_boundaries = [] list_pol_upper_boundaries = [] for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): tmpcolor = micolors[islitlet % 2] pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_lower_boundaries.append(pol_lower_expected) pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_upper_boundaries.append(pol_upper_expected) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum1 = pol_lower_expected(xdum) ax.plot(xdum + xoff, ydum1 + yoff, tmpcolor + linetype) ydum2 = pol_upper_expected(xdum) ax.plot(xdum + xoff, ydum2 + yoff, tmpcolor + linetype) if alpha_fill is not None: ax.fill_between(xdum + xoff, ydum1 + yoff, ydum2 + yoff, facecolor=tmpcolor, alpha=alpha_fill) if labels: # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) xcsu = EMIR_NAXIS1 * csu_bar_slit_center / 341.5 ax.text(xcsu + xoff, (yc_lower + yc_upper) / 2 + yoff, str(islitlet), fontsize=10, va='center', ha='center', bbox=dict(boxstyle="round,pad=0.1", fc="white", ec="grey"), color=tmpcolor, fontweight='bold', backgroundcolor='white') # return lists with boundaries return list_pol_lower_boundaries, list_pol_upper_boundaries
python
def overplot_boundaries_from_params(ax, params, parmodel, list_islitlet, list_csu_bar_slit_center, micolors=('m', 'c'), linetype='--', labels=True, alpha_fill=None, global_offset_x_pix=0, global_offset_y_pix=0): """Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets. """ # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) list_pol_lower_boundaries = [] list_pol_upper_boundaries = [] for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): tmpcolor = micolors[islitlet % 2] pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_lower_boundaries.append(pol_lower_expected) pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_upper_boundaries.append(pol_upper_expected) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum1 = pol_lower_expected(xdum) ax.plot(xdum + xoff, ydum1 + yoff, tmpcolor + linetype) ydum2 = pol_upper_expected(xdum) ax.plot(xdum + xoff, ydum2 + yoff, tmpcolor + linetype) if alpha_fill is not None: ax.fill_between(xdum + xoff, ydum1 + yoff, ydum2 + yoff, facecolor=tmpcolor, alpha=alpha_fill) if labels: # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) xcsu = EMIR_NAXIS1 * csu_bar_slit_center / 341.5 ax.text(xcsu + xoff, (yc_lower + yc_upper) / 2 + yoff, str(islitlet), fontsize=10, va='center', ha='center', bbox=dict(boxstyle="round,pad=0.1", fc="white", ec="grey"), color=tmpcolor, fontweight='bold', backgroundcolor='white') # return lists with boundaries return list_pol_lower_boundaries, list_pol_upper_boundaries
[ "def", "overplot_boundaries_from_params", "(", "ax", ",", "params", ",", "parmodel", ",", "list_islitlet", ",", "list_csu_bar_slit_center", ",", "micolors", "=", "(", "'m'", ",", "'c'", ")", ",", "linetype", "=", "'--'", ",", "labels", "=", "True", ",", "alp...
Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets.
[ "Overplot", "boundaries", "computed", "from", "fitted", "parameters", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L719-L811
train
49,235
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
bound_params_from_dict
def bound_params_from_dict(bound_param_dict): """Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object. """ params = Parameters() for mainpar in EXPECTED_PARAMETER_LIST: if mainpar not in bound_param_dict['contents'].keys(): raise ValueError('Parameter ' + mainpar + ' not found!') if bound_param_dict['meta_info']['parmodel'] == "longslit": dumdict = bound_param_dict['contents'][mainpar] params.add(mainpar, value=dumdict["value"], vary=dumdict["vary"]) elif bound_param_dict['meta_info']['parmodel'] == 'multislit': for subpar in ['a0s', 'a1s', 'a2s']: if subpar not in bound_param_dict['contents'][mainpar].keys(): raise ValueError('Subparameter ' + subpar + ' not found' + ' under parameter ' + mainpar) cpar = mainpar + '_' + subpar dumdict = bound_param_dict['contents'][mainpar][subpar] params.add(cpar, value=dumdict["value"], vary=dumdict["vary"]) else: print('parmodel: ', bound_param_dict['meta_info']['parmodel']) raise ValueError('Unexpected parmodel') return params
python
def bound_params_from_dict(bound_param_dict): """Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object. """ params = Parameters() for mainpar in EXPECTED_PARAMETER_LIST: if mainpar not in bound_param_dict['contents'].keys(): raise ValueError('Parameter ' + mainpar + ' not found!') if bound_param_dict['meta_info']['parmodel'] == "longslit": dumdict = bound_param_dict['contents'][mainpar] params.add(mainpar, value=dumdict["value"], vary=dumdict["vary"]) elif bound_param_dict['meta_info']['parmodel'] == 'multislit': for subpar in ['a0s', 'a1s', 'a2s']: if subpar not in bound_param_dict['contents'][mainpar].keys(): raise ValueError('Subparameter ' + subpar + ' not found' + ' under parameter ' + mainpar) cpar = mainpar + '_' + subpar dumdict = bound_param_dict['contents'][mainpar][subpar] params.add(cpar, value=dumdict["value"], vary=dumdict["vary"]) else: print('parmodel: ', bound_param_dict['meta_info']['parmodel']) raise ValueError('Unexpected parmodel') return params
[ "def", "bound_params_from_dict", "(", "bound_param_dict", ")", ":", "params", "=", "Parameters", "(", ")", "for", "mainpar", "in", "EXPECTED_PARAMETER_LIST", ":", "if", "mainpar", "not", "in", "bound_param_dict", "[", "'contents'", "]", ".", "keys", "(", ")", ...
Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object.
[ "Define", "~lmfit", ".", "parameter", ".", "Parameters", "object", "from", "dictionary", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L1237-L1273
train
49,236
Jaymon/prom
prom/interface/base.py
Connection.transaction_start
def transaction_start(self, name): """ start a transaction this will increment transaction semaphore and pass it to _transaction_start() """ if not name: raise ValueError("Transaction name cannot be empty") #uid = id(self) self.transaction_count += 1 logger.debug("{}. Start transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_start() else: self._transaction_started(name) return self.transaction_count
python
def transaction_start(self, name): """ start a transaction this will increment transaction semaphore and pass it to _transaction_start() """ if not name: raise ValueError("Transaction name cannot be empty") #uid = id(self) self.transaction_count += 1 logger.debug("{}. Start transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_start() else: self._transaction_started(name) return self.transaction_count
[ "def", "transaction_start", "(", "self", ",", "name", ")", ":", "if", "not", "name", ":", "raise", "ValueError", "(", "\"Transaction name cannot be empty\"", ")", "#uid = id(self)", "self", ".", "transaction_count", "+=", "1", "logger", ".", "debug", "(", "\"{}....
start a transaction this will increment transaction semaphore and pass it to _transaction_start()
[ "start", "a", "transaction" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L43-L60
train
49,237
Jaymon/prom
prom/interface/base.py
Connection.transaction_fail
def transaction_fail(self, name): """ rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it """ if not name: raise ValueError("Transaction name cannot be empty") if self.transaction_count > 0: logger.debug("{}. Failing transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_fail() else: self._transaction_failing(name) self.transaction_count -= 1
python
def transaction_fail(self, name): """ rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it """ if not name: raise ValueError("Transaction name cannot be empty") if self.transaction_count > 0: logger.debug("{}. Failing transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_fail() else: self._transaction_failing(name) self.transaction_count -= 1
[ "def", "transaction_fail", "(", "self", ",", "name", ")", ":", "if", "not", "name", ":", "raise", "ValueError", "(", "\"Transaction name cannot be empty\"", ")", "if", "self", ".", "transaction_count", ">", "0", ":", "logger", ".", "debug", "(", "\"{}. Failing...
rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it
[ "rollback", "a", "transaction", "if", "currently", "in", "one" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L79-L95
train
49,238
Jaymon/prom
prom/interface/base.py
Interface.connect
def connect(self, connection_config=None, *args, **kwargs): """ connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect """ if self.connected: return self.connected if connection_config: self.connection_config = connection_config self.connected = True try: self._connect(self.connection_config) except Exception as e: self.connected = False self.raise_error(e) self.log("Connected {}", self.connection_config.interface_name) return self.connected
python
def connect(self, connection_config=None, *args, **kwargs): """ connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect """ if self.connected: return self.connected if connection_config: self.connection_config = connection_config self.connected = True try: self._connect(self.connection_config) except Exception as e: self.connected = False self.raise_error(e) self.log("Connected {}", self.connection_config.interface_name) return self.connected
[ "def", "connect", "(", "self", ",", "connection_config", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "connected", ":", "return", "self", ".", "connected", "if", "connection_config", ":", "self", ".", "connection_c...
connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect
[ "connect", "to", "the", "interface" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L153-L175
train
49,239
Jaymon/prom
prom/interface/base.py
Interface.close
def close(self): """close an open connection""" if not self.connected: return True self._close() self.connected = False self.log("Closed Connection {}", self.connection_config.interface_name) return True
python
def close(self): """close an open connection""" if not self.connected: return True self._close() self.connected = False self.log("Closed Connection {}", self.connection_config.interface_name) return True
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "connected", ":", "return", "True", "self", ".", "_close", "(", ")", "self", ".", "connected", "=", "False", "self", ".", "log", "(", "\"Closed Connection {}\"", ",", "self", ".", "connec...
close an open connection
[ "close", "an", "open", "connection" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L206-L213
train
49,240
Jaymon/prom
prom/interface/base.py
Interface.query
def query(self, query_str, *query_args, **query_options): """ run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax """ with self.connection(**query_options) as connection: query_options['connection'] = connection return self._query(query_str, query_args, **query_options)
python
def query(self, query_str, *query_args, **query_options): """ run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax """ with self.connection(**query_options) as connection: query_options['connection'] = connection return self._query(query_str, query_args, **query_options)
[ "def", "query", "(", "self", ",", "query_str", ",", "*", "query_args", ",", "*", "*", "query_options", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "query_options", ")", "as", "connection", ":", "query_options", "[", "'connection'", "]", "...
run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax
[ "run", "a", "raw", "query", "on", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L217-L227
train
49,241
Jaymon/prom
prom/interface/base.py
Interface.set_table
def set_table(self, schema, **kwargs): """ add the table to the db schema -- Schema() -- contains all the information about the table """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if self.has_table(str(schema), **kwargs): return True try: with self.transaction(**kwargs): self._set_table(schema, **kwargs) for index_name, index in schema.indexes.items(): self.set_index( schema, name=index.name, fields=index.fields, connection=connection, **index.options ) except InterfaceError: # check to see if this table now exists, it might have been created # in another thread if not self.has_table(schema, **kwargs): raise
python
def set_table(self, schema, **kwargs): """ add the table to the db schema -- Schema() -- contains all the information about the table """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if self.has_table(str(schema), **kwargs): return True try: with self.transaction(**kwargs): self._set_table(schema, **kwargs) for index_name, index in schema.indexes.items(): self.set_index( schema, name=index.name, fields=index.fields, connection=connection, **index.options ) except InterfaceError: # check to see if this table now exists, it might have been created # in another thread if not self.has_table(schema, **kwargs): raise
[ "def", "set_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "self", ".", "ha...
add the table to the db schema -- Schema() -- contains all the information about the table
[ "add", "the", "table", "to", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L255-L282
train
49,242
Jaymon/prom
prom/interface/base.py
Interface.has_table
def has_table(self, table_name, **kwargs): """ check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise """ with self.connection(kwargs.get('connection', None)) as connection: kwargs['connection'] = connection tables = self.get_tables(table_name, **kwargs) return len(tables) > 0
python
def has_table(self, table_name, **kwargs): """ check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise """ with self.connection(kwargs.get('connection', None)) as connection: kwargs['connection'] = connection tables = self.get_tables(table_name, **kwargs) return len(tables) > 0
[ "def", "has_table", "(", "self", ",", "table_name", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "kwargs", ".", "get", "(", "'connection'", ",", "None", ")", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]"...
check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise
[ "check", "to", "see", "if", "a", "table", "is", "in", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L286-L296
train
49,243
Jaymon/prom
prom/interface/base.py
Interface.get_tables
def get_tables(self, table_name="", **kwargs): """ get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_tables(str(table_name), **kwargs)
python
def get_tables(self, table_name="", **kwargs): """ get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_tables(str(table_name), **kwargs)
[ "def", "get_tables", "(", "self", ",", "table_name", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "retur...
get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names
[ "get", "all", "the", "tables", "of", "the", "currently", "connected", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L298-L307
train
49,244
Jaymon/prom
prom/interface/base.py
Interface.delete_table
def delete_table(self, schema, **kwargs): """ remove a table matching schema from the db schema -- Schema() """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if not self.has_table(str(schema), **kwargs): return True with self.transaction(**kwargs): self._delete_table(schema, **kwargs) return True
python
def delete_table(self, schema, **kwargs): """ remove a table matching schema from the db schema -- Schema() """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if not self.has_table(str(schema), **kwargs): return True with self.transaction(**kwargs): self._delete_table(schema, **kwargs) return True
[ "def", "delete_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "not", "self", ...
remove a table matching schema from the db schema -- Schema()
[ "remove", "a", "table", "matching", "schema", "from", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L311-L323
train
49,245
Jaymon/prom
prom/interface/base.py
Interface.delete_tables
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs)
python
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs)
[ "def", "delete_tables", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "kwargs", ".", "get", "(", "'disable_protection'", ",", "False", ")", ":", "raise", "ValueError", "(", "'In order to delete all the tables, pass in disable_protection=True'", ")", ...
removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method
[ "removes", "all", "the", "tables", "from", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L327-L340
train
49,246
Jaymon/prom
prom/interface/base.py
Interface.get_indexes
def get_indexes(self, schema, **kwargs): """ get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_indexes(schema, **kwargs)
python
def get_indexes(self, schema, **kwargs): """ get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_indexes(schema, **kwargs)
[ "def", "get_indexes", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "return", "self", ".",...
get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format
[ "get", "all", "the", "indexes" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L351-L361
train
49,247
Jaymon/prom
prom/interface/base.py
Interface.set_index
def set_index(self, schema, name, fields, **index_options): """ add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index """ with self.transaction(**index_options) as connection: index_options['connection'] = connection self._set_index(schema, name, fields, **index_options) return True
python
def set_index(self, schema, name, fields, **index_options): """ add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index """ with self.transaction(**index_options) as connection: index_options['connection'] = connection self._set_index(schema, name, fields, **index_options) return True
[ "def", "set_index", "(", "self", ",", "schema", ",", "name", ",", "fields", ",", "*", "*", "index_options", ")", ":", "with", "self", ".", "transaction", "(", "*", "*", "index_options", ")", "as", "connection", ":", "index_options", "[", "'connection'", ...
add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index
[ "add", "an", "index", "to", "the", "table" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L365-L378
train
49,248
Jaymon/prom
prom/interface/base.py
Interface.insert
def insert(self, schema, fields, **kwargs): """ Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted """ r = 0 with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._insert(schema, fields, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._insert(schema, fields, **kwargs) else: self.raise_error(e, exc_info) return r
python
def insert(self, schema, fields, **kwargs): """ Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted """ r = 0 with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._insert(schema, fields, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._insert(schema, fields, **kwargs) else: self.raise_error(e, exc_info) return r
[ "def", "insert", "(", "self", ",", "schema", ",", "fields", ",", "*", "*", "kwargs", ")", ":", "r", "=", "0", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connec...
Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted
[ "Persist", "d", "into", "the", "db" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L384-L408
train
49,249
Jaymon/prom
prom/interface/base.py
Interface.update
def update(self, schema, fields, query, **kwargs): """ Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._update(schema, fields, query, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._update(schema, fields, query, **kwargs) else: self.raise_error(e, exc_info) return r
python
def update(self, schema, fields, query, **kwargs): """ Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._update(schema, fields, query, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._update(schema, fields, query, **kwargs) else: self.raise_error(e, exc_info) return r
[ "def", "update", "(", "self", ",", "schema", ",", "fields", ",", "query", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connectio...
Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated
[ "Persist", "the", "query", ".", "fields", "into", "the", "db", "that", "match", "query", ".", "fields_where" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L413-L436
train
49,250
Jaymon/prom
prom/interface/base.py
Interface._get_query
def _get_query(self, callback, schema, query=None, *args, **kwargs): """this is just a common wrapper around all the get queries since they are all really similar in how they execute""" if not query: query = Query() ret = None with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: if connection.in_transaction(): # we wrap SELECT queries in a transaction if we are in a transaction because # it could cause data loss if it failed by causing the db to discard # anything in the current transaction if the query isn't wrapped, # go ahead, ask me how I know this with self.transaction(**kwargs): ret = callback(schema, query, *args, **kwargs) else: ret = callback(schema, query, *args, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): ret = callback(schema, query, *args, **kwargs) else: self.raise_error(e, exc_info) return ret
python
def _get_query(self, callback, schema, query=None, *args, **kwargs): """this is just a common wrapper around all the get queries since they are all really similar in how they execute""" if not query: query = Query() ret = None with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: if connection.in_transaction(): # we wrap SELECT queries in a transaction if we are in a transaction because # it could cause data loss if it failed by causing the db to discard # anything in the current transaction if the query isn't wrapped, # go ahead, ask me how I know this with self.transaction(**kwargs): ret = callback(schema, query, *args, **kwargs) else: ret = callback(schema, query, *args, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): ret = callback(schema, query, *args, **kwargs) else: self.raise_error(e, exc_info) return ret
[ "def", "_get_query", "(", "self", ",", "callback", ",", "schema", ",", "query", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "query", ":", "query", "=", "Query", "(", ")", "ret", "=", "None", "with", "self", ".",...
this is just a common wrapper around all the get queries since they are all really similar in how they execute
[ "this", "is", "just", "a", "common", "wrapper", "around", "all", "the", "get", "queries", "since", "they", "are", "all", "really", "similar", "in", "how", "they", "execute" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L441-L468
train
49,251
Jaymon/prom
prom/interface/base.py
Interface.get_one
def get_one(self, schema, query=None, **kwargs): """ get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row """ ret = self._get_query(self._get_one, schema, query, **kwargs) if not ret: ret = {} return ret
python
def get_one(self, schema, query=None, **kwargs): """ get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row """ ret = self._get_query(self._get_one, schema, query, **kwargs) if not ret: ret = {} return ret
[ "def", "get_one", "(", "self", ",", "schema", ",", "query", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "self", ".", "_get_query", "(", "self", ".", "_get_one", ",", "schema", ",", "query", ",", "*", "*", "kwargs", ")", "if", "not...
get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row
[ "get", "one", "row", "from", "the", "db", "matching", "filters", "set", "in", "query" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L470-L481
train
49,252
Jaymon/prom
prom/interface/base.py
Interface.get
def get(self, schema, query=None, **kwargs): """ get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts """ ret = self._get_query(self._get, schema, query, **kwargs) if not ret: ret = [] return ret
python
def get(self, schema, query=None, **kwargs): """ get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts """ ret = self._get_query(self._get, schema, query, **kwargs) if not ret: ret = [] return ret
[ "def", "get", "(", "self", ",", "schema", ",", "query", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "self", ".", "_get_query", "(", "self", ".", "_get", ",", "schema", ",", "query", ",", "*", "*", "kwargs", ")", "if", "not", "re...
get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts
[ "get", "matching", "rows", "from", "the", "db", "matching", "filters", "set", "in", "query" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L485-L496
train
49,253
Jaymon/prom
prom/interface/base.py
Interface.log
def log(self, format_str, *format_args, **log_options): """ wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG """ if isinstance(format_str, Exception): logger.exception(format_str, *format_args) else: log_level = log_options.get('level', logging.DEBUG) if logger.isEnabledFor(log_level): if format_args: logger.log(log_level, format_str.format(*format_args)) else: logger.log(log_level, format_str)
python
def log(self, format_str, *format_args, **log_options): """ wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG """ if isinstance(format_str, Exception): logger.exception(format_str, *format_args) else: log_level = log_options.get('level', logging.DEBUG) if logger.isEnabledFor(log_level): if format_args: logger.log(log_level, format_str.format(*format_args)) else: logger.log(log_level, format_str)
[ "def", "log", "(", "self", ",", "format_str", ",", "*", "format_args", ",", "*", "*", "log_options", ")", ":", "if", "isinstance", "(", "format_str", ",", "Exception", ")", ":", "logger", ".", "exception", "(", "format_str", ",", "*", "format_args", ")",...
wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG
[ "wrapper", "around", "the", "module", "s", "logger" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L536-L553
train
49,254
Jaymon/prom
prom/interface/base.py
Interface.raise_error
def raise_error(self, e, exc_info=None): """this is just a wrapper to make the passed in exception an InterfaceError""" if not exc_info: exc_info = sys.exc_info() if not isinstance(e, InterfaceError): # allow python's built in errors to filter up through # https://docs.python.org/2/library/exceptions.html #if not hasattr(exceptions, e.__class__.__name__): if not hasattr(builtins, e.__class__.__name__): e = self._create_error(e, exc_info) reraise(e.__class__, e, exc_info[2])
python
def raise_error(self, e, exc_info=None): """this is just a wrapper to make the passed in exception an InterfaceError""" if not exc_info: exc_info = sys.exc_info() if not isinstance(e, InterfaceError): # allow python's built in errors to filter up through # https://docs.python.org/2/library/exceptions.html #if not hasattr(exceptions, e.__class__.__name__): if not hasattr(builtins, e.__class__.__name__): e = self._create_error(e, exc_info) reraise(e.__class__, e, exc_info[2])
[ "def", "raise_error", "(", "self", ",", "e", ",", "exc_info", "=", "None", ")", ":", "if", "not", "exc_info", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "not", "isinstance", "(", "e", ",", "InterfaceError", ")", ":", "# allow python'...
this is just a wrapper to make the passed in exception an InterfaceError
[ "this", "is", "just", "a", "wrapper", "to", "make", "the", "passed", "in", "exception", "an", "InterfaceError" ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L555-L567
train
49,255
Jaymon/prom
prom/interface/base.py
SQLInterface._set_all_tables
def _set_all_tables(self, schema, **kwargs): """ You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist """ with self.transaction(**kwargs) as connection: kwargs['connection'] = connection # go through and make sure all foreign key referenced tables exist for field_name, field_val in schema.fields.items(): s = field_val.schema if s: self._set_all_tables(s, **kwargs) # now that we know all fk tables exist, create this table self.set_table(schema, **kwargs) return True
python
def _set_all_tables(self, schema, **kwargs): """ You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist """ with self.transaction(**kwargs) as connection: kwargs['connection'] = connection # go through and make sure all foreign key referenced tables exist for field_name, field_val in schema.fields.items(): s = field_val.schema if s: self._set_all_tables(s, **kwargs) # now that we know all fk tables exist, create this table self.set_table(schema, **kwargs) return True
[ "def", "_set_all_tables", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "# go through and mak...
You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist
[ "You", "can", "run", "into", "a", "problem", "when", "you", "are", "trying", "to", "set", "a", "table", "and", "it", "has", "a", "foreign", "key", "to", "a", "table", "that", "doesn", "t", "exist", "so", "this", "method", "will", "go", "through", "al...
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L856-L873
train
49,256
Jaymon/prom
prom/interface/base.py
SQLInterface._set_all_fields
def _set_all_fields(self, schema, **kwargs): """ this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly """ current_fields = self.get_fields(schema, **kwargs) for field_name, field in schema.fields.items(): if field_name not in current_fields: if field.required: raise ValueError('Cannot safely add {} on the fly because it is required'.format(field_name)) else: query_str = [] query_str.append('ALTER TABLE') query_str.append(' {}'.format(schema)) query_str.append('ADD COLUMN') query_str.append(' {}'.format(self.get_field_SQL(field_name, field))) query_str = os.linesep.join(query_str) self.query(query_str, ignore_result=True, **kwargs) return True
python
def _set_all_fields(self, schema, **kwargs): """ this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly """ current_fields = self.get_fields(schema, **kwargs) for field_name, field in schema.fields.items(): if field_name not in current_fields: if field.required: raise ValueError('Cannot safely add {} on the fly because it is required'.format(field_name)) else: query_str = [] query_str.append('ALTER TABLE') query_str.append(' {}'.format(schema)) query_str.append('ADD COLUMN') query_str.append(' {}'.format(self.get_field_SQL(field_name, field))) query_str = os.linesep.join(query_str) self.query(query_str, ignore_result=True, **kwargs) return True
[ "def", "_set_all_fields", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "current_fields", "=", "self", ".", "get_fields", "(", "schema", ",", "*", "*", "kwargs", ")", "for", "field_name", ",", "field", "in", "schema", ".", "fields", "....
this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly
[ "this", "will", "add", "fields", "that", "don", "t", "exist", "in", "the", "table", "if", "they", "can", "be", "set", "to", "NULL", "the", "reason", "they", "have", "to", "be", "NULL", "is", "adding", "fields", "to", "Postgres", "that", "can", "be", ...
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L912-L933
train
49,257
thorgate/tg-utils
tg_utils/files.py
random_path
def random_path(instance, filename): """ Random path generator for uploads, specify this for upload_to= argument of FileFields """ # Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars, # thus 4k possible values. uuid_hex = get_uuid() return os.path.join(uuid_hex[:3], uuid_hex[3:], filename)
python
def random_path(instance, filename): """ Random path generator for uploads, specify this for upload_to= argument of FileFields """ # Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars, # thus 4k possible values. uuid_hex = get_uuid() return os.path.join(uuid_hex[:3], uuid_hex[3:], filename)
[ "def", "random_path", "(", "instance", ",", "filename", ")", ":", "# Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars,", "# thus 4k possible values.", "uuid_hex", "=", "get_uuid", "(", ")", "return", "os", ".", "path",...
Random path generator for uploads, specify this for upload_to= argument of FileFields
[ "Random", "path", "generator", "for", "uploads", "specify", "this", "for", "upload_to", "=", "argument", "of", "FileFields" ]
81e404e837334b241686d9159cc3eb44de509a88
https://github.com/thorgate/tg-utils/blob/81e404e837334b241686d9159cc3eb44de509a88/tg_utils/files.py#L6-L12
train
49,258
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
initialize
def initialize(signal_number=DEFAULT_TIMER_SIGNAL_NUMBER, update_period_s=DEFAULT_UPDATE_PERIOD_S): """Initialize metrics, must be invoked at least once prior to invoking any other method.""" global initialized if initialized: return initialized = True uwsgi.add_timer(signal_number, update_period_s) uwsgi.register_signal(signal_number, MULE, emit)
python
def initialize(signal_number=DEFAULT_TIMER_SIGNAL_NUMBER, update_period_s=DEFAULT_UPDATE_PERIOD_S): """Initialize metrics, must be invoked at least once prior to invoking any other method.""" global initialized if initialized: return initialized = True uwsgi.add_timer(signal_number, update_period_s) uwsgi.register_signal(signal_number, MULE, emit)
[ "def", "initialize", "(", "signal_number", "=", "DEFAULT_TIMER_SIGNAL_NUMBER", ",", "update_period_s", "=", "DEFAULT_UPDATE_PERIOD_S", ")", ":", "global", "initialized", "if", "initialized", ":", "return", "initialized", "=", "True", "uwsgi", ".", "add_timer", "(", ...
Initialize metrics, must be invoked at least once prior to invoking any other method.
[ "Initialize", "metrics", "must", "be", "invoked", "at", "least", "once", "prior", "to", "invoking", "any", "other", "method", "." ]
534966fd461ff711aecd1e3d4caaafdc23ac33f0
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L91-L100
train
49,259
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
emit
def emit(_): """Serialize metrics to the memory mapped buffer.""" if not initialized: raise NotInitialized view = { 'version': __version__, 'counters': {}, 'gauges': {}, 'histograms': {}, 'meters': {}, 'timers': {}, } for (ty, module, name), metric in six.iteritems(all_metrics): view[ty]['%s.%s' % (module, name)] = metric.view() marshalled_view = marshal.dumps(view) if len(marshalled_view) > MAX_MARSHALLED_VIEW_SIZE: log.warn( 'Marshalled length too large, got %d, max %d. ' 'Try recording fewer metrics or increasing ' 'MAX_MARSHALLED_VIEW_SIZE' % (len(marshalled_view), MAX_MARSHALLED_VIEW_SIZE)) return marshalled_metrics_mmap.seek(0) try: # Reading and writing to/from an mmap'ed buffer is not guaranteed # to be atomic, so we must serialize access to it. uwsgi.lock() marshalled_metrics_mmap.write(marshalled_view) finally: uwsgi.unlock()
python
def emit(_): """Serialize metrics to the memory mapped buffer.""" if not initialized: raise NotInitialized view = { 'version': __version__, 'counters': {}, 'gauges': {}, 'histograms': {}, 'meters': {}, 'timers': {}, } for (ty, module, name), metric in six.iteritems(all_metrics): view[ty]['%s.%s' % (module, name)] = metric.view() marshalled_view = marshal.dumps(view) if len(marshalled_view) > MAX_MARSHALLED_VIEW_SIZE: log.warn( 'Marshalled length too large, got %d, max %d. ' 'Try recording fewer metrics or increasing ' 'MAX_MARSHALLED_VIEW_SIZE' % (len(marshalled_view), MAX_MARSHALLED_VIEW_SIZE)) return marshalled_metrics_mmap.seek(0) try: # Reading and writing to/from an mmap'ed buffer is not guaranteed # to be atomic, so we must serialize access to it. uwsgi.lock() marshalled_metrics_mmap.write(marshalled_view) finally: uwsgi.unlock()
[ "def", "emit", "(", "_", ")", ":", "if", "not", "initialized", ":", "raise", "NotInitialized", "view", "=", "{", "'version'", ":", "__version__", ",", "'counters'", ":", "{", "}", ",", "'gauges'", ":", "{", "}", ",", "'histograms'", ":", "{", "}", ",...
Serialize metrics to the memory mapped buffer.
[ "Serialize", "metrics", "to", "the", "memory", "mapped", "buffer", "." ]
534966fd461ff711aecd1e3d4caaafdc23ac33f0
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L110-L142
train
49,260
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
view
def view(): """Get a dictionary representation of current metrics.""" if not initialized: raise NotInitialized marshalled_metrics_mmap.seek(0) try: uwsgi.lock() marshalled_view = marshalled_metrics_mmap.read( MAX_MARSHALLED_VIEW_SIZE) finally: uwsgi.unlock() return marshal.loads(marshalled_view)
python
def view(): """Get a dictionary representation of current metrics.""" if not initialized: raise NotInitialized marshalled_metrics_mmap.seek(0) try: uwsgi.lock() marshalled_view = marshalled_metrics_mmap.read( MAX_MARSHALLED_VIEW_SIZE) finally: uwsgi.unlock() return marshal.loads(marshalled_view)
[ "def", "view", "(", ")", ":", "if", "not", "initialized", ":", "raise", "NotInitialized", "marshalled_metrics_mmap", ".", "seek", "(", "0", ")", "try", ":", "uwsgi", ".", "lock", "(", ")", "marshalled_view", "=", "marshalled_metrics_mmap", ".", "read", "(", ...
Get a dictionary representation of current metrics.
[ "Get", "a", "dictionary", "representation", "of", "current", "metrics", "." ]
534966fd461ff711aecd1e3d4caaafdc23ac33f0
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L145-L157
train
49,261
guaix-ucm/pyemir
emirdrp/products.py
ProcessedImageProduct.convert_out
def convert_out(self, obj): """Write EMIRUUID header on reduction""" newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
python
def convert_out(self, obj): """Write EMIRUUID header on reduction""" newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
[ "def", "convert_out", "(", "self", ",", "obj", ")", ":", "newobj", "=", "super", "(", "ProcessedImageProduct", ",", "self", ")", ".", "convert_out", "(", "obj", ")", "if", "newobj", ":", "hdulist", "=", "newobj", ".", "open", "(", ")", "hdr", "=", "h...
Write EMIRUUID header on reduction
[ "Write", "EMIRUUID", "header", "on", "reduction" ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/products.py#L97-L105
train
49,262
IdentityPython/oidcendpoint
src/oidcendpoint/client_authn.py
verify_client
def verify_client(endpoint_context, request, authorization_info): """ Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token. """ if not authorization_info: if 'client_id' in request and 'client_secret' in request: auth_info = ClientSecretPost(endpoint_context).verify(request) auth_info['method'] = 'client_secret_post' elif 'client_assertion' in request: auth_info = JWSAuthnMethod(endpoint_context).verify(request) # If symmetric key was used # auth_method = 'client_secret_jwt' # If asymmetric key was used auth_info['method'] = 'private_key_jwt' elif 'access_token' in request: auth_info = BearerBody(endpoint_context).verify(request) auth_info['method'] = 'bearer_body' else: raise UnknownOrNoAuthnMethod() else: if authorization_info.startswith('Basic '): auth_info = ClientSecretBasic(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'client_secret_basic' elif authorization_info.startswith('Bearer '): auth_info = BearerHeader(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'bearer_header' else: raise UnknownOrNoAuthnMethod(authorization_info) try: client_id = auth_info['client_id'] except KeyError: client_id = '' try: _token = auth_info['token'] except KeyError: logger.warning('Unknown client ID') else: sinfo = endpoint_context.sdb[_token] auth_info['client_id'] = sinfo['authn_req']['client_id'] else: try: _cinfo = endpoint_context.cdb[client_id] except KeyError: raise ValueError('Unknown Client ID') else: if isinstance(_cinfo,str): try: _cinfo = endpoint_context.cdb[_cinfo] except KeyError: raise ValueError('Unknown Client ID') try: valid_client_info(_cinfo) except KeyError: logger.warning('Client registration has timed out') raise ValueError('Not valid client') else: # check that the expected authz method was used try: endpoint_context.cdb[client_id]['auth_method'][ request.__class__.__name__] = auth_info['method'] except KeyError: try: endpoint_context.cdb[client_id]['auth_method'] = { request.__class__.__name__: auth_info['method']} except KeyError: pass return auth_info
python
def verify_client(endpoint_context, request, authorization_info): """ Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token. """ if not authorization_info: if 'client_id' in request and 'client_secret' in request: auth_info = ClientSecretPost(endpoint_context).verify(request) auth_info['method'] = 'client_secret_post' elif 'client_assertion' in request: auth_info = JWSAuthnMethod(endpoint_context).verify(request) # If symmetric key was used # auth_method = 'client_secret_jwt' # If asymmetric key was used auth_info['method'] = 'private_key_jwt' elif 'access_token' in request: auth_info = BearerBody(endpoint_context).verify(request) auth_info['method'] = 'bearer_body' else: raise UnknownOrNoAuthnMethod() else: if authorization_info.startswith('Basic '): auth_info = ClientSecretBasic(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'client_secret_basic' elif authorization_info.startswith('Bearer '): auth_info = BearerHeader(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'bearer_header' else: raise UnknownOrNoAuthnMethod(authorization_info) try: client_id = auth_info['client_id'] except KeyError: client_id = '' try: _token = auth_info['token'] except KeyError: logger.warning('Unknown client ID') else: sinfo = endpoint_context.sdb[_token] auth_info['client_id'] = sinfo['authn_req']['client_id'] else: try: _cinfo = endpoint_context.cdb[client_id] except KeyError: raise ValueError('Unknown Client ID') else: if isinstance(_cinfo,str): try: _cinfo = endpoint_context.cdb[_cinfo] except KeyError: raise ValueError('Unknown Client ID') try: valid_client_info(_cinfo) except KeyError: logger.warning('Client registration has timed out') raise ValueError('Not valid client') else: # check that the expected authz method was used try: endpoint_context.cdb[client_id]['auth_method'][ request.__class__.__name__] = auth_info['method'] except KeyError: try: endpoint_context.cdb[client_id]['auth_method'] = { request.__class__.__name__: auth_info['method']} except KeyError: pass return auth_info
[ "def", "verify_client", "(", "endpoint_context", ",", "request", ",", "authorization_info", ")", ":", "if", "not", "authorization_info", ":", "if", "'client_id'", "in", "request", "and", "'client_secret'", "in", "request", ":", "auth_info", "=", "ClientSecretPost", ...
Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token.
[ "Initiated", "Guessing", "!" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/client_authn.py#L194-L272
train
49,263
IdentityPython/oidcendpoint
src/oidcendpoint/oidc/refresh_token.py
RefreshAccessToken._post_parse_request
def _post_parse_request(self, request, client_id='', **kwargs): """ This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns: """ request = RefreshAccessTokenRequest(**request.to_dict()) try: keyjar = self.endpoint_context.keyjar except AttributeError: keyjar = "" request.verify(keyjar=keyjar, opponent_id=client_id) if "client_id" not in request: # Optional for refresh access token request request["client_id"] = client_id logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request))) return request
python
def _post_parse_request(self, request, client_id='', **kwargs): """ This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns: """ request = RefreshAccessTokenRequest(**request.to_dict()) try: keyjar = self.endpoint_context.keyjar except AttributeError: keyjar = "" request.verify(keyjar=keyjar, opponent_id=client_id) if "client_id" not in request: # Optional for refresh access token request request["client_id"] = client_id logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request))) return request
[ "def", "_post_parse_request", "(", "self", ",", "request", ",", "client_id", "=", "''", ",", "*", "*", "kwargs", ")", ":", "request", "=", "RefreshAccessTokenRequest", "(", "*", "*", "request", ".", "to_dict", "(", ")", ")", "try", ":", "keyjar", "=", ...
This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns:
[ "This", "is", "where", "clients", "come", "to", "refresh", "their", "access", "tokens" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/refresh_token.py#L77-L100
train
49,264
volafiled/python-volapi
volapi/utils.py
random_id
def random_id(length): """Generates a random ID of given length""" def char(): """Generate single random char""" return random.choice(string.ascii_letters + string.digits) return "".join(char() for _ in range(length))
python
def random_id(length): """Generates a random ID of given length""" def char(): """Generate single random char""" return random.choice(string.ascii_letters + string.digits) return "".join(char() for _ in range(length))
[ "def", "random_id", "(", "length", ")", ":", "def", "char", "(", ")", ":", "\"\"\"Generate single random char\"\"\"", "return", "random", ".", "choice", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "return", "\"\"", ".", "join", "(...
Generates a random ID of given length
[ "Generates", "a", "random", "ID", "of", "given", "length" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/utils.py#L39-L47
train
49,265
volafiled/python-volapi
volapi/utils.py
delayed_close
def delayed_close(closable): """Delay close until this contextmanager dies""" close = getattr(closable, "close", None) if close: # we do not want the library to close file in case we need to # resume, hence make close a no-op # pylint: disable=unused-argument def replacement_close(*args, **kw): """ No op """ pass # pylint: enable=unused-argument setattr(closable, "close", replacement_close) try: yield closable finally: if close: setattr(closable, "close", close) closable.close()
python
def delayed_close(closable): """Delay close until this contextmanager dies""" close = getattr(closable, "close", None) if close: # we do not want the library to close file in case we need to # resume, hence make close a no-op # pylint: disable=unused-argument def replacement_close(*args, **kw): """ No op """ pass # pylint: enable=unused-argument setattr(closable, "close", replacement_close) try: yield closable finally: if close: setattr(closable, "close", close) closable.close()
[ "def", "delayed_close", "(", "closable", ")", ":", "close", "=", "getattr", "(", "closable", ",", "\"close\"", ",", "None", ")", "if", "close", ":", "# we do not want the library to close file in case we need to", "# resume, hence make close a no-op", "# pylint: disable=unu...
Delay close until this contextmanager dies
[ "Delay", "close", "until", "this", "contextmanager", "dies" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/utils.py#L63-L82
train
49,266
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.map_sid2uid
def map_sid2uid(self, sid, uid): """ Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID """ self.set('sid2uid', sid, uid) self.set('uid2sid', uid, sid)
python
def map_sid2uid(self, sid, uid): """ Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID """ self.set('sid2uid', sid, uid) self.set('uid2sid', uid, sid)
[ "def", "map_sid2uid", "(", "self", ",", "sid", ",", "uid", ")", ":", "self", ".", "set", "(", "'sid2uid'", ",", "sid", ",", "uid", ")", "self", ".", "set", "(", "'uid2sid'", ",", "uid", ",", "sid", ")" ]
Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID
[ "Store", "the", "connection", "between", "a", "Session", "ID", "and", "a", "User", "ID" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L52-L60
train
49,267
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.map_sid2sub
def map_sid2sub(self, sid, sub): """ Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID """ self.set('sid2sub', sid, sub) self.set('sub2sid', sub, sid)
python
def map_sid2sub(self, sid, sub): """ Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID """ self.set('sid2sub', sid, sub) self.set('sub2sid', sub, sid)
[ "def", "map_sid2sub", "(", "self", ",", "sid", ",", "sub", ")", ":", "self", ".", "set", "(", "'sid2sub'", ",", "sid", ",", "sub", ")", "self", ".", "set", "(", "'sub2sid'", ",", "sub", ",", "sid", ")" ]
Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID
[ "Store", "the", "connection", "between", "a", "Session", "ID", "and", "a", "subject", "ID", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L62-L70
train
49,268
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.get_subs_by_uid
def get_subs_by_uid(self, uid): """ Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers """ res = set() for sid in self.get('uid2sid', uid): res |= set(self.get('sid2sub', sid)) return res
python
def get_subs_by_uid(self, uid): """ Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers """ res = set() for sid in self.get('uid2sid', uid): res |= set(self.get('sid2sub', sid)) return res
[ "def", "get_subs_by_uid", "(", "self", ",", "uid", ")", ":", "res", "=", "set", "(", ")", "for", "sid", "in", "self", ".", "get", "(", "'uid2sid'", ",", "uid", ")", ":", "res", "|=", "set", "(", "self", ".", "get", "(", "'sid2sub'", ",", "sid", ...
Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers
[ "Find", "all", "subject", "identifiers", "that", "is", "connected", "to", "a", "User", "ID", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L104-L114
train
49,269
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_session_id
def remove_session_id(self, sid): """ Remove all references to a specific Session ID :param sid: A Session ID """ for uid in self.get('sid2uid', sid): self.remove('uid2sid', uid, sid) self.delete('sid2uid', sid) for sub in self.get('sid2sub', sid): self.remove('sub2sid', sub, sid) self.delete('sid2sub', sid)
python
def remove_session_id(self, sid): """ Remove all references to a specific Session ID :param sid: A Session ID """ for uid in self.get('sid2uid', sid): self.remove('uid2sid', uid, sid) self.delete('sid2uid', sid) for sub in self.get('sid2sub', sid): self.remove('sub2sid', sub, sid) self.delete('sid2sub', sid)
[ "def", "remove_session_id", "(", "self", ",", "sid", ")", ":", "for", "uid", "in", "self", ".", "get", "(", "'sid2uid'", ",", "sid", ")", ":", "self", ".", "remove", "(", "'uid2sid'", ",", "uid", ",", "sid", ")", "self", ".", "delete", "(", "'sid2u...
Remove all references to a specific Session ID :param sid: A Session ID
[ "Remove", "all", "references", "to", "a", "specific", "Session", "ID" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L136-L148
train
49,270
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_uid
def remove_uid(self, uid): """ Remove all references to a specific User ID :param uid: A User ID """ for sid in self.get('uid2sid', uid): self.remove('sid2uid', sid, uid) self.delete('uid2sid', uid)
python
def remove_uid(self, uid): """ Remove all references to a specific User ID :param uid: A User ID """ for sid in self.get('uid2sid', uid): self.remove('sid2uid', sid, uid) self.delete('uid2sid', uid)
[ "def", "remove_uid", "(", "self", ",", "uid", ")", ":", "for", "sid", "in", "self", ".", "get", "(", "'uid2sid'", ",", "uid", ")", ":", "self", ".", "remove", "(", "'sid2uid'", ",", "sid", ",", "uid", ")", "self", ".", "delete", "(", "'uid2sid'", ...
Remove all references to a specific User ID :param uid: A User ID
[ "Remove", "all", "references", "to", "a", "specific", "User", "ID" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L150-L158
train
49,271
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_sub
def remove_sub(self, sub): """ Remove all references to a specific Subject ID :param sub: A Subject ID """ for _sid in self.get('sub2sid', sub): self.remove('sid2sub', _sid, sub) self.delete('sub2sid', sub)
python
def remove_sub(self, sub): """ Remove all references to a specific Subject ID :param sub: A Subject ID """ for _sid in self.get('sub2sid', sub): self.remove('sid2sub', _sid, sub) self.delete('sub2sid', sub)
[ "def", "remove_sub", "(", "self", ",", "sub", ")", ":", "for", "_sid", "in", "self", ".", "get", "(", "'sub2sid'", ",", "sub", ")", ":", "self", ".", "remove", "(", "'sid2sub'", ",", "_sid", ",", "sub", ")", "self", ".", "delete", "(", "'sub2sid'",...
Remove all references to a specific Subject ID :param sub: A Subject ID
[ "Remove", "all", "references", "to", "a", "specific", "Subject", "ID" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L160-L168
train
49,272
BeyondTheClouds/enoslib
docs/tutorials/using-tasks/step2.py
up
def up(force=True, env=None, **kwargs): "Starts a new experiment" inventory = os.path.join(os.getcwd(), "hosts") conf = Configuration.from_dictionnary(provider_conf) provider = Enos_vagrant(conf) roles, networks = provider.init() check_networks(roles, networks) env["roles"] = roles env["networks"] = networks
python
def up(force=True, env=None, **kwargs): "Starts a new experiment" inventory = os.path.join(os.getcwd(), "hosts") conf = Configuration.from_dictionnary(provider_conf) provider = Enos_vagrant(conf) roles, networks = provider.init() check_networks(roles, networks) env["roles"] = roles env["networks"] = networks
[ "def", "up", "(", "force", "=", "True", ",", "env", "=", "None", ",", "*", "*", "kwargs", ")", ":", "inventory", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "\"hosts\"", ")", "conf", "=", "Configuration", ".", ...
Starts a new experiment
[ "Starts", "a", "new", "experiment" ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/docs/tutorials/using-tasks/step2.py#L32-L40
train
49,273
BreakingBytes/simkit
simkit/core/__init__.py
convert_args
def convert_args(test_fcn, *test_args): """ Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx """ def wrapper(origfcn): @functools.wraps(origfcn) def newfcn(*args, **kwargs): argspec = getargspec(origfcn) # use ``inspect`` to get arg names kwargs.update(zip(argspec.args, args)) # convert args to kw # loop over test args for a in test_args: # convert a if it's in args if a in argspec.args: kwargs[a] = test_fcn(kwargs[a]) # update kwargs # call original function with converted args return origfcn(**kwargs) # return wrapped function return newfcn # return the wrapper function that consumes the original function return wrapper
python
def convert_args(test_fcn, *test_args): """ Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx """ def wrapper(origfcn): @functools.wraps(origfcn) def newfcn(*args, **kwargs): argspec = getargspec(origfcn) # use ``inspect`` to get arg names kwargs.update(zip(argspec.args, args)) # convert args to kw # loop over test args for a in test_args: # convert a if it's in args if a in argspec.args: kwargs[a] = test_fcn(kwargs[a]) # update kwargs # call original function with converted args return origfcn(**kwargs) # return wrapped function return newfcn # return the wrapper function that consumes the original function return wrapper
[ "def", "convert_args", "(", "test_fcn", ",", "*", "test_args", ")", ":", "def", "wrapper", "(", "origfcn", ")", ":", "@", "functools", ".", "wraps", "(", "origfcn", ")", "def", "newfcn", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "argspec",...
Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx
[ "Decorator", "to", "be", "using", "in", "formulas", "to", "convert", "test_args", "depending", "on", "the", "test_fcn", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L157-L192
train
49,274
BreakingBytes/simkit
simkit/core/__init__.py
get_public_attributes
def get_public_attributes(cls, as_list=True): """ Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class """ attrs = (a for a in dir(cls) if not a.startswith('_')) if as_list: return list(attrs) return attrs
python
def get_public_attributes(cls, as_list=True): """ Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class """ attrs = (a for a in dir(cls) if not a.startswith('_')) if as_list: return list(attrs) return attrs
[ "def", "get_public_attributes", "(", "cls", ",", "as_list", "=", "True", ")", ":", "attrs", "=", "(", "a", "for", "a", "in", "dir", "(", "cls", ")", "if", "not", "a", ".", "startswith", "(", "'_'", ")", ")", "if", "as_list", ":", "return", "list", ...
Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class
[ "Return", "class", "attributes", "that", "are", "neither", "private", "nor", "magic", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L226-L237
train
49,275
BreakingBytes/simkit
simkit/core/__init__.py
Registry.register
def register(self, newitems, *args, **kwargs): """ Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError` """ newkeys = newitems.viewkeys() # set of the new item keys if any(self.viewkeys() & newkeys): # duplicates raise DuplicateRegItemError(self.viewkeys() & newkeys) self.update(newitems) # register new item # update meta fields kwargs.update(zip(self.meta_names, args)) for k, v in kwargs.iteritems(): meta = getattr(self, k) # get the meta attribute if v: if not v.viewkeys() <= newkeys: raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) meta.update(v)
python
def register(self, newitems, *args, **kwargs): """ Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError` """ newkeys = newitems.viewkeys() # set of the new item keys if any(self.viewkeys() & newkeys): # duplicates raise DuplicateRegItemError(self.viewkeys() & newkeys) self.update(newitems) # register new item # update meta fields kwargs.update(zip(self.meta_names, args)) for k, v in kwargs.iteritems(): meta = getattr(self, k) # get the meta attribute if v: if not v.viewkeys() <= newkeys: raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) meta.update(v)
[ "def", "register", "(", "self", ",", "newitems", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "newkeys", "=", "newitems", ".", "viewkeys", "(", ")", "# set of the new item keys", "if", "any", "(", "self", ".", "viewkeys", "(", ")", "&", "newkey...
Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError`
[ "Register", "newitems", "in", "registry", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L103-L130
train
49,276
BreakingBytes/simkit
simkit/core/__init__.py
Registry.unregister
def unregister(self, items): """ Remove items from registry. :param items: """ items = _listify(items) # get all members of Registry except private, special or class meta_names = (m for m in vars(self).iterkeys() if (not m.startswith('_') and m not in dir(Registry))) # check that meta names matches # FIXME: this is so lame. replace this with something more robust for m in meta_names: if m not in self.meta_names: raise AttributeError('Meta name %s not listed.') # pop items from Registry and from meta for it in items: if it in self: self.pop(it) for m in (getattr(self, m_) for m_ in self.meta_names): if it in m: m.pop(it)
python
def unregister(self, items): """ Remove items from registry. :param items: """ items = _listify(items) # get all members of Registry except private, special or class meta_names = (m for m in vars(self).iterkeys() if (not m.startswith('_') and m not in dir(Registry))) # check that meta names matches # FIXME: this is so lame. replace this with something more robust for m in meta_names: if m not in self.meta_names: raise AttributeError('Meta name %s not listed.') # pop items from Registry and from meta for it in items: if it in self: self.pop(it) for m in (getattr(self, m_) for m_ in self.meta_names): if it in m: m.pop(it)
[ "def", "unregister", "(", "self", ",", "items", ")", ":", "items", "=", "_listify", "(", "items", ")", "# get all members of Registry except private, special or class", "meta_names", "=", "(", "m", "for", "m", "in", "vars", "(", "self", ")", ".", "iterkeys", "...
Remove items from registry. :param items:
[ "Remove", "items", "from", "registry", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L132-L153
train
49,277
BreakingBytes/simkit
simkit/core/__init__.py
SimKitJSONEncoder.default
def default(self, o): """ JSONEncoder default method that converts NumPy arrays and quantities objects to lists. """ if isinstance(o, Q_): return o.magnitude elif isinstance(o, np.ndarray): return o.tolist() else: # raise TypeError if not serializable return super(SimKitJSONEncoder, self).default(o)
python
def default(self, o): """ JSONEncoder default method that converts NumPy arrays and quantities objects to lists. """ if isinstance(o, Q_): return o.magnitude elif isinstance(o, np.ndarray): return o.tolist() else: # raise TypeError if not serializable return super(SimKitJSONEncoder, self).default(o)
[ "def", "default", "(", "self", ",", "o", ")", ":", "if", "isinstance", "(", "o", ",", "Q_", ")", ":", "return", "o", ".", "magnitude", "elif", "isinstance", "(", "o", ",", "np", ".", "ndarray", ")", ":", "return", "o", ".", "tolist", "(", ")", ...
JSONEncoder default method that converts NumPy arrays and quantities objects to lists.
[ "JSONEncoder", "default", "method", "that", "converts", "NumPy", "arrays", "and", "quantities", "objects", "to", "lists", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L212-L223
train
49,278
BreakingBytes/simkit
simkit/core/__init__.py
CommonBase.set_meta
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
python
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
[ "def", "set_meta", "(", "mcs", ",", "bases", ",", "attr", ")", ":", "# pop the meta class from the attributes", "meta", "=", "attr", ".", "pop", "(", "mcs", ".", "_meta_cls", ",", "types", ".", "ClassType", "(", "mcs", ".", "_meta_cls", ",", "(", ")", ",...
Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents
[ "Get", "all", "of", "the", "Meta", "classes", "from", "bases", "and", "combine", "them", "with", "this", "class", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L267-L297
train
49,279
IdentityPython/oidcendpoint
src/oidcendpoint/user_authn/user.py
factory
def factory(cls, **kwargs): """ Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class. """ for name, obj in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod): try: if obj.__name__ == cls: return obj(**kwargs) except AttributeError: pass
python
def factory(cls, **kwargs): """ Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class. """ for name, obj in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod): try: if obj.__name__ == cls: return obj(**kwargs) except AttributeError: pass
[ "def", "factory", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "for", "name", ",", "obj", "in", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ")", ":", "if", "inspect", ".", "isclass", "(", "obj", ")", "and", "...
Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class.
[ "Factory", "method", "that", "can", "be", "used", "to", "easily", "instantiate", "a", "class", "instance" ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/user_authn/user.py#L301-L316
train
49,280
BreakingBytes/simkit
simkit/core/formulas.py
FormulaRegistry.register
def register(self, new_formulas, *args, **kwargs): """ Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry. """ kwargs.update(zip(self.meta_names, args)) # call super method, meta must be passed as kwargs! super(FormulaRegistry, self).register(new_formulas, **kwargs)
python
def register(self, new_formulas, *args, **kwargs): """ Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry. """ kwargs.update(zip(self.meta_names, args)) # call super method, meta must be passed as kwargs! super(FormulaRegistry, self).register(new_formulas, **kwargs)
[ "def", "register", "(", "self", ",", "new_formulas", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "update", "(", "zip", "(", "self", ".", "meta_names", ",", "args", ")", ")", "# call super method, meta must be passed as kwargs!", "supe...
Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry.
[ "Register", "formula", "and", "meta", "data", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/formulas.py#L35-L48
train
49,281
BeyondTheClouds/enoslib
enoslib/infra/enos_chameleonbaremetal/provider.py
create_blazar_client
def create_blazar_client(config, session): """Check the reservation, creates a new one if nescessary.""" return blazar_client.Client(session=session, service_type="reservation", region_name=os.environ["OS_REGION_NAME"])
python
def create_blazar_client(config, session): """Check the reservation, creates a new one if nescessary.""" return blazar_client.Client(session=session, service_type="reservation", region_name=os.environ["OS_REGION_NAME"])
[ "def", "create_blazar_client", "(", "config", ",", "session", ")", ":", "return", "blazar_client", ".", "Client", "(", "session", "=", "session", ",", "service_type", "=", "\"reservation\"", ",", "region_name", "=", "os", ".", "environ", "[", "\"OS_REGION_NAME\"...
Check the reservation, creates a new one if nescessary.
[ "Check", "the", "reservation", "creates", "a", "new", "one", "if", "nescessary", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_chameleonbaremetal/provider.py#L46-L50
train
49,282
Jaymon/prom
prom/decorators.py
reconnecting
def reconnecting(count=None, backoff=None): """this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0 """ # we get trixxy here so we can manipulate these values in the wrapped function, # this is one of the first times I wish we were on Python 3 # http://stackoverflow.com/a/9264845/5006 reconn_params = { "count": count, "backoff": backoff } def retry_decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): count = reconn_params["count"] backoff = reconn_params["backoff"] if count is None: count = self.connection_config.options.get('reconnect_attempts', 3) if backoff is None: backoff = self.connection_config.options.get('reconnect_backoff', 1.0) count = int(count) backoff = float(backoff) for attempt in range(1, count + 1): try: backoff_seconds = float(attempt - 1) * backoff if backoff_seconds: logger.debug("sleeping {} seconds before attempt {}".format( backoff_seconds, attempt )) time.sleep(backoff_seconds) return func(self, *args, **kwargs) except InterfaceError as e: e_msg = str(e.e) # TODO -- this gets us by SQLite and Postgres, but might not # work in the future, so this needs to be a tad more robust if "closed" in e_msg.lower(): if attempt == count: logger.debug("all {} attempts failed".format(count)) raise else: logger.debug("attempt {}/{} failed, retrying".format( attempt, count )) else: raise return wrapper return retry_decorator
python
def reconnecting(count=None, backoff=None): """this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0 """ # we get trixxy here so we can manipulate these values in the wrapped function, # this is one of the first times I wish we were on Python 3 # http://stackoverflow.com/a/9264845/5006 reconn_params = { "count": count, "backoff": backoff } def retry_decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): count = reconn_params["count"] backoff = reconn_params["backoff"] if count is None: count = self.connection_config.options.get('reconnect_attempts', 3) if backoff is None: backoff = self.connection_config.options.get('reconnect_backoff', 1.0) count = int(count) backoff = float(backoff) for attempt in range(1, count + 1): try: backoff_seconds = float(attempt - 1) * backoff if backoff_seconds: logger.debug("sleeping {} seconds before attempt {}".format( backoff_seconds, attempt )) time.sleep(backoff_seconds) return func(self, *args, **kwargs) except InterfaceError as e: e_msg = str(e.e) # TODO -- this gets us by SQLite and Postgres, but might not # work in the future, so this needs to be a tad more robust if "closed" in e_msg.lower(): if attempt == count: logger.debug("all {} attempts failed".format(count)) raise else: logger.debug("attempt {}/{} failed, retrying".format( attempt, count )) else: raise return wrapper return retry_decorator
[ "def", "reconnecting", "(", "count", "=", "None", ",", "backoff", "=", "None", ")", ":", "# we get trixxy here so we can manipulate these values in the wrapped function,", "# this is one of the first times I wish we were on Python 3", "# http://stackoverflow.com/a/9264845/5006", "recon...
this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0
[ "this", "is", "a", "very", "specific", "decorator", "meant", "to", "be", "used", "on", "Interface", "classes", ".", "It", "will", "attempt", "to", "reconnect", "if", "the", "connection", "is", "closed", "and", "run", "the", "same", "method", "again", "." ]
b7ad2c259eca198da03e1e4bc7d95014c168c361
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/decorators.py#L13-L81
train
49,283
volafiled/python-volapi
volapi/multipart.py
escape_header
def escape_header(val): """Escapes a value so that it can be used in a mime header""" if val is None: return None try: return quote(val, encoding="ascii", safe="/ ") except ValueError: return "utf-8''" + quote(val, encoding="utf-8", safe="/ ")
python
def escape_header(val): """Escapes a value so that it can be used in a mime header""" if val is None: return None try: return quote(val, encoding="ascii", safe="/ ") except ValueError: return "utf-8''" + quote(val, encoding="utf-8", safe="/ ")
[ "def", "escape_header", "(", "val", ")", ":", "if", "val", "is", "None", ":", "return", "None", "try", ":", "return", "quote", "(", "val", ",", "encoding", "=", "\"ascii\"", ",", "safe", "=", "\"/ \"", ")", "except", "ValueError", ":", "return", "\"utf...
Escapes a value so that it can be used in a mime header
[ "Escapes", "a", "value", "so", "that", "it", "can", "be", "used", "in", "a", "mime", "header" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L24-L32
train
49,284
volafiled/python-volapi
volapi/multipart.py
make_streams
def make_streams(name, value, boundary, encoding): """Generates one or more streams for each name, value pair""" filename = None mime = None # user passed in a special dict. if isinstance(value, collections.Mapping) and "name" in value and "value" in value: filename = value["name"] try: mime = value["mime"] except KeyError: pass value = value["value"] if not filename: filename = getattr(value, "name", None) if filename: filename = os.path.split(filename)[1] mime = mime or "application/octet-stream" name, filename, mime = [escape_header(v) for v in (name, filename, mime)] stream = BytesIO() stream.write("--{}\r\n".format(boundary).encode(encoding)) if not filename: stream.write( 'Content-Disposition: form-data; name="{}"\r\n'.format(name).encode( encoding ) ) else: stream.write( 'Content-Disposition: form-data; name="{}"; filename="{}"\r\n'.format( name, filename ).encode(encoding) ) if mime: stream.write("Content-Type: {}\r\n".format(mime).encode(encoding)) stream.write(b"\r\n") if hasattr(value, "read"): stream.seek(0) return stream, value, BytesIO("\r\n".encode(encoding)) # not a file-like object, encode headers and value in one go value = value if isinstance(value, (str, bytes)) else json.dumps(value) if isinstance(value, bytes): stream.write(value) else: stream.write(value.encode(encoding)) stream.write(b"\r\n") stream.seek(0) return (stream,)
python
def make_streams(name, value, boundary, encoding): """Generates one or more streams for each name, value pair""" filename = None mime = None # user passed in a special dict. if isinstance(value, collections.Mapping) and "name" in value and "value" in value: filename = value["name"] try: mime = value["mime"] except KeyError: pass value = value["value"] if not filename: filename = getattr(value, "name", None) if filename: filename = os.path.split(filename)[1] mime = mime or "application/octet-stream" name, filename, mime = [escape_header(v) for v in (name, filename, mime)] stream = BytesIO() stream.write("--{}\r\n".format(boundary).encode(encoding)) if not filename: stream.write( 'Content-Disposition: form-data; name="{}"\r\n'.format(name).encode( encoding ) ) else: stream.write( 'Content-Disposition: form-data; name="{}"; filename="{}"\r\n'.format( name, filename ).encode(encoding) ) if mime: stream.write("Content-Type: {}\r\n".format(mime).encode(encoding)) stream.write(b"\r\n") if hasattr(value, "read"): stream.seek(0) return stream, value, BytesIO("\r\n".encode(encoding)) # not a file-like object, encode headers and value in one go value = value if isinstance(value, (str, bytes)) else json.dumps(value) if isinstance(value, bytes): stream.write(value) else: stream.write(value.encode(encoding)) stream.write(b"\r\n") stream.seek(0) return (stream,)
[ "def", "make_streams", "(", "name", ",", "value", ",", "boundary", ",", "encoding", ")", ":", "filename", "=", "None", "mime", "=", "None", "# user passed in a special dict.", "if", "isinstance", "(", "value", ",", "collections", ".", "Mapping", ")", "and", ...
Generates one or more streams for each name, value pair
[ "Generates", "one", "or", "more", "streams", "for", "each", "name", "value", "pair" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L35-L89
train
49,285
volafiled/python-volapi
volapi/multipart.py
Data.len
def len(self): """Length of the data stream""" # The len property is needed for requests. # requests checks __len__, then len # Since we cannot implement __len__ because python 32-bit uses 32-bit # sizes, we implement this instead. def stream_len(stream): """Stream length""" cur = stream.tell() try: stream.seek(0, 2) return stream.tell() - cur finally: stream.seek(cur) return sum(stream_len(s) for s in self.streams)
python
def len(self): """Length of the data stream""" # The len property is needed for requests. # requests checks __len__, then len # Since we cannot implement __len__ because python 32-bit uses 32-bit # sizes, we implement this instead. def stream_len(stream): """Stream length""" cur = stream.tell() try: stream.seek(0, 2) return stream.tell() - cur finally: stream.seek(cur) return sum(stream_len(s) for s in self.streams)
[ "def", "len", "(", "self", ")", ":", "# The len property is needed for requests.", "# requests checks __len__, then len", "# Since we cannot implement __len__ because python 32-bit uses 32-bit", "# sizes, we implement this instead.", "def", "stream_len", "(", "stream", ")", ":", "\"\...
Length of the data stream
[ "Length", "of", "the", "data", "stream" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L132-L147
train
49,286
volafiled/python-volapi
volapi/multipart.py
Data.headers
def headers(self): """All headers needed to make a request""" return { "Content-Type": ("multipart/form-data; boundary={}".format(self.boundary)), "Content-Length": str(self.len), "Content-Encoding": self.encoding, }
python
def headers(self): """All headers needed to make a request""" return { "Content-Type": ("multipart/form-data; boundary={}".format(self.boundary)), "Content-Length": str(self.len), "Content-Encoding": self.encoding, }
[ "def", "headers", "(", "self", ")", ":", "return", "{", "\"Content-Type\"", ":", "(", "\"multipart/form-data; boundary={}\"", ".", "format", "(", "self", ".", "boundary", ")", ")", ",", "\"Content-Length\"", ":", "str", "(", "self", ".", "len", ")", ",", "...
All headers needed to make a request
[ "All", "headers", "needed", "to", "make", "a", "request" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L150-L156
train
49,287
BeyondTheClouds/enoslib
enoslib/infra/utils.py
mk_pools
def mk_pools(things, keyfnc=lambda x: x): "Indexes a thing by the keyfnc to construct pools of things." pools = {} sthings = sorted(things, key=keyfnc) for key, thingz in groupby(sthings, key=keyfnc): pools.setdefault(key, []).extend(list(thingz)) return pools
python
def mk_pools(things, keyfnc=lambda x: x): "Indexes a thing by the keyfnc to construct pools of things." pools = {} sthings = sorted(things, key=keyfnc) for key, thingz in groupby(sthings, key=keyfnc): pools.setdefault(key, []).extend(list(thingz)) return pools
[ "def", "mk_pools", "(", "things", ",", "keyfnc", "=", "lambda", "x", ":", "x", ")", ":", "pools", "=", "{", "}", "sthings", "=", "sorted", "(", "things", ",", "key", "=", "keyfnc", ")", "for", "key", ",", "thingz", "in", "groupby", "(", "sthings", ...
Indexes a thing by the keyfnc to construct pools of things.
[ "Indexes", "a", "thing", "by", "the", "keyfnc", "to", "construct", "pools", "of", "things", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/utils.py#L6-L12
train
49,288
BeyondTheClouds/enoslib
enoslib/infra/utils.py
pick_things
def pick_things(pools, key, n): "Picks a maximum of n things in a dict of indexed pool of things." pool = pools.get(key) if not pool: return [] things = pool[:n] del pool[:n] return things
python
def pick_things(pools, key, n): "Picks a maximum of n things in a dict of indexed pool of things." pool = pools.get(key) if not pool: return [] things = pool[:n] del pool[:n] return things
[ "def", "pick_things", "(", "pools", ",", "key", ",", "n", ")", ":", "pool", "=", "pools", ".", "get", "(", "key", ")", "if", "not", "pool", ":", "return", "[", "]", "things", "=", "pool", "[", ":", "n", "]", "del", "pool", "[", ":", "n", "]",...
Picks a maximum of n things in a dict of indexed pool of things.
[ "Picks", "a", "maximum", "of", "n", "things", "in", "a", "dict", "of", "indexed", "pool", "of", "things", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/utils.py#L15-L22
train
49,289
volafiled/python-volapi
docs/examples/parrot.py
listen
def listen(room): """Open a volafile room and start listening to it""" def onmessage(m): """Print the new message and respond to it.""" print(m) if m.admin or m.nick == r.user.name: return if "parrot" in m.msg.lower(): r.post_chat("ayy lmao") elif m.msg.lower() in ("lol", "lel", "kek"): r.post_chat("*kok") else: r.post_chat(re.sub(r"\blain\b", "purpleadmin", m.msg, re.I)) with Room(room) as r: r.user.change_nick("DumbParrot") r.add_listener("chat", onmessage) r.listen()
python
def listen(room): """Open a volafile room and start listening to it""" def onmessage(m): """Print the new message and respond to it.""" print(m) if m.admin or m.nick == r.user.name: return if "parrot" in m.msg.lower(): r.post_chat("ayy lmao") elif m.msg.lower() in ("lol", "lel", "kek"): r.post_chat("*kok") else: r.post_chat(re.sub(r"\blain\b", "purpleadmin", m.msg, re.I)) with Room(room) as r: r.user.change_nick("DumbParrot") r.add_listener("chat", onmessage) r.listen()
[ "def", "listen", "(", "room", ")", ":", "def", "onmessage", "(", "m", ")", ":", "\"\"\"Print the new message and respond to it.\"\"\"", "print", "(", "m", ")", "if", "m", ".", "admin", "or", "m", ".", "nick", "==", "r", ".", "user", ".", "name", ":", "...
Open a volafile room and start listening to it
[ "Open", "a", "volafile", "room", "and", "start", "listening", "to", "it" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/docs/examples/parrot.py#L11-L28
train
49,290
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
average_dtu_configurations
def average_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute average of all the members of the class for member in list_of_members: result.__dict__[member] = np.mean( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) return result
python
def average_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute average of all the members of the class for member in list_of_members: result.__dict__[member] = np.mean( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) return result
[ "def", "average_dtu_configurations", "(", "list_of_objects", ")", ":", "result", "=", "DtuConfiguration", "(", ")", "if", "len", "(", "list_of_objects", ")", "==", "0", ":", "return", "result", "list_of_members", "=", "result", ".", "__dict__", ".", "keys", "(...
Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values.
[ "Return", "DtuConfiguration", "instance", "with", "averaged", "values", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L208-L236
train
49,291
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
maxdiff_dtu_configurations
def maxdiff_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute maximum difference for each member for member in list_of_members: tmp_array = np.array( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) minval = tmp_array.min() maxval = tmp_array.max() result.__dict__[member] = maxval - minval return result
python
def maxdiff_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute maximum difference for each member for member in list_of_members: tmp_array = np.array( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) minval = tmp_array.min() maxval = tmp_array.max() result.__dict__[member] = maxval - minval return result
[ "def", "maxdiff_dtu_configurations", "(", "list_of_objects", ")", ":", "result", "=", "DtuConfiguration", "(", ")", "if", "len", "(", "list_of_objects", ")", "==", "0", ":", "return", "result", "list_of_members", "=", "result", ".", "__dict__", ".", "keys", "(...
Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values.
[ "Return", "DtuConfiguration", "instance", "with", "maximum", "differences", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L239-L270
train
49,292
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_fits
def define_from_fits(cls, fitsobj, extnum=0): """Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0) """ # read input FITS file with fits.open(fitsobj) as hdulist: image_header = hdulist[extnum].header return cls.define_from_header(image_header)
python
def define_from_fits(cls, fitsobj, extnum=0): """Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0) """ # read input FITS file with fits.open(fitsobj) as hdulist: image_header = hdulist[extnum].header return cls.define_from_header(image_header)
[ "def", "define_from_fits", "(", "cls", ",", "fitsobj", ",", "extnum", "=", "0", ")", ":", "# read input FITS file", "with", "fits", ".", "open", "(", "fitsobj", ")", "as", "hdulist", ":", "image_header", "=", "hdulist", "[", "extnum", "]", ".", "header", ...
Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0)
[ "Define", "class", "object", "from", "header", "information", "in", "FITS", "file", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L94-L110
train
49,293
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_dictionary
def define_from_dictionary(cls, inputdict): """Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class. """ self = DtuConfiguration() for item in self.__dict__: self.__dict__[item] = inputdict[item] return self
python
def define_from_dictionary(cls, inputdict): """Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class. """ self = DtuConfiguration() for item in self.__dict__: self.__dict__[item] = inputdict[item] return self
[ "def", "define_from_dictionary", "(", "cls", ",", "inputdict", ")", ":", "self", "=", "DtuConfiguration", "(", ")", "for", "item", "in", "self", ".", "__dict__", ":", "self", ".", "__dict__", "[", "item", "]", "=", "inputdict", "[", "item", "]", "return"...
Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class.
[ "Define", "class", "object", "from", "dictionary", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L126-L139
train
49,294
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_values
def define_from_values(cls, xdtu, ydtu, zdtu, xdtu_0, ydtu_0, zdtu_0): """Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value. """ self = DtuConfiguration() # define DTU variables self.xdtu = xdtu self.ydtu = ydtu self.zdtu = zdtu self.xdtu_0 = xdtu_0 self.ydtu_0 = ydtu_0 self.zdtu_0 = zdtu_0 return self
python
def define_from_values(cls, xdtu, ydtu, zdtu, xdtu_0, ydtu_0, zdtu_0): """Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value. """ self = DtuConfiguration() # define DTU variables self.xdtu = xdtu self.ydtu = ydtu self.zdtu = zdtu self.xdtu_0 = xdtu_0 self.ydtu_0 = ydtu_0 self.zdtu_0 = zdtu_0 return self
[ "def", "define_from_values", "(", "cls", ",", "xdtu", ",", "ydtu", ",", "zdtu", ",", "xdtu_0", ",", "ydtu_0", ",", "zdtu_0", ")", ":", "self", "=", "DtuConfiguration", "(", ")", "# define DTU variables", "self", ".", "xdtu", "=", "xdtu", "self", ".", "yd...
Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value.
[ "Define", "class", "object", "from", "from", "provided", "values", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L142-L170
train
49,295
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.closeto
def closeto(self, other, abserror): """Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error """ result = \ (abs(self.xdtu - other.xdtu) <= abserror) and \ (abs(self.ydtu - other.ydtu) <= abserror) and \ (abs(self.zdtu - other.zdtu) <= abserror) and \ (abs(self.xdtu_0 - other.xdtu_0) <= abserror) and \ (abs(self.ydtu_0 - other.ydtu_0) <= abserror) and \ (abs(self.zdtu_0 - other.zdtu_0) <= abserror) return result
python
def closeto(self, other, abserror): """Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error """ result = \ (abs(self.xdtu - other.xdtu) <= abserror) and \ (abs(self.ydtu - other.ydtu) <= abserror) and \ (abs(self.zdtu - other.zdtu) <= abserror) and \ (abs(self.xdtu_0 - other.xdtu_0) <= abserror) and \ (abs(self.ydtu_0 - other.ydtu_0) <= abserror) and \ (abs(self.zdtu_0 - other.zdtu_0) <= abserror) return result
[ "def", "closeto", "(", "self", ",", "other", ",", "abserror", ")", ":", "result", "=", "(", "abs", "(", "self", ".", "xdtu", "-", "other", ".", "xdtu", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "ydtu", "-", "other", ".", ...
Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error
[ "Check", "that", "all", "the", "members", "are", "equal", "within", "provided", "absolute", "error", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L172-L197
train
49,296
BeyondTheClouds/enoslib
enoslib/infra/enos_vagrant/provider.py
Enos_vagrant.init
def init(self, force_deploy=False): """Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started """ machines = self.provider_conf.machines networks = self.provider_conf.networks _networks = [] for network in networks: ipnet = IPNetwork(network.cidr) _networks.append({ "netpool": list(ipnet)[10:-10], "cidr": network.cidr, "roles": network.roles, "gateway": ipnet.ip }) vagrant_machines = [] vagrant_roles = {} j = 0 for machine in machines: for _ in range(machine.number): vagrant_machine = { "name": "enos-%s" % j, "cpu": machine.flavour_desc["core"], "mem": machine.flavour_desc["mem"], "ips": [n["netpool"].pop() for n in _networks], } vagrant_machines.append(vagrant_machine) # Assign the machines to the right roles for role in machine.roles: vagrant_roles.setdefault(role, []).append(vagrant_machine) j = j + 1 logger.debug(vagrant_roles) loader = FileSystemLoader(searchpath=TEMPLATE_DIR) env = Environment(loader=loader, autoescape=True) template = env.get_template('Vagrantfile.j2') vagrantfile = template.render(machines=vagrant_machines, provider_conf=self.provider_conf) vagrantfile_path = os.path.join(os.getcwd(), "Vagrantfile") with open(vagrantfile_path, 'w') as f: f.write(vagrantfile) # Build env for Vagrant with a copy of env variables (needed by # subprocess opened by vagrant v_env = dict(os.environ) v_env['VAGRANT_DEFAULT_PROVIDER'] = self.provider_conf.backend v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=False, env=v_env) if force_deploy: v.destroy() v.up() v.provision() roles = {} for role, machines in vagrant_roles.items(): for machine in machines: keyfile = v.keyfile(vm_name=machine['name']) port = v.port(vm_name=machine['name']) address = v.hostname(vm_name=machine['name']) roles.setdefault(role, []).append( Host(address, alias=machine['name'], user=self.provider_conf.user, port=port, keyfile=keyfile)) networks = [{ 'cidr': str(n["cidr"]), 'start': str(n["netpool"][0]), 'end': str(n["netpool"][-1]), 'dns': '8.8.8.8', 'gateway': str(n["gateway"]), 'roles': n["roles"] } for n in _networks] logger.debug(roles) logger.debug(networks) return (roles, networks)
python
def init(self, force_deploy=False): """Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started """ machines = self.provider_conf.machines networks = self.provider_conf.networks _networks = [] for network in networks: ipnet = IPNetwork(network.cidr) _networks.append({ "netpool": list(ipnet)[10:-10], "cidr": network.cidr, "roles": network.roles, "gateway": ipnet.ip }) vagrant_machines = [] vagrant_roles = {} j = 0 for machine in machines: for _ in range(machine.number): vagrant_machine = { "name": "enos-%s" % j, "cpu": machine.flavour_desc["core"], "mem": machine.flavour_desc["mem"], "ips": [n["netpool"].pop() for n in _networks], } vagrant_machines.append(vagrant_machine) # Assign the machines to the right roles for role in machine.roles: vagrant_roles.setdefault(role, []).append(vagrant_machine) j = j + 1 logger.debug(vagrant_roles) loader = FileSystemLoader(searchpath=TEMPLATE_DIR) env = Environment(loader=loader, autoescape=True) template = env.get_template('Vagrantfile.j2') vagrantfile = template.render(machines=vagrant_machines, provider_conf=self.provider_conf) vagrantfile_path = os.path.join(os.getcwd(), "Vagrantfile") with open(vagrantfile_path, 'w') as f: f.write(vagrantfile) # Build env for Vagrant with a copy of env variables (needed by # subprocess opened by vagrant v_env = dict(os.environ) v_env['VAGRANT_DEFAULT_PROVIDER'] = self.provider_conf.backend v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=False, env=v_env) if force_deploy: v.destroy() v.up() v.provision() roles = {} for role, machines in vagrant_roles.items(): for machine in machines: keyfile = v.keyfile(vm_name=machine['name']) port = v.port(vm_name=machine['name']) address = v.hostname(vm_name=machine['name']) roles.setdefault(role, []).append( Host(address, alias=machine['name'], user=self.provider_conf.user, port=port, keyfile=keyfile)) networks = [{ 'cidr': str(n["cidr"]), 'start': str(n["netpool"][0]), 'end': str(n["netpool"][-1]), 'dns': '8.8.8.8', 'gateway': str(n["gateway"]), 'roles': n["roles"] } for n in _networks] logger.debug(roles) logger.debug(networks) return (roles, networks)
[ "def", "init", "(", "self", ",", "force_deploy", "=", "False", ")", ":", "machines", "=", "self", ".", "provider_conf", ".", "machines", "networks", "=", "self", ".", "provider_conf", ".", "networks", "_networks", "=", "[", "]", "for", "network", "in", "...
Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started
[ "Reserve", "and", "deploys", "the", "vagrant", "boxes", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_vagrant/provider.py#L22-L105
train
49,297
BeyondTheClouds/enoslib
enoslib/infra/enos_vagrant/provider.py
Enos_vagrant.destroy
def destroy(self): """Destroy all vagrant box involved in the deployment.""" v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=True) v.destroy()
python
def destroy(self): """Destroy all vagrant box involved in the deployment.""" v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=True) v.destroy()
[ "def", "destroy", "(", "self", ")", ":", "v", "=", "vagrant", ".", "Vagrant", "(", "root", "=", "os", ".", "getcwd", "(", ")", ",", "quiet_stdout", "=", "False", ",", "quiet_stderr", "=", "True", ")", "v", ".", "destroy", "(", ")" ]
Destroy all vagrant box involved in the deployment.
[ "Destroy", "all", "vagrant", "box", "involved", "in", "the", "deployment", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_vagrant/provider.py#L107-L112
train
49,298
Yelp/uwsgi_metrics
uwsgi_metrics/ewma.py
EWMA.tick
def tick(self): """Mark the passage of time and decay the current rate accordingly.""" instant_rate = self.count / float(self.tick_interval_s) self.count = 0 if self.initialized: self.rate += (self.alpha * (instant_rate - self.rate)) else: self.rate = instant_rate self.initialized = True
python
def tick(self): """Mark the passage of time and decay the current rate accordingly.""" instant_rate = self.count / float(self.tick_interval_s) self.count = 0 if self.initialized: self.rate += (self.alpha * (instant_rate - self.rate)) else: self.rate = instant_rate self.initialized = True
[ "def", "tick", "(", "self", ")", ":", "instant_rate", "=", "self", ".", "count", "/", "float", "(", "self", ".", "tick_interval_s", ")", "self", ".", "count", "=", "0", "if", "self", ".", "initialized", ":", "self", ".", "rate", "+=", "(", "self", ...
Mark the passage of time and decay the current rate accordingly.
[ "Mark", "the", "passage", "of", "time", "and", "decay", "the", "current", "rate", "accordingly", "." ]
534966fd461ff711aecd1e3d4caaafdc23ac33f0
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/ewma.py#L66-L74
train
49,299