repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
openxc/openxc-python
openxc/controllers/base.py
Controller.write_translated
def write_translated(self, name, value, event=None): """Send a translated write request to the VI. """ data = {'name': name} if value is not None: data['value'] = self._massage_write_value(value) if event is not None: data['event'] = self._massage_write_value(event); message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
python
def write_translated(self, name, value, event=None): """Send a translated write request to the VI. """ data = {'name': name} if value is not None: data['value'] = self._massage_write_value(value) if event is not None: data['event'] = self._massage_write_value(event); message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
[ "def", "write_translated", "(", "self", ",", "name", ",", "value", ",", "event", "=", "None", ")", ":", "data", "=", "{", "'name'", ":", "name", "}", "if", "value", "is", "not", "None", ":", "data", "[", "'value'", "]", "=", "self", ".", "_massage_...
Send a translated write request to the VI.
[ "Send", "a", "translated", "write", "request", "to", "the", "VI", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/controllers/base.py#L401-L412
train
38,400
openxc/openxc-python
openxc/controllers/base.py
Controller.write_raw
def write_raw(self, message_id, data, bus=None, frame_format=None): """Send a raw write request to the VI. """ if not isinstance(message_id, numbers.Number): try: message_id = int(message_id, 0) except ValueError: raise ValueError("ID must be numerical") data = {'id': message_id, 'data': data} if bus is not None: data['bus'] = bus if frame_format is not None: data['frame_format'] = frame_format message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
python
def write_raw(self, message_id, data, bus=None, frame_format=None): """Send a raw write request to the VI. """ if not isinstance(message_id, numbers.Number): try: message_id = int(message_id, 0) except ValueError: raise ValueError("ID must be numerical") data = {'id': message_id, 'data': data} if bus is not None: data['bus'] = bus if frame_format is not None: data['frame_format'] = frame_format message = self.streamer.serialize_for_stream(data) bytes_written = self.write_bytes(message) assert bytes_written == len(message) return bytes_written
[ "def", "write_raw", "(", "self", ",", "message_id", ",", "data", ",", "bus", "=", "None", ",", "frame_format", "=", "None", ")", ":", "if", "not", "isinstance", "(", "message_id", ",", "numbers", ".", "Number", ")", ":", "try", ":", "message_id", "=", ...
Send a raw write request to the VI.
[ "Send", "a", "raw", "write", "request", "to", "the", "VI", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/controllers/base.py#L414-L430
train
38,401
openxc/openxc-python
openxc/controllers/base.py
Controller._massage_write_value
def _massage_write_value(cls, value): """Convert string values from command-line arguments into first-order Python boolean and float objects, if applicable. """ if not isinstance(value, numbers.Number): if value == "true": value = True elif value == "false": value = False elif value[0] == '"' and value[-1] == '"': value = value[1:-1] else: try: value = float(value) except ValueError: pass return value
python
def _massage_write_value(cls, value): """Convert string values from command-line arguments into first-order Python boolean and float objects, if applicable. """ if not isinstance(value, numbers.Number): if value == "true": value = True elif value == "false": value = False elif value[0] == '"' and value[-1] == '"': value = value[1:-1] else: try: value = float(value) except ValueError: pass return value
[ "def", "_massage_write_value", "(", "cls", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "numbers", ".", "Number", ")", ":", "if", "value", "==", "\"true\"", ":", "value", "=", "True", "elif", "value", "==", "\"false\"", ":", "v...
Convert string values from command-line arguments into first-order Python boolean and float objects, if applicable.
[ "Convert", "string", "values", "from", "command", "-", "line", "arguments", "into", "first", "-", "order", "Python", "boolean", "and", "float", "objects", "if", "applicable", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/controllers/base.py#L440-L456
train
38,402
openxc/openxc-python
openxc/formats/json.py
JsonFormatter._validate
def _validate(cls, message): """Confirm the validitiy of a given dict as an OpenXC message. Returns: ``True`` if the message contains at least a ``name`` and ``value``. """ valid = False if(('name' in message and 'value' in message) or ('id' in message and 'data' in message)): valid = True return valid
python
def _validate(cls, message): """Confirm the validitiy of a given dict as an OpenXC message. Returns: ``True`` if the message contains at least a ``name`` and ``value``. """ valid = False if(('name' in message and 'value' in message) or ('id' in message and 'data' in message)): valid = True return valid
[ "def", "_validate", "(", "cls", ",", "message", ")", ":", "valid", "=", "False", "if", "(", "(", "'name'", "in", "message", "and", "'value'", "in", "message", ")", "or", "(", "'id'", "in", "message", "and", "'data'", "in", "message", ")", ")", ":", ...
Confirm the validitiy of a given dict as an OpenXC message. Returns: ``True`` if the message contains at least a ``name`` and ``value``.
[ "Confirm", "the", "validitiy", "of", "a", "given", "dict", "as", "an", "OpenXC", "message", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/formats/json.py#L42-L52
train
38,403
openxc/openxc-python
openxc/measurements.py
Measurement.value
def value(self, new_value): """Set the value of this measurement. Raises: AttributeError: if the new value isn't of the correct units. """ if self.unit != units.Undefined and new_value.unit != self.unit: raise AttributeError("%s must be in %s" % ( self.__class__, self.unit)) self._value = new_value
python
def value(self, new_value): """Set the value of this measurement. Raises: AttributeError: if the new value isn't of the correct units. """ if self.unit != units.Undefined and new_value.unit != self.unit: raise AttributeError("%s must be in %s" % ( self.__class__, self.unit)) self._value = new_value
[ "def", "value", "(", "self", ",", "new_value", ")", ":", "if", "self", ".", "unit", "!=", "units", ".", "Undefined", "and", "new_value", ".", "unit", "!=", "self", ".", "unit", ":", "raise", "AttributeError", "(", "\"%s must be in %s\"", "%", "(", "self"...
Set the value of this measurement. Raises: AttributeError: if the new value isn't of the correct units.
[ "Set", "the", "value", "of", "this", "measurement", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/measurements.py#L66-L75
train
38,404
openxc/openxc-python
openxc/measurements.py
Measurement.from_dict
def from_dict(cls, data): """Create a new Measurement subclass instance using the given dict. If Measurement.name_from_class was previously called with this data's associated Measurement sub-class in Python, the returned object will be an instance of that sub-class. If the measurement name in ``data`` is unrecognized, the returned object will be of the generic ``Measurement`` type. Args: data (dict): the data for the new measurement, including at least a name and value. """ args = [] if 'id' in data and 'data' in data: measurement_class = CanMessage args.append("Bus %s: 0x%x" % (data.get('bus', '?'), data['id'])) args.append(data['data']) # TODO grab bus else: measurement_class = cls._class_from_name(data['name']) if measurement_class == Measurement: args.append(data['name']) args.append(data['value']) return measurement_class(*args, event=data.get('event', None), override_unit=True)
python
def from_dict(cls, data): """Create a new Measurement subclass instance using the given dict. If Measurement.name_from_class was previously called with this data's associated Measurement sub-class in Python, the returned object will be an instance of that sub-class. If the measurement name in ``data`` is unrecognized, the returned object will be of the generic ``Measurement`` type. Args: data (dict): the data for the new measurement, including at least a name and value. """ args = [] if 'id' in data and 'data' in data: measurement_class = CanMessage args.append("Bus %s: 0x%x" % (data.get('bus', '?'), data['id'])) args.append(data['data']) # TODO grab bus else: measurement_class = cls._class_from_name(data['name']) if measurement_class == Measurement: args.append(data['name']) args.append(data['value']) return measurement_class(*args, event=data.get('event', None), override_unit=True)
[ "def", "from_dict", "(", "cls", ",", "data", ")", ":", "args", "=", "[", "]", "if", "'id'", "in", "data", "and", "'data'", "in", "data", ":", "measurement_class", "=", "CanMessage", "args", ".", "append", "(", "\"Bus %s: 0x%x\"", "%", "(", "data", ".",...
Create a new Measurement subclass instance using the given dict. If Measurement.name_from_class was previously called with this data's associated Measurement sub-class in Python, the returned object will be an instance of that sub-class. If the measurement name in ``data`` is unrecognized, the returned object will be of the generic ``Measurement`` type. Args: data (dict): the data for the new measurement, including at least a name and value.
[ "Create", "a", "new", "Measurement", "subclass", "instance", "using", "the", "given", "dict", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/measurements.py#L78-L106
train
38,405
openxc/openxc-python
openxc/measurements.py
Measurement.name_from_class
def name_from_class(cls, measurement_class): """For a given measurement class, return its generic name. The given class is expected to have a ``name`` attribute, otherwise this function will raise an execption. The point of using this method instead of just trying to grab that attribute in the application is to cache measurement name to class mappings for future use. Returns: the generic OpenXC name for a measurement class. Raise: UnrecognizedMeasurementError: if the class does not have a valid generic name """ if not getattr(cls, '_measurements_initialized', False): cls._measurement_map = dict((m.name, m) for m in all_measurements()) cls._measurements_initialized = True try: name = getattr(measurement_class, 'name') except AttributeError: raise UnrecognizedMeasurementError("No 'name' attribute in %s" % measurement_class) else: cls._measurement_map[name] = measurement_class return name
python
def name_from_class(cls, measurement_class): """For a given measurement class, return its generic name. The given class is expected to have a ``name`` attribute, otherwise this function will raise an execption. The point of using this method instead of just trying to grab that attribute in the application is to cache measurement name to class mappings for future use. Returns: the generic OpenXC name for a measurement class. Raise: UnrecognizedMeasurementError: if the class does not have a valid generic name """ if not getattr(cls, '_measurements_initialized', False): cls._measurement_map = dict((m.name, m) for m in all_measurements()) cls._measurements_initialized = True try: name = getattr(measurement_class, 'name') except AttributeError: raise UnrecognizedMeasurementError("No 'name' attribute in %s" % measurement_class) else: cls._measurement_map[name] = measurement_class return name
[ "def", "name_from_class", "(", "cls", ",", "measurement_class", ")", ":", "if", "not", "getattr", "(", "cls", ",", "'_measurements_initialized'", ",", "False", ")", ":", "cls", ".", "_measurement_map", "=", "dict", "(", "(", "m", ".", "name", ",", "m", "...
For a given measurement class, return its generic name. The given class is expected to have a ``name`` attribute, otherwise this function will raise an execption. The point of using this method instead of just trying to grab that attribute in the application is to cache measurement name to class mappings for future use. Returns: the generic OpenXC name for a measurement class. Raise: UnrecognizedMeasurementError: if the class does not have a valid generic name
[ "For", "a", "given", "measurement", "class", "return", "its", "generic", "name", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/measurements.py#L109-L135
train
38,406
openxc/openxc-python
openxc/sources/trace.py
TraceDataSource._store_timestamp
def _store_timestamp(self, timestamp): """If not already saved, cache the first timestamp in the active trace file on the instance. """ if getattr(self, 'first_timestamp', None) is None: self.first_timestamp = timestamp LOG.debug("Storing %d as the first timestamp of the trace file %s", self.first_timestamp, self.filename)
python
def _store_timestamp(self, timestamp): """If not already saved, cache the first timestamp in the active trace file on the instance. """ if getattr(self, 'first_timestamp', None) is None: self.first_timestamp = timestamp LOG.debug("Storing %d as the first timestamp of the trace file %s", self.first_timestamp, self.filename)
[ "def", "_store_timestamp", "(", "self", ",", "timestamp", ")", ":", "if", "getattr", "(", "self", ",", "'first_timestamp'", ",", "None", ")", "is", "None", ":", "self", ".", "first_timestamp", "=", "timestamp", "LOG", ".", "debug", "(", "\"Storing %d as the ...
If not already saved, cache the first timestamp in the active trace file on the instance.
[ "If", "not", "already", "saved", "cache", "the", "first", "timestamp", "in", "the", "active", "trace", "file", "on", "the", "instance", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/sources/trace.py#L45-L52
train
38,407
openxc/openxc-python
openxc/sources/trace.py
TraceDataSource.read
def read(self): """Read a line of data from the input source at a time.""" line = self.trace_file.readline() if line == '': if self.loop: self._reopen_file() else: self.trace_file.close() self.trace_file = None raise DataSourceError() message = JsonFormatter.deserialize(line) timestamp = message.get('timestamp', None) if self.realtime and timestamp is not None: self._store_timestamp(timestamp) self._wait(self.starting_time, self.first_timestamp, timestamp) return line + "\x00"
python
def read(self): """Read a line of data from the input source at a time.""" line = self.trace_file.readline() if line == '': if self.loop: self._reopen_file() else: self.trace_file.close() self.trace_file = None raise DataSourceError() message = JsonFormatter.deserialize(line) timestamp = message.get('timestamp', None) if self.realtime and timestamp is not None: self._store_timestamp(timestamp) self._wait(self.starting_time, self.first_timestamp, timestamp) return line + "\x00"
[ "def", "read", "(", "self", ")", ":", "line", "=", "self", ".", "trace_file", ".", "readline", "(", ")", "if", "line", "==", "''", ":", "if", "self", ".", "loop", ":", "self", ".", "_reopen_file", "(", ")", "else", ":", "self", ".", "trace_file", ...
Read a line of data from the input source at a time.
[ "Read", "a", "line", "of", "data", "from", "the", "input", "source", "at", "a", "time", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/sources/trace.py#L54-L70
train
38,408
openxc/openxc-python
openxc/sources/trace.py
TraceDataSource._open_file
def _open_file(filename): """Attempt to open the the file at ``filename`` for reading. Raises: DataSourceError, if the file cannot be opened. """ if filename is None: raise DataSourceError("Trace filename is not defined") try: trace_file = open(filename, "r") except IOError as e: raise DataSourceError("Unable to open trace file %s" % filename, e) else: LOG.debug("Opened trace file %s", filename) return trace_file
python
def _open_file(filename): """Attempt to open the the file at ``filename`` for reading. Raises: DataSourceError, if the file cannot be opened. """ if filename is None: raise DataSourceError("Trace filename is not defined") try: trace_file = open(filename, "r") except IOError as e: raise DataSourceError("Unable to open trace file %s" % filename, e) else: LOG.debug("Opened trace file %s", filename) return trace_file
[ "def", "_open_file", "(", "filename", ")", ":", "if", "filename", "is", "None", ":", "raise", "DataSourceError", "(", "\"Trace filename is not defined\"", ")", "try", ":", "trace_file", "=", "open", "(", "filename", ",", "\"r\"", ")", "except", "IOError", "as"...
Attempt to open the the file at ``filename`` for reading. Raises: DataSourceError, if the file cannot be opened.
[ "Attempt", "to", "open", "the", "the", "file", "at", "filename", "for", "reading", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/sources/trace.py#L73-L88
train
38,409
openxc/openxc-python
openxc/sources/trace.py
TraceDataSource._wait
def _wait(starting_time, first_timestamp, timestamp): """Given that the first timestamp in the trace file is ``first_timestamp`` and we started playing back the file at ``starting_time``, block until the current ``timestamp`` should occur. """ target_time = starting_time + (timestamp - first_timestamp) time.sleep(max(target_time - time.time(), 0))
python
def _wait(starting_time, first_timestamp, timestamp): """Given that the first timestamp in the trace file is ``first_timestamp`` and we started playing back the file at ``starting_time``, block until the current ``timestamp`` should occur. """ target_time = starting_time + (timestamp - first_timestamp) time.sleep(max(target_time - time.time(), 0))
[ "def", "_wait", "(", "starting_time", ",", "first_timestamp", ",", "timestamp", ")", ":", "target_time", "=", "starting_time", "+", "(", "timestamp", "-", "first_timestamp", ")", "time", ".", "sleep", "(", "max", "(", "target_time", "-", "time", ".", "time",...
Given that the first timestamp in the trace file is ``first_timestamp`` and we started playing back the file at ``starting_time``, block until the current ``timestamp`` should occur.
[ "Given", "that", "the", "first", "timestamp", "in", "the", "trace", "file", "is", "first_timestamp", "and", "we", "started", "playing", "back", "the", "file", "at", "starting_time", "block", "until", "the", "current", "timestamp", "should", "occur", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/sources/trace.py#L91-L97
train
38,410
openxc/openxc-python
openxc/generator/xml_to_json.py
XMLBackedSignal.from_xml_node
def from_xml_node(cls, node): """Construct a Signal instance from an XML node exported from a Vector CANoe .dbc file.""" return cls(name=node.find("Name").text, bit_position=int(node.find("Bitposition").text), bit_size=int(node.find("Bitsize").text), factor=float(node.find("Factor").text), offset=float(node.find("Offset").text), min_value=float(node.find("Minimum").text), max_value=float(node.find("Maximum").text))
python
def from_xml_node(cls, node): """Construct a Signal instance from an XML node exported from a Vector CANoe .dbc file.""" return cls(name=node.find("Name").text, bit_position=int(node.find("Bitposition").text), bit_size=int(node.find("Bitsize").text), factor=float(node.find("Factor").text), offset=float(node.find("Offset").text), min_value=float(node.find("Minimum").text), max_value=float(node.find("Maximum").text))
[ "def", "from_xml_node", "(", "cls", ",", "node", ")", ":", "return", "cls", "(", "name", "=", "node", ".", "find", "(", "\"Name\"", ")", ".", "text", ",", "bit_position", "=", "int", "(", "node", ".", "find", "(", "\"Bitposition\"", ")", ".", "text",...
Construct a Signal instance from an XML node exported from a Vector CANoe .dbc file.
[ "Construct", "a", "Signal", "instance", "from", "an", "XML", "node", "exported", "from", "a", "Vector", "CANoe", ".", "dbc", "file", "." ]
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/generator/xml_to_json.py#L62-L71
train
38,411
openxc/openxc-python
openxc/sources/base.py
SourceLogger.run
def run(self): """Continuously read data from the source and attempt to parse a valid message from the buffer of bytes. When a message is parsed, passes it off to the callback if one is set. """ message_buffer = b"" while self.running: try: message_buffer += self.source.read_logs() except DataSourceError as e: if self.running: LOG.warn("Can't read logs from data source -- stopping: %s", e) break except NotImplementedError as e: LOG.info("%s doesn't support logging" % self) break while True: if "\x00" not in message_buffer: break record, _, remainder = message_buffer.partition(b"\x00") self.record(record) message_buffer = remainder
python
def run(self): """Continuously read data from the source and attempt to parse a valid message from the buffer of bytes. When a message is parsed, passes it off to the callback if one is set. """ message_buffer = b"" while self.running: try: message_buffer += self.source.read_logs() except DataSourceError as e: if self.running: LOG.warn("Can't read logs from data source -- stopping: %s", e) break except NotImplementedError as e: LOG.info("%s doesn't support logging" % self) break while True: if "\x00" not in message_buffer: break record, _, remainder = message_buffer.partition(b"\x00") self.record(record) message_buffer = remainder
[ "def", "run", "(", "self", ")", ":", "message_buffer", "=", "b\"\"", "while", "self", ".", "running", ":", "try", ":", "message_buffer", "+=", "self", ".", "source", ".", "read_logs", "(", ")", "except", "DataSourceError", "as", "e", ":", "if", "self", ...
Continuously read data from the source and attempt to parse a valid message from the buffer of bytes. When a message is parsed, passes it off to the callback if one is set.
[ "Continuously", "read", "data", "from", "the", "source", "and", "attempt", "to", "parse", "a", "valid", "message", "from", "the", "buffer", "of", "bytes", ".", "When", "a", "message", "is", "parsed", "passes", "it", "off", "to", "the", "callback", "if", ...
4becb4a6310bd658c125195ef6ffea4deaf7d7e7
https://github.com/openxc/openxc-python/blob/4becb4a6310bd658c125195ef6ffea4deaf7d7e7/openxc/sources/base.py#L141-L163
train
38,412
dfm/celerite
celerite/celerite.py
GP.compute
def compute(self, t, yerr=1.123e-12, check_sorted=True, A=None, U=None, V=None): """ Compute the extended form of the covariance matrix and factorize Args: x (array[n]): The independent coordinates of the data points. This array must be _sorted_ in ascending order. yerr (Optional[float or array[n]]): The measurement uncertainties for the data points at coordinates ``x``. These values will be added in quadrature to the diagonal of the covariance matrix. (default: ``1.123e-12``) check_sorted (bool): If ``True``, ``x`` will be checked to make sure that it is properly sorted. If ``False``, the coordinates will be assumed to be in the correct order. Raises: ValueError: For un-sorted data or mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") self._t = t self._yerr = np.empty_like(self._t) self._yerr[:] = yerr (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients self._A = np.empty(0) if A is None else A self._U = np.empty((0, 0)) if U is None else U self._V = np.empty((0, 0)) if V is None else V self.solver.compute( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, t, self._yerr**2 ) self.dirty = False
python
def compute(self, t, yerr=1.123e-12, check_sorted=True, A=None, U=None, V=None): """ Compute the extended form of the covariance matrix and factorize Args: x (array[n]): The independent coordinates of the data points. This array must be _sorted_ in ascending order. yerr (Optional[float or array[n]]): The measurement uncertainties for the data points at coordinates ``x``. These values will be added in quadrature to the diagonal of the covariance matrix. (default: ``1.123e-12``) check_sorted (bool): If ``True``, ``x`` will be checked to make sure that it is properly sorted. If ``False``, the coordinates will be assumed to be in the correct order. Raises: ValueError: For un-sorted data or mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") self._t = t self._yerr = np.empty_like(self._t) self._yerr[:] = yerr (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients self._A = np.empty(0) if A is None else A self._U = np.empty((0, 0)) if U is None else U self._V = np.empty((0, 0)) if V is None else V self.solver.compute( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, t, self._yerr**2 ) self.dirty = False
[ "def", "compute", "(", "self", ",", "t", ",", "yerr", "=", "1.123e-12", ",", "check_sorted", "=", "True", ",", "A", "=", "None", ",", "U", "=", "None", ",", "V", "=", "None", ")", ":", "t", "=", "np", ".", "atleast_1d", "(", "t", ")", "if", "...
Compute the extended form of the covariance matrix and factorize Args: x (array[n]): The independent coordinates of the data points. This array must be _sorted_ in ascending order. yerr (Optional[float or array[n]]): The measurement uncertainties for the data points at coordinates ``x``. These values will be added in quadrature to the diagonal of the covariance matrix. (default: ``1.123e-12``) check_sorted (bool): If ``True``, ``x`` will be checked to make sure that it is properly sorted. If ``False``, the coordinates will be assumed to be in the correct order. Raises: ValueError: For un-sorted data or mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
[ "Compute", "the", "extended", "form", "of", "the", "covariance", "matrix", "and", "factorize" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L97-L139
train
38,413
dfm/celerite
celerite/celerite.py
GP.log_likelihood
def log_likelihood(self, y, _const=math.log(2.0*math.pi), quiet=False): """ Compute the marginalized likelihood of the GP model The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` for non-positive definite matrices instead of throwing an error. Returns: float: The marginalized likelihood of the GP model. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ y = self._process_input(y) resid = y - self.mean.get_value(self._t) try: self._recompute() except solver.LinAlgError: if quiet: return -np.inf raise if len(y.shape) > 1: raise ValueError("dimension mismatch") logdet = self.solver.log_determinant() if not np.isfinite(logdet): return -np.inf loglike = -0.5*(self.solver.dot_solve(resid)+logdet+len(y)*_const) if not np.isfinite(loglike): return -np.inf return loglike
python
def log_likelihood(self, y, _const=math.log(2.0*math.pi), quiet=False): """ Compute the marginalized likelihood of the GP model The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` for non-positive definite matrices instead of throwing an error. Returns: float: The marginalized likelihood of the GP model. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ y = self._process_input(y) resid = y - self.mean.get_value(self._t) try: self._recompute() except solver.LinAlgError: if quiet: return -np.inf raise if len(y.shape) > 1: raise ValueError("dimension mismatch") logdet = self.solver.log_determinant() if not np.isfinite(logdet): return -np.inf loglike = -0.5*(self.solver.dot_solve(resid)+logdet+len(y)*_const) if not np.isfinite(loglike): return -np.inf return loglike
[ "def", "log_likelihood", "(", "self", ",", "y", ",", "_const", "=", "math", ".", "log", "(", "2.0", "*", "math", ".", "pi", ")", ",", "quiet", "=", "False", ")", ":", "y", "=", "self", ".", "_process_input", "(", "y", ")", "resid", "=", "y", "-...
Compute the marginalized likelihood of the GP model The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` for non-positive definite matrices instead of throwing an error. Returns: float: The marginalized likelihood of the GP model. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
[ "Compute", "the", "marginalized", "likelihood", "of", "the", "GP", "model" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L155-L192
train
38,414
dfm/celerite
celerite/celerite.py
GP.grad_log_likelihood
def grad_log_likelihood(self, y, quiet=False): """ Compute the gradient of the marginalized likelihood The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. The gradient is taken with respect to the parameters returned by :func:`GP.get_parameter_vector`. This function requires the `autograd <https://github.com/HIPS/autograd>`_ package. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` and a gradient vector of zeros for non-positive definite matrices instead of throwing an error. Returns: The gradient of marginalized likelihood with respect to the parameter vector. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ if not solver.has_autodiff(): raise RuntimeError("celerite must be compiled with autodiff " "support to use the gradient methods") if not self.kernel.vector_size: return self.log_likelihood(y, quiet=quiet), np.empty(0) y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") resid = y - self.mean.get_value(self._t) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients try: val, grad = self.solver.grad_log_likelihood( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, self._t, resid, self._yerr**2 ) except solver.LinAlgError: if quiet: return -np.inf, np.zeros(self.vector_size) raise if self.kernel._has_coeffs: coeffs_jac = self.kernel.get_coeffs_jacobian() full_grad = np.dot(coeffs_jac, grad[1:]) else: full_grad = np.zeros(self.kernel.vector_size) if self.kernel._has_jitter: jitter_jac = self.kernel.get_jitter_jacobian() full_grad += jitter_jac * grad[0] if self.mean.vector_size: self._recompute() alpha = self.solver.solve(resid) g = self.mean.get_gradient(self._t) full_grad = np.append(full_grad, np.dot(g, alpha)) return val, full_grad
python
def grad_log_likelihood(self, y, quiet=False): """ Compute the gradient of the marginalized likelihood The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. The gradient is taken with respect to the parameters returned by :func:`GP.get_parameter_vector`. This function requires the `autograd <https://github.com/HIPS/autograd>`_ package. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` and a gradient vector of zeros for non-positive definite matrices instead of throwing an error. Returns: The gradient of marginalized likelihood with respect to the parameter vector. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices. """ if not solver.has_autodiff(): raise RuntimeError("celerite must be compiled with autodiff " "support to use the gradient methods") if not self.kernel.vector_size: return self.log_likelihood(y, quiet=quiet), np.empty(0) y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") resid = y - self.mean.get_value(self._t) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.kernel.coefficients try: val, grad = self.solver.grad_log_likelihood( self.kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, self._A, self._U, self._V, self._t, resid, self._yerr**2 ) except solver.LinAlgError: if quiet: return -np.inf, np.zeros(self.vector_size) raise if self.kernel._has_coeffs: coeffs_jac = self.kernel.get_coeffs_jacobian() full_grad = np.dot(coeffs_jac, grad[1:]) else: full_grad = np.zeros(self.kernel.vector_size) if self.kernel._has_jitter: jitter_jac = self.kernel.get_jitter_jacobian() full_grad += jitter_jac * grad[0] if self.mean.vector_size: self._recompute() alpha = self.solver.solve(resid) g = self.mean.get_gradient(self._t) full_grad = np.append(full_grad, np.dot(g, alpha)) return val, full_grad
[ "def", "grad_log_likelihood", "(", "self", ",", "y", ",", "quiet", "=", "False", ")", ":", "if", "not", "solver", ".", "has_autodiff", "(", ")", ":", "raise", "RuntimeError", "(", "\"celerite must be compiled with autodiff \"", "\"support to use the gradient methods\"...
Compute the gradient of the marginalized likelihood The factorized matrix from the previous call to :func:`GP.compute` is used so ``compute`` must be called first. The gradient is taken with respect to the parameters returned by :func:`GP.get_parameter_vector`. This function requires the `autograd <https://github.com/HIPS/autograd>`_ package. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. quiet (bool): If true, return ``-numpy.inf`` and a gradient vector of zeros for non-positive definite matrices instead of throwing an error. Returns: The gradient of marginalized likelihood with respect to the parameter vector. Raises: ValueError: For mismatched dimensions. solver.LinAlgError: For non-positive definite matrices.
[ "Compute", "the", "gradient", "of", "the", "marginalized", "likelihood" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L194-L263
train
38,415
dfm/celerite
celerite/celerite.py
GP.apply_inverse
def apply_inverse(self, y): """ Apply the inverse of the covariance matrix to a vector or matrix Solve ``K.x = y`` for ``x`` where ``K`` is the covariance matrix of the GP with the white noise and ``yerr`` components included on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. Returns: array[n] or array[n, nrhs]: The solution to the linear system. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions. """ self._recompute() return self.solver.solve(self._process_input(y))
python
def apply_inverse(self, y): """ Apply the inverse of the covariance matrix to a vector or matrix Solve ``K.x = y`` for ``x`` where ``K`` is the covariance matrix of the GP with the white noise and ``yerr`` components included on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. Returns: array[n] or array[n, nrhs]: The solution to the linear system. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions. """ self._recompute() return self.solver.solve(self._process_input(y))
[ "def", "apply_inverse", "(", "self", ",", "y", ")", ":", "self", ".", "_recompute", "(", ")", "return", "self", ".", "solver", ".", "solve", "(", "self", ".", "_process_input", "(", "y", ")", ")" ]
Apply the inverse of the covariance matrix to a vector or matrix Solve ``K.x = y`` for ``x`` where ``K`` is the covariance matrix of the GP with the white noise and ``yerr`` components included on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. Returns: array[n] or array[n, nrhs]: The solution to the linear system. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions.
[ "Apply", "the", "inverse", "of", "the", "covariance", "matrix", "to", "a", "vector", "or", "matrix" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L265-L286
train
38,416
dfm/celerite
celerite/celerite.py
GP.dot
def dot(self, y, t=None, A=None, U=None, V=None, kernel=None, check_sorted=True): """ Dot the covariance matrix into a vector or matrix Compute ``K.y`` where ``K`` is the covariance matrix of the GP without the white noise or ``yerr`` values on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. kernel (Optional[terms.Term]): A different kernel can optionally be provided to compute the matrix ``K`` from a different kernel than the ``kernel`` property on this object. Returns: array[n] or array[n, nrhs]: The dot product ``K.y`` as described above. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions. """ if kernel is None: kernel = self.kernel if t is not None: t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") A = np.empty(0) if A is None else A U = np.empty((0, 0)) if U is None else U V = np.empty((0, 0)) if V is None else V else: if not self.computed: raise RuntimeError("you must call 'compute' first") t = self._t A = self._A U = self._U V = self._V (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = kernel.coefficients return self.solver.dot( kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, A, U, V, t, np.ascontiguousarray(y, dtype=float) )
python
def dot(self, y, t=None, A=None, U=None, V=None, kernel=None, check_sorted=True): """ Dot the covariance matrix into a vector or matrix Compute ``K.y`` where ``K`` is the covariance matrix of the GP without the white noise or ``yerr`` values on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. kernel (Optional[terms.Term]): A different kernel can optionally be provided to compute the matrix ``K`` from a different kernel than the ``kernel`` property on this object. Returns: array[n] or array[n, nrhs]: The dot product ``K.y`` as described above. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions. """ if kernel is None: kernel = self.kernel if t is not None: t = np.atleast_1d(t) if check_sorted and np.any(np.diff(t) < 0.0): raise ValueError("the input coordinates must be sorted") if check_sorted and len(t.shape) > 1: raise ValueError("dimension mismatch") A = np.empty(0) if A is None else A U = np.empty((0, 0)) if U is None else U V = np.empty((0, 0)) if V is None else V else: if not self.computed: raise RuntimeError("you must call 'compute' first") t = self._t A = self._A U = self._U V = self._V (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = kernel.coefficients return self.solver.dot( kernel.jitter, alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, A, U, V, t, np.ascontiguousarray(y, dtype=float) )
[ "def", "dot", "(", "self", ",", "y", ",", "t", "=", "None", ",", "A", "=", "None", ",", "U", "=", "None", ",", "V", "=", "None", ",", "kernel", "=", "None", ",", "check_sorted", "=", "True", ")", ":", "if", "kernel", "is", "None", ":", "kerne...
Dot the covariance matrix into a vector or matrix Compute ``K.y`` where ``K`` is the covariance matrix of the GP without the white noise or ``yerr`` values on the diagonal. Args: y (array[n] or array[n, nrhs]): The vector or matrix ``y`` described above. kernel (Optional[terms.Term]): A different kernel can optionally be provided to compute the matrix ``K`` from a different kernel than the ``kernel`` property on this object. Returns: array[n] or array[n, nrhs]: The dot product ``K.y`` as described above. This will have the same shape as ``y``. Raises: ValueError: For mismatched dimensions.
[ "Dot", "the", "covariance", "matrix", "into", "a", "vector", "or", "matrix" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L288-L341
train
38,417
dfm/celerite
celerite/celerite.py
GP.predict
def predict(self, y, t=None, return_cov=True, return_var=False): """ Compute the conditional predictive distribution of the model You must call :func:`GP.compute` before this method. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the prediction should be made. If this is omitted the coordinates will be assumed to be ``x`` from :func:`GP.compute` and an efficient method will be used to compute the prediction. return_cov (Optional[bool]): If ``True``, the full covariance matrix is computed and returned. Otherwise, only the mean prediction is computed. (default: ``True``) return_var (Optional[bool]): If ``True``, only return the diagonal of the predictive covariance; this will be faster to compute than the full covariance matrix. This overrides ``return_cov`` so, if both are set to ``True``, only the diagonal is computed. (default: ``False``) Returns: ``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of ``return_cov`` and ``return_var``. These output values are: (a) **mu** ``(ntest,)``: mean of the predictive distribution, (b) **cov** ``(ntest, ntest)``: the predictive covariance matrix, and (c) **var** ``(ntest,)``: the diagonal elements of ``cov``. Raises: ValueError: For mismatched dimensions. """ y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") if t is None: xs = self._t else: xs = np.ascontiguousarray(t, dtype=float) if len(xs.shape) > 1: raise ValueError("dimension mismatch") # Make sure that the model is computed self._recompute() # Compute the predictive mean. resid = y - self.mean.get_value(self._t) if t is None: alpha = self.solver.solve(resid).flatten() alpha = resid - (self._yerr**2 + self.kernel.jitter) * alpha elif not len(self._A): alpha = self.solver.predict(resid, xs) else: Kxs = self.get_matrix(xs, self._t) alpha = np.dot(Kxs, alpha) mu = self.mean.get_value(xs) + alpha if not (return_var or return_cov): return mu # Predictive variance. Kxs = self.get_matrix(xs, self._t) KxsT = np.ascontiguousarray(Kxs.T, dtype=np.float64) if return_var: var = -np.sum(KxsT*self.apply_inverse(KxsT), axis=0) var += self.kernel.get_value(0.0) return mu, var # Predictive covariance cov = self.kernel.get_value(xs[:, None] - xs[None, :]) cov -= np.dot(Kxs, self.apply_inverse(KxsT)) return mu, cov
python
def predict(self, y, t=None, return_cov=True, return_var=False): """ Compute the conditional predictive distribution of the model You must call :func:`GP.compute` before this method. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the prediction should be made. If this is omitted the coordinates will be assumed to be ``x`` from :func:`GP.compute` and an efficient method will be used to compute the prediction. return_cov (Optional[bool]): If ``True``, the full covariance matrix is computed and returned. Otherwise, only the mean prediction is computed. (default: ``True``) return_var (Optional[bool]): If ``True``, only return the diagonal of the predictive covariance; this will be faster to compute than the full covariance matrix. This overrides ``return_cov`` so, if both are set to ``True``, only the diagonal is computed. (default: ``False``) Returns: ``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of ``return_cov`` and ``return_var``. These output values are: (a) **mu** ``(ntest,)``: mean of the predictive distribution, (b) **cov** ``(ntest, ntest)``: the predictive covariance matrix, and (c) **var** ``(ntest,)``: the diagonal elements of ``cov``. Raises: ValueError: For mismatched dimensions. """ y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") if t is None: xs = self._t else: xs = np.ascontiguousarray(t, dtype=float) if len(xs.shape) > 1: raise ValueError("dimension mismatch") # Make sure that the model is computed self._recompute() # Compute the predictive mean. resid = y - self.mean.get_value(self._t) if t is None: alpha = self.solver.solve(resid).flatten() alpha = resid - (self._yerr**2 + self.kernel.jitter) * alpha elif not len(self._A): alpha = self.solver.predict(resid, xs) else: Kxs = self.get_matrix(xs, self._t) alpha = np.dot(Kxs, alpha) mu = self.mean.get_value(xs) + alpha if not (return_var or return_cov): return mu # Predictive variance. Kxs = self.get_matrix(xs, self._t) KxsT = np.ascontiguousarray(Kxs.T, dtype=np.float64) if return_var: var = -np.sum(KxsT*self.apply_inverse(KxsT), axis=0) var += self.kernel.get_value(0.0) return mu, var # Predictive covariance cov = self.kernel.get_value(xs[:, None] - xs[None, :]) cov -= np.dot(Kxs, self.apply_inverse(KxsT)) return mu, cov
[ "def", "predict", "(", "self", ",", "y", ",", "t", "=", "None", ",", "return_cov", "=", "True", ",", "return_var", "=", "False", ")", ":", "y", "=", "self", ".", "_process_input", "(", "y", ")", "if", "len", "(", "y", ".", "shape", ")", ">", "1...
Compute the conditional predictive distribution of the model You must call :func:`GP.compute` before this method. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the prediction should be made. If this is omitted the coordinates will be assumed to be ``x`` from :func:`GP.compute` and an efficient method will be used to compute the prediction. return_cov (Optional[bool]): If ``True``, the full covariance matrix is computed and returned. Otherwise, only the mean prediction is computed. (default: ``True``) return_var (Optional[bool]): If ``True``, only return the diagonal of the predictive covariance; this will be faster to compute than the full covariance matrix. This overrides ``return_cov`` so, if both are set to ``True``, only the diagonal is computed. (default: ``False``) Returns: ``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of ``return_cov`` and ``return_var``. These output values are: (a) **mu** ``(ntest,)``: mean of the predictive distribution, (b) **cov** ``(ntest, ntest)``: the predictive covariance matrix, and (c) **var** ``(ntest,)``: the diagonal elements of ``cov``. Raises: ValueError: For mismatched dimensions.
[ "Compute", "the", "conditional", "predictive", "distribution", "of", "the", "model" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L343-L418
train
38,418
dfm/celerite
celerite/celerite.py
GP.get_matrix
def get_matrix(self, x1=None, x2=None, include_diagonal=None, include_general=None): """ Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``) """ if x1 is None and x2 is None: if self._t is None or not self.computed: raise RuntimeError("you must call 'compute' first") K = self.kernel.get_value(self._t[:, None] - self._t[None, :]) if include_diagonal is None or include_diagonal: K[np.diag_indices_from(K)] += ( self._yerr**2 + self.kernel.jitter ) if (include_general is None or include_general) and len(self._A): K[np.diag_indices_from(K)] += self._A K += np.tril(np.dot(self._U.T, self._V), -1) K += np.triu(np.dot(self._V.T, self._U), 1) return K incl = False x1 = np.ascontiguousarray(x1, dtype=float) if x2 is None: x2 = x1 incl = include_diagonal is not None and include_diagonal K = self.kernel.get_value(x1[:, None] - x2[None, :]) if incl: K[np.diag_indices_from(K)] += self.kernel.jitter return K
python
def get_matrix(self, x1=None, x2=None, include_diagonal=None, include_general=None): """ Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``) """ if x1 is None and x2 is None: if self._t is None or not self.computed: raise RuntimeError("you must call 'compute' first") K = self.kernel.get_value(self._t[:, None] - self._t[None, :]) if include_diagonal is None or include_diagonal: K[np.diag_indices_from(K)] += ( self._yerr**2 + self.kernel.jitter ) if (include_general is None or include_general) and len(self._A): K[np.diag_indices_from(K)] += self._A K += np.tril(np.dot(self._U.T, self._V), -1) K += np.triu(np.dot(self._V.T, self._U), 1) return K incl = False x1 = np.ascontiguousarray(x1, dtype=float) if x2 is None: x2 = x1 incl = include_diagonal is not None and include_diagonal K = self.kernel.get_value(x1[:, None] - x2[None, :]) if incl: K[np.diag_indices_from(K)] += self.kernel.jitter return K
[ "def", "get_matrix", "(", "self", ",", "x1", "=", "None", ",", "x2", "=", "None", ",", "include_diagonal", "=", "None", ",", "include_general", "=", "None", ")", ":", "if", "x1", "is", "None", "and", "x2", "is", "None", ":", "if", "self", ".", "_t"...
Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``)
[ "Get", "the", "covariance", "matrix", "at", "given", "independent", "coordinates" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L420-L459
train
38,419
dfm/celerite
celerite/celerite.py
GP.sample
def sample(self, size=None): """ Sample from the prior distribution over datasets Args: size (Optional[int]): The number of samples to draw. Returns: array[n] or array[size, n]: The samples from the prior distribution over datasets. """ self._recompute() if size is None: n = np.random.randn(len(self._t)) else: n = np.random.randn(len(self._t), size) n = self.solver.dot_L(n) if size is None: return self.mean.get_value(self._t) + n[:, 0] return self.mean.get_value(self._t)[None, :] + n.T
python
def sample(self, size=None): """ Sample from the prior distribution over datasets Args: size (Optional[int]): The number of samples to draw. Returns: array[n] or array[size, n]: The samples from the prior distribution over datasets. """ self._recompute() if size is None: n = np.random.randn(len(self._t)) else: n = np.random.randn(len(self._t), size) n = self.solver.dot_L(n) if size is None: return self.mean.get_value(self._t) + n[:, 0] return self.mean.get_value(self._t)[None, :] + n.T
[ "def", "sample", "(", "self", ",", "size", "=", "None", ")", ":", "self", ".", "_recompute", "(", ")", "if", "size", "is", "None", ":", "n", "=", "np", ".", "random", ".", "randn", "(", "len", "(", "self", ".", "_t", ")", ")", "else", ":", "n...
Sample from the prior distribution over datasets Args: size (Optional[int]): The number of samples to draw. Returns: array[n] or array[size, n]: The samples from the prior distribution over datasets.
[ "Sample", "from", "the", "prior", "distribution", "over", "datasets" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L461-L481
train
38,420
dfm/celerite
celerite/terms.py
Term.get_value
def get_value(self, tau): """ Compute the value of the term for an array of lags Args: tau (array[...]): An array of lags where the term should be evaluated. Returns: The value of the term for each ``tau``. This will have the same shape as ``tau``. """ tau = np.asarray(tau) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients k = get_kernel_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, tau.flatten(), ) return np.asarray(k).reshape(tau.shape)
python
def get_value(self, tau): """ Compute the value of the term for an array of lags Args: tau (array[...]): An array of lags where the term should be evaluated. Returns: The value of the term for each ``tau``. This will have the same shape as ``tau``. """ tau = np.asarray(tau) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients k = get_kernel_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, tau.flatten(), ) return np.asarray(k).reshape(tau.shape)
[ "def", "get_value", "(", "self", ",", "tau", ")", ":", "tau", "=", "np", ".", "asarray", "(", "tau", ")", "(", "alpha_real", ",", "beta_real", ",", "alpha_complex_real", ",", "alpha_complex_imag", ",", "beta_complex_real", ",", "beta_complex_imag", ")", "=",...
Compute the value of the term for an array of lags Args: tau (array[...]): An array of lags where the term should be evaluated. Returns: The value of the term for each ``tau``. This will have the same shape as ``tau``.
[ "Compute", "the", "value", "of", "the", "term", "for", "an", "array", "of", "lags" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/terms.py#L43-L65
train
38,421
dfm/celerite
celerite/terms.py
Term.get_psd
def get_psd(self, omega): """ Compute the PSD of the term for an array of angular frequencies Args: omega (array[...]): An array of frequencies where the PSD should be evaluated. Returns: The value of the PSD for each ``omega``. This will have the same shape as ``omega``. """ w = np.asarray(omega) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients p = get_psd_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, w.flatten(), ) return p.reshape(w.shape)
python
def get_psd(self, omega): """ Compute the PSD of the term for an array of angular frequencies Args: omega (array[...]): An array of frequencies where the PSD should be evaluated. Returns: The value of the PSD for each ``omega``. This will have the same shape as ``omega``. """ w = np.asarray(omega) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients p = get_psd_value( alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, w.flatten(), ) return p.reshape(w.shape)
[ "def", "get_psd", "(", "self", ",", "omega", ")", ":", "w", "=", "np", ".", "asarray", "(", "omega", ")", "(", "alpha_real", ",", "beta_real", ",", "alpha_complex_real", ",", "alpha_complex_imag", ",", "beta_complex_real", ",", "beta_complex_imag", ")", "=",...
Compute the PSD of the term for an array of angular frequencies Args: omega (array[...]): An array of frequencies where the PSD should be evaluated. Returns: The value of the PSD for each ``omega``. This will have the same shape as ``omega``.
[ "Compute", "the", "PSD", "of", "the", "term", "for", "an", "array", "of", "angular", "frequencies" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/terms.py#L67-L89
train
38,422
dfm/celerite
celerite/terms.py
Term.coefficients
def coefficients(self): """ All of the coefficient arrays This property is the concatenation of the results from :func:`terms.Term.get_real_coefficients` and :func:`terms.Term.get_complex_coefficients` but it will always return a tuple of length 6, even if ``alpha_complex_imag`` was omitted from ``get_complex_coefficients``. Returns: (array[j_real], array[j_real], array[j_complex], array[j_complex], array[j_complex], array[j_complex]): ``alpha_real``, ``beta_real``, ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` as described above. Raises: ValueError: For invalid dimensions for the coefficients. """ vector = self.get_parameter_vector(include_frozen=True) pars = self.get_all_coefficients(vector) if len(pars) != 6: raise ValueError("there must be 6 coefficient blocks") if any(len(p.shape) != 1 for p in pars): raise ValueError("coefficient blocks must be 1D") if len(pars[0]) != len(pars[1]): raise ValueError("coefficient blocks must have the same shape") if any(len(pars[2]) != len(p) for p in pars[3:]): raise ValueError("coefficient blocks must have the same shape") return pars
python
def coefficients(self): """ All of the coefficient arrays This property is the concatenation of the results from :func:`terms.Term.get_real_coefficients` and :func:`terms.Term.get_complex_coefficients` but it will always return a tuple of length 6, even if ``alpha_complex_imag`` was omitted from ``get_complex_coefficients``. Returns: (array[j_real], array[j_real], array[j_complex], array[j_complex], array[j_complex], array[j_complex]): ``alpha_real``, ``beta_real``, ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` as described above. Raises: ValueError: For invalid dimensions for the coefficients. """ vector = self.get_parameter_vector(include_frozen=True) pars = self.get_all_coefficients(vector) if len(pars) != 6: raise ValueError("there must be 6 coefficient blocks") if any(len(p.shape) != 1 for p in pars): raise ValueError("coefficient blocks must be 1D") if len(pars[0]) != len(pars[1]): raise ValueError("coefficient blocks must have the same shape") if any(len(pars[2]) != len(p) for p in pars[3:]): raise ValueError("coefficient blocks must have the same shape") return pars
[ "def", "coefficients", "(", "self", ")", ":", "vector", "=", "self", ".", "get_parameter_vector", "(", "include_frozen", "=", "True", ")", "pars", "=", "self", ".", "get_all_coefficients", "(", "vector", ")", "if", "len", "(", "pars", ")", "!=", "6", ":"...
All of the coefficient arrays This property is the concatenation of the results from :func:`terms.Term.get_real_coefficients` and :func:`terms.Term.get_complex_coefficients` but it will always return a tuple of length 6, even if ``alpha_complex_imag`` was omitted from ``get_complex_coefficients``. Returns: (array[j_real], array[j_real], array[j_complex], array[j_complex], array[j_complex], array[j_complex]): ``alpha_real``, ``beta_real``, ``alpha_complex_real``, ``alpha_complex_imag``, ``beta_complex_real``, and ``beta_complex_imag`` as described above. Raises: ValueError: For invalid dimensions for the coefficients.
[ "All", "of", "the", "coefficient", "arrays" ]
ad3f471f06b18d233f3dab71bb1c20a316173cae
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/terms.py#L157-L188
train
38,423
shoyer/h5netcdf
h5netcdf/core.py
Group._determine_current_dimension_size
def _determine_current_dimension_size(self, dim_name, max_size): """ Helper method to determine the current size of a dimension. """ # Limited dimension. if self.dimensions[dim_name] is not None: return max_size def _find_dim(h5group, dim): if dim not in h5group: return _find_dim(h5group.parent, dim) return h5group[dim] dim_variable = _find_dim(self._h5group, dim_name) if "REFERENCE_LIST" not in dim_variable.attrs: return max_size root = self._h5group["/"] for ref, _ in dim_variable.attrs["REFERENCE_LIST"]: var = root[ref] for i, var_d in enumerate(var.dims): name = _name_from_dimension(var_d) if name == dim_name: max_size = max(var.shape[i], max_size) return max_size
python
def _determine_current_dimension_size(self, dim_name, max_size): """ Helper method to determine the current size of a dimension. """ # Limited dimension. if self.dimensions[dim_name] is not None: return max_size def _find_dim(h5group, dim): if dim not in h5group: return _find_dim(h5group.parent, dim) return h5group[dim] dim_variable = _find_dim(self._h5group, dim_name) if "REFERENCE_LIST" not in dim_variable.attrs: return max_size root = self._h5group["/"] for ref, _ in dim_variable.attrs["REFERENCE_LIST"]: var = root[ref] for i, var_d in enumerate(var.dims): name = _name_from_dimension(var_d) if name == dim_name: max_size = max(var.shape[i], max_size) return max_size
[ "def", "_determine_current_dimension_size", "(", "self", ",", "dim_name", ",", "max_size", ")", ":", "# Limited dimension.", "if", "self", ".", "dimensions", "[", "dim_name", "]", "is", "not", "None", ":", "return", "max_size", "def", "_find_dim", "(", "h5group"...
Helper method to determine the current size of a dimension.
[ "Helper", "method", "to", "determine", "the", "current", "size", "of", "a", "dimension", "." ]
3ae35cd58297281a1dc69c46fb0b315a0007ac2b
https://github.com/shoyer/h5netcdf/blob/3ae35cd58297281a1dc69c46fb0b315a0007ac2b/h5netcdf/core.py#L273-L300
train
38,424
shoyer/h5netcdf
h5netcdf/core.py
Group._create_dim_scales
def _create_dim_scales(self): """Create all necessary HDF5 dimension scale.""" dim_order = self._dim_order.maps[0] for dim in sorted(dim_order, key=lambda d: dim_order[d]): if dim not in self._h5group: size = self._current_dim_sizes[dim] kwargs = {} if self._dim_sizes[dim] is None: kwargs["maxshape"] = (None,) self._h5group.create_dataset( name=dim, shape=(size,), dtype='S1', **kwargs) h5ds = self._h5group[dim] h5ds.attrs['_Netcdf4Dimid'] = dim_order[dim] if len(h5ds.shape) > 1: dims = self._variables[dim].dimensions coord_ids = np.array([dim_order[d] for d in dims], 'int32') h5ds.attrs['_Netcdf4Coordinates'] = coord_ids scale_name = dim if dim in self.variables else NOT_A_VARIABLE h5ds.dims.create_scale(h5ds, scale_name) for subgroup in self.groups.values(): subgroup._create_dim_scales()
python
def _create_dim_scales(self): """Create all necessary HDF5 dimension scale.""" dim_order = self._dim_order.maps[0] for dim in sorted(dim_order, key=lambda d: dim_order[d]): if dim not in self._h5group: size = self._current_dim_sizes[dim] kwargs = {} if self._dim_sizes[dim] is None: kwargs["maxshape"] = (None,) self._h5group.create_dataset( name=dim, shape=(size,), dtype='S1', **kwargs) h5ds = self._h5group[dim] h5ds.attrs['_Netcdf4Dimid'] = dim_order[dim] if len(h5ds.shape) > 1: dims = self._variables[dim].dimensions coord_ids = np.array([dim_order[d] for d in dims], 'int32') h5ds.attrs['_Netcdf4Coordinates'] = coord_ids scale_name = dim if dim in self.variables else NOT_A_VARIABLE h5ds.dims.create_scale(h5ds, scale_name) for subgroup in self.groups.values(): subgroup._create_dim_scales()
[ "def", "_create_dim_scales", "(", "self", ")", ":", "dim_order", "=", "self", ".", "_dim_order", ".", "maps", "[", "0", "]", "for", "dim", "in", "sorted", "(", "dim_order", ",", "key", "=", "lambda", "d", ":", "dim_order", "[", "d", "]", ")", ":", ...
Create all necessary HDF5 dimension scale.
[ "Create", "all", "necessary", "HDF5", "dimension", "scale", "." ]
3ae35cd58297281a1dc69c46fb0b315a0007ac2b
https://github.com/shoyer/h5netcdf/blob/3ae35cd58297281a1dc69c46fb0b315a0007ac2b/h5netcdf/core.py#L459-L483
train
38,425
shoyer/h5netcdf
h5netcdf/core.py
Group._attach_dim_scales
def _attach_dim_scales(self): """Attach dimension scales to all variables.""" for name, var in self.variables.items(): if name not in self.dimensions: for n, dim in enumerate(var.dimensions): var._h5ds.dims[n].attach_scale(self._all_h5groups[dim]) for subgroup in self.groups.values(): subgroup._attach_dim_scales()
python
def _attach_dim_scales(self): """Attach dimension scales to all variables.""" for name, var in self.variables.items(): if name not in self.dimensions: for n, dim in enumerate(var.dimensions): var._h5ds.dims[n].attach_scale(self._all_h5groups[dim]) for subgroup in self.groups.values(): subgroup._attach_dim_scales()
[ "def", "_attach_dim_scales", "(", "self", ")", ":", "for", "name", ",", "var", "in", "self", ".", "variables", ".", "items", "(", ")", ":", "if", "name", "not", "in", "self", ".", "dimensions", ":", "for", "n", ",", "dim", "in", "enumerate", "(", "...
Attach dimension scales to all variables.
[ "Attach", "dimension", "scales", "to", "all", "variables", "." ]
3ae35cd58297281a1dc69c46fb0b315a0007ac2b
https://github.com/shoyer/h5netcdf/blob/3ae35cd58297281a1dc69c46fb0b315a0007ac2b/h5netcdf/core.py#L485-L493
train
38,426
shoyer/h5netcdf
h5netcdf/core.py
Group._detach_dim_scale
def _detach_dim_scale(self, name): """Detach the dimension scale corresponding to a dimension name.""" for var in self.variables.values(): for n, dim in enumerate(var.dimensions): if dim == name: var._h5ds.dims[n].detach_scale(self._all_h5groups[dim]) for subgroup in self.groups.values(): if dim not in subgroup._h5group: subgroup._detach_dim_scale(name)
python
def _detach_dim_scale(self, name): """Detach the dimension scale corresponding to a dimension name.""" for var in self.variables.values(): for n, dim in enumerate(var.dimensions): if dim == name: var._h5ds.dims[n].detach_scale(self._all_h5groups[dim]) for subgroup in self.groups.values(): if dim not in subgroup._h5group: subgroup._detach_dim_scale(name)
[ "def", "_detach_dim_scale", "(", "self", ",", "name", ")", ":", "for", "var", "in", "self", ".", "variables", ".", "values", "(", ")", ":", "for", "n", ",", "dim", "in", "enumerate", "(", "var", ".", "dimensions", ")", ":", "if", "dim", "==", "name...
Detach the dimension scale corresponding to a dimension name.
[ "Detach", "the", "dimension", "scale", "corresponding", "to", "a", "dimension", "name", "." ]
3ae35cd58297281a1dc69c46fb0b315a0007ac2b
https://github.com/shoyer/h5netcdf/blob/3ae35cd58297281a1dc69c46fb0b315a0007ac2b/h5netcdf/core.py#L495-L504
train
38,427
shoyer/h5netcdf
h5netcdf/core.py
Group.resize_dimension
def resize_dimension(self, dimension, size): """ Resize a dimension to a certain size. It will pad with the underlying HDF5 data sets' fill values (usually zero) where necessary. """ if self.dimensions[dimension] is not None: raise ValueError("Dimension '%s' is not unlimited and thus " "cannot be resized." % dimension) # Resize the dimension. self._current_dim_sizes[dimension] = size for var in self.variables.values(): new_shape = list(var.shape) for i, d in enumerate(var.dimensions): if d == dimension: new_shape[i] = size new_shape = tuple(new_shape) if new_shape != var.shape: var._h5ds.resize(new_shape) # Recurse as dimensions are visible to this group and all child groups. for i in self.groups.values(): i.resize_dimension(dimension, size)
python
def resize_dimension(self, dimension, size): """ Resize a dimension to a certain size. It will pad with the underlying HDF5 data sets' fill values (usually zero) where necessary. """ if self.dimensions[dimension] is not None: raise ValueError("Dimension '%s' is not unlimited and thus " "cannot be resized." % dimension) # Resize the dimension. self._current_dim_sizes[dimension] = size for var in self.variables.values(): new_shape = list(var.shape) for i, d in enumerate(var.dimensions): if d == dimension: new_shape[i] = size new_shape = tuple(new_shape) if new_shape != var.shape: var._h5ds.resize(new_shape) # Recurse as dimensions are visible to this group and all child groups. for i in self.groups.values(): i.resize_dimension(dimension, size)
[ "def", "resize_dimension", "(", "self", ",", "dimension", ",", "size", ")", ":", "if", "self", ".", "dimensions", "[", "dimension", "]", "is", "not", "None", ":", "raise", "ValueError", "(", "\"Dimension '%s' is not unlimited and thus \"", "\"cannot be resized.\"", ...
Resize a dimension to a certain size. It will pad with the underlying HDF5 data sets' fill values (usually zero) where necessary.
[ "Resize", "a", "dimension", "to", "a", "certain", "size", "." ]
3ae35cd58297281a1dc69c46fb0b315a0007ac2b
https://github.com/shoyer/h5netcdf/blob/3ae35cd58297281a1dc69c46fb0b315a0007ac2b/h5netcdf/core.py#L550-L575
train
38,428
vijayvarma392/surfinBH
surfinBH/_utils.py
alignVec_quat
def alignVec_quat(vec): """Returns a unit quaternion that will align vec with the z-axis""" alpha = np.arctan2(vec[1], vec[0]) beta = np.arccos(vec[2]) gamma = -alpha*vec[2] cb = np.cos(0.5*beta) sb = np.sin(0.5*beta) return np.array([cb*np.cos(0.5*(alpha + gamma)), sb*np.sin(0.5*(gamma - alpha)), sb*np.cos(0.5*(gamma - alpha)), cb*np.sin(0.5*(alpha + gamma))])
python
def alignVec_quat(vec): """Returns a unit quaternion that will align vec with the z-axis""" alpha = np.arctan2(vec[1], vec[0]) beta = np.arccos(vec[2]) gamma = -alpha*vec[2] cb = np.cos(0.5*beta) sb = np.sin(0.5*beta) return np.array([cb*np.cos(0.5*(alpha + gamma)), sb*np.sin(0.5*(gamma - alpha)), sb*np.cos(0.5*(gamma - alpha)), cb*np.sin(0.5*(alpha + gamma))])
[ "def", "alignVec_quat", "(", "vec", ")", ":", "alpha", "=", "np", ".", "arctan2", "(", "vec", "[", "1", "]", ",", "vec", "[", "0", "]", ")", "beta", "=", "np", ".", "arccos", "(", "vec", "[", "2", "]", ")", "gamma", "=", "-", "alpha", "*", ...
Returns a unit quaternion that will align vec with the z-axis
[ "Returns", "a", "unit", "quaternion", "that", "will", "align", "vec", "with", "the", "z", "-", "axis" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_utils.py#L21-L31
train
38,429
vijayvarma392/surfinBH
surfinBH/_utils.py
rotate_in_plane
def rotate_in_plane(chi, phase): """For transforming spins between the coprecessing and coorbital frames""" v = chi.T sp = np.sin(phase) cp = np.cos(phase) res = 1.*v res[0] = v[0]*cp + v[1]*sp res[1] = v[1]*cp - v[0]*sp return res.T
python
def rotate_in_plane(chi, phase): """For transforming spins between the coprecessing and coorbital frames""" v = chi.T sp = np.sin(phase) cp = np.cos(phase) res = 1.*v res[0] = v[0]*cp + v[1]*sp res[1] = v[1]*cp - v[0]*sp return res.T
[ "def", "rotate_in_plane", "(", "chi", ",", "phase", ")", ":", "v", "=", "chi", ".", "T", "sp", "=", "np", ".", "sin", "(", "phase", ")", "cp", "=", "np", ".", "cos", "(", "phase", ")", "res", "=", "1.", "*", "v", "res", "[", "0", "]", "=", ...
For transforming spins between the coprecessing and coorbital frames
[ "For", "transforming", "spins", "between", "the", "coprecessing", "and", "coorbital", "frames" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_utils.py#L54-L62
train
38,430
vijayvarma392/surfinBH
surfinBH/_utils.py
transform_error_coorb_to_inertial
def transform_error_coorb_to_inertial(vec_coorb, vec_err_coorb, orbPhase, quat_copr): """ Transform error in a vector from the coorbital frame to the inertial frame. Generates distributions in the coorbital frame, transforms them to inertial frame and returns 1-simga widths in the inertial frame. """ # for reproducibility np.random.seed(0) # Get distribution in coorbital frame dist_coorb = np.array([np.random.normal(m, s, 1000) for m,s in zip(vec_coorb, vec_err_coorb)]).T # Transform distribution to coprecessing frame dist_copr = rotate_in_plane(dist_coorb, -orbPhase) # Transform distribution to inertial frame dist_inertial = transformTimeDependentVector( np.array([quat_copr for _ in dist_copr]).T, dist_copr.T).T # Get 1sigma width in inertial frame vec_err_inertial = np.std(dist_inertial, axis=0) return vec_err_inertial
python
def transform_error_coorb_to_inertial(vec_coorb, vec_err_coorb, orbPhase, quat_copr): """ Transform error in a vector from the coorbital frame to the inertial frame. Generates distributions in the coorbital frame, transforms them to inertial frame and returns 1-simga widths in the inertial frame. """ # for reproducibility np.random.seed(0) # Get distribution in coorbital frame dist_coorb = np.array([np.random.normal(m, s, 1000) for m,s in zip(vec_coorb, vec_err_coorb)]).T # Transform distribution to coprecessing frame dist_copr = rotate_in_plane(dist_coorb, -orbPhase) # Transform distribution to inertial frame dist_inertial = transformTimeDependentVector( np.array([quat_copr for _ in dist_copr]).T, dist_copr.T).T # Get 1sigma width in inertial frame vec_err_inertial = np.std(dist_inertial, axis=0) return vec_err_inertial
[ "def", "transform_error_coorb_to_inertial", "(", "vec_coorb", ",", "vec_err_coorb", ",", "orbPhase", ",", "quat_copr", ")", ":", "# for reproducibility", "np", ".", "random", ".", "seed", "(", "0", ")", "# Get distribution in coorbital frame", "dist_coorb", "=", "np",...
Transform error in a vector from the coorbital frame to the inertial frame. Generates distributions in the coorbital frame, transforms them to inertial frame and returns 1-simga widths in the inertial frame.
[ "Transform", "error", "in", "a", "vector", "from", "the", "coorbital", "frame", "to", "the", "inertial", "frame", ".", "Generates", "distributions", "in", "the", "coorbital", "frame", "transforms", "them", "to", "inertial", "frame", "and", "returns", "1", "-",...
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_utils.py#L81-L105
train
38,431
vijayvarma392/surfinBH
surfinBH/_fit_evaluators/fit_7dq2.py
Fit7dq2._extra_regression_kwargs
def _extra_regression_kwargs(self): """ List of additional kwargs to use in regression tests. """ # larger than default sometimes needed when extrapolating omega_switch_test = 0.019 extra_args = [] extra_args.append({ 'omega0': 5e-3, 'PN_approximant': 'SpinTaylorT4', 'PN_dt': 0.1, 'PN_spin_order': 7, 'PN_phase_order': 7, 'omega_switch': omega_switch_test, }) extra_args.append({ 'omega0': 6e-3, 'PN_approximant': 'SpinTaylorT1', 'PN_dt': 0.5, 'PN_spin_order': 5, 'PN_phase_order': 7, 'omega_switch': omega_switch_test, }) extra_args.append({ 'omega0': 7e-3, 'PN_approximant': 'SpinTaylorT2', 'PN_dt': 1, 'PN_spin_order': 7, 'PN_phase_order': 5, 'omega_switch': omega_switch_test, }) # These should be pure NRSur7dq2 extra_args.append({'omega0': 3e-2}) extra_args.append({'omega0': 5e-2}) return extra_args
python
def _extra_regression_kwargs(self): """ List of additional kwargs to use in regression tests. """ # larger than default sometimes needed when extrapolating omega_switch_test = 0.019 extra_args = [] extra_args.append({ 'omega0': 5e-3, 'PN_approximant': 'SpinTaylorT4', 'PN_dt': 0.1, 'PN_spin_order': 7, 'PN_phase_order': 7, 'omega_switch': omega_switch_test, }) extra_args.append({ 'omega0': 6e-3, 'PN_approximant': 'SpinTaylorT1', 'PN_dt': 0.5, 'PN_spin_order': 5, 'PN_phase_order': 7, 'omega_switch': omega_switch_test, }) extra_args.append({ 'omega0': 7e-3, 'PN_approximant': 'SpinTaylorT2', 'PN_dt': 1, 'PN_spin_order': 7, 'PN_phase_order': 5, 'omega_switch': omega_switch_test, }) # These should be pure NRSur7dq2 extra_args.append({'omega0': 3e-2}) extra_args.append({'omega0': 5e-2}) return extra_args
[ "def", "_extra_regression_kwargs", "(", "self", ")", ":", "# larger than default sometimes needed when extrapolating", "omega_switch_test", "=", "0.019", "extra_args", "=", "[", "]", "extra_args", ".", "append", "(", "{", "'omega0'", ":", "5e-3", ",", "'PN_approximant'"...
List of additional kwargs to use in regression tests.
[ "List", "of", "additional", "kwargs", "to", "use", "in", "regression", "tests", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_fit_evaluators/fit_7dq2.py#L159-L199
train
38,432
vijayvarma392/surfinBH
surfinBH/_fit_evaluators/fit_7dq2.py
Fit7dq2._evolve_spins
def _evolve_spins(self, q, chiA0, chiB0, omega0, PN_approximant, PN_dt, PN_spin0, PN_phase0, omega0_nrsur): """ Evolves spins of the component BHs from an initial orbital frequency = omega0 until t=-100 M from the peak of the waveform. If omega0 < omega0_nrsur, use PN to evolve the spins until orbital frequency = omega0. Then evolves further with the NRSur7dq2 waveform model until t=-100M from the peak. Assumes chiA0 and chiB0 are defined in the inertial frame defined at orbital frequency = omega0 as: The z-axis is along the Newtonian orbital angular momentum when the PN orbital frequency = omega0. The x-axis is along the line of separation from the smaller BH to the larger BH at this frequency. The y-axis completes the triad. Returns spins in the coorbital frame at t=-100M, as well as the coprecessing frame quaternion and orbital phase in the coprecessing frame at this time. """ if omega0 < omega0_nrsur: # If omega0 is below the NRSur7dq2 start frequency, we use PN # to evolve the spins until orbital frequency = omega0_nrsur. # Note that we update omega0_nrsur here with the PN # frequency that was closest to the input omega0_nrsur. chiA0_nrsur_copr, chiB0_nrsur_copr, quat0_nrsur_copr, \ phi0_nrsur, omega0_nrsur \ = evolve_pn_spins(q, chiA0, chiB0, omega0, omega0_nrsur, approximant=PN_approximant, dt=PN_dt, spinO=PN_spin0, phaseO=PN_phase0) else: # If omega0>= omega0_nrsur, we evolve spins directly with NRSur7dq2 # waveform model. We set the coprecessing frame quaternion to # identity and orbital phase to 0 at omega=omega0, hence the # coprecessing frame is the same as the inertial frame here. # Note that we update omega0_nrsur here and set it to omega0 chiA0_nrsur_copr, chiB0_nrsur_copr, quat0_nrsur_copr, \ phi0_nrsur, omega0_nrsur \ = chiA0, chiB0, [1,0,0,0], 0, omega0 # Load NRSur7dq2 if needed if self.nrsur is None: self._load_NRSur7dq2() # evaluate NRSur7dq2 dynamics # We set allow_extrapolation=True always since we test param limits # independently quat, orbphase, chiA_copr, chiB_copr = self.nrsur.get_dynamics(q, chiA0_nrsur_copr, chiB0_nrsur_copr, init_quat=quat0_nrsur_copr, init_phase=phi0_nrsur, omega_ref=omega0_nrsur, allow_extrapolation=True) # get data at time node where remnant fits are done fitnode_time = -100 nodeIdx = np.argmin(np.abs(self.nrsur.tds - fitnode_time)) quat_fitnode = quat.T[nodeIdx] orbphase_fitnode = orbphase[nodeIdx] # get coorbital frame spins at the time node chiA_coorb_fitnode = utils.rotate_in_plane(chiA_copr[nodeIdx], orbphase_fitnode) chiB_coorb_fitnode = utils.rotate_in_plane(chiB_copr[nodeIdx], orbphase_fitnode) return chiA_coorb_fitnode, chiB_coorb_fitnode, quat_fitnode, \ orbphase_fitnode
python
def _evolve_spins(self, q, chiA0, chiB0, omega0, PN_approximant, PN_dt, PN_spin0, PN_phase0, omega0_nrsur): """ Evolves spins of the component BHs from an initial orbital frequency = omega0 until t=-100 M from the peak of the waveform. If omega0 < omega0_nrsur, use PN to evolve the spins until orbital frequency = omega0. Then evolves further with the NRSur7dq2 waveform model until t=-100M from the peak. Assumes chiA0 and chiB0 are defined in the inertial frame defined at orbital frequency = omega0 as: The z-axis is along the Newtonian orbital angular momentum when the PN orbital frequency = omega0. The x-axis is along the line of separation from the smaller BH to the larger BH at this frequency. The y-axis completes the triad. Returns spins in the coorbital frame at t=-100M, as well as the coprecessing frame quaternion and orbital phase in the coprecessing frame at this time. """ if omega0 < omega0_nrsur: # If omega0 is below the NRSur7dq2 start frequency, we use PN # to evolve the spins until orbital frequency = omega0_nrsur. # Note that we update omega0_nrsur here with the PN # frequency that was closest to the input omega0_nrsur. chiA0_nrsur_copr, chiB0_nrsur_copr, quat0_nrsur_copr, \ phi0_nrsur, omega0_nrsur \ = evolve_pn_spins(q, chiA0, chiB0, omega0, omega0_nrsur, approximant=PN_approximant, dt=PN_dt, spinO=PN_spin0, phaseO=PN_phase0) else: # If omega0>= omega0_nrsur, we evolve spins directly with NRSur7dq2 # waveform model. We set the coprecessing frame quaternion to # identity and orbital phase to 0 at omega=omega0, hence the # coprecessing frame is the same as the inertial frame here. # Note that we update omega0_nrsur here and set it to omega0 chiA0_nrsur_copr, chiB0_nrsur_copr, quat0_nrsur_copr, \ phi0_nrsur, omega0_nrsur \ = chiA0, chiB0, [1,0,0,0], 0, omega0 # Load NRSur7dq2 if needed if self.nrsur is None: self._load_NRSur7dq2() # evaluate NRSur7dq2 dynamics # We set allow_extrapolation=True always since we test param limits # independently quat, orbphase, chiA_copr, chiB_copr = self.nrsur.get_dynamics(q, chiA0_nrsur_copr, chiB0_nrsur_copr, init_quat=quat0_nrsur_copr, init_phase=phi0_nrsur, omega_ref=omega0_nrsur, allow_extrapolation=True) # get data at time node where remnant fits are done fitnode_time = -100 nodeIdx = np.argmin(np.abs(self.nrsur.tds - fitnode_time)) quat_fitnode = quat.T[nodeIdx] orbphase_fitnode = orbphase[nodeIdx] # get coorbital frame spins at the time node chiA_coorb_fitnode = utils.rotate_in_plane(chiA_copr[nodeIdx], orbphase_fitnode) chiB_coorb_fitnode = utils.rotate_in_plane(chiB_copr[nodeIdx], orbphase_fitnode) return chiA_coorb_fitnode, chiB_coorb_fitnode, quat_fitnode, \ orbphase_fitnode
[ "def", "_evolve_spins", "(", "self", ",", "q", ",", "chiA0", ",", "chiB0", ",", "omega0", ",", "PN_approximant", ",", "PN_dt", ",", "PN_spin0", ",", "PN_phase0", ",", "omega0_nrsur", ")", ":", "if", "omega0", "<", "omega0_nrsur", ":", "# If omega0 is below t...
Evolves spins of the component BHs from an initial orbital frequency = omega0 until t=-100 M from the peak of the waveform. If omega0 < omega0_nrsur, use PN to evolve the spins until orbital frequency = omega0. Then evolves further with the NRSur7dq2 waveform model until t=-100M from the peak. Assumes chiA0 and chiB0 are defined in the inertial frame defined at orbital frequency = omega0 as: The z-axis is along the Newtonian orbital angular momentum when the PN orbital frequency = omega0. The x-axis is along the line of separation from the smaller BH to the larger BH at this frequency. The y-axis completes the triad. Returns spins in the coorbital frame at t=-100M, as well as the coprecessing frame quaternion and orbital phase in the coprecessing frame at this time.
[ "Evolves", "spins", "of", "the", "component", "BHs", "from", "an", "initial", "orbital", "frequency", "=", "omega0", "until", "t", "=", "-", "100", "M", "from", "the", "peak", "of", "the", "waveform", ".", "If", "omega0", "<", "omega0_nrsur", "use", "PN"...
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_fit_evaluators/fit_7dq2.py#L226-L295
train
38,433
vijayvarma392/surfinBH
surfinBH/_fit_evaluators/fit_7dq2.py
Fit7dq2._eval_wrapper
def _eval_wrapper(self, fit_key, q, chiA, chiB, **kwargs): """Evaluates the surfinBH7dq2 model. """ chiA = np.array(chiA) chiB = np.array(chiB) # Warn/Exit if extrapolating allow_extrap = kwargs.pop('allow_extrap', False) self._check_param_limits(q, chiA, chiB, allow_extrap) omega0 = kwargs.pop('omega0', None) PN_approximant = kwargs.pop('PN_approximant', 'SpinTaylorT4') PN_dt = kwargs.pop('PN_dt', 0.1) PN_spin_order = kwargs.pop('PN_spin_order', 7) PN_phase_order = kwargs.pop('PN_phase_order', 7) omega_switch = kwargs.pop('omega_switch', 0.018) self._check_unused_kwargs(kwargs) if omega0 is None: # If omega0 is given, assume chiA, chiB are the coorbital frame # spins at t=-100 M. x = np.concatenate(([q], chiA, chiB)) else: # If omega0 is given, evolve the spins from omega0 # to t = -100 M from the peak. chiA_coorb_fitnode, chiB_coorb_fitnode, quat_fitnode, \ orbphase_fitnode \ = self._evolve_spins(q, chiA, chiB, omega0, PN_approximant, PN_dt, PN_spin_order, PN_phase_order, omega_switch) # x should contain coorbital frame spins at t=-100M x = np.concatenate(([q], chiA_coorb_fitnode, chiB_coorb_fitnode)) def eval_vector_fit(x, fit_key): res = self._evaluate_fits(x, fit_key) fit_val = res.T[0] fit_err = res.T[1] if omega0 is not None: # If spins were given in inertial frame at omega0, # transform vectors and errors back to the same frame. fit_val = utils.transform_vector_coorb_to_inertial(fit_val, orbphase_fitnode, quat_fitnode) fit_err = utils.transform_error_coorb_to_inertial(fit_val, fit_err, orbphase_fitnode, quat_fitnode) return fit_val, fit_err if fit_key == 'mf' or fit_key == 'all': mf, mf_err = self._evaluate_fits(x, 'mf') if fit_key == 'mf': return mf, mf_err if fit_key == 'chif' or fit_key == 'all': chif, chif_err = eval_vector_fit(x, 'chif') if fit_key == 'chif': return chif, chif_err if fit_key == 'vf' or fit_key == 'all': vf, vf_err = eval_vector_fit(x, 'vf') if fit_key == 'vf': return vf, vf_err if fit_key == 'all': return mf, chif, vf, mf_err, chif_err, vf_err
python
def _eval_wrapper(self, fit_key, q, chiA, chiB, **kwargs): """Evaluates the surfinBH7dq2 model. """ chiA = np.array(chiA) chiB = np.array(chiB) # Warn/Exit if extrapolating allow_extrap = kwargs.pop('allow_extrap', False) self._check_param_limits(q, chiA, chiB, allow_extrap) omega0 = kwargs.pop('omega0', None) PN_approximant = kwargs.pop('PN_approximant', 'SpinTaylorT4') PN_dt = kwargs.pop('PN_dt', 0.1) PN_spin_order = kwargs.pop('PN_spin_order', 7) PN_phase_order = kwargs.pop('PN_phase_order', 7) omega_switch = kwargs.pop('omega_switch', 0.018) self._check_unused_kwargs(kwargs) if omega0 is None: # If omega0 is given, assume chiA, chiB are the coorbital frame # spins at t=-100 M. x = np.concatenate(([q], chiA, chiB)) else: # If omega0 is given, evolve the spins from omega0 # to t = -100 M from the peak. chiA_coorb_fitnode, chiB_coorb_fitnode, quat_fitnode, \ orbphase_fitnode \ = self._evolve_spins(q, chiA, chiB, omega0, PN_approximant, PN_dt, PN_spin_order, PN_phase_order, omega_switch) # x should contain coorbital frame spins at t=-100M x = np.concatenate(([q], chiA_coorb_fitnode, chiB_coorb_fitnode)) def eval_vector_fit(x, fit_key): res = self._evaluate_fits(x, fit_key) fit_val = res.T[0] fit_err = res.T[1] if omega0 is not None: # If spins were given in inertial frame at omega0, # transform vectors and errors back to the same frame. fit_val = utils.transform_vector_coorb_to_inertial(fit_val, orbphase_fitnode, quat_fitnode) fit_err = utils.transform_error_coorb_to_inertial(fit_val, fit_err, orbphase_fitnode, quat_fitnode) return fit_val, fit_err if fit_key == 'mf' or fit_key == 'all': mf, mf_err = self._evaluate_fits(x, 'mf') if fit_key == 'mf': return mf, mf_err if fit_key == 'chif' or fit_key == 'all': chif, chif_err = eval_vector_fit(x, 'chif') if fit_key == 'chif': return chif, chif_err if fit_key == 'vf' or fit_key == 'all': vf, vf_err = eval_vector_fit(x, 'vf') if fit_key == 'vf': return vf, vf_err if fit_key == 'all': return mf, chif, vf, mf_err, chif_err, vf_err
[ "def", "_eval_wrapper", "(", "self", ",", "fit_key", ",", "q", ",", "chiA", ",", "chiB", ",", "*", "*", "kwargs", ")", ":", "chiA", "=", "np", ".", "array", "(", "chiA", ")", "chiB", "=", "np", ".", "array", "(", "chiB", ")", "# Warn/Exit if extrap...
Evaluates the surfinBH7dq2 model.
[ "Evaluates", "the", "surfinBH7dq2", "model", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_fit_evaluators/fit_7dq2.py#L298-L363
train
38,434
vijayvarma392/surfinBH
surfinBH/_loadFits.py
LoadFits
def LoadFits(name): """ Loads data for a fit. If data is not available, downloads it before loading. """ if name not in fits_collection.keys(): raise Exception('Invalid fit name : %s'%name) else: testPath = DataPath() + '/' + fits_collection[name].data_url.split('/')[-1] if (not os.path.isfile(testPath)): DownloadData(name) fit = fits_collection[name].fit_class(name.split('surfinBH')[-1]) print('Loaded %s fit.'%name) return fit
python
def LoadFits(name): """ Loads data for a fit. If data is not available, downloads it before loading. """ if name not in fits_collection.keys(): raise Exception('Invalid fit name : %s'%name) else: testPath = DataPath() + '/' + fits_collection[name].data_url.split('/')[-1] if (not os.path.isfile(testPath)): DownloadData(name) fit = fits_collection[name].fit_class(name.split('surfinBH')[-1]) print('Loaded %s fit.'%name) return fit
[ "def", "LoadFits", "(", "name", ")", ":", "if", "name", "not", "in", "fits_collection", ".", "keys", "(", ")", ":", "raise", "Exception", "(", "'Invalid fit name : %s'", "%", "name", ")", "else", ":", "testPath", "=", "DataPath", "(", ")", "+", "'/'", ...
Loads data for a fit. If data is not available, downloads it before loading.
[ "Loads", "data", "for", "a", "fit", ".", "If", "data", "is", "not", "available", "downloads", "it", "before", "loading", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_loadFits.py#L25-L37
train
38,435
vijayvarma392/surfinBH
surfinBH/_fit_evaluators/fit_3dq8.py
Fit3dq8._eval_wrapper
def _eval_wrapper(self, fit_key, q, chiA, chiB, **kwargs): """ Evaluates the surfinBH3dq8 model. """ chiA = np.array(chiA) chiB = np.array(chiB) # Warn/Exit if extrapolating allow_extrap = kwargs.pop('allow_extrap', False) self._check_param_limits(q, chiA, chiB, allow_extrap) self._check_unused_kwargs(kwargs) x = [q, chiA[2], chiB[2]] if fit_key == 'mf' or fit_key == 'all': mf, mf_err = self._evaluate_fits(x, 'mf') if fit_key == 'mf': return mf, mf_err if fit_key == 'chif' or fit_key == 'all': chifz, chifz_err = self._evaluate_fits(x, 'chifz') chif = np.array([0,0,chifz]) chif_err = np.array([0,0,chifz_err]) if fit_key == 'chif': return chif, chif_err if fit_key == 'vf' or fit_key == 'all': vfx, vfx_err = self._evaluate_fits(x, 'vfx') vfy, vfy_err = self._evaluate_fits(x, 'vfy') vf = np.array([vfx, vfy, 0]) vf_err = np.array([vfx_err, vfy_err, 0]) if fit_key == 'vf': return vf, vf_err if fit_key == 'all': return mf, chif, vf, mf_err, chif_err, vf_err
python
def _eval_wrapper(self, fit_key, q, chiA, chiB, **kwargs): """ Evaluates the surfinBH3dq8 model. """ chiA = np.array(chiA) chiB = np.array(chiB) # Warn/Exit if extrapolating allow_extrap = kwargs.pop('allow_extrap', False) self._check_param_limits(q, chiA, chiB, allow_extrap) self._check_unused_kwargs(kwargs) x = [q, chiA[2], chiB[2]] if fit_key == 'mf' or fit_key == 'all': mf, mf_err = self._evaluate_fits(x, 'mf') if fit_key == 'mf': return mf, mf_err if fit_key == 'chif' or fit_key == 'all': chifz, chifz_err = self._evaluate_fits(x, 'chifz') chif = np.array([0,0,chifz]) chif_err = np.array([0,0,chifz_err]) if fit_key == 'chif': return chif, chif_err if fit_key == 'vf' or fit_key == 'all': vfx, vfx_err = self._evaluate_fits(x, 'vfx') vfy, vfy_err = self._evaluate_fits(x, 'vfy') vf = np.array([vfx, vfy, 0]) vf_err = np.array([vfx_err, vfy_err, 0]) if fit_key == 'vf': return vf, vf_err if fit_key == 'all': return mf, chif, vf, mf_err, chif_err, vf_err
[ "def", "_eval_wrapper", "(", "self", ",", "fit_key", ",", "q", ",", "chiA", ",", "chiB", ",", "*", "*", "kwargs", ")", ":", "chiA", "=", "np", ".", "array", "(", "chiA", ")", "chiB", "=", "np", ".", "array", "(", "chiB", ")", "# Warn/Exit if extrap...
Evaluates the surfinBH3dq8 model.
[ "Evaluates", "the", "surfinBH3dq8", "model", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_fit_evaluators/fit_3dq8.py#L112-L143
train
38,436
vijayvarma392/surfinBH
surfinBH/_lal_spin_evolution.py
evolve_pn_spins
def evolve_pn_spins(q, chiA0, chiB0, omega0, omegaTimesM_final, approximant='SpinTaylorT4', dt=0.1, spinO=7, phaseO=7): """ Evolves PN spins from a starting orbital frequency and spins to a final frequency. Inputs: q: Mass ratio (q>=1) chiA0: Dimless spin of BhA at initial freq. chiB0: Dimless spin of BhB at initial freq. omega0: Initial orbital frequency in dimless units. omegaTimesM_final: Final orbital frequency in dimless units. approximant: 'SpinTaylorT1/T2/T4'. Default: 'SpinTaylorT4'. dt: Dimless step time for evolution. Default: 0.1 . spinO: Twice PN order of spin effects. Default: 5 . phaseO: Twice PN order in phase. Default: 8 . Outputs (all are time series): chiA_end_copr: Spin of BhA at final frequency, in coprecessing frame. chiB_end_copr: Spin of BhB at final frequency, in coprecessing frame. q_copr_end: Coprecessing frame quaternion at final frequency. phi_end: Orbital phase in the coprecessing frame at final frequency. omegaTimesM_end Dimensionless final frequency. Should agree with omegaTimesM_final. The inertial frame is assumed to be aligned to the coorbital frame at orbital frequency = omega0. chiA0 and chiB0 are the inertial/coorbital frame spins at omega0. """ omega, phi, chiA, chiB, lNhat, e1 = lal_spin_evloution_wrapper(approximant, q, omega0, chiA0, chiB0, dt, spinO, phaseO) # Compute omega, inertial spins, angular momentum direction and orbital # phase when omega = omegaTimesM_final end_idx = np.argmin(np.abs(omega - omegaTimesM_final)) omegaTimesM_end = omega[end_idx] chiA_end = chiA[end_idx] chiB_end = chiB[end_idx] lNhat_end = lNhat[end_idx] phi_end = phi[end_idx] # Align the z-direction along orbital angular momentum direction # at end_idx. This moves us in to the coprecessing frame. q_copr_end = _utils.alignVec_quat(lNhat_end) chiA_end_copr = _utils.transformTimeDependentVector( np.array([q_copr_end]).T, np.array([chiA_end]).T, inverse=1).T[0] chiB_end_copr = _utils.transformTimeDependentVector( np.array([q_copr_end]).T, np.array([chiB_end]).T, inverse=1).T[0] return chiA_end_copr, chiB_end_copr, q_copr_end, phi_end, omegaTimesM_end
python
def evolve_pn_spins(q, chiA0, chiB0, omega0, omegaTimesM_final, approximant='SpinTaylorT4', dt=0.1, spinO=7, phaseO=7): """ Evolves PN spins from a starting orbital frequency and spins to a final frequency. Inputs: q: Mass ratio (q>=1) chiA0: Dimless spin of BhA at initial freq. chiB0: Dimless spin of BhB at initial freq. omega0: Initial orbital frequency in dimless units. omegaTimesM_final: Final orbital frequency in dimless units. approximant: 'SpinTaylorT1/T2/T4'. Default: 'SpinTaylorT4'. dt: Dimless step time for evolution. Default: 0.1 . spinO: Twice PN order of spin effects. Default: 5 . phaseO: Twice PN order in phase. Default: 8 . Outputs (all are time series): chiA_end_copr: Spin of BhA at final frequency, in coprecessing frame. chiB_end_copr: Spin of BhB at final frequency, in coprecessing frame. q_copr_end: Coprecessing frame quaternion at final frequency. phi_end: Orbital phase in the coprecessing frame at final frequency. omegaTimesM_end Dimensionless final frequency. Should agree with omegaTimesM_final. The inertial frame is assumed to be aligned to the coorbital frame at orbital frequency = omega0. chiA0 and chiB0 are the inertial/coorbital frame spins at omega0. """ omega, phi, chiA, chiB, lNhat, e1 = lal_spin_evloution_wrapper(approximant, q, omega0, chiA0, chiB0, dt, spinO, phaseO) # Compute omega, inertial spins, angular momentum direction and orbital # phase when omega = omegaTimesM_final end_idx = np.argmin(np.abs(omega - omegaTimesM_final)) omegaTimesM_end = omega[end_idx] chiA_end = chiA[end_idx] chiB_end = chiB[end_idx] lNhat_end = lNhat[end_idx] phi_end = phi[end_idx] # Align the z-direction along orbital angular momentum direction # at end_idx. This moves us in to the coprecessing frame. q_copr_end = _utils.alignVec_quat(lNhat_end) chiA_end_copr = _utils.transformTimeDependentVector( np.array([q_copr_end]).T, np.array([chiA_end]).T, inverse=1).T[0] chiB_end_copr = _utils.transformTimeDependentVector( np.array([q_copr_end]).T, np.array([chiB_end]).T, inverse=1).T[0] return chiA_end_copr, chiB_end_copr, q_copr_end, phi_end, omegaTimesM_end
[ "def", "evolve_pn_spins", "(", "q", ",", "chiA0", ",", "chiB0", ",", "omega0", ",", "omegaTimesM_final", ",", "approximant", "=", "'SpinTaylorT4'", ",", "dt", "=", "0.1", ",", "spinO", "=", "7", ",", "phaseO", "=", "7", ")", ":", "omega", ",", "phi", ...
Evolves PN spins from a starting orbital frequency and spins to a final frequency. Inputs: q: Mass ratio (q>=1) chiA0: Dimless spin of BhA at initial freq. chiB0: Dimless spin of BhB at initial freq. omega0: Initial orbital frequency in dimless units. omegaTimesM_final: Final orbital frequency in dimless units. approximant: 'SpinTaylorT1/T2/T4'. Default: 'SpinTaylorT4'. dt: Dimless step time for evolution. Default: 0.1 . spinO: Twice PN order of spin effects. Default: 5 . phaseO: Twice PN order in phase. Default: 8 . Outputs (all are time series): chiA_end_copr: Spin of BhA at final frequency, in coprecessing frame. chiB_end_copr: Spin of BhB at final frequency, in coprecessing frame. q_copr_end: Coprecessing frame quaternion at final frequency. phi_end: Orbital phase in the coprecessing frame at final frequency. omegaTimesM_end Dimensionless final frequency. Should agree with omegaTimesM_final. The inertial frame is assumed to be aligned to the coorbital frame at orbital frequency = omega0. chiA0 and chiB0 are the inertial/coorbital frame spins at omega0.
[ "Evolves", "PN", "spins", "from", "a", "starting", "orbital", "frequency", "and", "spins", "to", "a", "final", "frequency", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_lal_spin_evolution.py#L158-L211
train
38,437
crossbario/zlmdb
zlmdb/_pmap.py
qual
def qual(obj): """ Return fully qualified name of a class. """ return u'{}.{}'.format(obj.__class__.__module__, obj.__class__.__name__)
python
def qual(obj): """ Return fully qualified name of a class. """ return u'{}.{}'.format(obj.__class__.__module__, obj.__class__.__name__)
[ "def", "qual", "(", "obj", ")", ":", "return", "u'{}.{}'", ".", "format", "(", "obj", ".", "__class__", ".", "__module__", ",", "obj", ".", "__class__", ".", "__name__", ")" ]
Return fully qualified name of a class.
[ "Return", "fully", "qualified", "name", "of", "a", "class", "." ]
577e8ce9314484f1fd5092fb4eef70221bb1d030
https://github.com/crossbario/zlmdb/blob/577e8ce9314484f1fd5092fb4eef70221bb1d030/zlmdb/_pmap.py#L265-L269
train
38,438
crossbario/zlmdb
zlmdb/_pmap.py
PersistentMap.count
def count(self, txn, prefix=None): """ Count number of records in the persistent map. When no prefix is given, the total number of records is returned. When a prefix is given, only the number of records with keys that have this prefix are counted. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param prefix: The key prefix of records to count. :type prefix: object :returns: The number of records. :rtype: int """ key_from = struct.pack('>H', self._slot) if prefix: key_from += self._serialize_key(prefix) kfl = len(key_from) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: _key = cursor.key() _prefix = _key[:kfl] if _prefix != key_from: break cnt += 1 has_more = cursor.next() return cnt
python
def count(self, txn, prefix=None): """ Count number of records in the persistent map. When no prefix is given, the total number of records is returned. When a prefix is given, only the number of records with keys that have this prefix are counted. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param prefix: The key prefix of records to count. :type prefix: object :returns: The number of records. :rtype: int """ key_from = struct.pack('>H', self._slot) if prefix: key_from += self._serialize_key(prefix) kfl = len(key_from) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: _key = cursor.key() _prefix = _key[:kfl] if _prefix != key_from: break cnt += 1 has_more = cursor.next() return cnt
[ "def", "count", "(", "self", ",", "txn", ",", "prefix", "=", "None", ")", ":", "key_from", "=", "struct", ".", "pack", "(", "'>H'", ",", "self", ".", "_slot", ")", "if", "prefix", ":", "key_from", "+=", "self", ".", "_serialize_key", "(", "prefix", ...
Count number of records in the persistent map. When no prefix is given, the total number of records is returned. When a prefix is given, only the number of records with keys that have this prefix are counted. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param prefix: The key prefix of records to count. :type prefix: object :returns: The number of records. :rtype: int
[ "Count", "number", "of", "records", "in", "the", "persistent", "map", ".", "When", "no", "prefix", "is", "given", "the", "total", "number", "of", "records", "is", "returned", ".", "When", "a", "prefix", "is", "given", "only", "the", "number", "of", "reco...
577e8ce9314484f1fd5092fb4eef70221bb1d030
https://github.com/crossbario/zlmdb/blob/577e8ce9314484f1fd5092fb4eef70221bb1d030/zlmdb/_pmap.py#L513-L545
train
38,439
crossbario/zlmdb
zlmdb/_pmap.py
PersistentMap.count_range
def count_range(self, txn, from_key, to_key): """ Counter number of records in the perstistent map with keys within the given range. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param from_key: Count records starting and including from this key. :type from_key: object :param to_key: End counting records before this key. :type to_key: object :returns: The number of records. :rtype: int """ key_from = struct.pack('>H', self._slot) + self._serialize_key(from_key) to_key = struct.pack('>H', self._slot) + self._serialize_key(to_key) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: if cursor.key() >= to_key: break cnt += 1 has_more = cursor.next() return cnt
python
def count_range(self, txn, from_key, to_key): """ Counter number of records in the perstistent map with keys within the given range. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param from_key: Count records starting and including from this key. :type from_key: object :param to_key: End counting records before this key. :type to_key: object :returns: The number of records. :rtype: int """ key_from = struct.pack('>H', self._slot) + self._serialize_key(from_key) to_key = struct.pack('>H', self._slot) + self._serialize_key(to_key) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: if cursor.key() >= to_key: break cnt += 1 has_more = cursor.next() return cnt
[ "def", "count_range", "(", "self", ",", "txn", ",", "from_key", ",", "to_key", ")", ":", "key_from", "=", "struct", ".", "pack", "(", "'>H'", ",", "self", ".", "_slot", ")", "+", "self", ".", "_serialize_key", "(", "from_key", ")", "to_key", "=", "st...
Counter number of records in the perstistent map with keys within the given range. :param txn: The transaction in which to run. :type txn: :class:`zlmdb.Transaction` :param from_key: Count records starting and including from this key. :type from_key: object :param to_key: End counting records before this key. :type to_key: object :returns: The number of records. :rtype: int
[ "Counter", "number", "of", "records", "in", "the", "perstistent", "map", "with", "keys", "within", "the", "given", "range", "." ]
577e8ce9314484f1fd5092fb4eef70221bb1d030
https://github.com/crossbario/zlmdb/blob/577e8ce9314484f1fd5092fb4eef70221bb1d030/zlmdb/_pmap.py#L547-L576
train
38,440
vijayvarma392/surfinBH
surfinBH/surfinBH.py
SurFinBH._read_dict
def _read_dict(self, f): """ Converts h5 groups to dictionaries """ d = {} for k, item in f.items(): if type(item) == h5py._hl.dataset.Dataset: v = item.value if type(v) == np.string_: v = str(v) if type(v) == str and v == "NONE": d[k] = None elif type(v) == str and v == "EMPTYARR": d[k] = np.array([]) elif isinstance(v, bytes): d[k] = v.decode('utf-8') else: d[k] = v elif k[:5] == "DICT_": d[k[5:]] = self._read_dict(item) elif k[:5] == "LIST_": tmpD = self._read_dict(item) d[k[5:]] = [tmpD[str(i)] for i in range(len(tmpD))] return d
python
def _read_dict(self, f): """ Converts h5 groups to dictionaries """ d = {} for k, item in f.items(): if type(item) == h5py._hl.dataset.Dataset: v = item.value if type(v) == np.string_: v = str(v) if type(v) == str and v == "NONE": d[k] = None elif type(v) == str and v == "EMPTYARR": d[k] = np.array([]) elif isinstance(v, bytes): d[k] = v.decode('utf-8') else: d[k] = v elif k[:5] == "DICT_": d[k[5:]] = self._read_dict(item) elif k[:5] == "LIST_": tmpD = self._read_dict(item) d[k[5:]] = [tmpD[str(i)] for i in range(len(tmpD))] return d
[ "def", "_read_dict", "(", "self", ",", "f", ")", ":", "d", "=", "{", "}", "for", "k", ",", "item", "in", "f", ".", "items", "(", ")", ":", "if", "type", "(", "item", ")", "==", "h5py", ".", "_hl", ".", "dataset", ".", "Dataset", ":", "v", "...
Converts h5 groups to dictionaries
[ "Converts", "h5", "groups", "to", "dictionaries" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/surfinBH.py#L77-L100
train
38,441
vijayvarma392/surfinBH
surfinBH/surfinBH.py
SurFinBH._load_scalar_fit
def _load_scalar_fit(self, fit_key=None, h5file=None, fit_data=None): """ Loads a single fit """ if (fit_key is None) ^ (h5file is None): raise ValueError("Either specify both fit_key and h5file, or" " neither") if not ((fit_key is None) ^ (fit_data is None)): raise ValueError("Specify exactly one of fit_key and fit_data.") if fit_data is None: fit_data = self._read_dict(h5file[fit_key]) if 'fitType' in fit_data.keys() and fit_data['fitType'] == 'GPR': fit = _eval_pysur.evaluate_fit.getGPRFitAndErrorEvaluator(fit_data) else: fit = _eval_pysur.evaluate_fit.getFitEvaluator(fit_data) return fit
python
def _load_scalar_fit(self, fit_key=None, h5file=None, fit_data=None): """ Loads a single fit """ if (fit_key is None) ^ (h5file is None): raise ValueError("Either specify both fit_key and h5file, or" " neither") if not ((fit_key is None) ^ (fit_data is None)): raise ValueError("Specify exactly one of fit_key and fit_data.") if fit_data is None: fit_data = self._read_dict(h5file[fit_key]) if 'fitType' in fit_data.keys() and fit_data['fitType'] == 'GPR': fit = _eval_pysur.evaluate_fit.getGPRFitAndErrorEvaluator(fit_data) else: fit = _eval_pysur.evaluate_fit.getFitEvaluator(fit_data) return fit
[ "def", "_load_scalar_fit", "(", "self", ",", "fit_key", "=", "None", ",", "h5file", "=", "None", ",", "fit_data", "=", "None", ")", ":", "if", "(", "fit_key", "is", "None", ")", "^", "(", "h5file", "is", "None", ")", ":", "raise", "ValueError", "(", ...
Loads a single fit
[ "Loads", "a", "single", "fit" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/surfinBH.py#L103-L121
train
38,442
vijayvarma392/surfinBH
surfinBH/surfinBH.py
SurFinBH._load_vector_fit
def _load_vector_fit(self, fit_key, h5file): """ Loads a vector of fits """ vector_fit = [] for i in range(len(h5file[fit_key].keys())): fit_data = self._read_dict(h5file[fit_key]['comp_%d'%i]) vector_fit.append(self._load_scalar_fit(fit_data=fit_data)) return vector_fit
python
def _load_vector_fit(self, fit_key, h5file): """ Loads a vector of fits """ vector_fit = [] for i in range(len(h5file[fit_key].keys())): fit_data = self._read_dict(h5file[fit_key]['comp_%d'%i]) vector_fit.append(self._load_scalar_fit(fit_data=fit_data)) return vector_fit
[ "def", "_load_vector_fit", "(", "self", ",", "fit_key", ",", "h5file", ")", ":", "vector_fit", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "h5file", "[", "fit_key", "]", ".", "keys", "(", ")", ")", ")", ":", "fit_data", "=", "self", ...
Loads a vector of fits
[ "Loads", "a", "vector", "of", "fits" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/surfinBH.py#L124-L131
train
38,443
vijayvarma392/surfinBH
surfinBH/surfinBH.py
SurFinBH._check_unused_kwargs
def _check_unused_kwargs(self, kwargs): """ Call this at the end of call module to check if all the kwargs have been used. Assumes kwargs were extracted using pop. """ if len(kwargs.keys()) != 0: unused = "" for k in kwargs.keys(): unused += "'%s', "%k if unused[-2:] == ", ": # get rid of trailing comma unused = unused[:-2] raise Exception('Unused keys in kwargs: %s'%unused)
python
def _check_unused_kwargs(self, kwargs): """ Call this at the end of call module to check if all the kwargs have been used. Assumes kwargs were extracted using pop. """ if len(kwargs.keys()) != 0: unused = "" for k in kwargs.keys(): unused += "'%s', "%k if unused[-2:] == ", ": # get rid of trailing comma unused = unused[:-2] raise Exception('Unused keys in kwargs: %s'%unused)
[ "def", "_check_unused_kwargs", "(", "self", ",", "kwargs", ")", ":", "if", "len", "(", "kwargs", ".", "keys", "(", ")", ")", "!=", "0", ":", "unused", "=", "\"\"", "for", "k", "in", "kwargs", ".", "keys", "(", ")", ":", "unused", "+=", "\"'%s', \""...
Call this at the end of call module to check if all the kwargs have been used. Assumes kwargs were extracted using pop.
[ "Call", "this", "at", "the", "end", "of", "call", "module", "to", "check", "if", "all", "the", "kwargs", "have", "been", "used", ".", "Assumes", "kwargs", "were", "extracted", "using", "pop", "." ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/surfinBH.py#L149-L159
train
38,444
vijayvarma392/surfinBH
surfinBH/surfinBH.py
SurFinBH._check_param_limits
def _check_param_limits(self, q, chiA, chiB, allow_extrap): """ Checks that params are within allowed range of paramters. Raises a warning if outside self.soft_param_lims limits and raises an error if outside self.hard_param_lims. If allow_extrap=True, skips these checks. """ if q < 1: raise ValueError('Mass ratio should be >= 1.') chiAmag = np.sqrt(np.sum(chiA**2)) chiBmag = np.sqrt(np.sum(chiB**2)) if chiAmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhA > 1.') if chiBmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhB > 1.') if self.aligned_spin_only: if np.sqrt(np.sum(chiA[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiA should be zero.') if np.sqrt(np.sum(chiB[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiB should be zero.') # Do not check param limits if allow_extrap=True if allow_extrap: return if q > self.hard_param_lims['q']+ 1e-14: raise ValueError('Mass ratio outside allowed range.') elif q > self.soft_param_lims['q']: warnings.warn('Mass ratio outside training range.') if chiAmag > self.hard_param_lims['chiAmag']+ 1e-14: raise ValueError('Spin magnitude of BhA outside allowed range.') elif chiAmag > self.soft_param_lims['chiAmag']: warnings.warn('Spin magnitude of BhA outside training range.') if chiBmag > self.hard_param_lims['chiBmag']+ 1e-14: raise ValueError('Spin magnitude of BhB outside allowed range.') elif chiBmag > self.soft_param_lims['chiBmag']: warnings.warn('Spin magnitude of BhB outside training range.')
python
def _check_param_limits(self, q, chiA, chiB, allow_extrap): """ Checks that params are within allowed range of paramters. Raises a warning if outside self.soft_param_lims limits and raises an error if outside self.hard_param_lims. If allow_extrap=True, skips these checks. """ if q < 1: raise ValueError('Mass ratio should be >= 1.') chiAmag = np.sqrt(np.sum(chiA**2)) chiBmag = np.sqrt(np.sum(chiB**2)) if chiAmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhA > 1.') if chiBmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhB > 1.') if self.aligned_spin_only: if np.sqrt(np.sum(chiA[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiA should be zero.') if np.sqrt(np.sum(chiB[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiB should be zero.') # Do not check param limits if allow_extrap=True if allow_extrap: return if q > self.hard_param_lims['q']+ 1e-14: raise ValueError('Mass ratio outside allowed range.') elif q > self.soft_param_lims['q']: warnings.warn('Mass ratio outside training range.') if chiAmag > self.hard_param_lims['chiAmag']+ 1e-14: raise ValueError('Spin magnitude of BhA outside allowed range.') elif chiAmag > self.soft_param_lims['chiAmag']: warnings.warn('Spin magnitude of BhA outside training range.') if chiBmag > self.hard_param_lims['chiBmag']+ 1e-14: raise ValueError('Spin magnitude of BhB outside allowed range.') elif chiBmag > self.soft_param_lims['chiBmag']: warnings.warn('Spin magnitude of BhB outside training range.')
[ "def", "_check_param_limits", "(", "self", ",", "q", ",", "chiA", ",", "chiB", ",", "allow_extrap", ")", ":", "if", "q", "<", "1", ":", "raise", "ValueError", "(", "'Mass ratio should be >= 1.'", ")", "chiAmag", "=", "np", ".", "sqrt", "(", "np", ".", ...
Checks that params are within allowed range of paramters. Raises a warning if outside self.soft_param_lims limits and raises an error if outside self.hard_param_lims. If allow_extrap=True, skips these checks.
[ "Checks", "that", "params", "are", "within", "allowed", "range", "of", "paramters", ".", "Raises", "a", "warning", "if", "outside", "self", ".", "soft_param_lims", "limits", "and", "raises", "an", "error", "if", "outside", "self", ".", "hard_param_lims", ".", ...
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/surfinBH.py#L162-L202
train
38,445
crossbario/zlmdb
zlmdb/_schema.py
Schema.slot
def slot(self, slot_index, marshal=None, unmarshal=None, build=None, cast=None, compress=False): """ Decorator for use on classes derived from zlmdb.PersistentMap. The decorator define slots in a LMDB database schema based on persistent maps, and slot configuration. :param slot_index: :param marshal: :param unmarshal: :param build: :param cast: :param compress: :return: """ def decorate(o): assert isinstance(o, PersistentMap) name = o.__class__.__name__ assert slot_index not in self._index_to_slot assert name not in self._name_to_slot o._zlmdb_slot = slot_index o._zlmdb_marshal = marshal o._zlmdb_unmarshal = unmarshal o._zlmdb_build = build o._zlmdb_cast = cast o._zlmdb_compress = compress _slot = Slot(slot_index, name, o) self._index_to_slot[slot_index] = _slot self._name_to_slot[name] = _slot return o return decorate
python
def slot(self, slot_index, marshal=None, unmarshal=None, build=None, cast=None, compress=False): """ Decorator for use on classes derived from zlmdb.PersistentMap. The decorator define slots in a LMDB database schema based on persistent maps, and slot configuration. :param slot_index: :param marshal: :param unmarshal: :param build: :param cast: :param compress: :return: """ def decorate(o): assert isinstance(o, PersistentMap) name = o.__class__.__name__ assert slot_index not in self._index_to_slot assert name not in self._name_to_slot o._zlmdb_slot = slot_index o._zlmdb_marshal = marshal o._zlmdb_unmarshal = unmarshal o._zlmdb_build = build o._zlmdb_cast = cast o._zlmdb_compress = compress _slot = Slot(slot_index, name, o) self._index_to_slot[slot_index] = _slot self._name_to_slot[name] = _slot return o return decorate
[ "def", "slot", "(", "self", ",", "slot_index", ",", "marshal", "=", "None", ",", "unmarshal", "=", "None", ",", "build", "=", "None", ",", "cast", "=", "None", ",", "compress", "=", "False", ")", ":", "def", "decorate", "(", "o", ")", ":", "assert"...
Decorator for use on classes derived from zlmdb.PersistentMap. The decorator define slots in a LMDB database schema based on persistent maps, and slot configuration. :param slot_index: :param marshal: :param unmarshal: :param build: :param cast: :param compress: :return:
[ "Decorator", "for", "use", "on", "classes", "derived", "from", "zlmdb", ".", "PersistentMap", ".", "The", "decorator", "define", "slots", "in", "a", "LMDB", "database", "schema", "based", "on", "persistent", "maps", "and", "slot", "configuration", "." ]
577e8ce9314484f1fd5092fb4eef70221bb1d030
https://github.com/crossbario/zlmdb/blob/577e8ce9314484f1fd5092fb4eef70221bb1d030/zlmdb/_schema.py#L101-L131
train
38,446
vijayvarma392/surfinBH
surfinBH/_dataPath.py
DataPath
def DataPath(): """ Return the default path for fit data h5 files""" return os.path.abspath('%s/data'%(os.path.dirname( \ os.path.realpath(__file__))))
python
def DataPath(): """ Return the default path for fit data h5 files""" return os.path.abspath('%s/data'%(os.path.dirname( \ os.path.realpath(__file__))))
[ "def", "DataPath", "(", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "'%s/data'", "%", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ")", ")" ]
Return the default path for fit data h5 files
[ "Return", "the", "default", "path", "for", "fit", "data", "h5", "files" ]
9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb
https://github.com/vijayvarma392/surfinBH/blob/9f2d25d00f894ee2ce9ffbb02f4e4a41fa7989eb/surfinBH/_dataPath.py#L4-L7
train
38,447
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
init_services
def init_services(service_definitions, service_context, state_db, client_authn_factory=None): """ Initiates a set of services :param service_definitions: A dictionary cotaining service definitions :param service_context: A reference to the service context, this is the same for all service instances. :param state_db: A reference to the state database. Shared by all the services. :param client_authn_factory: A list of methods the services can use to authenticate the client to a service. :return: A dictionary, with service name as key and the service instance as value. """ service = {} for service_name, service_configuration in service_definitions.items(): try: kwargs = service_configuration['kwargs'] except KeyError: kwargs = {} kwargs.update({'service_context': service_context, 'state_db': state_db, 'client_authn_factory': client_authn_factory}) if isinstance(service_configuration['class'], str): _srv = util.importer(service_configuration['class'])(**kwargs) else: _srv = service_configuration['class'](**kwargs) try: service[_srv.service_name] = _srv except AttributeError: raise ValueError("Could not load '{}'".format(service_name)) return service
python
def init_services(service_definitions, service_context, state_db, client_authn_factory=None): """ Initiates a set of services :param service_definitions: A dictionary cotaining service definitions :param service_context: A reference to the service context, this is the same for all service instances. :param state_db: A reference to the state database. Shared by all the services. :param client_authn_factory: A list of methods the services can use to authenticate the client to a service. :return: A dictionary, with service name as key and the service instance as value. """ service = {} for service_name, service_configuration in service_definitions.items(): try: kwargs = service_configuration['kwargs'] except KeyError: kwargs = {} kwargs.update({'service_context': service_context, 'state_db': state_db, 'client_authn_factory': client_authn_factory}) if isinstance(service_configuration['class'], str): _srv = util.importer(service_configuration['class'])(**kwargs) else: _srv = service_configuration['class'](**kwargs) try: service[_srv.service_name] = _srv except AttributeError: raise ValueError("Could not load '{}'".format(service_name)) return service
[ "def", "init_services", "(", "service_definitions", ",", "service_context", ",", "state_db", ",", "client_authn_factory", "=", "None", ")", ":", "service", "=", "{", "}", "for", "service_name", ",", "service_configuration", "in", "service_definitions", ".", "items",...
Initiates a set of services :param service_definitions: A dictionary cotaining service definitions :param service_context: A reference to the service context, this is the same for all service instances. :param state_db: A reference to the state database. Shared by all the services. :param client_authn_factory: A list of methods the services can use to authenticate the client to a service. :return: A dictionary, with service name as key and the service instance as value.
[ "Initiates", "a", "set", "of", "services" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L514-L550
train
38,448
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.gather_request_args
def gather_request_args(self, **kwargs): """ Go through the attributes that the message class can contain and add values if they are missing but exists in the client info or when there are default values. :param kwargs: Initial set of attributes. :return: Possibly augmented set of attributes """ ar_args = kwargs.copy() # Go through the list of claims defined for the message class # there are a couple of places where informtation can be found # access them in the order of priority # 1. A keyword argument # 2. configured set of default attribute values # 3. default attribute values defined in the OIDC standard document for prop in self.msg_type.c_param.keys(): if prop in ar_args: continue else: try: ar_args[prop] = getattr(self.service_context, prop) except AttributeError: try: ar_args[prop] = self.conf['request_args'][prop] except KeyError: try: ar_args[prop] = self.service_context.register_args[ prop] except KeyError: try: ar_args[prop] = self.default_request_args[prop] except KeyError: pass return ar_args
python
def gather_request_args(self, **kwargs): """ Go through the attributes that the message class can contain and add values if they are missing but exists in the client info or when there are default values. :param kwargs: Initial set of attributes. :return: Possibly augmented set of attributes """ ar_args = kwargs.copy() # Go through the list of claims defined for the message class # there are a couple of places where informtation can be found # access them in the order of priority # 1. A keyword argument # 2. configured set of default attribute values # 3. default attribute values defined in the OIDC standard document for prop in self.msg_type.c_param.keys(): if prop in ar_args: continue else: try: ar_args[prop] = getattr(self.service_context, prop) except AttributeError: try: ar_args[prop] = self.conf['request_args'][prop] except KeyError: try: ar_args[prop] = self.service_context.register_args[ prop] except KeyError: try: ar_args[prop] = self.default_request_args[prop] except KeyError: pass return ar_args
[ "def", "gather_request_args", "(", "self", ",", "*", "*", "kwargs", ")", ":", "ar_args", "=", "kwargs", ".", "copy", "(", ")", "# Go through the list of claims defined for the message class", "# there are a couple of places where informtation can be found", "# access them in th...
Go through the attributes that the message class can contain and add values if they are missing but exists in the client info or when there are default values. :param kwargs: Initial set of attributes. :return: Possibly augmented set of attributes
[ "Go", "through", "the", "attributes", "that", "the", "message", "class", "can", "contain", "and", "add", "values", "if", "they", "are", "missing", "but", "exists", "in", "the", "client", "info", "or", "when", "there", "are", "default", "values", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L68-L104
train
38,449
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.method_args
def method_args(self, context, **kwargs): """ Collect the set of arguments that should be used by a set of methods :param context: Which service we're working for :param kwargs: A set of keyword arguments that are added at run-time. :return: A set of keyword arguments """ try: _args = self.conf[context].copy() except KeyError: _args = kwargs else: _args.update(kwargs) return _args
python
def method_args(self, context, **kwargs): """ Collect the set of arguments that should be used by a set of methods :param context: Which service we're working for :param kwargs: A set of keyword arguments that are added at run-time. :return: A set of keyword arguments """ try: _args = self.conf[context].copy() except KeyError: _args = kwargs else: _args.update(kwargs) return _args
[ "def", "method_args", "(", "self", ",", "context", ",", "*", "*", "kwargs", ")", ":", "try", ":", "_args", "=", "self", ".", "conf", "[", "context", "]", ".", "copy", "(", ")", "except", "KeyError", ":", "_args", "=", "kwargs", "else", ":", "_args"...
Collect the set of arguments that should be used by a set of methods :param context: Which service we're working for :param kwargs: A set of keyword arguments that are added at run-time. :return: A set of keyword arguments
[ "Collect", "the", "set", "of", "arguments", "that", "should", "be", "used", "by", "a", "set", "of", "methods" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L106-L120
train
38,450
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.do_pre_construct
def do_pre_construct(self, request_args, **kwargs): """ Will run the pre_construct methods one by one in the order given. :param request_args: Request arguments :param kwargs: Extra key word arguments :return: A tuple of request_args and post_args. post_args are to be used by the post_construct methods. """ _args = self.method_args('pre_construct', **kwargs) post_args = {} for meth in self.pre_construct: request_args, _post_args = meth(request_args, service=self, **_args) post_args.update(_post_args) return request_args, post_args
python
def do_pre_construct(self, request_args, **kwargs): """ Will run the pre_construct methods one by one in the order given. :param request_args: Request arguments :param kwargs: Extra key word arguments :return: A tuple of request_args and post_args. post_args are to be used by the post_construct methods. """ _args = self.method_args('pre_construct', **kwargs) post_args = {} for meth in self.pre_construct: request_args, _post_args = meth(request_args, service=self, **_args) post_args.update(_post_args) return request_args, post_args
[ "def", "do_pre_construct", "(", "self", ",", "request_args", ",", "*", "*", "kwargs", ")", ":", "_args", "=", "self", ".", "method_args", "(", "'pre_construct'", ",", "*", "*", "kwargs", ")", "post_args", "=", "{", "}", "for", "meth", "in", "self", "."...
Will run the pre_construct methods one by one in the order given. :param request_args: Request arguments :param kwargs: Extra key word arguments :return: A tuple of request_args and post_args. post_args are to be used by the post_construct methods.
[ "Will", "run", "the", "pre_construct", "methods", "one", "by", "one", "in", "the", "order", "given", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L122-L138
train
38,451
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.do_post_construct
def do_post_construct(self, request_args, **kwargs): """ Will run the post_construct methods one at the time in order. :param request_args: Request arguments :param kwargs: Arguments used by the post_construct method :return: Possible modified set of request arguments. """ _args = self.method_args('post_construct', **kwargs) for meth in self.post_construct: request_args = meth(request_args, service=self, **_args) return request_args
python
def do_post_construct(self, request_args, **kwargs): """ Will run the post_construct methods one at the time in order. :param request_args: Request arguments :param kwargs: Arguments used by the post_construct method :return: Possible modified set of request arguments. """ _args = self.method_args('post_construct', **kwargs) for meth in self.post_construct: request_args = meth(request_args, service=self, **_args) return request_args
[ "def", "do_post_construct", "(", "self", ",", "request_args", ",", "*", "*", "kwargs", ")", ":", "_args", "=", "self", ".", "method_args", "(", "'post_construct'", ",", "*", "*", "kwargs", ")", "for", "meth", "in", "self", ".", "post_construct", ":", "re...
Will run the post_construct methods one at the time in order. :param request_args: Request arguments :param kwargs: Arguments used by the post_construct method :return: Possible modified set of request arguments.
[ "Will", "run", "the", "post_construct", "methods", "one", "at", "the", "time", "in", "order", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L140-L153
train
38,452
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.construct
def construct(self, request_args=None, **kwargs): """ Instantiate the request as a message class instance with attribute values gathered in a pre_construct method or in the gather_request_args method. :param request_args: :param kwargs: extra keyword arguments :return: message class instance """ if request_args is None: request_args = {} # run the pre_construct methods. Will return a possibly new # set of request arguments but also a set of arguments to # be used by the post_construct methods. request_args, post_args = self.do_pre_construct(request_args, **kwargs) # If 'state' appears among the keyword argument and is not # expected to appear in the request, remove it. if 'state' in self.msg_type.c_param and 'state' in kwargs: # Don't overwrite something put there by the constructor if 'state' not in request_args: request_args['state'] = kwargs['state'] # logger.debug("request_args: %s" % sanitize(request_args)) _args = self.gather_request_args(**request_args) # logger.debug("kwargs: %s" % sanitize(kwargs)) # initiate the request as in an instance of the self.msg_type # message type request = self.msg_type(**_args) return self.do_post_construct(request, **post_args)
python
def construct(self, request_args=None, **kwargs): """ Instantiate the request as a message class instance with attribute values gathered in a pre_construct method or in the gather_request_args method. :param request_args: :param kwargs: extra keyword arguments :return: message class instance """ if request_args is None: request_args = {} # run the pre_construct methods. Will return a possibly new # set of request arguments but also a set of arguments to # be used by the post_construct methods. request_args, post_args = self.do_pre_construct(request_args, **kwargs) # If 'state' appears among the keyword argument and is not # expected to appear in the request, remove it. if 'state' in self.msg_type.c_param and 'state' in kwargs: # Don't overwrite something put there by the constructor if 'state' not in request_args: request_args['state'] = kwargs['state'] # logger.debug("request_args: %s" % sanitize(request_args)) _args = self.gather_request_args(**request_args) # logger.debug("kwargs: %s" % sanitize(kwargs)) # initiate the request as in an instance of the self.msg_type # message type request = self.msg_type(**_args) return self.do_post_construct(request, **post_args)
[ "def", "construct", "(", "self", ",", "request_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "request_args", "is", "None", ":", "request_args", "=", "{", "}", "# run the pre_construct methods. Will return a possibly new", "# set of request arguments but...
Instantiate the request as a message class instance with attribute values gathered in a pre_construct method or in the gather_request_args method. :param request_args: :param kwargs: extra keyword arguments :return: message class instance
[ "Instantiate", "the", "request", "as", "a", "message", "class", "instance", "with", "attribute", "values", "gathered", "in", "a", "pre_construct", "method", "or", "in", "the", "gather_request_args", "method", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L165-L199
train
38,453
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.init_authentication_method
def init_authentication_method(self, request, authn_method, http_args=None, **kwargs): """ Will run the proper client authentication method. Each such method will place the necessary information in the necessary place. A method may modify the request. :param request: The request, a Message class instance :param authn_method: Client authentication method :param http_args: HTTP header arguments :param kwargs: Extra keyword arguments :return: Extended set of HTTP header arguments """ if http_args is None: http_args = {} if authn_method: logger.debug('Client authn method: {}'.format(authn_method)) return self.client_authn_factory(authn_method).construct( request, self, http_args=http_args, **kwargs) else: return http_args
python
def init_authentication_method(self, request, authn_method, http_args=None, **kwargs): """ Will run the proper client authentication method. Each such method will place the necessary information in the necessary place. A method may modify the request. :param request: The request, a Message class instance :param authn_method: Client authentication method :param http_args: HTTP header arguments :param kwargs: Extra keyword arguments :return: Extended set of HTTP header arguments """ if http_args is None: http_args = {} if authn_method: logger.debug('Client authn method: {}'.format(authn_method)) return self.client_authn_factory(authn_method).construct( request, self, http_args=http_args, **kwargs) else: return http_args
[ "def", "init_authentication_method", "(", "self", ",", "request", ",", "authn_method", ",", "http_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "http_args", "is", "None", ":", "http_args", "=", "{", "}", "if", "authn_method", ":", "logger", ...
Will run the proper client authentication method. Each such method will place the necessary information in the necessary place. A method may modify the request. :param request: The request, a Message class instance :param authn_method: Client authentication method :param http_args: HTTP header arguments :param kwargs: Extra keyword arguments :return: Extended set of HTTP header arguments
[ "Will", "run", "the", "proper", "client", "authentication", "method", ".", "Each", "such", "method", "will", "place", "the", "necessary", "information", "in", "the", "necessary", "place", ".", "A", "method", "may", "modify", "the", "request", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L201-L222
train
38,454
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.construct_request
def construct_request(self, request_args=None, **kwargs): """ The method where everything is setup for sending the request. The request information is gathered and the where and how of sending the request is decided. :param request_args: Initial request arguments :param kwargs: Extra keyword arguments :return: A dictionary with the keys 'url' and possibly 'body', 'kwargs', 'request' and 'ht_args'. """ if request_args is None: request_args = {} # remove arguments that should not be included in the request # _args = dict( # [(k, v) for k, v in kwargs.items() if v and k not in SPECIAL_ARGS]) return self.construct(request_args, **kwargs)
python
def construct_request(self, request_args=None, **kwargs): """ The method where everything is setup for sending the request. The request information is gathered and the where and how of sending the request is decided. :param request_args: Initial request arguments :param kwargs: Extra keyword arguments :return: A dictionary with the keys 'url' and possibly 'body', 'kwargs', 'request' and 'ht_args'. """ if request_args is None: request_args = {} # remove arguments that should not be included in the request # _args = dict( # [(k, v) for k, v in kwargs.items() if v and k not in SPECIAL_ARGS]) return self.construct(request_args, **kwargs)
[ "def", "construct_request", "(", "self", ",", "request_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "request_args", "is", "None", ":", "request_args", "=", "{", "}", "# remove arguments that should not be included in the request", "# _args = dict(", ...
The method where everything is setup for sending the request. The request information is gathered and the where and how of sending the request is decided. :param request_args: Initial request arguments :param kwargs: Extra keyword arguments :return: A dictionary with the keys 'url' and possibly 'body', 'kwargs', 'request' and 'ht_args'.
[ "The", "method", "where", "everything", "is", "setup", "for", "sending", "the", "request", ".", "The", "request", "information", "is", "gathered", "and", "the", "where", "and", "how", "of", "sending", "the", "request", "is", "decided", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L224-L242
train
38,455
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.get_endpoint
def get_endpoint(self): """ Find the service endpoint :return: The service endpoint (a URL) """ if self.endpoint: return self.endpoint else: return self.service_context.provider_info[self.endpoint_name]
python
def get_endpoint(self): """ Find the service endpoint :return: The service endpoint (a URL) """ if self.endpoint: return self.endpoint else: return self.service_context.provider_info[self.endpoint_name]
[ "def", "get_endpoint", "(", "self", ")", ":", "if", "self", ".", "endpoint", ":", "return", "self", ".", "endpoint", "else", ":", "return", "self", ".", "service_context", ".", "provider_info", "[", "self", ".", "endpoint_name", "]" ]
Find the service endpoint :return: The service endpoint (a URL)
[ "Find", "the", "service", "endpoint" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L244-L253
train
38,456
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.get_authn_header
def get_authn_header(self, request, authn_method, **kwargs): """ Construct an authorization specification to be sent in the HTTP header. :param request: The service request :param authn_method: Which authentication/authorization method to use :param kwargs: Extra keyword arguments :return: A set of keyword arguments to be sent in the HTTP header. """ headers = {} # If I should deal with client authentication if authn_method: h_arg = self.init_authentication_method(request, authn_method, **kwargs) try: headers = h_arg['headers'] except KeyError: pass return headers
python
def get_authn_header(self, request, authn_method, **kwargs): """ Construct an authorization specification to be sent in the HTTP header. :param request: The service request :param authn_method: Which authentication/authorization method to use :param kwargs: Extra keyword arguments :return: A set of keyword arguments to be sent in the HTTP header. """ headers = {} # If I should deal with client authentication if authn_method: h_arg = self.init_authentication_method(request, authn_method, **kwargs) try: headers = h_arg['headers'] except KeyError: pass return headers
[ "def", "get_authn_header", "(", "self", ",", "request", ",", "authn_method", ",", "*", "*", "kwargs", ")", ":", "headers", "=", "{", "}", "# If I should deal with client authentication", "if", "authn_method", ":", "h_arg", "=", "self", ".", "init_authentication_me...
Construct an authorization specification to be sent in the HTTP header. :param request: The service request :param authn_method: Which authentication/authorization method to use :param kwargs: Extra keyword arguments :return: A set of keyword arguments to be sent in the HTTP header.
[ "Construct", "an", "authorization", "specification", "to", "be", "sent", "in", "the", "HTTP", "header", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L255-L275
train
38,457
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.get_request_parameters
def get_request_parameters(self, request_body_type="", method="", authn_method='', request_args=None, http_args=None, **kwargs): """ Builds the request message and constructs the HTTP headers. This is the starting point for a pipeline that will: - construct the request message - add/remove information to/from the request message in the way a specific client authentication method requires. - gather a set of HTTP headers like Content-type and Authorization. - serialize the request message into the necessary format (JSON, urlencoded, signed JWT) :param request_body_type: Which serialization to use for the HTTP body :param method: HTTP method used. :param authn_method: Client authentication method :param request_args: Message arguments :param http_args: Initial HTTP header arguments :param kwargs: extra keyword arguments :return: Dictionary with the necessary information for the HTTP request """ if not method: method = self.http_method if not authn_method: authn_method = self.get_authn_method() if not request_body_type: request_body_type = self.request_body_type request = self.construct_request(request_args=request_args, **kwargs) _info = {'method': method} _args = kwargs.copy() if self.service_context.issuer: _args['iss'] = self.service_context.issuer # Client authentication by usage of the Authorization HTTP header # or by modifying the request object _headers = self.get_authn_header(request, authn_method, authn_endpoint=self.endpoint_name, **_args) # Find out where to send this request try: endpoint_url = kwargs['endpoint'] except KeyError: endpoint_url = self.get_endpoint() _info['url'] = get_http_url(endpoint_url, request, method=method) # If there is to be a body part if method == 'POST': # How should it be serialized if request_body_type == 'urlencoded': content_type = URL_ENCODED elif request_body_type in ['jws', 'jwe', 'jose']: content_type = JOSE_ENCODED else: # request_body_type == 'json' content_type = JSON_ENCODED _info['body'] = get_http_body(request, content_type) _headers.update({'Content-Type': content_type}) if _headers: _info['headers'] = _headers return _info
python
def get_request_parameters(self, request_body_type="", method="", authn_method='', request_args=None, http_args=None, **kwargs): """ Builds the request message and constructs the HTTP headers. This is the starting point for a pipeline that will: - construct the request message - add/remove information to/from the request message in the way a specific client authentication method requires. - gather a set of HTTP headers like Content-type and Authorization. - serialize the request message into the necessary format (JSON, urlencoded, signed JWT) :param request_body_type: Which serialization to use for the HTTP body :param method: HTTP method used. :param authn_method: Client authentication method :param request_args: Message arguments :param http_args: Initial HTTP header arguments :param kwargs: extra keyword arguments :return: Dictionary with the necessary information for the HTTP request """ if not method: method = self.http_method if not authn_method: authn_method = self.get_authn_method() if not request_body_type: request_body_type = self.request_body_type request = self.construct_request(request_args=request_args, **kwargs) _info = {'method': method} _args = kwargs.copy() if self.service_context.issuer: _args['iss'] = self.service_context.issuer # Client authentication by usage of the Authorization HTTP header # or by modifying the request object _headers = self.get_authn_header(request, authn_method, authn_endpoint=self.endpoint_name, **_args) # Find out where to send this request try: endpoint_url = kwargs['endpoint'] except KeyError: endpoint_url = self.get_endpoint() _info['url'] = get_http_url(endpoint_url, request, method=method) # If there is to be a body part if method == 'POST': # How should it be serialized if request_body_type == 'urlencoded': content_type = URL_ENCODED elif request_body_type in ['jws', 'jwe', 'jose']: content_type = JOSE_ENCODED else: # request_body_type == 'json' content_type = JSON_ENCODED _info['body'] = get_http_body(request, content_type) _headers.update({'Content-Type': content_type}) if _headers: _info['headers'] = _headers return _info
[ "def", "get_request_parameters", "(", "self", ",", "request_body_type", "=", "\"\"", ",", "method", "=", "\"\"", ",", "authn_method", "=", "''", ",", "request_args", "=", "None", ",", "http_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "no...
Builds the request message and constructs the HTTP headers. This is the starting point for a pipeline that will: - construct the request message - add/remove information to/from the request message in the way a specific client authentication method requires. - gather a set of HTTP headers like Content-type and Authorization. - serialize the request message into the necessary format (JSON, urlencoded, signed JWT) :param request_body_type: Which serialization to use for the HTTP body :param method: HTTP method used. :param authn_method: Client authentication method :param request_args: Message arguments :param http_args: Initial HTTP header arguments :param kwargs: extra keyword arguments :return: Dictionary with the necessary information for the HTTP request
[ "Builds", "the", "request", "message", "and", "constructs", "the", "HTTP", "headers", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L286-L355
train
38,458
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.get_urlinfo
def get_urlinfo(info): """ Pick out the fragment or query part from a URL. :param info: A URL possibly containing a query or a fragment part :return: the query/fragment part """ # If info is a whole URL pick out the query or fragment part if '?' in info or '#' in info: parts = urlparse(info) scheme, netloc, path, params, query, fragment = parts[:6] # either query of fragment if query: info = query else: info = fragment return info
python
def get_urlinfo(info): """ Pick out the fragment or query part from a URL. :param info: A URL possibly containing a query or a fragment part :return: the query/fragment part """ # If info is a whole URL pick out the query or fragment part if '?' in info or '#' in info: parts = urlparse(info) scheme, netloc, path, params, query, fragment = parts[:6] # either query of fragment if query: info = query else: info = fragment return info
[ "def", "get_urlinfo", "(", "info", ")", ":", "# If info is a whole URL pick out the query or fragment part", "if", "'?'", "in", "info", "or", "'#'", "in", "info", ":", "parts", "=", "urlparse", "(", "info", ")", "scheme", ",", "netloc", ",", "path", ",", "para...
Pick out the fragment or query part from a URL. :param info: A URL possibly containing a query or a fragment part :return: the query/fragment part
[ "Pick", "out", "the", "fragment", "or", "query", "part", "from", "a", "URL", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L360-L376
train
38,459
openid/JWTConnect-Python-OidcService
src/oidcservice/service.py
Service.get_conf_attr
def get_conf_attr(self, attr, default=None): """ Get the value of a attribute in the configuration :param attr: The attribute :param default: If the attribute doesn't appear in the configuration return this value :return: The value of attribute in the configuration or the default value """ if attr in self.conf: return self.conf[attr] else: return default
python
def get_conf_attr(self, attr, default=None): """ Get the value of a attribute in the configuration :param attr: The attribute :param default: If the attribute doesn't appear in the configuration return this value :return: The value of attribute in the configuration or the default value """ if attr in self.conf: return self.conf[attr] else: return default
[ "def", "get_conf_attr", "(", "self", ",", "attr", ",", "default", "=", "None", ")", ":", "if", "attr", "in", "self", ".", "conf", ":", "return", "self", ".", "conf", "[", "attr", "]", "else", ":", "return", "default" ]
Get the value of a attribute in the configuration :param attr: The attribute :param default: If the attribute doesn't appear in the configuration return this value :return: The value of attribute in the configuration or the default value
[ "Get", "the", "value", "of", "a", "attribute", "in", "the", "configuration" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service.py#L498-L511
train
38,460
openid/JWTConnect-Python-OidcService
src/oidcservice/oauth2/authorization.py
Authorization.post_parse_response
def post_parse_response(self, response, **kwargs): """ Add scope claim to response, from the request, if not present in the response :param response: The response :param kwargs: Extra Keyword arguments :return: A possibly augmented response """ if "scope" not in response: try: _key = kwargs['state'] except KeyError: pass else: if _key: item = self.get_item(oauth2.AuthorizationRequest, 'auth_request', _key) try: response["scope"] = item["scope"] except KeyError: pass return response
python
def post_parse_response(self, response, **kwargs): """ Add scope claim to response, from the request, if not present in the response :param response: The response :param kwargs: Extra Keyword arguments :return: A possibly augmented response """ if "scope" not in response: try: _key = kwargs['state'] except KeyError: pass else: if _key: item = self.get_item(oauth2.AuthorizationRequest, 'auth_request', _key) try: response["scope"] = item["scope"] except KeyError: pass return response
[ "def", "post_parse_response", "(", "self", ",", "response", ",", "*", "*", "kwargs", ")", ":", "if", "\"scope\"", "not", "in", "response", ":", "try", ":", "_key", "=", "kwargs", "[", "'state'", "]", "except", "KeyError", ":", "pass", "else", ":", "if"...
Add scope claim to response, from the request, if not present in the response :param response: The response :param kwargs: Extra Keyword arguments :return: A possibly augmented response
[ "Add", "scope", "claim", "to", "response", "from", "the", "request", "if", "not", "present", "in", "the", "response" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oauth2/authorization.py#L54-L77
train
38,461
openid/JWTConnect-Python-OidcService
src/oidcservice/oidc/end_session.py
EndSession.get_id_token_hint
def get_id_token_hint(self, request_args=None, **kwargs): """ Add id_token_hint to request :param request_args: :param kwargs: :return: """ request_args = self.multiple_extend_request_args( request_args, kwargs['state'], ['id_token'], ['auth_response', 'token_response', 'refresh_token_response'], orig=True ) try: request_args['id_token_hint'] = request_args['id_token'] except KeyError: pass else: del request_args['id_token'] return request_args, {}
python
def get_id_token_hint(self, request_args=None, **kwargs): """ Add id_token_hint to request :param request_args: :param kwargs: :return: """ request_args = self.multiple_extend_request_args( request_args, kwargs['state'], ['id_token'], ['auth_response', 'token_response', 'refresh_token_response'], orig=True ) try: request_args['id_token_hint'] = request_args['id_token'] except KeyError: pass else: del request_args['id_token'] return request_args, {}
[ "def", "get_id_token_hint", "(", "self", ",", "request_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "request_args", "=", "self", ".", "multiple_extend_request_args", "(", "request_args", ",", "kwargs", "[", "'state'", "]", ",", "[", "'id_token'", "...
Add id_token_hint to request :param request_args: :param kwargs: :return:
[ "Add", "id_token_hint", "to", "request" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oidc/end_session.py#L32-L53
train
38,462
openid/JWTConnect-Python-OidcService
src/oidcservice/state_interface.py
StateInterface.get_state
def get_state(self, key): """ Get the state connected to a given key. :param key: Key into the state database :return: A :py:class:´oidcservice.state_interface.State` instance """ _data = self.state_db.get(key) if not _data: raise KeyError(key) else: return State().from_json(_data)
python
def get_state(self, key): """ Get the state connected to a given key. :param key: Key into the state database :return: A :py:class:´oidcservice.state_interface.State` instance """ _data = self.state_db.get(key) if not _data: raise KeyError(key) else: return State().from_json(_data)
[ "def", "get_state", "(", "self", ",", "key", ")", ":", "_data", "=", "self", ".", "state_db", ".", "get", "(", "key", ")", "if", "not", "_data", ":", "raise", "KeyError", "(", "key", ")", "else", ":", "return", "State", "(", ")", ".", "from_json", ...
Get the state connected to a given key. :param key: Key into the state database :return: A :py:class:´oidcservice.state_interface.State` instance
[ "Get", "the", "state", "connected", "to", "a", "given", "key", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/state_interface.py#L56-L67
train
38,463
openid/JWTConnect-Python-OidcService
src/oidcservice/state_interface.py
StateInterface.store_item
def store_item(self, item, item_type, key): """ Store a service response. :param item: The item as a :py:class:`oidcmsg.message.Message` subclass instance or a JSON document. :param item_type: The type of request or response :param key: The key under which the information should be stored in the state database """ try: _state = self.get_state(key) except KeyError: _state = State() try: _state[item_type] = item.to_json() except AttributeError: _state[item_type] = item self.state_db.set(key, _state.to_json())
python
def store_item(self, item, item_type, key): """ Store a service response. :param item: The item as a :py:class:`oidcmsg.message.Message` subclass instance or a JSON document. :param item_type: The type of request or response :param key: The key under which the information should be stored in the state database """ try: _state = self.get_state(key) except KeyError: _state = State() try: _state[item_type] = item.to_json() except AttributeError: _state[item_type] = item self.state_db.set(key, _state.to_json())
[ "def", "store_item", "(", "self", ",", "item", ",", "item_type", ",", "key", ")", ":", "try", ":", "_state", "=", "self", ".", "get_state", "(", "key", ")", "except", "KeyError", ":", "_state", "=", "State", "(", ")", "try", ":", "_state", "[", "it...
Store a service response. :param item: The item as a :py:class:`oidcmsg.message.Message` subclass instance or a JSON document. :param item_type: The type of request or response :param key: The key under which the information should be stored in the state database
[ "Store", "a", "service", "response", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/state_interface.py#L69-L89
train
38,464
openid/JWTConnect-Python-OidcService
src/oidcservice/state_interface.py
StateInterface.get_iss
def get_iss(self, key): """ Get the Issuer ID :param key: Key to the information in the state database :return: The issuer ID """ _state = self.get_state(key) if not _state: raise KeyError(key) return _state['iss']
python
def get_iss(self, key): """ Get the Issuer ID :param key: Key to the information in the state database :return: The issuer ID """ _state = self.get_state(key) if not _state: raise KeyError(key) return _state['iss']
[ "def", "get_iss", "(", "self", ",", "key", ")", ":", "_state", "=", "self", ".", "get_state", "(", "key", ")", "if", "not", "_state", ":", "raise", "KeyError", "(", "key", ")", "return", "_state", "[", "'iss'", "]" ]
Get the Issuer ID :param key: Key to the information in the state database :return: The issuer ID
[ "Get", "the", "Issuer", "ID" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/state_interface.py#L91-L101
train
38,465
openid/JWTConnect-Python-OidcService
src/oidcservice/state_interface.py
StateInterface.extend_request_args
def extend_request_args(self, args, item_cls, item_type, key, parameters, orig=False): """ Add a set of parameters and their value to a set of request arguments. :param args: A dictionary :param item_cls: The :py:class:`oidcmsg.message.Message` subclass that describes the item :param item_type: The type of item, this is one of the parameter names in the :py:class:`oidcservice.state_interface.State` class. :param key: The key to the information in the database :param parameters: A list of parameters who's values this method will return. :param orig: Where the value of a claim is a signed JWT return that. :return: A dictionary with keys from the list of parameters and values being the values of those parameters in the item. If the parameter does not a appear in the item it will not appear in the returned dictionary. """ try: item = self.get_item(item_cls, item_type, key) except KeyError: pass else: for parameter in parameters: if orig: try: args[parameter] = item[parameter] except KeyError: pass else: try: args[parameter] = item[verified_claim_name(parameter)] except KeyError: try: args[parameter] = item[parameter] except KeyError: pass return args
python
def extend_request_args(self, args, item_cls, item_type, key, parameters, orig=False): """ Add a set of parameters and their value to a set of request arguments. :param args: A dictionary :param item_cls: The :py:class:`oidcmsg.message.Message` subclass that describes the item :param item_type: The type of item, this is one of the parameter names in the :py:class:`oidcservice.state_interface.State` class. :param key: The key to the information in the database :param parameters: A list of parameters who's values this method will return. :param orig: Where the value of a claim is a signed JWT return that. :return: A dictionary with keys from the list of parameters and values being the values of those parameters in the item. If the parameter does not a appear in the item it will not appear in the returned dictionary. """ try: item = self.get_item(item_cls, item_type, key) except KeyError: pass else: for parameter in parameters: if orig: try: args[parameter] = item[parameter] except KeyError: pass else: try: args[parameter] = item[verified_claim_name(parameter)] except KeyError: try: args[parameter] = item[parameter] except KeyError: pass return args
[ "def", "extend_request_args", "(", "self", ",", "args", ",", "item_cls", ",", "item_type", ",", "key", ",", "parameters", ",", "orig", "=", "False", ")", ":", "try", ":", "item", "=", "self", ".", "get_item", "(", "item_cls", ",", "item_type", ",", "ke...
Add a set of parameters and their value to a set of request arguments. :param args: A dictionary :param item_cls: The :py:class:`oidcmsg.message.Message` subclass that describes the item :param item_type: The type of item, this is one of the parameter names in the :py:class:`oidcservice.state_interface.State` class. :param key: The key to the information in the database :param parameters: A list of parameters who's values this method will return. :param orig: Where the value of a claim is a signed JWT return that. :return: A dictionary with keys from the list of parameters and values being the values of those parameters in the item. If the parameter does not a appear in the item it will not appear in the returned dictionary.
[ "Add", "a", "set", "of", "parameters", "and", "their", "value", "to", "a", "set", "of", "request", "arguments", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/state_interface.py#L120-L160
train
38,466
openid/JWTConnect-Python-OidcService
src/oidcservice/state_interface.py
StateInterface.get_state_by_X
def get_state_by_X(self, x, xtyp): """ Find the state value by providing the x value. Will raise an exception if the x value is absent from the state data base. :param x: The x value :return: The state value """ _state = self.state_db.get(KEY_PATTERN[xtyp].format(x)) if _state: return _state else: raise KeyError('Unknown {}: "{}"'.format(xtyp, x))
python
def get_state_by_X(self, x, xtyp): """ Find the state value by providing the x value. Will raise an exception if the x value is absent from the state data base. :param x: The x value :return: The state value """ _state = self.state_db.get(KEY_PATTERN[xtyp].format(x)) if _state: return _state else: raise KeyError('Unknown {}: "{}"'.format(xtyp, x))
[ "def", "get_state_by_X", "(", "self", ",", "x", ",", "xtyp", ")", ":", "_state", "=", "self", ".", "state_db", ".", "get", "(", "KEY_PATTERN", "[", "xtyp", "]", ".", "format", "(", "x", ")", ")", "if", "_state", ":", "return", "_state", "else", ":"...
Find the state value by providing the x value. Will raise an exception if the x value is absent from the state data base. :param x: The x value :return: The state value
[ "Find", "the", "state", "value", "by", "providing", "the", "x", "value", ".", "Will", "raise", "an", "exception", "if", "the", "x", "value", "is", "absent", "from", "the", "state", "data", "base", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/state_interface.py#L226-L239
train
38,467
openid/JWTConnect-Python-OidcService
src/oidcservice/service_context.py
ServiceContext.import_keys
def import_keys(self, keyspec): """ The client needs it's own set of keys. It can either dynamically create them or load them from local storage. This method can also fetch other entities keys provided the URL points to a JWKS. :param keyspec: """ for where, spec in keyspec.items(): if where == 'file': for typ, files in spec.items(): if typ == 'rsa': for fil in files: _key = RSAKey( key=import_private_rsa_key_from_file(fil), use='sig') _kb = KeyBundle() _kb.append(_key) self.keyjar.add_kb('', _kb) elif where == 'url': for iss, url in spec.items(): kb = KeyBundle(source=url) self.keyjar.add_kb(iss, kb)
python
def import_keys(self, keyspec): """ The client needs it's own set of keys. It can either dynamically create them or load them from local storage. This method can also fetch other entities keys provided the URL points to a JWKS. :param keyspec: """ for where, spec in keyspec.items(): if where == 'file': for typ, files in spec.items(): if typ == 'rsa': for fil in files: _key = RSAKey( key=import_private_rsa_key_from_file(fil), use='sig') _kb = KeyBundle() _kb.append(_key) self.keyjar.add_kb('', _kb) elif where == 'url': for iss, url in spec.items(): kb = KeyBundle(source=url) self.keyjar.add_kb(iss, kb)
[ "def", "import_keys", "(", "self", ",", "keyspec", ")", ":", "for", "where", ",", "spec", "in", "keyspec", ".", "items", "(", ")", ":", "if", "where", "==", "'file'", ":", "for", "typ", ",", "files", "in", "spec", ".", "items", "(", ")", ":", "if...
The client needs it's own set of keys. It can either dynamically create them or load them from local storage. This method can also fetch other entities keys provided the URL points to a JWKS. :param keyspec:
[ "The", "client", "needs", "it", "s", "own", "set", "of", "keys", ".", "It", "can", "either", "dynamically", "create", "them", "or", "load", "them", "from", "local", "storage", ".", "This", "method", "can", "also", "fetch", "other", "entities", "keys", "p...
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/service_context.py#L200-L223
train
38,468
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
assertion_jwt
def assertion_jwt(client_id, keys, audience, algorithm, lifetime=600): """ Create a signed Json Web Token containing some information. :param client_id: The Client ID :param keys: Signing keys :param audience: Who is the receivers for this assertion :param algorithm: Signing algorithm :param lifetime: The lifetime of the signed Json Web Token :return: A Signed Json Web Token """ _now = utc_time_sans_frac() at = AuthnToken(iss=client_id, sub=client_id, aud=audience, jti=rndstr(32), exp=_now + lifetime, iat=_now) logger.debug('AuthnToken: {}'.format(at.to_dict())) return at.to_jwt(key=keys, algorithm=algorithm)
python
def assertion_jwt(client_id, keys, audience, algorithm, lifetime=600): """ Create a signed Json Web Token containing some information. :param client_id: The Client ID :param keys: Signing keys :param audience: Who is the receivers for this assertion :param algorithm: Signing algorithm :param lifetime: The lifetime of the signed Json Web Token :return: A Signed Json Web Token """ _now = utc_time_sans_frac() at = AuthnToken(iss=client_id, sub=client_id, aud=audience, jti=rndstr(32), exp=_now + lifetime, iat=_now) logger.debug('AuthnToken: {}'.format(at.to_dict())) return at.to_jwt(key=keys, algorithm=algorithm)
[ "def", "assertion_jwt", "(", "client_id", ",", "keys", ",", "audience", ",", "algorithm", ",", "lifetime", "=", "600", ")", ":", "_now", "=", "utc_time_sans_frac", "(", ")", "at", "=", "AuthnToken", "(", "iss", "=", "client_id", ",", "sub", "=", "client_...
Create a signed Json Web Token containing some information. :param client_id: The Client ID :param keys: Signing keys :param audience: Who is the receivers for this assertion :param algorithm: Signing algorithm :param lifetime: The lifetime of the signed Json Web Token :return: A Signed Json Web Token
[ "Create", "a", "signed", "Json", "Web", "Token", "containing", "some", "information", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L36-L53
train
38,469
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
valid_service_context
def valid_service_context(service_context, when=0): """ Check if the client_secret has expired :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :param when: A time stamp against which the expiration time is to be checked :return: True if the client_secret is still valid """ eta = getattr(service_context, 'client_secret_expires_at', 0) now = when or utc_time_sans_frac() if eta != 0 and eta < now: return False return True
python
def valid_service_context(service_context, when=0): """ Check if the client_secret has expired :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :param when: A time stamp against which the expiration time is to be checked :return: True if the client_secret is still valid """ eta = getattr(service_context, 'client_secret_expires_at', 0) now = when or utc_time_sans_frac() if eta != 0 and eta < now: return False return True
[ "def", "valid_service_context", "(", "service_context", ",", "when", "=", "0", ")", ":", "eta", "=", "getattr", "(", "service_context", ",", "'client_secret_expires_at'", ",", "0", ")", "now", "=", "when", "or", "utc_time_sans_frac", "(", ")", "if", "eta", "...
Check if the client_secret has expired :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :param when: A time stamp against which the expiration time is to be checked :return: True if the client_secret is still valid
[ "Check", "if", "the", "client_secret", "has", "expired" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L491-L504
train
38,470
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
ClientSecretBasic.construct
def construct(self, request, service=None, http_args=None, **kwargs): """ Construct a dictionary to be added to the HTTP request headers :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :return: dictionary of HTTP arguments """ if http_args is None: http_args = {} if "headers" not in http_args: http_args["headers"] = {} # get the username (client_id) and the password (client_secret) try: passwd = kwargs["password"] except KeyError: try: passwd = request["client_secret"] except KeyError: passwd = service.service_context.client_secret try: user = kwargs["user"] except KeyError: user = service.service_context.client_id # The credential is username and password concatenated with a ':' # in between and then base 64 encoded becomes the authentication # token. credentials = "{}:{}".format(quote_plus(user), quote_plus(passwd)) authz = base64.urlsafe_b64encode(credentials.encode("utf-8")).decode( "utf-8") http_args["headers"]["Authorization"] = "Basic {}".format(authz) # If client_secret was part of the request message instance remove it try: del request["client_secret"] except (KeyError, TypeError): pass # If we're doing an access token request with an authorization code # then we should add client_id to the request if it's not already # there if isinstance(request, AccessTokenRequest) and request[ 'grant_type'] == 'authorization_code': if 'client_id' not in request: try: request['client_id'] = service.service_context.client_id except AttributeError: pass else: # remove client_id if not required by the request definition try: _req = request.c_param["client_id"][VREQUIRED] except (KeyError, AttributeError): _req = False # if it's not required remove it if not _req: try: del request["client_id"] except KeyError: pass return http_args
python
def construct(self, request, service=None, http_args=None, **kwargs): """ Construct a dictionary to be added to the HTTP request headers :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :return: dictionary of HTTP arguments """ if http_args is None: http_args = {} if "headers" not in http_args: http_args["headers"] = {} # get the username (client_id) and the password (client_secret) try: passwd = kwargs["password"] except KeyError: try: passwd = request["client_secret"] except KeyError: passwd = service.service_context.client_secret try: user = kwargs["user"] except KeyError: user = service.service_context.client_id # The credential is username and password concatenated with a ':' # in between and then base 64 encoded becomes the authentication # token. credentials = "{}:{}".format(quote_plus(user), quote_plus(passwd)) authz = base64.urlsafe_b64encode(credentials.encode("utf-8")).decode( "utf-8") http_args["headers"]["Authorization"] = "Basic {}".format(authz) # If client_secret was part of the request message instance remove it try: del request["client_secret"] except (KeyError, TypeError): pass # If we're doing an access token request with an authorization code # then we should add client_id to the request if it's not already # there if isinstance(request, AccessTokenRequest) and request[ 'grant_type'] == 'authorization_code': if 'client_id' not in request: try: request['client_id'] = service.service_context.client_id except AttributeError: pass else: # remove client_id if not required by the request definition try: _req = request.c_param["client_id"][VREQUIRED] except (KeyError, AttributeError): _req = False # if it's not required remove it if not _req: try: del request["client_id"] except KeyError: pass return http_args
[ "def", "construct", "(", "self", ",", "request", ",", "service", "=", "None", ",", "http_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "http_args", "is", "None", ":", "http_args", "=", "{", "}", "if", "\"headers\"", "not", "in", "http_...
Construct a dictionary to be added to the HTTP request headers :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :return: dictionary of HTTP arguments
[ "Construct", "a", "dictionary", "to", "be", "added", "to", "the", "HTTP", "request", "headers" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L82-L151
train
38,471
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
BearerBody.construct
def construct(self, request, service=None, http_args=None, **kwargs): """ Will add a token to the request if not present :param request: The request :param service_context: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: extra keyword arguments :return: A possibly modified dictionary with HTTP arguments. """ _acc_token = '' for _token_type in ['access_token', 'refresh_token']: _acc_token = find_token(request, _token_type, service, **kwargs) if _acc_token: break if not _acc_token: raise KeyError('No access or refresh token available') else: request["access_token"] = _acc_token return http_args
python
def construct(self, request, service=None, http_args=None, **kwargs): """ Will add a token to the request if not present :param request: The request :param service_context: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: extra keyword arguments :return: A possibly modified dictionary with HTTP arguments. """ _acc_token = '' for _token_type in ['access_token', 'refresh_token']: _acc_token = find_token(request, _token_type, service, **kwargs) if _acc_token: break if not _acc_token: raise KeyError('No access or refresh token available') else: request["access_token"] = _acc_token return http_args
[ "def", "construct", "(", "self", ",", "request", ",", "service", "=", "None", ",", "http_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "_acc_token", "=", "''", "for", "_token_type", "in", "[", "'access_token'", ",", "'refresh_token'", "]", ":", ...
Will add a token to the request if not present :param request: The request :param service_context: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: extra keyword arguments :return: A possibly modified dictionary with HTTP arguments.
[ "Will", "add", "a", "token", "to", "the", "request", "if", "not", "present" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L264-L287
train
38,472
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
JWSAuthnMethod.choose_algorithm
def choose_algorithm(self, context, **kwargs): """ Pick signing algorithm :param context: Signing context :param kwargs: extra keyword arguments :return: Name of a signing algorithm """ try: algorithm = kwargs["algorithm"] except KeyError: # different contexts uses different signing algorithms algorithm = DEF_SIGN_ALG[context] if not algorithm: raise AuthnFailure("Missing algorithm specification") return algorithm
python
def choose_algorithm(self, context, **kwargs): """ Pick signing algorithm :param context: Signing context :param kwargs: extra keyword arguments :return: Name of a signing algorithm """ try: algorithm = kwargs["algorithm"] except KeyError: # different contexts uses different signing algorithms algorithm = DEF_SIGN_ALG[context] if not algorithm: raise AuthnFailure("Missing algorithm specification") return algorithm
[ "def", "choose_algorithm", "(", "self", ",", "context", ",", "*", "*", "kwargs", ")", ":", "try", ":", "algorithm", "=", "kwargs", "[", "\"algorithm\"", "]", "except", "KeyError", ":", "# different contexts uses different signing algorithms", "algorithm", "=", "DE...
Pick signing algorithm :param context: Signing context :param kwargs: extra keyword arguments :return: Name of a signing algorithm
[ "Pick", "signing", "algorithm" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L314-L329
train
38,473
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
JWSAuthnMethod.get_signing_key
def get_signing_key(self, algorithm, service_context): """ Pick signing key based on signing algorithm to be used :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A key """ return service_context.keyjar.get_signing_key( alg2keytype(algorithm), alg=algorithm)
python
def get_signing_key(self, algorithm, service_context): """ Pick signing key based on signing algorithm to be used :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A key """ return service_context.keyjar.get_signing_key( alg2keytype(algorithm), alg=algorithm)
[ "def", "get_signing_key", "(", "self", ",", "algorithm", ",", "service_context", ")", ":", "return", "service_context", ".", "keyjar", ".", "get_signing_key", "(", "alg2keytype", "(", "algorithm", ")", ",", "alg", "=", "algorithm", ")" ]
Pick signing key based on signing algorithm to be used :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A key
[ "Pick", "signing", "key", "based", "on", "signing", "algorithm", "to", "be", "used" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L331-L341
train
38,474
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
JWSAuthnMethod.get_key_by_kid
def get_key_by_kid(self, kid, algorithm, service_context): """ Pick a key that matches a given key ID and signing algorithm. :param kid: Key ID :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A matching key """ _key = service_context.keyjar.get_key_by_kid(kid) if _key: ktype = alg2keytype(algorithm) if _key.kty != ktype: raise NoMatchingKey("Wrong key type") else: return _key else: raise NoMatchingKey("No key with kid:%s" % kid)
python
def get_key_by_kid(self, kid, algorithm, service_context): """ Pick a key that matches a given key ID and signing algorithm. :param kid: Key ID :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A matching key """ _key = service_context.keyjar.get_key_by_kid(kid) if _key: ktype = alg2keytype(algorithm) if _key.kty != ktype: raise NoMatchingKey("Wrong key type") else: return _key else: raise NoMatchingKey("No key with kid:%s" % kid)
[ "def", "get_key_by_kid", "(", "self", ",", "kid", ",", "algorithm", ",", "service_context", ")", ":", "_key", "=", "service_context", ".", "keyjar", ".", "get_key_by_kid", "(", "kid", ")", "if", "_key", ":", "ktype", "=", "alg2keytype", "(", "algorithm", "...
Pick a key that matches a given key ID and signing algorithm. :param kid: Key ID :param algorithm: Signing algorithm :param service_context: A :py:class:`oidcservice.service_context.ServiceContext` instance :return: A matching key
[ "Pick", "a", "key", "that", "matches", "a", "given", "key", "ID", "and", "signing", "algorithm", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L343-L361
train
38,475
openid/JWTConnect-Python-OidcService
src/oidcservice/client_auth.py
JWSAuthnMethod.construct
def construct(self, request, service=None, http_args=None, **kwargs): """ Constructs a client assertion and signs it with a key. The request is modified as a side effect. :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: Extra arguments :return: Constructed HTTP arguments, in this case none """ if 'client_assertion' in kwargs: request["client_assertion"] = kwargs['client_assertion'] if 'client_assertion_type' in kwargs: request[ 'client_assertion_type'] = kwargs['client_assertion_type'] else: request["client_assertion_type"] = JWT_BEARER elif 'client_assertion' in request: if 'client_assertion_type' not in request: request["client_assertion_type"] = JWT_BEARER else: algorithm = None _context = service.service_context # audience for the signed JWT depends on which endpoint # we're talking to. if kwargs['authn_endpoint'] in ['token_endpoint']: try: algorithm = _context.behaviour[ 'token_endpoint_auth_signing_alg'] except (KeyError, AttributeError): pass audience = _context.provider_info['token_endpoint'] else: audience = _context.provider_info['issuer'] if not algorithm: algorithm = self.choose_algorithm(**kwargs) ktype = alg2keytype(algorithm) try: if 'kid' in kwargs: signing_key = [self.get_key_by_kid(kwargs["kid"], algorithm, _context)] elif ktype in _context.kid["sig"]: try: signing_key = [self.get_key_by_kid( _context.kid["sig"][ktype], algorithm, _context)] except KeyError: signing_key = self.get_signing_key(algorithm, _context) else: signing_key = self.get_signing_key(algorithm, _context) except NoMatchingKey as err: logger.error("%s" % sanitize(err)) raise try: _args = {'lifetime': kwargs['lifetime']} except KeyError: _args = {} # construct the signed JWT with the assertions and add # it as value to the 'client_assertion' claim of the request request["client_assertion"] = assertion_jwt( _context.client_id, signing_key, audience, algorithm, **_args) request["client_assertion_type"] = JWT_BEARER try: del request["client_secret"] except KeyError: pass # If client_id is not required to be present, remove it. if not request.c_param["client_id"][VREQUIRED]: try: del request["client_id"] except KeyError: pass return {}
python
def construct(self, request, service=None, http_args=None, **kwargs): """ Constructs a client assertion and signs it with a key. The request is modified as a side effect. :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: Extra arguments :return: Constructed HTTP arguments, in this case none """ if 'client_assertion' in kwargs: request["client_assertion"] = kwargs['client_assertion'] if 'client_assertion_type' in kwargs: request[ 'client_assertion_type'] = kwargs['client_assertion_type'] else: request["client_assertion_type"] = JWT_BEARER elif 'client_assertion' in request: if 'client_assertion_type' not in request: request["client_assertion_type"] = JWT_BEARER else: algorithm = None _context = service.service_context # audience for the signed JWT depends on which endpoint # we're talking to. if kwargs['authn_endpoint'] in ['token_endpoint']: try: algorithm = _context.behaviour[ 'token_endpoint_auth_signing_alg'] except (KeyError, AttributeError): pass audience = _context.provider_info['token_endpoint'] else: audience = _context.provider_info['issuer'] if not algorithm: algorithm = self.choose_algorithm(**kwargs) ktype = alg2keytype(algorithm) try: if 'kid' in kwargs: signing_key = [self.get_key_by_kid(kwargs["kid"], algorithm, _context)] elif ktype in _context.kid["sig"]: try: signing_key = [self.get_key_by_kid( _context.kid["sig"][ktype], algorithm, _context)] except KeyError: signing_key = self.get_signing_key(algorithm, _context) else: signing_key = self.get_signing_key(algorithm, _context) except NoMatchingKey as err: logger.error("%s" % sanitize(err)) raise try: _args = {'lifetime': kwargs['lifetime']} except KeyError: _args = {} # construct the signed JWT with the assertions and add # it as value to the 'client_assertion' claim of the request request["client_assertion"] = assertion_jwt( _context.client_id, signing_key, audience, algorithm, **_args) request["client_assertion_type"] = JWT_BEARER try: del request["client_secret"] except KeyError: pass # If client_id is not required to be present, remove it. if not request.c_param["client_id"][VREQUIRED]: try: del request["client_id"] except KeyError: pass return {}
[ "def", "construct", "(", "self", ",", "request", ",", "service", "=", "None", ",", "http_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "'client_assertion'", "in", "kwargs", ":", "request", "[", "\"client_assertion\"", "]", "=", "kwargs", "...
Constructs a client assertion and signs it with a key. The request is modified as a side effect. :param request: The request :param service: A :py:class:`oidcservice.service.Service` instance :param http_args: HTTP arguments :param kwargs: Extra arguments :return: Constructed HTTP arguments, in this case none
[ "Constructs", "a", "client", "assertion", "and", "signs", "it", "with", "a", "key", ".", "The", "request", "is", "modified", "as", "a", "side", "effect", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/client_auth.py#L363-L445
train
38,476
openid/JWTConnect-Python-OidcService
src/oidcservice/oauth2/provider_info_discovery.py
ProviderInfoDiscovery.get_endpoint
def get_endpoint(self): """ Find the issuer ID and from it construct the service endpoint :return: Service endpoint """ try: _iss = self.service_context.issuer except AttributeError: _iss = self.endpoint if _iss.endswith('/'): return OIDCONF_PATTERN.format(_iss[:-1]) else: return OIDCONF_PATTERN.format(_iss)
python
def get_endpoint(self): """ Find the issuer ID and from it construct the service endpoint :return: Service endpoint """ try: _iss = self.service_context.issuer except AttributeError: _iss = self.endpoint if _iss.endswith('/'): return OIDCONF_PATTERN.format(_iss[:-1]) else: return OIDCONF_PATTERN.format(_iss)
[ "def", "get_endpoint", "(", "self", ")", ":", "try", ":", "_iss", "=", "self", ".", "service_context", ".", "issuer", "except", "AttributeError", ":", "_iss", "=", "self", ".", "endpoint", "if", "_iss", ".", "endswith", "(", "'/'", ")", ":", "return", ...
Find the issuer ID and from it construct the service endpoint :return: Service endpoint
[ "Find", "the", "issuer", "ID", "and", "from", "it", "construct", "the", "service", "endpoint" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oauth2/provider_info_discovery.py#L28-L42
train
38,477
openid/JWTConnect-Python-OidcService
src/oidcservice/oauth2/provider_info_discovery.py
ProviderInfoDiscovery._update_service_context
def _update_service_context(self, resp, **kwargs): """ Deal with Provider Config Response. Based on the provider info response a set of parameters in different places needs to be set. :param resp: The provider info response :param service_context: Information collected/used by services """ issuer = self.service_context.issuer # Verify that the issuer value received is the same as the # url that was used as service endpoint (without the .well-known part) if "issuer" in resp: _pcr_issuer = resp["issuer"] if resp["issuer"].endswith("/"): if issuer.endswith("/"): _issuer = issuer else: _issuer = issuer + "/" else: if issuer.endswith("/"): _issuer = issuer[:-1] else: _issuer = issuer # In some cases we can live with the two URLs not being # the same. But this is an excepted that has to be explicit try: self.service_context.allow['issuer_mismatch'] except KeyError: if _issuer != _pcr_issuer: raise OidcServiceError( "provider info issuer mismatch '%s' != '%s'" % ( _issuer, _pcr_issuer)) else: # No prior knowledge _pcr_issuer = issuer self.service_context.issuer = _pcr_issuer self.service_context.provider_info = resp # If there are services defined set the service endpoint to be # the URLs specified in the provider information. try: _srvs = self.service_context.service except AttributeError: pass else: if self.service_context.service: for key, val in resp.items(): # All service endpoint parameters in the provider info has # a name ending in '_endpoint' so I can look specifically # for those if key.endswith("_endpoint"): for _srv in self.service_context.service.values(): # Every service has an endpoint_name assigned # when initiated. This name *MUST* match the # endpoint names used in the provider info if _srv.endpoint_name == key: _srv.endpoint = val # If I already have a Key Jar then I'll add then provider keys to # that. Otherwise a new Key Jar is minted try: kj = self.service_context.keyjar except KeyError: kj = KeyJar() # Load the keys. Note that this only means that the key specification # is loaded not necessarily that any keys are fetched. if 'jwks_uri' in resp: kj.load_keys(_pcr_issuer, jwks_uri=resp['jwks_uri']) elif 'jwks' in resp: kj.load_keys(_pcr_issuer, jwks=resp['jwks']) self.service_context.keyjar = kj
python
def _update_service_context(self, resp, **kwargs): """ Deal with Provider Config Response. Based on the provider info response a set of parameters in different places needs to be set. :param resp: The provider info response :param service_context: Information collected/used by services """ issuer = self.service_context.issuer # Verify that the issuer value received is the same as the # url that was used as service endpoint (without the .well-known part) if "issuer" in resp: _pcr_issuer = resp["issuer"] if resp["issuer"].endswith("/"): if issuer.endswith("/"): _issuer = issuer else: _issuer = issuer + "/" else: if issuer.endswith("/"): _issuer = issuer[:-1] else: _issuer = issuer # In some cases we can live with the two URLs not being # the same. But this is an excepted that has to be explicit try: self.service_context.allow['issuer_mismatch'] except KeyError: if _issuer != _pcr_issuer: raise OidcServiceError( "provider info issuer mismatch '%s' != '%s'" % ( _issuer, _pcr_issuer)) else: # No prior knowledge _pcr_issuer = issuer self.service_context.issuer = _pcr_issuer self.service_context.provider_info = resp # If there are services defined set the service endpoint to be # the URLs specified in the provider information. try: _srvs = self.service_context.service except AttributeError: pass else: if self.service_context.service: for key, val in resp.items(): # All service endpoint parameters in the provider info has # a name ending in '_endpoint' so I can look specifically # for those if key.endswith("_endpoint"): for _srv in self.service_context.service.values(): # Every service has an endpoint_name assigned # when initiated. This name *MUST* match the # endpoint names used in the provider info if _srv.endpoint_name == key: _srv.endpoint = val # If I already have a Key Jar then I'll add then provider keys to # that. Otherwise a new Key Jar is minted try: kj = self.service_context.keyjar except KeyError: kj = KeyJar() # Load the keys. Note that this only means that the key specification # is loaded not necessarily that any keys are fetched. if 'jwks_uri' in resp: kj.load_keys(_pcr_issuer, jwks_uri=resp['jwks_uri']) elif 'jwks' in resp: kj.load_keys(_pcr_issuer, jwks=resp['jwks']) self.service_context.keyjar = kj
[ "def", "_update_service_context", "(", "self", ",", "resp", ",", "*", "*", "kwargs", ")", ":", "issuer", "=", "self", ".", "service_context", ".", "issuer", "# Verify that the issuer value received is the same as the", "# url that was used as service endpoint (without the .we...
Deal with Provider Config Response. Based on the provider info response a set of parameters in different places needs to be set. :param resp: The provider info response :param service_context: Information collected/used by services
[ "Deal", "with", "Provider", "Config", "Response", ".", "Based", "on", "the", "provider", "info", "response", "a", "set", "of", "parameters", "in", "different", "places", "needs", "to", "be", "set", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oauth2/provider_info_discovery.py#L54-L129
train
38,478
openid/JWTConnect-Python-OidcService
src/oidcservice/util.py
get_http_url
def get_http_url(url, req, method='GET'): """ Add a query part representing the request to a url that may already contain a query part. Only done if the HTTP method used is 'GET' or 'DELETE'. :param url: The URL :param req: The request as a :py:class:`oidcmsg.message.Message` instance :param method: The HTTP method :return: A possibly modified URL """ if method in ["GET", "DELETE"]: if req.keys(): _req = req.copy() comp = urlsplit(str(url)) if comp.query: _req.update(parse_qs(comp.query)) _query = str(_req.to_urlencoded()) return urlunsplit((comp.scheme, comp.netloc, comp.path, _query, comp.fragment)) else: return url else: return url
python
def get_http_url(url, req, method='GET'): """ Add a query part representing the request to a url that may already contain a query part. Only done if the HTTP method used is 'GET' or 'DELETE'. :param url: The URL :param req: The request as a :py:class:`oidcmsg.message.Message` instance :param method: The HTTP method :return: A possibly modified URL """ if method in ["GET", "DELETE"]: if req.keys(): _req = req.copy() comp = urlsplit(str(url)) if comp.query: _req.update(parse_qs(comp.query)) _query = str(_req.to_urlencoded()) return urlunsplit((comp.scheme, comp.netloc, comp.path, _query, comp.fragment)) else: return url else: return url
[ "def", "get_http_url", "(", "url", ",", "req", ",", "method", "=", "'GET'", ")", ":", "if", "method", "in", "[", "\"GET\"", ",", "\"DELETE\"", "]", ":", "if", "req", ".", "keys", "(", ")", ":", "_req", "=", "req", ".", "copy", "(", ")", "comp", ...
Add a query part representing the request to a url that may already contain a query part. Only done if the HTTP method used is 'GET' or 'DELETE'. :param url: The URL :param req: The request as a :py:class:`oidcmsg.message.Message` instance :param method: The HTTP method :return: A possibly modified URL
[ "Add", "a", "query", "part", "representing", "the", "request", "to", "a", "url", "that", "may", "already", "contain", "a", "query", "part", ".", "Only", "done", "if", "the", "HTTP", "method", "used", "is", "GET", "or", "DELETE", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/util.py#L19-L42
train
38,479
openid/JWTConnect-Python-OidcService
src/oidcservice/util.py
get_http_body
def get_http_body(req, content_type=URL_ENCODED): """ Get the message into the format that should be places in the body part of a HTTP request. :param req: The service request as a :py:class:`oidcmsg.message.Message` instance :param content_type: The format of the body part. :return: The correctly formatet service request. """ if URL_ENCODED in content_type: return req.to_urlencoded() elif JSON_ENCODED in content_type: return req.to_json() elif JOSE_ENCODED in content_type: return req # already packaged else: raise UnSupported( "Unsupported content type: '%s'" % content_type)
python
def get_http_body(req, content_type=URL_ENCODED): """ Get the message into the format that should be places in the body part of a HTTP request. :param req: The service request as a :py:class:`oidcmsg.message.Message` instance :param content_type: The format of the body part. :return: The correctly formatet service request. """ if URL_ENCODED in content_type: return req.to_urlencoded() elif JSON_ENCODED in content_type: return req.to_json() elif JOSE_ENCODED in content_type: return req # already packaged else: raise UnSupported( "Unsupported content type: '%s'" % content_type)
[ "def", "get_http_body", "(", "req", ",", "content_type", "=", "URL_ENCODED", ")", ":", "if", "URL_ENCODED", "in", "content_type", ":", "return", "req", ".", "to_urlencoded", "(", ")", "elif", "JSON_ENCODED", "in", "content_type", ":", "return", "req", ".", "...
Get the message into the format that should be places in the body part of a HTTP request. :param req: The service request as a :py:class:`oidcmsg.message.Message` instance :param content_type: The format of the body part. :return: The correctly formatet service request.
[ "Get", "the", "message", "into", "the", "format", "that", "should", "be", "places", "in", "the", "body", "part", "of", "a", "HTTP", "request", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/util.py#L45-L63
train
38,480
openid/JWTConnect-Python-OidcService
src/oidcservice/util.py
importer
def importer(name): """Import by name""" c1, c2 = modsplit(name) module = importlib.import_module(c1) return getattr(module, c2)
python
def importer(name): """Import by name""" c1, c2 = modsplit(name) module = importlib.import_module(c1) return getattr(module, c2)
[ "def", "importer", "(", "name", ")", ":", "c1", ",", "c2", "=", "modsplit", "(", "name", ")", "module", "=", "importlib", ".", "import_module", "(", "c1", ")", "return", "getattr", "(", "module", ",", "c2", ")" ]
Import by name
[ "Import", "by", "name" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/util.py#L86-L90
train
38,481
openid/JWTConnect-Python-OidcService
src/oidcservice/__init__.py
rndstr
def rndstr(size=16): """ Returns a string of random ascii characters or digits :param size: The length of the string :return: string """ _basech = string.ascii_letters + string.digits return "".join([rnd.choice(_basech) for _ in range(size)])
python
def rndstr(size=16): """ Returns a string of random ascii characters or digits :param size: The length of the string :return: string """ _basech = string.ascii_letters + string.digits return "".join([rnd.choice(_basech) for _ in range(size)])
[ "def", "rndstr", "(", "size", "=", "16", ")", ":", "_basech", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "return", "\"\"", ".", "join", "(", "[", "rnd", ".", "choice", "(", "_basech", ")", "for", "_", "in", "range", "(", "si...
Returns a string of random ascii characters or digits :param size: The length of the string :return: string
[ "Returns", "a", "string", "of", "random", "ascii", "characters", "or", "digits" ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/__init__.py#L35-L43
train
38,482
openid/JWTConnect-Python-OidcService
src/oidcservice/oidc/provider_info_discovery.py
add_redirect_uris
def add_redirect_uris(request_args, service=None, **kwargs): """ Add redirect_uris to the request arguments. :param request_args: Incomming request arguments :param service: A link to the service :param kwargs: Possible extra keyword arguments :return: A possibly augmented set of request arguments. """ _context = service.service_context if "redirect_uris" not in request_args: # Callbacks is a dictionary with callback type 'code', 'implicit', # 'form_post' as keys. try: _cbs = _context.callbacks except AttributeError: request_args['redirect_uris'] = _context.redirect_uris else: # Filter out local additions. _uris = [v for k, v in _cbs.items() if not k.startswith('__')] request_args['redirect_uris'] = _uris return request_args, {}
python
def add_redirect_uris(request_args, service=None, **kwargs): """ Add redirect_uris to the request arguments. :param request_args: Incomming request arguments :param service: A link to the service :param kwargs: Possible extra keyword arguments :return: A possibly augmented set of request arguments. """ _context = service.service_context if "redirect_uris" not in request_args: # Callbacks is a dictionary with callback type 'code', 'implicit', # 'form_post' as keys. try: _cbs = _context.callbacks except AttributeError: request_args['redirect_uris'] = _context.redirect_uris else: # Filter out local additions. _uris = [v for k, v in _cbs.items() if not k.startswith('__')] request_args['redirect_uris'] = _uris return request_args, {}
[ "def", "add_redirect_uris", "(", "request_args", ",", "service", "=", "None", ",", "*", "*", "kwargs", ")", ":", "_context", "=", "service", ".", "service_context", "if", "\"redirect_uris\"", "not", "in", "request_args", ":", "# Callbacks is a dictionary with callba...
Add redirect_uris to the request arguments. :param request_args: Incomming request arguments :param service: A link to the service :param kwargs: Possible extra keyword arguments :return: A possibly augmented set of request arguments.
[ "Add", "redirect_uris", "to", "the", "request", "arguments", "." ]
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oidc/provider_info_discovery.py#L48-L70
train
38,483
openid/JWTConnect-Python-OidcService
src/oidcservice/oidc/provider_info_discovery.py
ProviderInfoDiscovery.match_preferences
def match_preferences(self, pcr=None, issuer=None): """ Match the clients preferences against what the provider can do. This is to prepare for later client registration and or what functionality the client actually will use. In the client configuration the client preferences are expressed. These are then compared with the Provider Configuration information. If the Provider has left some claims out, defaults specified in the standard will be used. :param pcr: Provider configuration response if available :param issuer: The issuer identifier """ if not pcr: pcr = self.service_context.provider_info regreq = oidc.RegistrationRequest for _pref, _prov in PREFERENCE2PROVIDER.items(): try: vals = self.service_context.client_preferences[_pref] except KeyError: continue try: _pvals = pcr[_prov] except KeyError: try: # If the provider have not specified use what the # standard says is mandatory if at all. _pvals = PROVIDER_DEFAULT[_pref] except KeyError: logger.info( 'No info from provider on {} and no default'.format( _pref)) _pvals = vals if isinstance(vals, str): if vals in _pvals: self.service_context.behaviour[_pref] = vals else: try: vtyp = regreq.c_param[_pref] except KeyError: # Allow non standard claims if isinstance(vals, list): self.service_context.behaviour[_pref] = [ v for v in vals if v in _pvals] elif vals in _pvals: self.service_context.behaviour[_pref] = vals else: if isinstance(vtyp[0], list): self.service_context.behaviour[_pref] = [] for val in vals: if val in _pvals: self.service_context.behaviour[_pref].append( val) else: for val in vals: if val in _pvals: self.service_context.behaviour[_pref] = val break if _pref not in self.service_context.behaviour: raise ConfigurationError( "OP couldn't match preference:%s" % _pref, pcr) for key, val in self.service_context.client_preferences.items(): if key in self.service_context.behaviour: continue try: vtyp = regreq.c_param[key] if isinstance(vtyp[0], list): pass elif isinstance(val, list) and not isinstance(val, str): val = val[0] except KeyError: pass if key not in PREFERENCE2PROVIDER: self.service_context.behaviour[key] = val logger.debug( 'service_context behaviour: {}'.format( self.service_context.behaviour))
python
def match_preferences(self, pcr=None, issuer=None): """ Match the clients preferences against what the provider can do. This is to prepare for later client registration and or what functionality the client actually will use. In the client configuration the client preferences are expressed. These are then compared with the Provider Configuration information. If the Provider has left some claims out, defaults specified in the standard will be used. :param pcr: Provider configuration response if available :param issuer: The issuer identifier """ if not pcr: pcr = self.service_context.provider_info regreq = oidc.RegistrationRequest for _pref, _prov in PREFERENCE2PROVIDER.items(): try: vals = self.service_context.client_preferences[_pref] except KeyError: continue try: _pvals = pcr[_prov] except KeyError: try: # If the provider have not specified use what the # standard says is mandatory if at all. _pvals = PROVIDER_DEFAULT[_pref] except KeyError: logger.info( 'No info from provider on {} and no default'.format( _pref)) _pvals = vals if isinstance(vals, str): if vals in _pvals: self.service_context.behaviour[_pref] = vals else: try: vtyp = regreq.c_param[_pref] except KeyError: # Allow non standard claims if isinstance(vals, list): self.service_context.behaviour[_pref] = [ v for v in vals if v in _pvals] elif vals in _pvals: self.service_context.behaviour[_pref] = vals else: if isinstance(vtyp[0], list): self.service_context.behaviour[_pref] = [] for val in vals: if val in _pvals: self.service_context.behaviour[_pref].append( val) else: for val in vals: if val in _pvals: self.service_context.behaviour[_pref] = val break if _pref not in self.service_context.behaviour: raise ConfigurationError( "OP couldn't match preference:%s" % _pref, pcr) for key, val in self.service_context.client_preferences.items(): if key in self.service_context.behaviour: continue try: vtyp = regreq.c_param[key] if isinstance(vtyp[0], list): pass elif isinstance(val, list) and not isinstance(val, str): val = val[0] except KeyError: pass if key not in PREFERENCE2PROVIDER: self.service_context.behaviour[key] = val logger.debug( 'service_context behaviour: {}'.format( self.service_context.behaviour))
[ "def", "match_preferences", "(", "self", ",", "pcr", "=", "None", ",", "issuer", "=", "None", ")", ":", "if", "not", "pcr", ":", "pcr", "=", "self", ".", "service_context", ".", "provider_info", "regreq", "=", "oidc", ".", "RegistrationRequest", "for", "...
Match the clients preferences against what the provider can do. This is to prepare for later client registration and or what functionality the client actually will use. In the client configuration the client preferences are expressed. These are then compared with the Provider Configuration information. If the Provider has left some claims out, defaults specified in the standard will be used. :param pcr: Provider configuration response if available :param issuer: The issuer identifier
[ "Match", "the", "clients", "preferences", "against", "what", "the", "provider", "can", "do", ".", "This", "is", "to", "prepare", "for", "later", "client", "registration", "and", "or", "what", "functionality", "the", "client", "actually", "will", "use", ".", ...
759ab7adef30a7e3b9d75475e2971433b9613788
https://github.com/openid/JWTConnect-Python-OidcService/blob/759ab7adef30a7e3b9d75475e2971433b9613788/src/oidcservice/oidc/provider_info_discovery.py#L93-L178
train
38,484
libindic/soundex
libindic/soundex/__init__.py
Soundex.soundexCode
def soundexCode(self, char): '''Return the soundex code for given character :param char: Character whose soundex code is needed :return: Returns soundex code if character is found in charmap else returns 0 ''' lang = get_language(char) try: if lang == "en_US": return _soundex_map["soundex_en"][charmap[lang].index(char)] else: return _soundex_map["soundex"][charmap[lang].index(char)] except: # Case of exception KeyError because we don't have soundex # mapping for the character pass return 0
python
def soundexCode(self, char): '''Return the soundex code for given character :param char: Character whose soundex code is needed :return: Returns soundex code if character is found in charmap else returns 0 ''' lang = get_language(char) try: if lang == "en_US": return _soundex_map["soundex_en"][charmap[lang].index(char)] else: return _soundex_map["soundex"][charmap[lang].index(char)] except: # Case of exception KeyError because we don't have soundex # mapping for the character pass return 0
[ "def", "soundexCode", "(", "self", ",", "char", ")", ":", "lang", "=", "get_language", "(", "char", ")", "try", ":", "if", "lang", "==", "\"en_US\"", ":", "return", "_soundex_map", "[", "\"soundex_en\"", "]", "[", "charmap", "[", "lang", "]", ".", "ind...
Return the soundex code for given character :param char: Character whose soundex code is needed :return: Returns soundex code if character is found in charmap else returns 0
[ "Return", "the", "soundex", "code", "for", "given", "character" ]
5d41126b9a017ecee0ae71902388c7bf47722907
https://github.com/libindic/soundex/blob/5d41126b9a017ecee0ae71902388c7bf47722907/libindic/soundex/__init__.py#L45-L65
train
38,485
libindic/soundex
libindic/soundex/__init__.py
Soundex.soundex
def soundex(self, name, length=8): '''Calculate soundex of given string This function calculates soundex for Indian language string as well as English string. This function is exposed as service method for JSONRPC in SILPA framework. :param name: String whose Soundex value to be calculated :param length: Length of final Soundex string, if soundex caculated is more than this it will be truncated to length. :return: Soundex string of `name' ''' sndx = [] fc = name[0] # translate alpha chars in name to soundex digits for c in name[1:].lower(): d = str(self.soundexCode(c)) # remove all 0s from the soundex code if d == '0': continue # duplicate consecutive soundex digits are skipped if len(sndx) == 0: sndx.append(d) elif d != sndx[-1]: sndx.append(d) # append first character to result sndx.insert(0, fc) if get_language(name[0]) == 'en_US': # Don't padd return ''.join(sndx) if len(sndx) < length: sndx.extend(repeat('0', length)) return ''.join(sndx[:length]) return ''.join(sndx[:length])
python
def soundex(self, name, length=8): '''Calculate soundex of given string This function calculates soundex for Indian language string as well as English string. This function is exposed as service method for JSONRPC in SILPA framework. :param name: String whose Soundex value to be calculated :param length: Length of final Soundex string, if soundex caculated is more than this it will be truncated to length. :return: Soundex string of `name' ''' sndx = [] fc = name[0] # translate alpha chars in name to soundex digits for c in name[1:].lower(): d = str(self.soundexCode(c)) # remove all 0s from the soundex code if d == '0': continue # duplicate consecutive soundex digits are skipped if len(sndx) == 0: sndx.append(d) elif d != sndx[-1]: sndx.append(d) # append first character to result sndx.insert(0, fc) if get_language(name[0]) == 'en_US': # Don't padd return ''.join(sndx) if len(sndx) < length: sndx.extend(repeat('0', length)) return ''.join(sndx[:length]) return ''.join(sndx[:length])
[ "def", "soundex", "(", "self", ",", "name", ",", "length", "=", "8", ")", ":", "sndx", "=", "[", "]", "fc", "=", "name", "[", "0", "]", "# translate alpha chars in name to soundex digits", "for", "c", "in", "name", "[", "1", ":", "]", ".", "lower", "...
Calculate soundex of given string This function calculates soundex for Indian language string as well as English string. This function is exposed as service method for JSONRPC in SILPA framework. :param name: String whose Soundex value to be calculated :param length: Length of final Soundex string, if soundex caculated is more than this it will be truncated to length. :return: Soundex string of `name'
[ "Calculate", "soundex", "of", "given", "string" ]
5d41126b9a017ecee0ae71902388c7bf47722907
https://github.com/libindic/soundex/blob/5d41126b9a017ecee0ae71902388c7bf47722907/libindic/soundex/__init__.py#L68-L111
train
38,486
libindic/soundex
libindic/soundex/__init__.py
Soundex.compare
def compare(self, string1, string2): '''Compare soundex of given strings This function checks if 2 given strings are phonetically sounds same by doing soundex code comparison :param string1: First string for comparison :param string2: Second string for comparison :return: Returns 0 if both strings are same, 1 if strings sound phonetically same and from same language, 2 if strings are phonetically same and from different languages. Returns -1 if strings are not equal. We can't perform English cross language comparision if English string is passed as one function will return -1. ''' # do a quick check if string1 == string2: return 0 string1_lang = get_language(string1[0]) string2_lang = get_language(string2[0]) if (string1_lang == 'en_US' and string2_lang != 'en_US') or \ (string1_lang != 'en_US' and string2_lang == 'en_US'): # Can't Soundex compare English and Indic string return -1 soundex1 = self.soundex(string1) soundex2 = self.soundex(string2) if soundex1[1:] == soundex2[1:]: # Strings sound phonetically same if string1_lang == string2_lang: # They are from same language return 1 else: # Different language return 2 # Strings are not same return -1
python
def compare(self, string1, string2): '''Compare soundex of given strings This function checks if 2 given strings are phonetically sounds same by doing soundex code comparison :param string1: First string for comparison :param string2: Second string for comparison :return: Returns 0 if both strings are same, 1 if strings sound phonetically same and from same language, 2 if strings are phonetically same and from different languages. Returns -1 if strings are not equal. We can't perform English cross language comparision if English string is passed as one function will return -1. ''' # do a quick check if string1 == string2: return 0 string1_lang = get_language(string1[0]) string2_lang = get_language(string2[0]) if (string1_lang == 'en_US' and string2_lang != 'en_US') or \ (string1_lang != 'en_US' and string2_lang == 'en_US'): # Can't Soundex compare English and Indic string return -1 soundex1 = self.soundex(string1) soundex2 = self.soundex(string2) if soundex1[1:] == soundex2[1:]: # Strings sound phonetically same if string1_lang == string2_lang: # They are from same language return 1 else: # Different language return 2 # Strings are not same return -1
[ "def", "compare", "(", "self", ",", "string1", ",", "string2", ")", ":", "# do a quick check", "if", "string1", "==", "string2", ":", "return", "0", "string1_lang", "=", "get_language", "(", "string1", "[", "0", "]", ")", "string2_lang", "=", "get_language",...
Compare soundex of given strings This function checks if 2 given strings are phonetically sounds same by doing soundex code comparison :param string1: First string for comparison :param string2: Second string for comparison :return: Returns 0 if both strings are same, 1 if strings sound phonetically same and from same language, 2 if strings are phonetically same and from different languages. Returns -1 if strings are not equal. We can't perform English cross language comparision if English string is passed as one function will return -1.
[ "Compare", "soundex", "of", "given", "strings" ]
5d41126b9a017ecee0ae71902388c7bf47722907
https://github.com/libindic/soundex/blob/5d41126b9a017ecee0ae71902388c7bf47722907/libindic/soundex/__init__.py#L114-L155
train
38,487
skoczen/django-seo-js
django_seo_js/backends/prerender.py
PrerenderIO.get_response_for_url
def get_response_for_url(self, url): """ Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code. """ if not url or "//" not in url: raise ValueError("Missing or invalid url: %s" % url) render_url = self.BASE_URL + url headers = { 'X-Prerender-Token': self.token, } r = self.session.get(render_url, headers=headers, allow_redirects=False) assert r.status_code < 500 return self.build_django_response_from_requests_response(r)
python
def get_response_for_url(self, url): """ Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code. """ if not url or "//" not in url: raise ValueError("Missing or invalid url: %s" % url) render_url = self.BASE_URL + url headers = { 'X-Prerender-Token': self.token, } r = self.session.get(render_url, headers=headers, allow_redirects=False) assert r.status_code < 500 return self.build_django_response_from_requests_response(r)
[ "def", "get_response_for_url", "(", "self", ",", "url", ")", ":", "if", "not", "url", "or", "\"//\"", "not", "in", "url", ":", "raise", "ValueError", "(", "\"Missing or invalid url: %s\"", "%", "url", ")", "render_url", "=", "self", ".", "BASE_URL", "+", "...
Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code.
[ "Accepts", "a", "fully", "-", "qualified", "url", ".", "Returns", "an", "HttpResponse", "passing", "through", "all", "headers", "and", "the", "status", "code", "." ]
7613a168deec9e7f11e5d845176d85a3e2c5cc55
https://github.com/skoczen/django-seo-js/blob/7613a168deec9e7f11e5d845176d85a3e2c5cc55/django_seo_js/backends/prerender.py#L19-L35
train
38,488
skoczen/django-seo-js
django_seo_js/backends/prerender.py
PrerenderIO.update_url
def update_url(self, url=None, regex=None): """ Accepts a fully-qualified url, or regex. Returns True if successful, False if not successful. """ if not url and not regex: raise ValueError("Neither a url or regex was provided to update_url.") headers = { 'X-Prerender-Token': self.token, 'Content-Type': 'application/json', } data = { 'prerenderToken': settings.PRERENDER_TOKEN, } if url: data["url"] = url if regex: data["regex"] = regex r = self.session.post(self.RECACHE_URL, headers=headers, data=data) return r.status_code < 500
python
def update_url(self, url=None, regex=None): """ Accepts a fully-qualified url, or regex. Returns True if successful, False if not successful. """ if not url and not regex: raise ValueError("Neither a url or regex was provided to update_url.") headers = { 'X-Prerender-Token': self.token, 'Content-Type': 'application/json', } data = { 'prerenderToken': settings.PRERENDER_TOKEN, } if url: data["url"] = url if regex: data["regex"] = regex r = self.session.post(self.RECACHE_URL, headers=headers, data=data) return r.status_code < 500
[ "def", "update_url", "(", "self", ",", "url", "=", "None", ",", "regex", "=", "None", ")", ":", "if", "not", "url", "and", "not", "regex", ":", "raise", "ValueError", "(", "\"Neither a url or regex was provided to update_url.\"", ")", "headers", "=", "{", "'...
Accepts a fully-qualified url, or regex. Returns True if successful, False if not successful.
[ "Accepts", "a", "fully", "-", "qualified", "url", "or", "regex", ".", "Returns", "True", "if", "successful", "False", "if", "not", "successful", "." ]
7613a168deec9e7f11e5d845176d85a3e2c5cc55
https://github.com/skoczen/django-seo-js/blob/7613a168deec9e7f11e5d845176d85a3e2c5cc55/django_seo_js/backends/prerender.py#L37-L59
train
38,489
skoczen/django-seo-js
django_seo_js/backends/prerender.py
PrerenderHosted.update_url
def update_url(self, url=None): """ Accepts a fully-qualified url. Returns True if successful, False if not successful. """ if not url: raise ValueError("Neither a url or regex was provided to update_url.") post_url = "%s%s" % (self.BASE_URL, url) r = self.session.post(post_url) return int(r.status_code) < 500
python
def update_url(self, url=None): """ Accepts a fully-qualified url. Returns True if successful, False if not successful. """ if not url: raise ValueError("Neither a url or regex was provided to update_url.") post_url = "%s%s" % (self.BASE_URL, url) r = self.session.post(post_url) return int(r.status_code) < 500
[ "def", "update_url", "(", "self", ",", "url", "=", "None", ")", ":", "if", "not", "url", ":", "raise", "ValueError", "(", "\"Neither a url or regex was provided to update_url.\"", ")", "post_url", "=", "\"%s%s\"", "%", "(", "self", ".", "BASE_URL", ",", "url",...
Accepts a fully-qualified url. Returns True if successful, False if not successful.
[ "Accepts", "a", "fully", "-", "qualified", "url", ".", "Returns", "True", "if", "successful", "False", "if", "not", "successful", "." ]
7613a168deec9e7f11e5d845176d85a3e2c5cc55
https://github.com/skoczen/django-seo-js/blob/7613a168deec9e7f11e5d845176d85a3e2c5cc55/django_seo_js/backends/prerender.py#L80-L89
train
38,490
lock8/django-rest-framework-jwt-refresh-token
refreshtoken/serializers.py
RefreshTokenSerializer.create
def create(self, validated_data): """Override ``create`` to provide a user via request.user by default. This is required since the read_only ``user`` field is not included by default anymore since https://github.com/encode/django-rest-framework/pull/5886. """ if 'user' not in validated_data: validated_data['user'] = self.context['request'].user return super(RefreshTokenSerializer, self).create(validated_data)
python
def create(self, validated_data): """Override ``create`` to provide a user via request.user by default. This is required since the read_only ``user`` field is not included by default anymore since https://github.com/encode/django-rest-framework/pull/5886. """ if 'user' not in validated_data: validated_data['user'] = self.context['request'].user return super(RefreshTokenSerializer, self).create(validated_data)
[ "def", "create", "(", "self", ",", "validated_data", ")", ":", "if", "'user'", "not", "in", "validated_data", ":", "validated_data", "[", "'user'", "]", "=", "self", ".", "context", "[", "'request'", "]", ".", "user", "return", "super", "(", "RefreshTokenS...
Override ``create`` to provide a user via request.user by default. This is required since the read_only ``user`` field is not included by default anymore since https://github.com/encode/django-rest-framework/pull/5886.
[ "Override", "create", "to", "provide", "a", "user", "via", "request", ".", "user", "by", "default", "." ]
9d46478ac1536982e4dc9dec7599fbce30fef91a
https://github.com/lock8/django-rest-framework-jwt-refresh-token/blob/9d46478ac1536982e4dc9dec7599fbce30fef91a/refreshtoken/serializers.py#L22-L31
train
38,491
lock8/django-rest-framework-jwt-refresh-token
refreshtoken/permissions.py
IsOwnerOrAdmin.has_object_permission
def has_object_permission(self, request, view, obj): """ Allow staff or superusers, and the owner of the object itself. """ user = request.user if not user.is_authenticated: return False elif user.is_staff or user.is_superuser: return True return user == obj.user
python
def has_object_permission(self, request, view, obj): """ Allow staff or superusers, and the owner of the object itself. """ user = request.user if not user.is_authenticated: return False elif user.is_staff or user.is_superuser: return True return user == obj.user
[ "def", "has_object_permission", "(", "self", ",", "request", ",", "view", ",", "obj", ")", ":", "user", "=", "request", ".", "user", "if", "not", "user", ".", "is_authenticated", ":", "return", "False", "elif", "user", ".", "is_staff", "or", "user", ".",...
Allow staff or superusers, and the owner of the object itself.
[ "Allow", "staff", "or", "superusers", "and", "the", "owner", "of", "the", "object", "itself", "." ]
9d46478ac1536982e4dc9dec7599fbce30fef91a
https://github.com/lock8/django-rest-framework-jwt-refresh-token/blob/9d46478ac1536982e4dc9dec7599fbce30fef91a/refreshtoken/permissions.py#L12-L21
train
38,492
Aula13/poloniex
poloniex/concurrency.py
Semaphore.clear
def clear(self): """Release the semaphore of all of its bounds, setting the internal counter back to its original bind limit. Notify an equivalent amount of threads that they can run.""" with self._cond: to_notify = self._initial - self._value self._value = self._initial self._cond.notify(to_notify)
python
def clear(self): """Release the semaphore of all of its bounds, setting the internal counter back to its original bind limit. Notify an equivalent amount of threads that they can run.""" with self._cond: to_notify = self._initial - self._value self._value = self._initial self._cond.notify(to_notify)
[ "def", "clear", "(", "self", ")", ":", "with", "self", ".", "_cond", ":", "to_notify", "=", "self", ".", "_initial", "-", "self", ".", "_value", "self", ".", "_value", "=", "self", ".", "_initial", "self", ".", "_cond", ".", "notify", "(", "to_notify...
Release the semaphore of all of its bounds, setting the internal counter back to its original bind limit. Notify an equivalent amount of threads that they can run.
[ "Release", "the", "semaphore", "of", "all", "of", "its", "bounds", "setting", "the", "internal", "counter", "back", "to", "its", "original", "bind", "limit", ".", "Notify", "an", "equivalent", "amount", "of", "threads", "that", "they", "can", "run", "." ]
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/concurrency.py#L96-L103
train
38,493
Aula13/poloniex
poloniex/poloniex.py
_api_wrapper
def _api_wrapper(fn): """API function decorator that performs rate limiting and error checking.""" def _convert(value): if isinstance(value, _datetime.date): return value.strftime('%s') return value @_six.wraps(fn) def _fn(self, command, **params): # sanitize the params by removing the None values with self.startup_lock: if self.timer.ident is None: self.timer.setDaemon(True) self.timer.start() params = dict((key, _convert(value)) for key, value in _six.iteritems(params) if value is not None) self.semaphore.acquire() resp = fn(self, command, **params) try: respdata = resp.json(object_hook=_AutoCastDict) except: # use more specific error if available or fallback to ValueError resp.raise_for_status() raise Exception('No JSON object could be decoded') # check for 'error' then check for status due to Poloniex inconsistency if 'error' in respdata: raise PoloniexCommandException(respdata['error']) resp.raise_for_status() return respdata return _fn
python
def _api_wrapper(fn): """API function decorator that performs rate limiting and error checking.""" def _convert(value): if isinstance(value, _datetime.date): return value.strftime('%s') return value @_six.wraps(fn) def _fn(self, command, **params): # sanitize the params by removing the None values with self.startup_lock: if self.timer.ident is None: self.timer.setDaemon(True) self.timer.start() params = dict((key, _convert(value)) for key, value in _six.iteritems(params) if value is not None) self.semaphore.acquire() resp = fn(self, command, **params) try: respdata = resp.json(object_hook=_AutoCastDict) except: # use more specific error if available or fallback to ValueError resp.raise_for_status() raise Exception('No JSON object could be decoded') # check for 'error' then check for status due to Poloniex inconsistency if 'error' in respdata: raise PoloniexCommandException(respdata['error']) resp.raise_for_status() return respdata return _fn
[ "def", "_api_wrapper", "(", "fn", ")", ":", "def", "_convert", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "_datetime", ".", "date", ")", ":", "return", "value", ".", "strftime", "(", "'%s'", ")", "return", "value", "@", "_six", "....
API function decorator that performs rate limiting and error checking.
[ "API", "function", "decorator", "that", "performs", "rate", "limiting", "and", "error", "checking", "." ]
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L20-L55
train
38,494
Aula13/poloniex
poloniex/poloniex.py
PoloniexPublic.returnOrderBook
def returnOrderBook(self, currencyPair='all', depth='50'): """Returns the order book for a given market, as well as a sequence number for use with the Push API and an indicator specifying whether the market is frozen. You may set currencyPair to "all" to get the order books of all markets.""" return self._public('returnOrderBook', currencyPair=currencyPair, depth=depth)
python
def returnOrderBook(self, currencyPair='all', depth='50'): """Returns the order book for a given market, as well as a sequence number for use with the Push API and an indicator specifying whether the market is frozen. You may set currencyPair to "all" to get the order books of all markets.""" return self._public('returnOrderBook', currencyPair=currencyPair, depth=depth)
[ "def", "returnOrderBook", "(", "self", ",", "currencyPair", "=", "'all'", ",", "depth", "=", "'50'", ")", ":", "return", "self", ".", "_public", "(", "'returnOrderBook'", ",", "currencyPair", "=", "currencyPair", ",", "depth", "=", "depth", ")" ]
Returns the order book for a given market, as well as a sequence number for use with the Push API and an indicator specifying whether the market is frozen. You may set currencyPair to "all" to get the order books of all markets.
[ "Returns", "the", "order", "book", "for", "a", "given", "market", "as", "well", "as", "a", "sequence", "number", "for", "use", "with", "the", "Push", "API", "and", "an", "indicator", "specifying", "whether", "the", "market", "is", "frozen", ".", "You", "...
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L95-L101
train
38,495
Aula13/poloniex
poloniex/poloniex.py
Poloniex.returnDepositsWithdrawals
def returnDepositsWithdrawals(self, start=0, end=2**32-1): """Returns your deposit and withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.""" return self._private('returnDepositsWithdrawals', start=start, end=end)
python
def returnDepositsWithdrawals(self, start=0, end=2**32-1): """Returns your deposit and withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.""" return self._private('returnDepositsWithdrawals', start=start, end=end)
[ "def", "returnDepositsWithdrawals", "(", "self", ",", "start", "=", "0", ",", "end", "=", "2", "**", "32", "-", "1", ")", ":", "return", "self", ".", "_private", "(", "'returnDepositsWithdrawals'", ",", "start", "=", "start", ",", "end", "=", "end", ")...
Returns your deposit and withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.
[ "Returns", "your", "deposit", "and", "withdrawal", "history", "within", "a", "range", "specified", "by", "the", "start", "and", "end", "POST", "parameters", "both", "of", "which", "should", "be", "given", "as", "UNIX", "timestamps", "." ]
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L203-L207
train
38,496
Aula13/poloniex
poloniex/poloniex.py
Poloniex.buy
def buy(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): """Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its entirety or be completely aborted. An immediate-or-cancel order can be partially or completely filled, but any portion of the order that cannot be filled immediately will be canceled rather than left on the order book. A post-only order will only be placed if no portion of it fills immediately; this guarantees you will never pay the taker fee on any part of the order that fills.""" return self._private('buy', currencyPair=currencyPair, rate=rate, amount=amount, fillOrKill=fillOrKill, immediateOrCancel=immediateOrCancel, postOnly=postOnly)
python
def buy(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): """Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its entirety or be completely aborted. An immediate-or-cancel order can be partially or completely filled, but any portion of the order that cannot be filled immediately will be canceled rather than left on the order book. A post-only order will only be placed if no portion of it fills immediately; this guarantees you will never pay the taker fee on any part of the order that fills.""" return self._private('buy', currencyPair=currencyPair, rate=rate, amount=amount, fillOrKill=fillOrKill, immediateOrCancel=immediateOrCancel, postOnly=postOnly)
[ "def", "buy", "(", "self", ",", "currencyPair", ",", "rate", ",", "amount", ",", "fillOrKill", "=", "None", ",", "immediateOrCancel", "=", "None", ",", "postOnly", "=", "None", ")", ":", "return", "self", ".", "_private", "(", "'buy'", ",", "currencyPair...
Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its entirety or be completely aborted. An immediate-or-cancel order can be partially or completely filled, but any portion of the order that cannot be filled immediately will be canceled rather than left on the order book. A post-only order will only be placed if no portion of it fills immediately; this guarantees you will never pay the taker fee on any part of the order that fills.
[ "Places", "a", "limit", "buy", "order", "in", "a", "given", "market", ".", "Required", "POST", "parameters", "are", "currencyPair", "rate", "and", "amount", ".", "If", "successful", "the", "method", "will", "return", "the", "order", "number", ".", "You", "...
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L250-L266
train
38,497
Aula13/poloniex
poloniex/poloniex.py
Poloniex.moveOrder
def moveOrder(self, orderNumber, rate, amount=None, postOnly=None, immediateOrCancel=None): """Cancels an order and places a new one of the same type in a single atomic transaction, meaning either both operations will succeed or both will fail. Required POST parameters are "orderNumber" and "rate"; you may optionally specify "amount" if you wish to change the amount of the new order. "postOnly" or "immediateOrCancel" may be specified for exchange orders, but will have no effect on margin orders. """ return self._private('moveOrder', orderNumber=orderNumber, rate=rate, amount=amount, postOnly=postOnly, immediateOrCancel=immediateOrCancel)
python
def moveOrder(self, orderNumber, rate, amount=None, postOnly=None, immediateOrCancel=None): """Cancels an order and places a new one of the same type in a single atomic transaction, meaning either both operations will succeed or both will fail. Required POST parameters are "orderNumber" and "rate"; you may optionally specify "amount" if you wish to change the amount of the new order. "postOnly" or "immediateOrCancel" may be specified for exchange orders, but will have no effect on margin orders. """ return self._private('moveOrder', orderNumber=orderNumber, rate=rate, amount=amount, postOnly=postOnly, immediateOrCancel=immediateOrCancel)
[ "def", "moveOrder", "(", "self", ",", "orderNumber", ",", "rate", ",", "amount", "=", "None", ",", "postOnly", "=", "None", ",", "immediateOrCancel", "=", "None", ")", ":", "return", "self", ".", "_private", "(", "'moveOrder'", ",", "orderNumber", "=", "...
Cancels an order and places a new one of the same type in a single atomic transaction, meaning either both operations will succeed or both will fail. Required POST parameters are "orderNumber" and "rate"; you may optionally specify "amount" if you wish to change the amount of the new order. "postOnly" or "immediateOrCancel" may be specified for exchange orders, but will have no effect on margin orders.
[ "Cancels", "an", "order", "and", "places", "a", "new", "one", "of", "the", "same", "type", "in", "a", "single", "atomic", "transaction", "meaning", "either", "both", "operations", "will", "succeed", "or", "both", "will", "fail", ".", "Required", "POST", "p...
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L282-L292
train
38,498
Aula13/poloniex
poloniex/poloniex.py
Poloniex.withdraw
def withdraw(self, currency, amount, address, paymentId=None): """Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". For XMR withdrawals, you may optionally specify "paymentId".""" return self._private('withdraw', currency=currency, amount=amount, address=address, paymentId=paymentId)
python
def withdraw(self, currency, amount, address, paymentId=None): """Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". For XMR withdrawals, you may optionally specify "paymentId".""" return self._private('withdraw', currency=currency, amount=amount, address=address, paymentId=paymentId)
[ "def", "withdraw", "(", "self", ",", "currency", ",", "amount", ",", "address", ",", "paymentId", "=", "None", ")", ":", "return", "self", ".", "_private", "(", "'withdraw'", ",", "currency", "=", "currency", ",", "amount", "=", "amount", ",", "address",...
Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". For XMR withdrawals, you may optionally specify "paymentId".
[ "Immediately", "places", "a", "withdrawal", "for", "a", "given", "currency", "with", "no", "email", "confirmation", ".", "In", "order", "to", "use", "this", "method", "the", "withdrawal", "privilege", "must", "be", "enabled", "for", "your", "API", "key", "."...
a5bfc91e766e220bf77f5e3a1b131f095913e714
https://github.com/Aula13/poloniex/blob/a5bfc91e766e220bf77f5e3a1b131f095913e714/poloniex/poloniex.py#L294-L301
train
38,499