docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Checks a namespace for the existence of a specific key Args: namespace (str): Namespace to check in key (str): Name of the key to check for Returns: `True` if key exists in the namespace, else `False`
def key_exists(self, namespace, key): return namespace in self.__data and key in self.__data[namespace]
331,519
Return the value of a key/namespace pair Args: key (str): Key to return namespace (str): Namespace of the key default (:obj:`Any`): Optional default value to return, if key was not found as_object (bool): If `True` returns the object as a :py:obj:`ConfigItem` object instead of its primitive type Returns: Requested value if found, else default value or `None`
def get(self, key, namespace='default', default=None, as_object=False): if namespace in self.__data and key in self.__data[namespace]: if as_object: return db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) return self.__data[namespace][key] else: return default
331,520
Set (create/update) a configuration item Args: namespace (`str`): Namespace for the item key (`str`): Key of the item value (`Any`): Value of the type, must by one of `DBCString`, `DBCFloat`, `DBCInt`, `DBCArray`, `DBCJSON` or `bool` description (`str`): Description of the configuration item Returns: `None`
def set(self, namespace, key, value, description=None): if isinstance(value, DBCChoice): vtype = 'choice' elif isinstance(value, DBCString): vtype = 'string' elif isinstance(value, DBCFloat): vtype = 'float' elif isinstance(value, DBCInt): vtype = 'int' elif isinstance(value, DBCArray): vtype = 'array' elif isinstance(value, DBCJSON): vtype = 'json' elif isinstance(value, bool): vtype = 'bool' else: raise ValueError('Invalid config item type: {}'.format(type(value))) if namespace in self.__data and key in self.__data[namespace]: itm = db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) if not itm: raise KeyError(key) itm.value = value itm.type = vtype if description: itm.description = description else: itm = ConfigItem() itm.key = key itm.value = value itm.type = vtype itm.description = description itm.namespace_prefix = namespace db.session.add(itm) db.session.commit() if namespace in self.__data: self.__data[namespace][key] = value else: self.__data[namespace] = {key: value}
331,521
Remove a configuration item from the database Args: namespace (`str`): Namespace of the config item key (`str`): Key to delete Returns: `None`
def delete(self, namespace, key): if self.key_exists(namespace, key): obj = db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) del self.__data[namespace][key] db.session.delete(obj) db.session.commit() else: raise KeyError('{}/{}'.format(namespace, key))
331,522
Method to send a notification. A plugin may use only part of the information, but all fields are required. Args: subsystem (`str`): Name of the subsystem originating the notification recipient (`str`): Recipient email address subject (`str`): Subject / title of the notification body_html (`str)`: HTML formatted version of the message body_text (`str`): Text formatted version of the message Returns: `None`
def notify(self, subsystem, recipient, subject, body_html, body_text): if not re.match(RGX_EMAIL_VALIDATION_PATTERN, recipient, re.I): raise ValueError('Invalid recipient provided') email = Email() email.timestamp = datetime.now() email.subsystem = subsystem email.sender = self.sender email.recipients = recipient email.subject = subject email.uuid = uuid.uuid4() email.message_html = body_html email.message_text = body_text method = dbconfig.get('method', NS_EMAIL, 'ses') try: if method == 'ses': self.__send_ses_email([recipient], subject, body_html, body_text) elif method == 'smtp': self.__send_smtp_email([recipient], subject, body_html, body_text) else: raise ValueError('Invalid email method: {}'.format(method)) db.session.add(email) db.session.commit() except Exception as ex: raise EmailSendError(ex)
331,535
Send an email using SES Args: recipients (`1ist` of `str`): List of recipient email addresses subject (str): Subject of the email body_html (str): HTML body of the email body_text (str): Text body of the email Returns: `None`
def __send_ses_email(self, recipients, subject, body_html, body_text): source_arn = dbconfig.get('source_arn', NS_EMAIL) return_arn = dbconfig.get('return_path_arn', NS_EMAIL) session = get_local_aws_session() ses = session.client('ses', region_name=dbconfig.get('ses_region', NS_EMAIL, 'us-west-2')) body = {} if body_html: body['Html'] = { 'Data': body_html } if body_text: body['Text'] = { 'Data': body_text } ses_options = { 'Source': self.sender, 'Destination': { 'ToAddresses': recipients }, 'Message': { 'Subject': { 'Data': subject }, 'Body': body } } # Set SES options if needed if source_arn and return_arn: ses_options.update({ 'SourceArn': source_arn, 'ReturnPathArn': return_arn }) ses.send_email(**ses_options)
331,536
Send an email using SMTP Args: recipients (`list` of `str`): List of recipient email addresses subject (str): Subject of the email html_body (str): HTML body of the email text_body (str): Text body of the email Returns: `None`
def __send_smtp_email(self, recipients, subject, html_body, text_body): smtp = smtplib.SMTP( dbconfig.get('smtp_server', NS_EMAIL, 'localhost'), dbconfig.get('smtp_port', NS_EMAIL, 25) ) source_arn = dbconfig.get('source_arn', NS_EMAIL) return_arn = dbconfig.get('return_path_arn', NS_EMAIL) from_arn = dbconfig.get('from_arn', NS_EMAIL) msg = MIMEMultipart('alternative') # Set SES options if needed if source_arn and from_arn and return_arn: msg['X-SES-SOURCE-ARN'] = source_arn msg['X-SES-FROM-ARN'] = from_arn msg['X-SES-RETURN-PATH-ARN'] = return_arn msg['Subject'] = subject msg['To'] = ','.join(recipients) msg['From'] = self.sender # Check body types to avoid exceptions if html_body: html_part = MIMEText(html_body, 'html') msg.attach(html_part) if text_body: text_part = MIMEText(text_body, 'plain') msg.attach(text_part) # TLS if needed if dbconfig.get('smtp_tls', NS_EMAIL, False): smtp.starttls() # Login if needed username = dbconfig.get('smtp_username', NS_EMAIL) password = dbconfig.get('smtp_password', NS_EMAIL) if username and password: smtp.login(username, password) smtp.sendmail(self.sender, recipients, msg.as_string()) smtp.quit()
331,537
Default object encoder function Args: obj (:obj:`Any`): Object to be serialized Returns: JSON string
def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() if issubclass(obj.__class__, Enum.__class__): return obj.value to_json = getattr(obj, 'to_json', None) if to_json: out = obj.to_json() if issubclass(obj.__class__, Model): out.update({'__type': obj.__class__.__name__}) return out return JSONEncoder.default(self, obj)
331,538
Initialize the class, overriding the object hook Args: object_hook: parse_float: parse_int: parse_constant: strict: object_pairs_hook:
def __init__(self, *, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, strict=True, object_pairs_hook=None): try: super().__init__( object_hook=self.object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, strict=strict, object_pairs_hook=object_pairs_hook ) except Exception: log.exception('Failed loading JSON data')
331,539
Checks to see if the `__type`-hinting field is available in the object being de-serialized. If present, and the class referenced has a `from_json` function it will return the generated object, else a standard dic will be returned Args: obj: Object to be deserialized Returns: Deserialized object or regular python objec
def object_hook(obj): try: if '__type' in obj: obj_type = obj['__type'] cls = getattr(cloud_inquisitor.schema, obj_type) if hasattr(cls, 'from_json'): return cls.from_json(obj) key, value = next(iter(obj.items())) if key == ' t': return tuple(value) elif key == ' u': return uuid.UUID(value) elif key == ' b': return b64decode(value) elif key == ' m': return Markup(value) elif key == ' d': return parse_date(value) return obj except Exception: log.exception('Error during data deserialization')
331,540
Marks a function / method as deprecated. Takes one argument, a message to be logged with information on future usage of the function or alternative methods to call. Args: msg (str): Deprecation message to be logged Returns: `callable`
def deprecated(msg): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): logging.getLogger(__name__).warning(msg) return func(*args, **kwargs) return wrapper return decorator
331,552
Evaluate a value for truthiness >>> is_truthy('Yes') True >>> is_truthy('False') False >>> is_truthy(1) True Args: value (Any): Value to evaluate default (bool): Optional default value, if the input does not match the true or false values Returns: True if a truthy value is passed, else False
def is_truthy(value, default=False): if value is None: return False if isinstance(value, bool): return value if isinstance(value, int): return value > 0 trues = ('1', 'true', 'y', 'yes', 'ok') falses = ('', '0', 'false', 'n', 'none', 'no') if value.lower().strip() in falses: return False elif value.lower().strip() in trues: return True else: if default: return default else: raise ValueError('Invalid argument given to truthy: {0}'.format(value))
331,553
Return a Jinja2 template by filename Args: template (str): Name of the template to return Returns: A Jinja2 Template object
def get_template(template): from cloud_inquisitor.database import db tmpl = db.Template.find_one(template_name=template) if not tmpl: raise InquisitorError('No such template found: {}'.format(template)) tmplenv = Environment(loader=BaseLoader, autoescape=True) tmplenv.filters['json_loads'] = json.loads tmplenv.filters['slack_quote_join'] = lambda data: ', '.join('`{}`'.format(x) for x in data) return tmplenv.from_string(tmpl.template)
331,555
Parse a domain name to gather the bucket name and region for an S3 bucket. Returns a tuple (bucket_name, bucket_region) if a valid domain name, else `None` >>> parse_bucket_info('www.riotgames.com.br.s3-website-us-west-2.amazonaws.com') ('www.riotgames.com.br', 'us-west-2') Args: domain (`str`): Domain name to parse Returns: :obj:`list` of `str`: `str`,`None`
def parse_bucket_info(domain): match = RGX_BUCKET.match(domain) if match: data = match.groupdict() return data['bucket'], data['region'] or 'us-east-1'
331,556
Convert a datetime object from local to UTC format >>> import datetime >>> d = datetime.datetime(2017, 8, 15, 18, 24, 31) >>> to_utc_date(d) datetime.datetime(2017, 8, 16, 1, 24, 31) Args: date (`datetime`): Input datetime object Returns: `datetime`
def to_utc_date(date): return datetime.utcfromtimestamp(float(date.strftime('%s'))).replace(tzinfo=None) if date else None
331,557
Generate a cryptographically secure random string to use for passwords Args: length (int): Length of password, defaults to 32 characters Returns: Randomly generated string
def generate_password(length=32): return ''.join(random.SystemRandom().choice(string.ascii_letters + '!@#$+.,') for _ in range(length))
331,558
Generate a new JWT token, with optional extra information. Any data provided in `**kwargs` will be added into the token object for auth specific usage Args: user (:obj:`User`): User object to generate token for authsys (str): The auth system for which the token was generated **kwargs (dict): Any optional items to add to the token Returns: Encoded JWT token
def generate_jwt_token(user, authsys, **kwargs): # Local import to prevent app startup failures from cloud_inquisitor.config import dbconfig token = { 'auth_system': authsys, 'exp': time.time() + dbconfig.get('session_expire_time'), 'roles': [role.name for role in user.roles] } if kwargs: token.update(**kwargs) enc = jwt.encode(token, get_jwt_key_data(), algorithm='HS512') return enc.decode()
331,559
Check if the user meets the role requirements. If mode is set to AND, all the provided roles must apply Args: user (:obj:`User`): User object required_roles (`list` of `str`): List of roles that the user must have applied match_all (`bool`): If true, all the required_roles must be applied to the user, else any one match will return `True` Returns: `bool`
def has_access(user, required_roles, match_all=True): # Admins have access to everything if ROLE_ADMIN in user.roles: return True if isinstance(required_roles, str): if required_roles in user.roles: return True return False # If we received a list of roles to match against if match_all: for role in required_roles: if role not in user.roles: return False return True else: for role in required_roles: if role in user.roles: return True return False
331,561
Merge an arbitrary number of lists into a single list and dedupe it Args: *args: Two or more lists Returns: A deduped merged list of all the provided lists as a single list
def merge_lists(*args): out = {} for contacts in filter(None, args): for contact in contacts: out[contact.value] = contact return list(out.values())
331,562
Parse a string as a date. If the string fails to parse, `None` will be returned instead >>> parse_date('2017-08-15T18:24:31') datetime.datetime(2017, 8, 15, 18, 24, 31) Args: date_string (`str`): Date in string format to parse ignoretz (`bool`): If set ``True``, ignore time zones and return a naive :class:`datetime` object. Returns: `datetime`, `None`
def parse_date(date_string, ignoretz=True): try: return parser.parse(date_string, ignoretz=ignoretz) except TypeError: return None
331,564
Returns a flattened version of a list. Courtesy of https://stackoverflow.com/a/12472564 Args: data (`tuple` or `list`): Input data Returns: `list`
def flatten(data): if not data: return data if type(data[0]) in (list, tuple): return list(flatten(data[0])) + list(flatten(data[1:])) return list(data[:1]) + list(flatten(data[1:]))
331,567
Method to send a notification. A plugin may use only part of the information, but all fields are required. Args: subsystem (`str`): Name of the subsystem originating the notification recipients (`list` of :obj:`NotificationContact`): List of recipients subject (`str`): Subject / title of the notification body_html (`str)`: HTML formatted version of the message body_text (`str`): Text formatted version of the message Returns: `None`
def send_notification(*, subsystem, recipients, subject, body_html, body_text): from cloud_inquisitor import CINQ_PLUGINS if not body_html and not body_text: raise ValueError('body_html or body_text must be provided') # Make sure that we don't have any duplicate recipients recipients = list(set(recipients)) notifiers = map(lambda plugin: plugin.load(), CINQ_PLUGINS['cloud_inquisitor.plugins.notifiers']['plugins']) for cls in filter(lambda x: x.enabled(), notifiers): for recipient in recipients: if isinstance(recipient, NotificationContact): if recipient.type == cls.notifier_type: try: notifier = cls() notifier.notify(subsystem, recipient.value, subject, body_html, body_text) except Exception: log.exception('Failed sending notification for {}/{}'.format( recipient.type, recipient.value )) else: log.warning('Unexpected recipient {}'.format(recipient))
331,568
Return the difference between two strings Will return a human-readable difference between two strings. See https://docs.python.org/3/library/difflib.html#difflib.Differ for more information about the output format Args: a (str): Original string b (str): New string Returns: `str`
def diff(a, b): return ''.join( Differ().compare( a.splitlines(keepends=True), b.splitlines(keepends=True) ) )
331,569
Update the cinq-frontend-latest.tar.gz redirect Args: bucket_name (str): Name of the bucket to upload to version (str): Override build version. Defaults to using SCM based versioning (git tags)
def update_latest(bucket_name, version): bucket = get_bucket_resource(bucket_name) if version: new_ver = os.path.join('release', TARBALL_FORMAT.format(version)) if not s3_file_exists(bucket, new_ver): log.error('Target file does not exist') return else: new_ver = max(x.key for x in bucket.objects.filter(Prefix='release')) bucket.put_object( Body=b'', Key=LATEST_TARBALL, WebsiteRedirectLocation='/{}'.format(new_ver) ) log.info('Updated {} to point to {}'.format(LATEST_TARBALL, new_ver))
331,597
Build and upload a new tarball Args: bucket_name (str): Name of the bucket to upload to version (str): Override build version. Defaults to using SCM based versioning (git tags) force (bool): Overwrite existing files in S3, if present verbose (bool): Verbose output
def build(bucket_name, version, force, verbose): if verbose: log.setLevel('DEBUG') if not version: version = setuptools_scm.get_version() release = "dev" if "dev" in version else "release" tarball = TARBALL_FORMAT.format(version) tarball_path = os.path.join(tempfile.gettempdir(), tarball) s3_key = os.path.join(release, tarball) try: run('npm i') run('./node_modules/.bin/gulp build.prod') except ExecutionError: log.exception('Failed executing command') return log.debug('Creating archive') tar = tarfile.open(tarball_path, "w:gz") for root, dirnames, filenames in os.walk('dist'): for f in filenames: tar.add(os.path.join(root, f), recursive=False, filter=strip_path) tar.close() log.debug('Uploading {} to s3://{}/{}'.format(tarball, bucket_name, s3_key)) try: bucket = get_bucket_resource(bucket_name) if s3_file_exists(bucket, s3_key) and not force: log.error('File already exists in S3, use --force to overwrite') return bucket.upload_file(tarball_path, os.path.join(release, tarball)) except ClientError: log.exception('AWS API failure')
331,598
Returns the class object identified by `issue_id` Args: issue_id (str): Unique EC2 Instance ID to load from database Returns: EC2 Instance object if found, else None
def get(cls, issue_id): res = Issue.get(issue_id, IssueType.get(cls.issue_type).issue_type_id) return cls(res) if res else None
331,607
Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool`
def update(self, data): updated = False if 'missing_tags' in data: updated |= self.set_property('missing_tags', data['missing_tags']) if 'notes' in data: updated |= self.set_property('notes', data['notes']) if 'state' in data: updated |= self.set_property('state', data['state']) if 'last_alert' in data: updated |= self.set_property('last_alert', data['last_alert']) if updated: now = datetime.now() self.set_property('last_change', now) return updated
331,611
Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool`
def update(self, data): # If the instance was terminated, remove it updated = False if 'state' in data: updated = self.set_property('state', data['state']) if 'end' in data: updated |= self.set_property('end', data['end']) if 'last_alert' in data: updated |= self.set_property('last_alert', data['last_alert']) return updated
331,613
Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool`
def update(self, data): # If the instance was terminated, remove it updated = self.set_property('state', data['state']) updated |= self.set_property('notes', sorted(data['notes'] or [])) updated |= self.set_property('last_notice', data['last_notice']) if updated: self.set_property('last_change', datetime.now()) return updated
331,614
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = OggFLAC(filething) filething.fileobj.seek(0) t.delete(filething)
331,626
Remove tags from a file. Args: delete_v1 (bool): delete any ID3v1 tag delete_v2 (bool): delete any ID3v2 tag Raises: mutagen.MutagenError: In case deleting failed
def delete(filething, delete_v1=True, delete_v2=True): f = filething.fileobj if delete_v1: tag, offset = find_id3v1(f) if tag is not None: f.seek(offset, 2) f.truncate() # technically an insize=0 tag is invalid, but we delete it anyway # (primarily because we used to write it) if delete_v2: f.seek(0, 0) idata = f.read(10) try: id3, vmaj, vrev, flags, insize = struct.unpack('>3sBBB4s', idata) except struct.error: pass else: insize = BitPaddedInt(insize) if id3 == b'ID3' and insize >= 0: delete_bytes(f, insize + 10, 0)
331,631
delete(filething=None, delete_v1=True, delete_v2=True) Remove tags from a file. Args: filething (filething): A filename or `None` to use the one used when loading. delete_v1 (bool): delete any ID3v1 tag delete_v2 (bool): delete any ID3v2 tag If no filename is given, the one most recently loaded is used.
def delete(self, filething=None, delete_v1=True, delete_v2=True): delete(filething, delete_v1, delete_v2) self.clear()
331,637
Add an empty ID3 tag to the file. Args: ID3 (ID3): An ID3 subclass to use or `None` to use the one that used when loading. A custom tag reader may be used in instead of the default `ID3` object, e.g. an `mutagen.easyid3.EasyID3` reader.
def add_tags(self, ID3=None): if ID3 is None: ID3 = self.ID3 if self.tags is None: self.ID3 = ID3 self.tags = ID3() else: raise error("an ID3 tag already exists")
331,638
Returns a possibly valid _ADTSStream or None. Args: max_bytes (int): maximum bytes to read
def find_stream(cls, fileobj, max_bytes): r = BitReader(fileobj) stream = cls(r) if stream.sync(max_bytes): stream.offset = (r.get_position() - 12) // 8 return stream
331,656
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = OggSpeex(filething) filething.fileobj.seek(0) t.delete(filething)
331,669
Remove tags from a file. Args: filething (filething) Raises: mutagen.MutagenError
def delete(filething): dsf_file = DSFFile(filething.fileobj) if dsf_file.dsd_chunk.offset_metdata_chunk != 0: id3_location = dsf_file.dsd_chunk.offset_metdata_chunk dsf_file.dsd_chunk.offset_metdata_chunk = 0 dsf_file.dsd_chunk.write() filething.fileobj.seek(id3_location) filething.fileobj.truncate()
331,671
Verifies that the passed fileobj is a file like object which we can use. Args: writable (bool): verify that the file object is writable as well Raises: ValueError: In case the object is not a file object that is readable (or writable if required) or is not opened in bytes mode.
def verify_fileobj(fileobj, writable=False): try: data = fileobj.read(0) except Exception: if not hasattr(fileobj, "read"): raise ValueError("%r not a valid file object" % fileobj) raise ValueError("Can't read from file object %r" % fileobj) if not isinstance(data, bytes): raise ValueError( "file object %r not opened in binary mode" % fileobj) if writable: try: fileobj.write(b"") except Exception: if not hasattr(fileobj, "write"): raise ValueError("%r not a valid file object" % fileobj) raise ValueError("Can't write to file object %r" % fileobj)
331,763
A decorator for functions taking a `filething` as a first argument. Passes a FileThing instance as the first argument to the wrapped function. Args: method (bool): If the wrapped functions is a method writable (bool): If a filename is passed opens the file readwrite, if passed a file object verifies that it is writable. create (bool): If passed a filename that does not exist will create a new empty file.
def loadfile(method=True, writable=False, create=False): def convert_file_args(args, kwargs): filething = args[0] if args else None filename = kwargs.pop("filename", None) fileobj = kwargs.pop("fileobj", None) return filething, filename, fileobj, args[1:], kwargs def wrap(func): @wraps(func) def wrapper(self, *args, **kwargs): filething, filename, fileobj, args, kwargs = \ convert_file_args(args, kwargs) with _openfile(self, filething, filename, fileobj, writable, create) as h: return func(self, h, *args, **kwargs) @wraps(func) def wrapper_func(*args, **kwargs): filething, filename, fileobj, args, kwargs = \ convert_file_args(args, kwargs) with _openfile(None, filething, filename, fileobj, writable, create) as h: return func(h, *args, **kwargs) return wrapper if method else wrapper_func return wrap
331,765
A decorator for reraising exceptions with a different type. Mostly useful for IOError. Args: exc_src (type): The source exception type exc_dest (type): The target exception type.
def convert_error(exc_src, exc_dest): def wrap(func): @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except exc_dest: raise except exc_src as err: reraise(exc_dest, err, sys.exc_info()[2]) return wrapper return wrap
331,766
yields a FileThing Args: filething: Either a file name, a file object or None filename: Either a file name or None fileobj: Either a file object or None writable (bool): if the file should be opened create (bool): if the file should be created if it doesn't exist. implies writable Raises: MutagenError: In case opening the file failed TypeError: in case neither a file name or a file object is passed
def _openfile(instance, filething, filename, fileobj, writable, create): assert not create or writable # to allow stacked context managers, just pass the result through if isinstance(filething, FileThing): filename = filething.filename fileobj = filething.fileobj filething = None if filething is not None: if is_fileobj(filething): fileobj = filething elif hasattr(filething, "__fspath__"): filename = filething.__fspath__() if not isinstance(filename, (bytes, text_type)): raise TypeError("expected __fspath__() to return a filename") else: filename = filething if instance is not None: # XXX: take "not writable" as loading the file.. if not writable: instance.filename = filename elif filename is None: filename = getattr(instance, "filename", None) if fileobj is not None: verify_fileobj(fileobj, writable=writable) yield FileThing(fileobj, filename, filename or fileobj_name(fileobj)) elif filename is not None: verify_filename(filename) inmemory_fileobj = False try: fileobj = open(filename, "rb+" if writable else "rb") except IOError as e: if writable and e.errno == errno.EOPNOTSUPP: # Some file systems (gvfs over fuse) don't support opening # files read/write. To make things still work read the whole # file into an in-memory file like object and write it back # later. # https://github.com/quodlibet/mutagen/issues/300 try: with open(filename, "rb") as fileobj: fileobj = BytesIO(fileobj.read()) except IOError as e2: raise MutagenError(e2) inmemory_fileobj = True elif create and e.errno == errno.ENOENT: assert writable try: fileobj = open(filename, "wb+") except IOError as e2: raise MutagenError(e2) else: raise MutagenError(e) with fileobj as fileobj: yield FileThing(fileobj, filename, filename) if inmemory_fileobj: assert writable data = fileobj.getvalue() try: with open(filename, "wb") as fileobj: fileobj.write(data) except IOError as e: raise MutagenError(e) else: raise TypeError("Missing filename or fileobj argument")
331,767
A decorator for creating an int enum class. Makes the values a subclass of the type and implements repr/str. The new class will be a subclass of int. Args: cls (type): The class to convert to an enum Returns: type: A new class :: @enum class Foo(object): FOO = 1 BAR = 2
def enum(cls): assert cls.__bases__ == (object,) d = dict(cls.__dict__) new_type = type(cls.__name__, (int,), d) new_type.__module__ = cls.__module__ map_ = {} for key, value in iteritems(d): if key.upper() == key and isinstance(value, integer_types): value_instance = new_type(value) setattr(new_type, key, value_instance) map_[value] = key def str_(self): if self in map_: return "%s.%s" % (type(self).__name__, map_[self]) return "%d" % int(self) def repr_(self): if self in map_: return "<%s.%s: %d>" % (type(self).__name__, map_[self], int(self)) return "%d" % int(self) setattr(new_type, "__repr__", repr_) setattr(new_type, "__str__", str_) return new_type
331,769
A decorator for creating an int flags class. Makes the values a subclass of the type and implements repr/str. The new class will be a subclass of int. Args: cls (type): The class to convert to an flags Returns: type: A new class :: @flags class Foo(object): FOO = 1 BAR = 2
def flags(cls): assert cls.__bases__ == (object,) d = dict(cls.__dict__) new_type = type(cls.__name__, (int,), d) new_type.__module__ = cls.__module__ map_ = {} for key, value in iteritems(d): if key.upper() == key and isinstance(value, integer_types): value_instance = new_type(value) setattr(new_type, key, value_instance) map_[value] = key def str_(self): value = int(self) matches = [] for k, v in map_.items(): if value & k: matches.append("%s.%s" % (type(self).__name__, v)) value &= ~k if value != 0 or not matches: matches.append(text_type(value)) return " | ".join(matches) def repr_(self): return "<%s: %d>" % (str(self), int(self)) setattr(new_type, "__repr__", repr_) setattr(new_type, "__str__", str_) return new_type
331,770
Returns the size of the file. The position when passed in will be preserved if no error occurs. Args: fileobj (fileobj) Returns: int: The size of the file Raises: IOError
def get_size(fileobj): old_pos = fileobj.tell() try: fileobj.seek(0, 2) return fileobj.tell() finally: fileobj.seek(old_pos, 0)
331,772
Like fileobj.read but raises IOError if not all requested data is returned. If you want to distinguish IOError and the EOS case, better handle the error yourself instead of using this. Args: fileobj (fileobj) size (int): amount of bytes to read Raises: IOError: In case read fails or not enough data is read
def read_full(fileobj, size): if size < 0: raise ValueError("size must not be negative") data = fileobj.read(size) if len(data) != size: raise IOError return data
331,773
Like fileobj.seek(-offset, 2), but will not try to go beyond the start Needed since file objects from BytesIO will not raise IOError and file objects from open() will raise IOError if going to a negative offset. To make things easier for custom implementations, instead of allowing both behaviors, we just don't do it. Args: fileobj (fileobj) offset (int): how many bytes away from the end backwards to seek to Raises: IOError
def seek_end(fileobj, offset): if offset < 0: raise ValueError if get_size(fileobj) < offset: fileobj.seek(0, 0) else: fileobj.seek(-offset, 2)
331,774
Resize a file by `diff`. New space will be filled with zeros. Args: fobj (fileobj) diff (int): amount of size to change Raises: IOError
def resize_file(fobj, diff, BUFFER_SIZE=2 ** 16): fobj.seek(0, 2) filesize = fobj.tell() if diff < 0: if filesize + diff < 0: raise ValueError # truncate flushes internally fobj.truncate(filesize + diff) elif diff > 0: try: while diff: addsize = min(BUFFER_SIZE, diff) fobj.write(b"\x00" * addsize) diff -= addsize fobj.flush() except IOError as e: if e.errno == errno.ENOSPC: # To reduce the chance of corrupt files in case of missing # space try to revert the file expansion back. Of course # in reality every in-file-write can also fail due to COW etc. # Note: IOError gets also raised in flush() due to buffering fobj.truncate(filesize) raise
331,776
Moves data around using read()/write(). Args: fileobj (fileobj) dest (int): The destination offset src (int): The source offset count (int) The amount of data to move Raises: IOError: In case an operation on the fileobj fails ValueError: In case invalid parameters were given
def fallback_move(fobj, dest, src, count, BUFFER_SIZE=2 ** 16): if dest < 0 or src < 0 or count < 0: raise ValueError fobj.seek(0, 2) filesize = fobj.tell() if max(dest, src) + count > filesize: raise ValueError("area outside of file") if src > dest: moved = 0 while count - moved: this_move = min(BUFFER_SIZE, count - moved) fobj.seek(src + moved) buf = fobj.read(this_move) fobj.seek(dest + moved) fobj.write(buf) moved += this_move fobj.flush() else: while count: this_move = min(BUFFER_SIZE, count) fobj.seek(src + count - this_move) buf = fobj.read(this_move) fobj.seek(count + dest - this_move) fobj.write(buf) count -= this_move fobj.flush()
331,777
Insert size bytes of empty space starting at offset. fobj must be an open file object, open rb+ or equivalent. Mutagen tries to use mmap to resize the file, but falls back to a significantly slower method if mmap fails. Args: fobj (fileobj) size (int): The amount of space to insert offset (int): The offset at which to insert the space Raises: IOError
def insert_bytes(fobj, size, offset, BUFFER_SIZE=2 ** 16): if size < 0 or offset < 0: raise ValueError fobj.seek(0, 2) filesize = fobj.tell() movesize = filesize - offset if movesize < 0: raise ValueError resize_file(fobj, size, BUFFER_SIZE) if mmap is not None: try: mmap_move(fobj, offset + size, offset, movesize) except mmap.error: fallback_move(fobj, offset + size, offset, movesize, BUFFER_SIZE) else: fallback_move(fobj, offset + size, offset, movesize, BUFFER_SIZE)
331,778
Resize an area in a file adding and deleting at the end of it. Does nothing if no resizing is needed. Args: fobj (fileobj) old_size (int): The area starting at offset new_size (int): The new size of the area offset (int): The start of the area Raises: IOError
def resize_bytes(fobj, old_size, new_size, offset): if new_size < old_size: delete_size = old_size - new_size delete_at = offset + new_size delete_bytes(fobj, delete_size, delete_at) elif new_size > old_size: insert_size = new_size - old_size insert_at = offset + old_size insert_bytes(fobj, insert_size, insert_at)
331,779
Like text.encode(encoding) but always returns little endian/big endian BOMs instead of the system one. Args: text (text) encoding (str) errors (str) le (boolean): if little endian Returns: bytes Raises: UnicodeEncodeError LookupError
def encode_endian(text, encoding, errors="strict", le=True): encoding = codecs.lookup(encoding).name if encoding == "utf-16": if le: return codecs.BOM_UTF16_LE + text.encode("utf-16-le", errors) else: return codecs.BOM_UTF16_BE + text.encode("utf-16-be", errors) elif encoding == "utf-32": if le: return codecs.BOM_UTF32_LE + text.encode("utf-32-le", errors) else: return codecs.BOM_UTF32_BE + text.encode("utf-32-be", errors) else: return text.encode(encoding, errors)
331,781
(Windows only) Returns a file name for a file handle. Args: handle (winapi.HANDLE) Returns: `text` or `None` if no file name could be retrieved.
def _get_file_name_for_handle(handle): assert is_win assert handle != winapi.INVALID_HANDLE_VALUE size = winapi.FILE_NAME_INFO.FileName.offset + \ winapi.MAX_PATH * ctypes.sizeof(winapi.WCHAR) buf = ctypes.create_string_buffer(size) if winapi.GetFileInformationByHandleEx is None: # Windows XP return None status = winapi.GetFileInformationByHandleEx( handle, winapi.FileNameInfo, buf, size) if status == 0: return None name_info = ctypes.cast( buf, ctypes.POINTER(winapi.FILE_NAME_INFO)).contents offset = winapi.FILE_NAME_INFO.FileName.offset data = buf[offset:offset + name_info.FileNameLength] return bytes2fsn(data, "utf-16-le")
331,801
Returns whether the output device is capable of interpreting ANSI escape codes when :func:`print_` is used. Args: fd (int): file descriptor (e.g. ``sys.stdout.fileno()``) Returns: `bool`
def supports_ansi_escape_codes(fd): if os.isatty(fd): return True if not is_win: return False # Check for cygwin/msys terminal handle = winapi._get_osfhandle(fd) if handle == winapi.INVALID_HANDLE_VALUE: return False if winapi.GetFileType(handle) != winapi.FILE_TYPE_PIPE: return False file_name = _get_file_name_for_handle(handle) match = re.match( "^\\\\(cygwin|msys)-[a-z0-9]+-pty[0-9]+-(from|to)-master$", file_name) return match is not None
331,802
load(filething) Load file information from a filename. Args: filething (filething) Raises: mutagen.MutagenError
def load(self, filething): fileobj = filething.fileobj try: self.info = self._Info(fileobj) self.tags = self._Tags(fileobj, self.info) self.info._post_tags(fileobj) except (error, IOError) as e: reraise(self._Error, e, sys.exc_info()[2]) except EOFError: raise self._Error("no appropriate stream found")
331,814
delete(filething=None) Remove tags from a file. If no filename is given, the one most recently loaded is used. Args: filething (filething) Raises: mutagen.MutagenError
def delete(self, filething=None): fileobj = filething.fileobj self.tags.clear() # TODO: we should delegate the deletion to the subclass and not through # _inject. try: try: self.tags._inject(fileobj, lambda x: 0) except error as e: reraise(self._Error, e, sys.exc_info()[2]) except EOFError: raise self._Error("no appropriate stream found") except IOError as e: reraise(self._Error, e, sys.exc_info()[2])
331,815
save(filething=None, padding=None) Save a tag to a file. If no filename is given, the one most recently loaded is used. Args: filething (filething) padding (:obj:`mutagen.PaddingFunction`) Raises: mutagen.MutagenError
def save(self, filething=None, padding=None): try: self.tags._inject(filething.fileobj, padding) except (IOError, error) as e: reraise(self._Error, e, sys.exc_info()[2]) except EOFError: raise self._Error("no appropriate stream found")
331,816
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = OggOpus(filething) filething.fileobj.seek(0) t.delete(filething)
331,819
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = MP4(filething) filething.fileobj.seek(0) t.delete(filething)
331,829
Remove tags from a file. Args: filething (filething) Raises: mutagen.MutagenError
def delete(filething): f = FLAC(filething) filething.fileobj.seek(0) f.delete(filething)
331,906
Save metadata blocks to a file. Args: filething (filething) deleteid3 (bool): delete id3 tags while at it padding (:obj:`mutagen.PaddingFunction`) If no filename is given, the one most recently loaded is used.
def save(self, filething=None, deleteid3=False, padding=None): self._save(filething, self.metadata_blocks, deleteid3, padding)
331,943
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): try: t = APEv2(filething) except APENoHeaderError: return filething.fileobj.seek(0) t.delete(filething)
331,994
load(filething) Args: filething (filething) Raises: mutagen.MutagenError
def load(self, filething): fileobj = filething.fileobj self.info = ASFInfo() self.tags = ASFTags() self._tags = {} self._header = HeaderObject.parse_full(self, fileobj) for guid in [ContentDescriptionObject.GUID, ExtendedContentDescriptionObject.GUID, MetadataObject.GUID, MetadataLibraryObject.GUID]: self.tags.extend(self._tags.pop(guid, [])) assert not self._tags
332,040
save(filething=None, padding=None) Save tag changes back to the loaded file. Args: filething (filething) padding (:obj:`mutagen.PaddingFunction`) Raises: mutagen.MutagenError
def save(self, filething=None, padding=None): # Move attributes to the right objects self.to_content_description = {} self.to_extended_content_description = {} self.to_metadata = {} self.to_metadata_library = [] for name, value in self.tags: library_only = (value.data_size() > 0xFFFF or value.TYPE == GUID) can_cont_desc = value.TYPE == UNICODE if library_only or value.language is not None: self.to_metadata_library.append((name, value)) elif value.stream is not None: if name not in self.to_metadata: self.to_metadata[name] = value else: self.to_metadata_library.append((name, value)) elif name in ContentDescriptionObject.NAMES: if name not in self.to_content_description and can_cont_desc: self.to_content_description[name] = value else: self.to_metadata_library.append((name, value)) else: if name not in self.to_extended_content_description: self.to_extended_content_description[name] = value else: self.to_metadata_library.append((name, value)) # Add missing objects header = self._header if header.get_child(ContentDescriptionObject.GUID) is None: header.objects.append(ContentDescriptionObject()) if header.get_child(ExtendedContentDescriptionObject.GUID) is None: header.objects.append(ExtendedContentDescriptionObject()) header_ext = header.get_child(HeaderExtensionObject.GUID) if header_ext is None: header_ext = HeaderExtensionObject() header.objects.append(header_ext) if header_ext.get_child(MetadataObject.GUID) is None: header_ext.objects.append(MetadataObject()) if header_ext.get_child(MetadataLibraryObject.GUID) is None: header_ext.objects.append(MetadataLibraryObject()) fileobj = filething.fileobj # Render to file old_size = header.parse_size(fileobj)[0] data = header.render_full(self, fileobj, old_size, padding) size = len(data) resize_bytes(fileobj, old_size, size, 0) fileobj.seek(0) fileobj.write(data)
332,041
delete(filething=None) Args: filething (filething) Raises: mutagen.MutagenError
def delete(self, filething=None): self.tags.clear() self.save(filething, padding=lambda x: 0)
332,042
Gives a guess about the encoder settings used. Returns an empty string if unknown. The guess is mostly correct in case the file was encoded with the default options (-V --preset --alt-preset --abr -b etc) and no other fancy options. Args: major (int) minor (int) Returns: text
def guess_settings(self, major, minor): version = major, minor if self.vbr_method == 2: if version in ((3, 90), (3, 91), (3, 92)) and self.encoding_flags: if self.bitrate < 255: return u"--alt-preset %d" % self.bitrate else: return u"--alt-preset %d+" % self.bitrate if self.preset_used != 0: return u"--preset %d" % self.preset_used elif self.bitrate < 255: return u"--abr %d" % self.bitrate else: return u"--abr %d+" % self.bitrate elif self.vbr_method == 1: if self.preset_used == 0: if self.bitrate < 255: return u"-b %d" % self.bitrate else: return u"-b 255+" elif self.preset_used == 1003: return u"--preset insane" return u"-b %d" % self.preset_used elif version in ((3, 90), (3, 91), (3, 92)): preset_key = (self.vbr_quality, self.quality, self.vbr_method, self.lowpass_filter, self.ath_type) if preset_key == (1, 2, 4, 19500, 3): return u"--preset r3mix" if preset_key == (2, 2, 3, 19000, 4): return u"--alt-preset standard" if preset_key == (2, 2, 3, 19500, 2): return u"--alt-preset extreme" if self.vbr_method == 3: return u"-V %s" % self.vbr_quality elif self.vbr_method in (4, 5): return u"-V %s --vbr-new" % self.vbr_quality elif version in ((3, 93), (3, 94), (3, 95), (3, 96), (3, 97)): if self.preset_used == 1001: return u"--preset standard" elif self.preset_used == 1002: return u"--preset extreme" elif self.preset_used == 1004: return u"--preset fast standard" elif self.preset_used == 1005: return u"--preset fast extreme" elif self.preset_used == 1006: return u"--preset medium" elif self.preset_used == 1007: return u"--preset fast medium" if self.vbr_method == 3: return u"-V %s" % self.vbr_quality elif self.vbr_method in (4, 5): return u"-V %s --vbr-new" % self.vbr_quality elif version == (3, 98): if self.vbr_method == 3: return u"-V %s --vbr-old" % self.vbr_quality elif self.vbr_method in (4, 5): return u"-V %s" % self.vbr_quality elif version >= (3, 99): if self.vbr_method == 3: return u"-V %s --vbr-old" % self.vbr_quality elif self.vbr_method in (4, 5): p = self.vbr_quality adjust_key = (p, self.bitrate, self.lowpass_filter) # https://sourceforge.net/p/lame/bugs/455/ p = { (5, 32, 0): 7, (5, 8, 0): 8, (6, 8, 0): 9, }.get(adjust_key, p) return u"-V %s" % p return u""
332,070
Like `os.getenv` but returns unicode under Windows + Python 2 Args: key (pathlike): The env var to get value (object): The value to return if the env var does not exist Returns: `fsnative` or `object`: The env var or the passed value if it doesn't exist
def getenv(key, value=None): key = path2fsn(key) if is_win and PY2: return environ.get(key, value) return os.getenv(key, value)
332,081
Like `os.unsetenv` but takes unicode under Windows + Python 2 Args: key (pathlike): The env var to unset
def unsetenv(key): key = path2fsn(key) if is_win: # python 3 has no unsetenv under Windows -> use our ctypes one as well try: del_windows_env_var(key) except WindowsError: pass else: os.unsetenv(key)
332,082
Like `os.putenv` but takes unicode under Windows + Python 2 Args: key (pathlike): The env var to get value (pathlike): The value to set Raises: ValueError
def putenv(key, value): key = path2fsn(key) value = path2fsn(value) if is_win and PY2: try: set_windows_env_var(key, value) except WindowsError: # py3 + win fails here raise ValueError else: try: os.putenv(key, value) except OSError: # win + py3 raise here for invalid keys which is probably a bug. # ValueError seems better raise ValueError
332,083
Delete frames of the given type and add frames in 'values'. Args: key (text): key for frames to delete values (list[Frame]): frames to add
def setall(self, key, values): self.delall(key) for tag in values: self[tag.HashKey] = tag
332,108
Add a frame. Args: frame (Frame): the frame to add strict (bool): if this should raise in case it can't be added and frames shouldn't be merged.
def _add(self, frame, strict): if not isinstance(frame, Frame): raise TypeError("%r not a Frame instance" % frame) orig_frame = frame frame = frame._upgrade_frame() if frame is None: if not strict: return raise TypeError( "Can't upgrade %r frame" % type(orig_frame).__name__) hash_key = frame.HashKey if strict or hash_key not in self: self[hash_key] = frame return # Try to merge frames, or change the new one. Since changing # the new one can lead to new conflicts, try until everything is # either merged or added. while True: old_frame = self[hash_key] new_frame = old_frame._merge_frame(frame) new_hash = new_frame.HashKey if new_hash == hash_key: self[hash_key] = new_frame break else: assert new_frame is frame if new_hash not in self: self[new_hash] = new_frame break hash_key = new_hash
332,110
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = OggTheora(filething) filething.fileobj.seek(0) t.delete(filething)
332,116
Parse a Vorbis comment from a file-like object. Arguments: errors (str): 'strict', 'replace', or 'ignore'. This affects Unicode decoding and how other malformed content is interpreted. framing (bool): if true, fail if a framing bit is not present Framing bits are required by the Vorbis comment specification, but are not used in FLAC Vorbis comment blocks.
def load(self, fileobj, errors='replace', framing=True): try: vendor_length = cdata.uint_le(fileobj.read(4)) self.vendor = fileobj.read(vendor_length).decode('utf-8', errors) count = cdata.uint_le(fileobj.read(4)) for i in xrange(count): length = cdata.uint_le(fileobj.read(4)) try: string = fileobj.read(length).decode('utf-8', errors) except (OverflowError, MemoryError): raise error("cannot read %d bytes, too large" % length) try: tag, value = string.split('=', 1) except ValueError as err: if errors == "ignore": continue elif errors == "replace": tag, value = u"unknown%d" % i, string else: reraise(VorbisEncodingError, err, sys.exc_info()[2]) try: tag = tag.encode('ascii', errors) except UnicodeEncodeError: raise VorbisEncodingError("invalid tag name %r" % tag) else: # string keys in py3k if PY3: tag = tag.decode("ascii") if is_valid_key(tag): self.append((tag, value)) if framing and not bytearray(fileobj.read(1))[0] & 0x01: raise VorbisUnsetFrameError("framing bit was unset") except (cdata.error, TypeError): raise error("file is not a valid Vorbis comment")
332,127
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
def delete(filething): t = OggVorbis(filething) filething.fileobj.seek(0) t.delete(filething)
332,141
Checks if the blocks in the RDD matches the expected types. Parameters: ----------- rdd: splearn.BlockRDD The RDD to check expected_dtype: {type, list of types, tuple of types, dict of types} Expected type(s). If the RDD is a DictRDD the parameter type is restricted to dict. Returns: -------- accept: bool Returns if the types are matched.
def check_rdd_dtype(rdd, expected_dtype): if not isinstance(rdd, BlockRDD): raise TypeError("Expected {0} for parameter rdd, got {1}." .format(BlockRDD, type(rdd))) if isinstance(rdd, DictRDD): if not isinstance(expected_dtype, dict): raise TypeError('Expected {0} for parameter ' 'expected_dtype, got {1}.' .format(dict, type(expected_dtype))) accept = True types = dict(list(zip(rdd.columns, rdd.dtype))) for key, values in expected_dtype.items(): if not isinstance(values, (tuple, list)): values = [values] accept = accept and types[key] in values return accept if not isinstance(expected_dtype, (tuple, list)): expected_dtype = [expected_dtype] return rdd.dtype in expected_dtype
332,992
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.Range = channel.unary_unary( '/etcdserverpb.KV/Range', request_serializer=rpc__pb2.RangeRequest.SerializeToString, response_deserializer=rpc__pb2.RangeResponse.FromString, ) self.Put = channel.unary_unary( '/etcdserverpb.KV/Put', request_serializer=rpc__pb2.PutRequest.SerializeToString, response_deserializer=rpc__pb2.PutResponse.FromString, ) self.DeleteRange = channel.unary_unary( '/etcdserverpb.KV/DeleteRange', request_serializer=rpc__pb2.DeleteRangeRequest.SerializeToString, response_deserializer=rpc__pb2.DeleteRangeResponse.FromString, ) self.Txn = channel.unary_unary( '/etcdserverpb.KV/Txn', request_serializer=rpc__pb2.TxnRequest.SerializeToString, response_deserializer=rpc__pb2.TxnResponse.FromString, ) self.Compact = channel.unary_unary( '/etcdserverpb.KV/Compact', request_serializer=rpc__pb2.CompactionRequest.SerializeToString, response_deserializer=rpc__pb2.CompactionResponse.FromString, )
333,872
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.Watch = channel.stream_stream( '/etcdserverpb.Watch/Watch', request_serializer=rpc__pb2.WatchRequest.SerializeToString, response_deserializer=rpc__pb2.WatchResponse.FromString, )
333,873
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.LeaseGrant = channel.unary_unary( '/etcdserverpb.Lease/LeaseGrant', request_serializer=rpc__pb2.LeaseGrantRequest.SerializeToString, response_deserializer=rpc__pb2.LeaseGrantResponse.FromString, ) self.LeaseRevoke = channel.unary_unary( '/etcdserverpb.Lease/LeaseRevoke', request_serializer=rpc__pb2.LeaseRevokeRequest.SerializeToString, response_deserializer=rpc__pb2.LeaseRevokeResponse.FromString, ) self.LeaseKeepAlive = channel.stream_stream( '/etcdserverpb.Lease/LeaseKeepAlive', request_serializer=rpc__pb2.LeaseKeepAliveRequest.SerializeToString, response_deserializer=rpc__pb2.LeaseKeepAliveResponse.FromString, ) self.LeaseTimeToLive = channel.unary_unary( '/etcdserverpb.Lease/LeaseTimeToLive', request_serializer=rpc__pb2.LeaseTimeToLiveRequest.SerializeToString, response_deserializer=rpc__pb2.LeaseTimeToLiveResponse.FromString, ) self.LeaseLeases = channel.unary_unary( '/etcdserverpb.Lease/LeaseLeases', request_serializer=rpc__pb2.LeaseLeasesRequest.SerializeToString, response_deserializer=rpc__pb2.LeaseLeasesResponse.FromString, )
333,874
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.MemberAdd = channel.unary_unary( '/etcdserverpb.Cluster/MemberAdd', request_serializer=rpc__pb2.MemberAddRequest.SerializeToString, response_deserializer=rpc__pb2.MemberAddResponse.FromString, ) self.MemberRemove = channel.unary_unary( '/etcdserverpb.Cluster/MemberRemove', request_serializer=rpc__pb2.MemberRemoveRequest.SerializeToString, response_deserializer=rpc__pb2.MemberRemoveResponse.FromString, ) self.MemberUpdate = channel.unary_unary( '/etcdserverpb.Cluster/MemberUpdate', request_serializer=rpc__pb2.MemberUpdateRequest.SerializeToString, response_deserializer=rpc__pb2.MemberUpdateResponse.FromString, ) self.MemberList = channel.unary_unary( '/etcdserverpb.Cluster/MemberList', request_serializer=rpc__pb2.MemberListRequest.SerializeToString, response_deserializer=rpc__pb2.MemberListResponse.FromString, )
333,875
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.Alarm = channel.unary_unary( '/etcdserverpb.Maintenance/Alarm', request_serializer=rpc__pb2.AlarmRequest.SerializeToString, response_deserializer=rpc__pb2.AlarmResponse.FromString, ) self.Status = channel.unary_unary( '/etcdserverpb.Maintenance/Status', request_serializer=rpc__pb2.StatusRequest.SerializeToString, response_deserializer=rpc__pb2.StatusResponse.FromString, ) self.Defragment = channel.unary_unary( '/etcdserverpb.Maintenance/Defragment', request_serializer=rpc__pb2.DefragmentRequest.SerializeToString, response_deserializer=rpc__pb2.DefragmentResponse.FromString, ) self.Hash = channel.unary_unary( '/etcdserverpb.Maintenance/Hash', request_serializer=rpc__pb2.HashRequest.SerializeToString, response_deserializer=rpc__pb2.HashResponse.FromString, ) self.HashKV = channel.unary_unary( '/etcdserverpb.Maintenance/HashKV', request_serializer=rpc__pb2.HashKVRequest.SerializeToString, response_deserializer=rpc__pb2.HashKVResponse.FromString, ) self.Snapshot = channel.unary_stream( '/etcdserverpb.Maintenance/Snapshot', request_serializer=rpc__pb2.SnapshotRequest.SerializeToString, response_deserializer=rpc__pb2.SnapshotResponse.FromString, ) self.MoveLeader = channel.unary_unary( '/etcdserverpb.Maintenance/MoveLeader', request_serializer=rpc__pb2.MoveLeaderRequest.SerializeToString, response_deserializer=rpc__pb2.MoveLeaderResponse.FromString, )
333,876
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.AuthEnable = channel.unary_unary( '/etcdserverpb.Auth/AuthEnable', request_serializer=rpc__pb2.AuthEnableRequest.SerializeToString, response_deserializer=rpc__pb2.AuthEnableResponse.FromString, ) self.AuthDisable = channel.unary_unary( '/etcdserverpb.Auth/AuthDisable', request_serializer=rpc__pb2.AuthDisableRequest.SerializeToString, response_deserializer=rpc__pb2.AuthDisableResponse.FromString, ) self.Authenticate = channel.unary_unary( '/etcdserverpb.Auth/Authenticate', request_serializer=rpc__pb2.AuthenticateRequest.SerializeToString, response_deserializer=rpc__pb2.AuthenticateResponse.FromString, ) self.UserAdd = channel.unary_unary( '/etcdserverpb.Auth/UserAdd', request_serializer=rpc__pb2.AuthUserAddRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserAddResponse.FromString, ) self.UserGet = channel.unary_unary( '/etcdserverpb.Auth/UserGet', request_serializer=rpc__pb2.AuthUserGetRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserGetResponse.FromString, ) self.UserList = channel.unary_unary( '/etcdserverpb.Auth/UserList', request_serializer=rpc__pb2.AuthUserListRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserListResponse.FromString, ) self.UserDelete = channel.unary_unary( '/etcdserverpb.Auth/UserDelete', request_serializer=rpc__pb2.AuthUserDeleteRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserDeleteResponse.FromString, ) self.UserChangePassword = channel.unary_unary( '/etcdserverpb.Auth/UserChangePassword', request_serializer=rpc__pb2.AuthUserChangePasswordRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserChangePasswordResponse.FromString, ) self.UserGrantRole = channel.unary_unary( '/etcdserverpb.Auth/UserGrantRole', request_serializer=rpc__pb2.AuthUserGrantRoleRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserGrantRoleResponse.FromString, ) self.UserRevokeRole = channel.unary_unary( '/etcdserverpb.Auth/UserRevokeRole', request_serializer=rpc__pb2.AuthUserRevokeRoleRequest.SerializeToString, response_deserializer=rpc__pb2.AuthUserRevokeRoleResponse.FromString, ) self.RoleAdd = channel.unary_unary( '/etcdserverpb.Auth/RoleAdd', request_serializer=rpc__pb2.AuthRoleAddRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleAddResponse.FromString, ) self.RoleGet = channel.unary_unary( '/etcdserverpb.Auth/RoleGet', request_serializer=rpc__pb2.AuthRoleGetRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleGetResponse.FromString, ) self.RoleList = channel.unary_unary( '/etcdserverpb.Auth/RoleList', request_serializer=rpc__pb2.AuthRoleListRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleListResponse.FromString, ) self.RoleDelete = channel.unary_unary( '/etcdserverpb.Auth/RoleDelete', request_serializer=rpc__pb2.AuthRoleDeleteRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleDeleteResponse.FromString, ) self.RoleGrantPermission = channel.unary_unary( '/etcdserverpb.Auth/RoleGrantPermission', request_serializer=rpc__pb2.AuthRoleGrantPermissionRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleGrantPermissionResponse.FromString, ) self.RoleRevokePermission = channel.unary_unary( '/etcdserverpb.Auth/RoleRevokePermission', request_serializer=rpc__pb2.AuthRoleRevokePermissionRequest.SerializeToString, response_deserializer=rpc__pb2.AuthRoleRevokePermissionResponse.FromString, )
333,877
Add a row to the table Arguments: row - row of data, should be a list with as many elements as the table has fields
def add_row(self, row): if self._field_names and len(row) != len(self._field_names): raise Exception("Row has incorrect number of values, (actual) %d!=%d (expected)" %(len(row),len(self._field_names))) self._rows.append(list(row))
335,219
Display a nice table with occurrences and action buttons. Arguments: start - hour at which the day starts end - hour at which the day ends increment - size of a time slot (in minutes)
def daily_table(context, day, start=8, end=20, increment=30): user = context['request'].user addable = CHECK_EVENT_PERM_FUNC(None, user) if 'calendar' in context: addable = addable and CHECK_CALENDAR_PERM_FUNC(context['calendar'], user) context['addable'] = addable day_part = day.get_time_slot(day.start + datetime.timedelta(hours=start), day.start + datetime.timedelta(hours=end)) # get slots to display on the left slots = _cook_slots(day_part, increment) context['slots'] = slots return context
336,022
Prepare slots to be displayed on the left hand side calculate dimensions (in px) for each slot. Arguments: period - time period for the whole series increment - slot size in minutes
def _cook_slots(period, increment): tdiff = datetime.timedelta(minutes=increment) num = int((period.end - period.start).total_seconds()) // int(tdiff.total_seconds()) s = period.start slots = [] for i in range(num): sl = period.get_time_slot(s, s + tdiff) slots.append(sl) s = s + tdiff return slots
336,031
Return a new :class:`PaillierPublicKey` and :class:`PaillierPrivateKey`. Add the private key to *private_keyring* if given. Args: private_keyring (PaillierPrivateKeyring): a :class:`PaillierPrivateKeyring` on which to store the private key. n_length: key size in bits. Returns: tuple: The generated :class:`PaillierPublicKey` and :class:`PaillierPrivateKey`
def generate_paillier_keypair(private_keyring=None, n_length=DEFAULT_KEYSIZE): p = q = n = None n_len = 0 while n_len != n_length: p = getprimeover(n_length // 2) q = p while q == p: q = getprimeover(n_length // 2) n = p * q n_len = n.bit_length() public_key = PaillierPublicKey(n) private_key = PaillierPrivateKey(public_key, p, q) if private_keyring is not None: private_keyring.add(private_key) return public_key, private_key
337,126
Paillier encrypt an encoded value. Args: encoding: The EncodedNumber instance. r_value (int): obfuscator for the ciphertext; by default (i.e. if *r_value* is None), a random value is used. Returns: EncryptedNumber: An encryption of *value*.
def encrypt_encoded(self, encoding, r_value): # If r_value is None, obfuscate in a call to .obfuscate() (below) obfuscator = r_value or 1 ciphertext = self.raw_encrypt(encoding.encoding, r_value=obfuscator) encrypted_number = EncryptedNumber(self, ciphertext, encoding.exponent) if r_value is None: encrypted_number.obfuscate() return encrypted_number
337,130
Decrypt raw ciphertext and return raw plaintext. Args: ciphertext (int): (usually from :meth:`EncryptedNumber.ciphertext()`) that is to be Paillier decrypted. Returns: int: Paillier decryption of ciphertext. This is a positive integer < :attr:`public_key.n`. Raises: TypeError: if ciphertext is not an int.
def raw_decrypt(self, ciphertext): if not isinstance(ciphertext, int): raise TypeError('Expected ciphertext to be an int, not: %s' % type(ciphertext)) decrypt_to_p = self.l_function(powmod(ciphertext, self.p-1, self.psquare), self.p) * self.hp % self.p decrypt_to_q = self.l_function(powmod(ciphertext, self.q-1, self.qsquare), self.q) * self.hq % self.q return self.crt(decrypt_to_p, decrypt_to_q)
337,134
The Chinese Remainder Theorem as needed for decryption. Returns the solution modulo n=pq. Args: mp(int): the solution modulo p. mq(int): the solution modulo q.
def crt(self, mp, mq): u = (mq - mp) * self.p_inverse % self.q return mp + (u * self.p)
337,136
Add a key to the keyring. Args: private_key (PaillierPrivateKey): a key to add to this keyring.
def add(self, private_key): if not isinstance(private_key, PaillierPrivateKey): raise TypeError("private_key should be of type PaillierPrivateKey, " "not %s" % type(private_key)) self.__keyring[private_key.public_key] = private_key
337,139
Returns E(a + b), given self=E(a) and b. Args: scalar: an int or float b, to be added to `self`. Returns: EncryptedNumber: E(a + b), calculated by encrypting b and taking the product of E(a) and E(b) modulo :attr:`~PaillierPublicKey.n` ** 2. Raises: ValueError: if scalar is out of range or precision.
def _add_scalar(self, scalar): encoded = EncodedNumber.encode(self.public_key, scalar, max_exponent=self.exponent) return self._add_encoded(encoded)
337,147
Returns E(a + b), given self=E(a) and b. Args: encoded (EncodedNumber): an :class:`EncodedNumber` to be added to `self`. Returns: EncryptedNumber: E(a + b), calculated by encrypting b and taking the product of E(a) and E(b) modulo :attr:`~PaillierPublicKey.n` ** 2. Raises: ValueError: if scalar is out of range or precision.
def _add_encoded(self, encoded): if self.public_key != encoded.public_key: raise ValueError("Attempted to add numbers encoded against " "different public keys!") # In order to add two numbers, their exponents must match. a, b = self, encoded if a.exponent > b.exponent: a = self.decrease_exponent_to(b.exponent) elif a.exponent < b.exponent: b = b.decrease_exponent_to(a.exponent) # Don't bother to salt/obfuscate in a basic operation, do it # just before leaving the computer. encrypted_scalar = a.public_key.raw_encrypt(b.encoding, 1) sum_ciphertext = a._raw_add(a.ciphertext(False), encrypted_scalar) return EncryptedNumber(a.public_key, sum_ciphertext, a.exponent)
337,148
Returns E(a + b) given E(a) and E(b). Args: other (EncryptedNumber): an `EncryptedNumber` to add to self. Returns: EncryptedNumber: E(a + b), calculated by taking the product of E(a) and E(b) modulo :attr:`~PaillierPublicKey.n` ** 2. Raises: ValueError: if numbers were encrypted against different keys.
def _add_encrypted(self, other): if self.public_key != other.public_key: raise ValueError("Attempted to add numbers encrypted against " "different public keys!") # In order to add two numbers, their exponents must match. a, b = self, other if a.exponent > b.exponent: a = self.decrease_exponent_to(b.exponent) elif a.exponent < b.exponent: b = b.decrease_exponent_to(a.exponent) sum_ciphertext = a._raw_add(a.ciphertext(False), b.ciphertext(False)) return EncryptedNumber(a.public_key, sum_ciphertext, a.exponent)
337,149
Run the Miller-Rabin test on n with at most k iterations Arguments: n (int): number whose primality is to be tested k (int): maximum number of iterations to run Returns: bool: If n is prime, then True is returned. Otherwise, False is returned, except with probability less than 4**-k. See <https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test>
def miller_rabin(n, k): assert n > 3 # find r and d such that n-1 = 2^r × d d = n-1 r = 0 while d % 2 == 0: d //= 2 r += 1 assert n-1 == d * 2**r assert d % 2 == 1 for _ in range(k): # each iteration divides risk of false prime by 4 a = random.randint(2, n-2) # choose a random witness x = pow(a, d, n) if x == 1 or x == n-1: continue # go to next witness for _ in range(1, r): x = x*x % n if x == n-1: break # go to next witness else: return False return True
337,168
Test whether n is probably prime See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests> Arguments: n (int): the number to be tested mr_rounds (int, optional): number of Miller-Rabin iterations to run; defaults to 25 iterations, which is what the GMP library uses Returns: bool: when this function returns False, `n` is composite (not prime); when it returns True, `n` is prime with overwhelming probability
def is_prime(n, mr_rounds=25): # as an optimization we quickly detect small primes using the list above if n <= first_primes[-1]: return n in first_primes # for small dividors (relatively frequent), euclidean division is best for p in first_primes: if n % p == 0: return False # the actual generic test; give a false prime with probability 2⁻⁵⁰ return miller_rabin(n, mr_rounds)
337,169
Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return reviews starting from an offset value Returns: dict object containing all the protobuf data returned from the api
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None): # TODO: select the number of reviews to return path = REVIEWS_URL + "?doc={}&sort={}".format(requests.utils.quote(packageName), sort) if nb_results is not None: path += "&n={}".format(nb_results) if offset is not None: path += "&o={}".format(offset) if filterByDevice: path += "&dfil=1" data = self.executeRequestApi2(path) output = [] for review in data.payload.reviewResponse.getResponse.review: output.append(utils.parseProtobufObj(review)) return output
339,232
Wraps a regularizer in a parameter-function. Args: name: The name scope for this regularizer. regularization_fn: A function with signature: fn(variable) -> loss `Tensor` or `None`. name_filter: A regex that will be used to filter variables by name. Returns: A parameter modification function that adds the loss to the REGULARIZATION_LOSSES graph key.
def regularizer(name, regularization_fn, name_filter='weights'): regex = re.compile(name_filter) def fn(var_name, variable, phase): if phase is pt.Phase.train and regex.search(var_name): with tf.name_scope(None, name, [variable]): loss = regularization_fn(variable) if loss is not None: tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, loss) return variable return fn
339,341
Composes multiple modification functions in order. Args: *parameter_functions: The functions to compose. Returns: A parameter modification function that consists of applying all the provided functions.
def compose(*parameter_functions): def composed_fn(var_name, variable, phase): for fn in parameter_functions: variable = fn(var_name, variable, phase) return variable return composed_fn
339,344
Calculates the sum of absolute errors between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l1_regression Returns: A tensorflow op.
def l1_regression_loss(y, target, name=None): with tf.name_scope(name, 'l1_regression', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return reduce_batch_sum(tf.abs(y - target), name=scope)
339,347
Calculates the sum of squared errors between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l2_regression Returns: A tensorflow op.
def l2_regression_sq_loss(y, target, name=None): with tf.name_scope(name, 'l2_regression_sq', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return reduce_batch_sum(tf.square(y - target), name=scope)
339,348
Calculates the square root of the SSE between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l2_regression Returns: A tensorflow op.
def l2_regression_loss(y, target, name=None): with tf.name_scope(name, 'l2_regression', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return tf.sqrt(l2_regression_sq_loss(y, target, name=scope))
339,350
Calculates the binary cross entropy between sigmoid(x) and target. Expects unscaled logits. Do not pass in results of sigmoid operation. Args: x: the calculated pre-sigmoid values target: the desired values. name: the name for this op, defaults to binary_cross_entropy_with_logits Returns: -(target * -softplus(-x) + (1-target) * (-x - softplus(-x))) Raises: ValueError: If shapes are incompatible.
def binary_cross_entropy_loss_with_logits(x, target, name=None): with tf.name_scope(name, 'binary_cross_entropy_with_logits', [x, target]) as scope: x.get_shape().assert_is_compatible_with(target.get_shape()) neg_softplus = -tf.nn.softplus(-x) return -tf.add(tf.multiply(target, neg_softplus), tf.multiply(1 - target, -x + neg_softplus), name=scope)
339,351
Cos distance between t1 and t2 and caps the gradient of the Square Root. Args: t1: A tensor t2: A tensor that can be multiplied by t1. epsilon: A lower bound value for the distance. The square root is used as the normalizer. name: Optional name for this op. Returns: The cos distance between t1 and t2.
def cos_distance(t1, t2, epsilon=1e-12, name=None): with tf.name_scope(name, 'cos_distance', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') x_inv_norm = tf.rsqrt(tf.maximum(length_squared(t1) * length_squared(t2), epsilon)) return tf.subtract(1.0, dot_product(t1, t2) * x_inv_norm, name=scope)
339,352